├── .gitignore ├── .vscode └── settings.json ├── src ├── global │ ├── mod.rs │ ├── local.rs │ └── sync.rs ├── oom.rs ├── api.rs ├── lib.rs ├── arena │ ├── local.rs │ ├── sync.rs │ └── mod.rs ├── drop_list.rs ├── tests.rs ├── cache.rs ├── local.rs ├── sync.rs └── blink.rs ├── COPYING ├── .github └── workflows │ ├── release-please.yml │ ├── badge.yml │ ├── security.yml │ ├── fast-pr-check.yml │ ├── check-platforms.yml │ ├── check-toolchains.yml │ ├── test-features.yml │ ├── check-targets.yml │ └── lints.yml ├── examples └── global.rs ├── license ├── APACHE └── MIT ├── Cargo.toml ├── CHANGELOG.md ├── BENCHMARKS.md ├── README.md └── benches └── bench.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "rust-analyzer.cargo.features": [ 3 | "sync", 4 | "alloc" 5 | ] 6 | } -------------------------------------------------------------------------------- /src/global/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module provide types suitable for use as `#[global_allocator]`. 2 | //! 3 | 4 | pub mod local; 5 | #[cfg(feature = "sync")] 6 | pub mod sync; 7 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | Copyright 2022 The blink-alloc Project Developers 2 | 3 | Licensed under the Apache License, Version 2.0, or the MIT license , at your option. This file may not be 6 | copied, modified, or distributed except according to those terms. 7 | -------------------------------------------------------------------------------- /.github/workflows/release-please.yml: -------------------------------------------------------------------------------- 1 | name: release-please 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | release-please: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: google-github-actions/release-please-action@v3 13 | with: 14 | release-type: node 15 | package-name: release-please-action 16 | -------------------------------------------------------------------------------- /examples/global.rs: -------------------------------------------------------------------------------- 1 | use blink_alloc::GlobalBlinkAlloc; 2 | 3 | #[global_allocator] 4 | static GLOBAL_ALLOC: GlobalBlinkAlloc = GlobalBlinkAlloc::new(); 5 | 6 | fn main() { 7 | unsafe { 8 | GLOBAL_ALLOC.blink_mode(); 9 | } 10 | 11 | let _ = Box::new(42); 12 | let _ = vec![1, 2, 3]; 13 | 14 | unsafe { 15 | GLOBAL_ALLOC.direct_mode(); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /.github/workflows/badge.yml: -------------------------------------------------------------------------------- 1 | name: badge 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | test: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | - name: Install stable toolchain 16 | uses: actions-rust-lang/setup-rust-toolchain@v1 17 | - name: Run cargo test 18 | run: cargo test --all --features=sync 19 | -------------------------------------------------------------------------------- /.github/workflows/security.yml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | branches: [ main ] 7 | paths: 8 | - '**/Cargo.toml' 9 | 10 | env: 11 | CARGO_TERM_COLOR: always 12 | 13 | jobs: 14 | security_audit: 15 | if: ${{ github.event.label.name == 'ready-to-merge' }} 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: actions-rust-lang/audit@v1 20 | with: 21 | TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | -------------------------------------------------------------------------------- /.github/workflows/fast-pr-check.yml: -------------------------------------------------------------------------------- 1 | name: Fast PR check 2 | 3 | on: 4 | pull_request: 5 | types: [ opened, edited ] 6 | branches: [ main ] 7 | paths: 8 | - '**.rs' 9 | - '**/Cargo.toml' 10 | 11 | env: 12 | CARGO_TERM_COLOR: always 13 | 14 | jobs: 15 | test: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | - name: Install stable toolchain 20 | uses: actions-rust-lang/setup-rust-toolchain@v1 21 | - name: Run cargo test 22 | run: cargo test --all --features=sync 23 | -------------------------------------------------------------------------------- /src/oom.rs: -------------------------------------------------------------------------------- 1 | use core::convert::Infallible; 2 | 3 | #[cfg_attr(feature = "alloc", inline(always))] 4 | #[cfg_attr(not(feature = "alloc"), inline(never))] 5 | #[cold] 6 | pub(crate) fn handle_alloc_error(layout: core::alloc::Layout) -> ! { 7 | #[cfg(feature = "alloc")] 8 | alloc::alloc::handle_alloc_error(layout); 9 | 10 | #[cfg(not(feature = "alloc"))] 11 | panic!("allocation of {:?} failed", layout); 12 | } 13 | 14 | #[inline(never)] 15 | #[cold] 16 | pub(crate) fn size_overflow() -> Infallible { 17 | panic!("Size overflow") 18 | } 19 | -------------------------------------------------------------------------------- /license/APACHE: -------------------------------------------------------------------------------- 1 | Copyright 2022 The blink-alloc project developers 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /.github/workflows/check-platforms.yml: -------------------------------------------------------------------------------- 1 | name: Check multiple platforms 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | check-targets: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest, windows-latest, macOS-latest] 16 | runs-on: ${{ matrix.os }} 17 | steps: 18 | - uses: actions/checkout@v4 19 | - name: Install stable toolchain 20 | uses: actions-rust-lang/setup-rust-toolchain@v1 21 | - name: Run cargo check 22 | run: cargo check --all --features=sync 23 | -------------------------------------------------------------------------------- /.github/workflows/check-toolchains.yml: -------------------------------------------------------------------------------- 1 | name: Check multiple toolchains 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | check-toolchains: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | rust-toolchain: [stable, nightly] 17 | steps: 18 | - uses: actions/checkout@v4 19 | - name: Install ${{ matrix.rust-toolchain }} toolchain 20 | uses: actions-rust-lang/setup-rust-toolchain@v1 21 | with: 22 | toolchain: ${{ matrix.rust-toolchain }} 23 | - name: Run cargo check 24 | run: cargo check --all --features=sync 25 | -------------------------------------------------------------------------------- /.github/workflows/test-features.yml: -------------------------------------------------------------------------------- 1 | name: Test feature permutations 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | test-features: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | std: ["", "alloc,", "std,"] 17 | sync: ["", "sync,"] 18 | nightly: ["", "nightly,"] 19 | steps: 20 | - uses: actions/checkout@v4 21 | - name: Install nightly toolchain 22 | uses: actions-rust-lang/setup-rust-toolchain@v1 23 | with: 24 | toolchain: nightly 25 | - name: Run cargo test 26 | run: cargo test --all --features=${{ matrix.std }}${{ matrix.sync }}${{ matrix.nightly }} 27 | -------------------------------------------------------------------------------- /.github/workflows/check-targets.yml: -------------------------------------------------------------------------------- 1 | name: Check multiple targets 2 | 3 | on: 4 | pull_request: 5 | types: [ labeled ] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | check-targets: 12 | if: ${{ github.event.label.name == 'ready-to-merge' }} 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | target: 17 | - i686-pc-windows-gnu 18 | - i686-pc-windows-msvc 19 | - i686-unknown-linux-gnu 20 | - x86_64-apple-darwin 21 | - x86_64-pc-windows-gnu 22 | - x86_64-pc-windows-msvc 23 | - x86_64-unknown-linux-gnu 24 | - wasm32-unknown-unknown 25 | steps: 26 | - uses: actions/checkout@v4 27 | - name: Install stable toolchain 28 | uses: actions-rust-lang/setup-rust-toolchain@v1 29 | with: 30 | target: ${{ matrix.target }} 31 | - name: Run cargo check 32 | run: cargo check --all --features=sync 33 | -------------------------------------------------------------------------------- /.github/workflows/lints.yml: -------------------------------------------------------------------------------- 1 | name: Lints 2 | 3 | on: 4 | pull_request: 5 | types: [ opened, edited ] 6 | branches: [ main ] 7 | paths: 8 | - '**.rs' 9 | - '**/Cargo.toml' 10 | 11 | env: 12 | CARGO_TERM_COLOR: always 13 | 14 | jobs: 15 | fmt: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | - name: Ensure rustfmt is installed 20 | uses: actions-rust-lang/setup-rust-toolchain@v1 21 | with: 22 | toolchain: nightly 23 | components: rustfmt 24 | - name: Rustfmt check 25 | uses: actions-rust-lang/rustfmt@v1 26 | 27 | clippy: 28 | runs-on: ubuntu-latest 29 | steps: 30 | - uses: actions/checkout@v4 31 | - name: Ensure clippy is installed 32 | uses: actions-rust-lang/setup-rust-toolchain@v1 33 | with: 34 | toolchain: nightly 35 | components: clippy 36 | - name: Run cargo clippy 37 | uses: giraffate/clippy-action@v1 38 | with: 39 | github_token: ${{ secrets.GITHUB_TOKEN }} 40 | clippy_flags: --all --all-features -- -D warnings 41 | reporter: 'github-pr-review' 42 | -------------------------------------------------------------------------------- /license/MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2022 The blink-alloc project developers 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "blink-alloc" 3 | version = "0.4.0" 4 | edition = "2018" 5 | authors = ["Zakarum "] 6 | license = "MIT OR Apache-2.0" 7 | documentation = "https://docs.rs/blink-alloc" 8 | homepage = "https://github.com/zakarumych/blink-alloc" 9 | repository = "https://github.com/zakarumych/blink-alloc" 10 | readme = "README.md" 11 | description = "Fast, concurrent, arena-based allocator with drop support" 12 | keywords = ["allocator", "arena", "concurrent", "drop", "no-std"] 13 | categories = ["memory-management", "no-std"] 14 | 15 | [features] 16 | nightly = ["bumpalo/allocator_api"] 17 | alloc = ["allocator-api2/alloc"] 18 | std = ["alloc", "allocator-api2/std"] 19 | sync = ["parking_lot", "std"] 20 | 21 | default = ["std"] 22 | 23 | [dependencies] 24 | parking_lot = { version = "0.12", optional = true } 25 | allocator-api2 = { version = "0.4.0", default-features = false } 26 | 27 | [dev-dependencies] 28 | criterion = "0.4" 29 | bumpalo = "3.19" 30 | 31 | [[bench]] 32 | name = "bench" 33 | harness = false 34 | required-features = ["alloc", "sync", "nightly"] 35 | 36 | [[example]] 37 | name = "global" 38 | required-features = ["std", "sync"] 39 | 40 | [package.metadata.docs.rs] 41 | all-features = true 42 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [0.3.2] - 2025-12-11 8 | 9 | ### Changed 10 | 11 | - Updated to allocator-api2 v0.4.0, explicitly use `allocator_api` lang feature under "nightly" crate feture. 12 | 13 | ## [0.3.0] - 2023-05-10 14 | 15 | ### Changed 16 | 17 | - API cleanup. 18 | - Update to semi-stable version of `allocator-api2`. 19 | 20 | ## [0.2.5] - 2023-03-08 21 | 22 | ### Fixes 23 | - Allocated memory length calculation. 24 | 25 | ## [0.2.4] - 2023-03-07 26 | 27 | ### Fixed 28 | 29 | - build warnings 30 | 31 | 32 | ## [0.2.3] - 2023-03-07 33 | 34 | ### Added 35 | - `GlobalBlinkAlloc` - allocator suitable for use as `#[global_allocator]` 36 | Based on `SyncBlinkAlloc`. Provides local proxies that may be `'static` 37 | Unsafe `reset` method requires that all local proxies 38 | and previous allocations are dropped. 39 | - `UnsafeGlobalAlloc` - more unsafe version of `GlobalBlinkAlloc`. 40 | It must never be used concurrently. Only really usable in single-threaded 41 | applications. 42 | Requires `unsafe` block to initialize. 43 | 44 | ## [0.1.0] - 2023-02-27 45 | 46 | Initial implementation of blink-allocators. 47 | `BlinkAlloc` for thread-local usage. 48 | `SyncBlinkAlloc` for multi-threaded usage. 49 | `LocalBlinkAlloc` thread-local proxy for `SyncBlinkAlloc`. 50 | `Blink` - friendly allocator adaptor for use without collections. 51 | `BlinkAllocCache` - a cache of `BlinkAlloc` instances to keep them warm 52 | and use from multiple threads. 53 | -------------------------------------------------------------------------------- /src/api.rs: -------------------------------------------------------------------------------- 1 | 2 | #[cfg(feature = "nightly")] 3 | use core::alloc::Allocator; 4 | 5 | #[cfg(not(feature = "nightly"))] 6 | use allocator_api2::alloc::Allocator; 7 | 8 | /// Extension trait for [`Allocator`] that defines blink allocator API. 9 | /// Blink-allocators are allocators with cheap allocation 10 | /// and potentially no-op deallocation. 11 | /// Blink-allocator *can* reuse deallocated memory, but not required to. 12 | /// Typically deallocation is either no-op or processed only if deallocating 13 | /// the very last allocated memory block. 14 | /// The [`reset`][BlinkAllocator::reset] 15 | /// method deallocates all memory allocated from this instance at once. 16 | /// 17 | /// Additional guarantees are provided that 18 | /// 19 | /// * [`Allocator::shrink`] will always succeed and never move memory 20 | /// when `ptr` is already aligned to `new_layout.align()`. 21 | /// 22 | /// # Safety 23 | /// 24 | /// Draws most requirements from [`Allocator`] super-trait. 25 | /// The [`reset`][BlinkAllocator::reset] method 26 | /// may invalidate currently allocated memory if allocator is not cloneable. 27 | pub unsafe trait BlinkAllocator: Allocator { 28 | /// Resets allocator potentially invalidating all allocations 29 | /// made from this instance. 30 | /// This is no-op if allocator is [`Clone`][core::clone::Clone] 31 | /// (typically shared reference to blink-allocator). 32 | /// 33 | /// # Safety 34 | /// 35 | /// Caller must guarantee that all allocations made from this instance 36 | /// won't be used after this call. 37 | /// The potential invalid memory access will happen through 38 | /// raw pointer and requires `unsafe`. 39 | /// 40 | /// This method requires mutable borrow of the blink-allocator 41 | /// and so cannot be called while collection types reference it. 42 | /// So it is safe to use reference to blink allocator as allocator type 43 | /// for, say, [`Vec`] and later then call `reset` safely. 44 | /// The `Vec` would have to be dropped (or forgotten) before calling `reset` 45 | /// due to mutable borrow. 46 | /// 47 | /// [`Vec`]: alloc::vec::Vec 48 | fn reset(&mut self); 49 | } 50 | 51 | unsafe impl BlinkAllocator for &A 52 | where 53 | A: BlinkAllocator, 54 | { 55 | #[inline] 56 | fn reset(&mut self) {} 57 | } 58 | 59 | unsafe impl<'a, A> BlinkAllocator for &'a mut A 60 | where 61 | &'a mut A: Allocator, 62 | A: BlinkAllocator, 63 | { 64 | #[inline] 65 | fn reset(&mut self) { 66 | A::reset(self); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | #![cfg_attr(not(feature = "std"), no_std)] 3 | #![cfg_attr(feature = "nightly", feature(allocator_api))] 4 | 5 | #[cfg(feature = "alloc")] 6 | extern crate alloc; 7 | 8 | macro_rules! feature_switch { 9 | ( ($feature:literal => $with:path | $without:path) ($($args:tt)*)) => { 10 | #[cfg(feature = $feature)] 11 | $with!($($args)*); 12 | 13 | #[cfg(not(feature = $feature))] 14 | $without!($($args)*); 15 | }; 16 | } 17 | 18 | #[allow(unused)] 19 | macro_rules! with_default { 20 | ($(#[$meta:meta])* $v:vis struct $name:ident<$($lt:lifetime,)* $($generic:ident $(: $bound:path $(: $bounds:path )*)? $(= +$default:ty)? $(= $default_type:ty)?),+> { $($(#[$fmeta:meta])* $fvis:vis $fname:ident: $ftype:ty),* $(,)? }) => { 21 | $(#[$meta])* 22 | $v struct $name<$($lt,)* $($generic $(: $bound $(+ $bounds)*)? $(= $default)? $(= $default_type)?)+> { 23 | $($(#[$fmeta])* $fvis $fname: $ftype,)* 24 | } 25 | }; 26 | } 27 | 28 | #[allow(unused)] 29 | macro_rules! without_default { 30 | ($(#[$meta:meta])* $v:vis struct $name:ident<$($lt:lifetime,)* $($generic:ident $(: $bound:path $(: $bounds:path )*)? $(= +$default:ty)? $(= $default_type:ty)?),+> { $($(#[$fmeta:meta])* $fvis:vis $fname:ident: $ftype:ty),* $(,)? }) => { 31 | $(#[$meta])* 32 | $v struct $name<$($lt,)* $($generic $(: $bound $(+ $bounds)*)? $(= $default_type)?)+> { 33 | $($(#[$fmeta])* $fvis $fname: $ftype,)* 34 | } 35 | }; 36 | } 37 | 38 | macro_rules! switch_alloc_default { 39 | ($($args:tt)*) => { 40 | feature_switch!{("alloc" => with_default | without_default) ($($args)*)} 41 | }; 42 | } 43 | 44 | macro_rules! switch_std_default { 45 | ($($args:tt)*) => { 46 | feature_switch!{("std" => with_default | without_default) ($($args)*)} 47 | }; 48 | } 49 | 50 | mod api; 51 | mod arena; 52 | mod blink; 53 | mod drop_list; 54 | mod global; 55 | mod local; 56 | 57 | #[cfg(feature = "sync")] 58 | mod sync; 59 | 60 | #[cfg(all(feature = "sync", feature = "alloc"))] 61 | mod cache; 62 | 63 | #[cfg(test)] 64 | mod tests; 65 | 66 | #[cfg(not(no_global_oom_handling))] 67 | mod oom; 68 | 69 | pub use self::{ 70 | api::BlinkAllocator, 71 | blink::{Blink, Emplace, IteratorExt, SendBlink}, 72 | global::local::UnsafeGlobalBlinkAlloc, 73 | local::BlinkAlloc, 74 | }; 75 | 76 | #[cfg(feature = "sync")] 77 | pub use self::sync::{LocalBlinkAlloc, SyncBlinkAlloc}; 78 | 79 | #[cfg(feature = "sync")] 80 | pub use self::global::sync::GlobalBlinkAlloc; 81 | 82 | #[cfg(all(feature = "sync", feature = "alloc"))] 83 | pub use self::cache::BlinkAllocCache; 84 | 85 | pub(crate) trait ResultExt { 86 | fn safe_ok(self) -> T; 87 | } 88 | 89 | impl ResultExt for Result { 90 | #[inline] 91 | fn safe_ok(self) -> T { 92 | match self { 93 | Ok(value) => value, 94 | Err(never) => match never {}, 95 | } 96 | } 97 | } 98 | 99 | #[inline] 100 | unsafe fn in_place<'a, T, I>(ptr: *mut T, init: I, f: impl FnOnce(I) -> T) -> &'a mut T { 101 | // Ask compiler very nicely to store return directly into memory. 102 | core::ptr::write(ptr, f(init)); 103 | &mut *ptr 104 | } 105 | 106 | #[cold] 107 | fn cold() {} 108 | 109 | // #[cfg(debug_assertions)] 110 | // #[track_caller] 111 | // unsafe fn unreachable_unchecked() -> ! { 112 | // unreachable!() 113 | // } 114 | 115 | // #[cfg(not(debug_assertions))] 116 | // unsafe fn unreachable_unchecked() -> ! { 117 | // unsafe { core::hint::unreachable_unchecked() } 118 | // } 119 | -------------------------------------------------------------------------------- /BENCHMARKS.md: -------------------------------------------------------------------------------- 1 | # Benchmarks 2 | 3 | ## Table of Contents 4 | 5 | - [Benchmark Results](#benchmark-results) 6 | - [allocation](#allocation) 7 | - [warm-up](#warm-up) 8 | - [vec](#vec) 9 | - [from-iter](#from-iter) 10 | 11 | ## Benchmark Results 12 | 13 | ### allocation 14 | 15 | | | `blink_alloc::BlinkAlloc` | `blink_alloc::SyncBlinkAlloc` | `bumpalo::Bump` | 16 | |:-----------------------------------|:-----------------------------------|:---------------------------------------|:--------------------------------- | 17 | | **`alloc x 17453`** | `23.79 us` (✅ **1.00x**) | `169.62 us` (❌ *7.13x slower*) | `91.14 us` (❌ *3.83x slower*) | 18 | | **`grow same align x 17453`** | `59.37 us` (✅ **1.00x**) | `340.33 us` (❌ *5.73x slower*) | `152.35 us` (❌ *2.57x slower*) | 19 | | **`grow smaller align x 17453`** | `59.49 us` (✅ **1.00x**) | `340.34 us` (❌ *5.72x slower*) | `151.47 us` (❌ *2.55x slower*) | 20 | | **`grow larger align x 17453`** | `99.36 us` (✅ **1.00x**) | `341.84 us` (❌ *3.44x slower*) | `183.13 us` (❌ *1.84x slower*) | 21 | | **`shrink same align x 17453`** | `53.86 us` (✅ **1.00x**) | `340.96 us` (❌ *6.33x slower*) | `103.17 us` (❌ *1.92x slower*) | 22 | | **`shrink smaller align x 17453`** | `53.82 us` (✅ **1.00x**) | `341.15 us` (❌ *6.34x slower*) | `100.82 us` (❌ *1.87x slower*) | 23 | | **`shrink larger align x 17453`** | `87.50 us` (✅ **1.00x**) | `342.38 us` (❌ *3.91x slower*) | `fails` | 24 | 25 | ### warm-up 26 | 27 | | | `blink_alloc::BlinkAlloc` | `blink_alloc::SyncBlinkAlloc` | `bumpalo::Bump` | 28 | |:----------------------------|:-----------------------------------|:---------------------------------------|:-------------------------------- | 29 | | **`alloc 4 bytes x 17453`** | `24.39 us` (✅ **1.00x**) | `170.02 us` (❌ *6.97x slower*) | `91.73 us` (❌ *3.76x slower*) | 30 | 31 | ### vec 32 | 33 | | | `blink_alloc::BlinkAlloc` | `blink_alloc::SyncBlinkAlloc` | `bumpalo::Bump` | 34 | |:-------------------------------|:-----------------------------------|:---------------------------------------|:--------------------------------- | 35 | | **`push x 17453`** | `36.96 us` (✅ **1.00x**) | `37.01 us` (✅ **1.00x slower**) | `42.26 us` (❌ *1.14x slower*) | 36 | | **`reserve_exact(1) x 17453`** | `63.87 us` (✅ **1.00x**) | `169.10 us` (❌ *2.65x slower*) | `8.64 ms` (❌ *135.21x slower*) | 37 | 38 | 39 | ### from-iter 40 | 41 | | | `blink_alloc::BlinkAlloc` | `blink_alloc::SyncBlinkAlloc` | `bumpalo::Bump` | 42 | |:---------------------------------|:-----------------------------------|:---------------------------------------|:------------------------------- | 43 | | **`basic x 17453`** | `1.11 ms` (✅ **1.00x**) | `1.11 ms` (✅ **1.00x faster**) | `N/A` | 44 | | **`no-drop x 17453`** | `1.10 ms` (✅ **1.00x**) | `1.12 ms` (✅ **1.02x slower**) | `1.36 ms` (❌ *1.24x slower*) | 45 | | **`bad-filter x 17453`** | `1.67 ms` (✅ **1.00x**) | `1.77 ms` (✅ **1.06x slower**) | `N/A` | 46 | | **`bad-filter no-drop x 17453`** | `1.67 ms` (✅ **1.00x**) | `1.77 ms` (✅ **1.06x slower**) | `N/A` | 47 | 48 | --- 49 | Made with [criterion-table](https://github.com/nu11ptr/criterion-table) 50 | 51 | -------------------------------------------------------------------------------- /src/arena/local.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | with_cursor!(Cell<*mut u8>); 4 | 5 | /// Thread-local arena allocator. 6 | pub struct ArenaLocal { 7 | root: Cell>>, 8 | min_chunk_size: Cell, 9 | } 10 | 11 | /// It is safe to send `ArenaLocal` between threads. 12 | unsafe impl Send for ArenaLocal {} 13 | 14 | impl Drop for ArenaLocal { 15 | #[inline(always)] 16 | fn drop(&mut self) { 17 | debug_assert!( 18 | self.root.get().is_none(), 19 | "Owner must reset `ArenaLocal` with `keep_last` set to `false` before drop" 20 | ); 21 | } 22 | } 23 | 24 | impl ArenaLocal { 25 | #[inline(always)] 26 | pub const fn new() -> Self { 27 | ArenaLocal { 28 | root: Cell::new(None), 29 | min_chunk_size: Cell::new(CHUNK_START_SIZE), 30 | } 31 | } 32 | 33 | #[inline(always)] 34 | pub const fn with_chunk_size(min_chunk_size: usize) -> Self { 35 | ArenaLocal { 36 | root: Cell::new(None), 37 | min_chunk_size: Cell::new(min_chunk_size), 38 | } 39 | } 40 | 41 | #[inline(always)] 42 | #[cfg(feature = "sync")] 43 | pub fn last_chunk_size(&self) -> usize { 44 | match self.root.get() { 45 | None => 0, 46 | Some(root) => { 47 | // Safety: `root` is a valid pointer to chunk allocation. 48 | unsafe { root.as_ref().cap() } 49 | } 50 | } 51 | } 52 | 53 | #[inline(always)] 54 | pub unsafe fn alloc_fast(&self, layout: Layout) -> Option> { 55 | if let Some(root) = self.root.get() { 56 | return unsafe { ChunkHeader::alloc(root, layout) }; 57 | } 58 | None 59 | } 60 | 61 | #[inline(always)] 62 | pub unsafe fn alloc_slow( 63 | &self, 64 | layout: Layout, 65 | allocator: impl Allocator, 66 | ) -> Result, AllocError> { 67 | alloc_slow(&self.root, self.min_chunk_size.get(), layout, allocator) 68 | } 69 | 70 | #[inline(always)] 71 | pub unsafe fn resize_fast( 72 | &self, 73 | ptr: NonNull, 74 | old_layout: Layout, 75 | new_layout: Layout, 76 | ) -> Option> { 77 | if let Some(root) = self.root.get() { 78 | return unsafe { ChunkHeader::resize(root, ptr, old_layout, new_layout) }; 79 | } 80 | None 81 | } 82 | 83 | #[inline(always)] 84 | pub unsafe fn resize_slow( 85 | &self, 86 | ptr: NonNull, 87 | old_layout: Layout, 88 | new_layout: Layout, 89 | allocator: impl Allocator, 90 | ) -> Result, AllocError> { 91 | resize_slow( 92 | &self.root, 93 | self.min_chunk_size.get(), 94 | ptr, 95 | old_layout, 96 | new_layout, 97 | allocator, 98 | ) 99 | } 100 | 101 | #[inline(always)] 102 | pub unsafe fn dealloc(&self, ptr: NonNull, size: usize) { 103 | dealloc(self.root.get(), ptr, size) 104 | } 105 | 106 | #[inline(always)] 107 | pub unsafe fn reset(&mut self, keep_last: bool, allocator: impl Allocator) { 108 | unsafe { reset(&self.root, keep_last, allocator) } 109 | } 110 | 111 | #[inline(always)] 112 | pub unsafe fn reset_unchecked(&self, keep_last: bool, allocator: impl Allocator) { 113 | unsafe { reset(&self.root, keep_last, allocator) } 114 | } 115 | 116 | #[cfg(feature = "sync")] 117 | #[inline(always)] 118 | pub fn reset_leak(&mut self, keep_last: bool) { 119 | reset_leak(&self.root, keep_last) 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/arena/sync.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | with_cursor!(AtomicPtr); 4 | 5 | struct Inner { 6 | root: Option>, 7 | min_chunk_size: usize, 8 | } 9 | 10 | unsafe impl Send for Inner {} 11 | unsafe impl Sync for Inner {} 12 | 13 | /// Multi-threaded arena allocator. 14 | pub struct ArenaSync { 15 | inner: RwLock, 16 | } 17 | 18 | impl Drop for ArenaSync { 19 | #[inline(always)] 20 | fn drop(&mut self) { 21 | debug_assert!( 22 | self.inner.get_mut().root.is_none(), 23 | "Owner must reset `ArenaSync` with `keep_last` set to `false` before drop" 24 | ); 25 | } 26 | } 27 | 28 | impl ArenaSync { 29 | #[inline(always)] 30 | pub const fn new() -> Self { 31 | ArenaSync { 32 | inner: RwLock::new(Inner { 33 | root: None, 34 | min_chunk_size: CHUNK_START_SIZE, 35 | }), 36 | } 37 | } 38 | 39 | #[inline(always)] 40 | pub const fn with_chunk_size(min_chunk_size: usize) -> Self { 41 | ArenaSync { 42 | inner: RwLock::new(Inner { 43 | root: None, 44 | min_chunk_size, 45 | }), 46 | } 47 | } 48 | 49 | #[inline(always)] 50 | pub unsafe fn alloc_fast(&self, layout: Layout) -> Option> { 51 | let inner = self.inner.read(); 52 | 53 | if let Some(root) = inner.root { 54 | return unsafe { ChunkHeader::alloc(root, layout) }; 55 | } 56 | 57 | None 58 | } 59 | 60 | #[inline(always)] 61 | pub unsafe fn alloc_slow( 62 | &self, 63 | layout: Layout, 64 | allocator: impl Allocator, 65 | ) -> Result, AllocError> { 66 | let mut guard = self.inner.write(); 67 | let inner = &mut *guard; 68 | 69 | alloc_slow( 70 | Cell::from_mut(&mut inner.root), 71 | inner.min_chunk_size, 72 | layout, 73 | &allocator, 74 | ) 75 | } 76 | 77 | #[inline(always)] 78 | pub unsafe fn resize_fast( 79 | &self, 80 | ptr: NonNull, 81 | old_layout: Layout, 82 | new_layout: Layout, 83 | ) -> Option> { 84 | let inner = self.inner.read(); 85 | 86 | if let Some(root) = inner.root { 87 | return unsafe { ChunkHeader::resize(root, ptr, old_layout, new_layout) }; 88 | } 89 | None 90 | } 91 | 92 | #[inline(always)] 93 | pub unsafe fn resize_slow( 94 | &self, 95 | ptr: NonNull, 96 | old_layout: Layout, 97 | new_layout: Layout, 98 | allocator: impl Allocator, 99 | ) -> Result, AllocError> { 100 | let mut guard = self.inner.write(); 101 | let inner = &mut *guard; 102 | 103 | resize_slow( 104 | Cell::from_mut(&mut inner.root), 105 | inner.min_chunk_size, 106 | ptr, 107 | old_layout, 108 | new_layout, 109 | &allocator, 110 | ) 111 | } 112 | 113 | #[inline(always)] 114 | pub unsafe fn dealloc(&self, ptr: NonNull, size: usize) { 115 | dealloc(self.inner.read().root, ptr, size) 116 | } 117 | 118 | #[inline(always)] 119 | pub unsafe fn reset(&mut self, keep_last: bool, allocator: impl Allocator) { 120 | unsafe { 121 | reset( 122 | Cell::from_mut(&mut self.inner.get_mut().root), 123 | keep_last, 124 | allocator, 125 | ) 126 | } 127 | } 128 | 129 | #[inline(always)] 130 | pub unsafe fn reset_unchecked(&self, keep_last: bool, allocator: impl Allocator) { 131 | let mut guard = self.inner.write(); 132 | unsafe { reset(Cell::from_mut(&mut guard.root), keep_last, allocator) } 133 | } 134 | 135 | // #[inline(always)] 136 | // pub fn reset_leak(&mut self, keep_last: bool) { 137 | // reset_leak(Cell::from_mut(&mut self.inner.get_mut().root), keep_last) 138 | // } 139 | } 140 | -------------------------------------------------------------------------------- /src/drop_list.rs: -------------------------------------------------------------------------------- 1 | //! This crate provides `DropList` type which is 2 | //! an intrusive linked list of drop functions. 3 | //! 4 | 5 | use core::{ 6 | cell::Cell, 7 | mem::MaybeUninit, 8 | ptr::{self, addr_of_mut, slice_from_raw_parts_mut, NonNull}, 9 | }; 10 | 11 | /// Single drop item. 12 | /// Drops associated value when invoked. 13 | struct Drops { 14 | /// Number of values pointed by `ptr`. 15 | count: usize, 16 | 17 | /// Drop function. 18 | drop: unsafe fn(NonNull, usize), 19 | 20 | /// Next item in the list. 21 | next: Option>, 22 | } 23 | 24 | impl Drops { 25 | unsafe fn drop(ptr: NonNull) -> Option> { 26 | let Drops { count, drop, next } = *ptr.as_ref(); 27 | // Safety: `DropItem` constructed as part of `DropItemValue`. 28 | // And `drop` is set to `drop_in_place::`. 29 | unsafe { (drop)(ptr, count) }; 30 | next 31 | } 32 | } 33 | 34 | #[repr(C)] 35 | pub struct DropItem { 36 | drops: Drops, 37 | pub value: T, 38 | } 39 | 40 | impl DropItem { 41 | pub unsafe fn init_value<'a, I>( 42 | mut ptr: NonNull>, 43 | init: I, 44 | f: impl FnOnce(&mut MaybeUninit, I), 45 | ) -> &'a mut Self { 46 | let drops_ptr = addr_of_mut!((*ptr.as_ptr()).drops); 47 | f(&mut *addr_of_mut!((*ptr.as_ptr()).value).cast(), init); 48 | ptr::write( 49 | drops_ptr, 50 | Drops { 51 | count: 1, 52 | drop: drop_from_item::, 53 | next: None, 54 | }, 55 | ); 56 | ptr.as_mut() 57 | } 58 | } 59 | 60 | impl DropItem<[T; 0]> { 61 | pub unsafe fn init_slice<'a>( 62 | mut ptr: NonNull>, 63 | count: usize, 64 | ) -> (&'a mut Self, &'a mut [T]) { 65 | debug_assert_ne!( 66 | count, 0, 67 | "DropItem<[T]> should not be constructed with count 0" 68 | ); 69 | ptr::write( 70 | ptr.as_ptr().cast(), 71 | Drops { 72 | count, 73 | drop: drop_from_item::, 74 | next: None, 75 | }, 76 | ); 77 | let slice = core::slice::from_raw_parts_mut(ptr.as_ptr().add(1).cast(), count); 78 | (ptr.as_mut(), slice) 79 | } 80 | } 81 | 82 | /// Intrusive linked list of drop functions. 83 | pub struct DropList { 84 | // Root item of the list. 85 | // Contains `None` if list is empty. 86 | // Lifetime of the items is bound to `DropList::reset` method calls. 87 | root: Cell>>, 88 | } 89 | 90 | impl DropList { 91 | pub const fn new() -> Self { 92 | DropList { 93 | root: Cell::new(None), 94 | } 95 | } 96 | 97 | /// Adds new drop item for given typed pointer. 98 | /// 99 | /// # Safety 100 | /// 101 | /// `item` reference must be valid until next call to [`DropList::reset`]. 102 | #[allow(clippy::mut_from_ref)] 103 | pub unsafe fn add<'a, 'b: 'a, T: ?Sized>(&'a self, item: &'b mut DropItem) -> &'a mut T { 104 | item.drops.next = self.root.take(); 105 | let item = NonNull::from(item); 106 | self.root.set(Some(item.cast())); 107 | &mut *addr_of_mut!((*item.as_ptr()).value) 108 | } 109 | 110 | /// Drops all items in the list. 111 | pub fn reset(&mut self) { 112 | let mut next = self.root.take(); 113 | 114 | while let Some(item_ptr) = next { 115 | // Safety: `item` is a valid pointer to `DropItem`. 116 | // And it didn't move since it was added to the list. 117 | unsafe { 118 | next = Drops::drop(item_ptr); 119 | } 120 | } 121 | } 122 | } 123 | 124 | /// Type-erased `core::ptr::drop_in_place` wrapper. 125 | unsafe fn drop_from_item(ptr: NonNull, count: usize) { 126 | let ptr = ptr.cast::>(); 127 | let value_ptr = addr_of_mut!((*ptr.as_ptr()).value); 128 | core::ptr::drop_in_place(slice_from_raw_parts_mut(value_ptr, count)) 129 | } 130 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "alloc")] 2 | 3 | use core::{alloc::Layout, cell::Cell, mem::size_of, ptr::NonNull}; 4 | 5 | #[cfg(feature = "nightly")] 6 | use alloc::{ 7 | alloc::{AllocError, Allocator, Global}, 8 | vec::Vec, 9 | }; 10 | #[cfg(not(feature = "nightly"))] 11 | use allocator_api2::{ 12 | alloc::{AllocError, Allocator, Global}, 13 | vec::Vec, 14 | }; 15 | 16 | use crate::{blink::Blink, local::BlinkAlloc}; 17 | 18 | #[test] 19 | fn test_local_alloc() { 20 | let mut blink = BlinkAlloc::new(); 21 | 22 | let ptr = blink 23 | .allocate(Layout::new::()) 24 | .unwrap() 25 | .cast::(); 26 | unsafe { 27 | core::ptr::write(ptr.as_ptr(), 42); 28 | } 29 | 30 | blink.reset(); 31 | } 32 | 33 | #[test] 34 | fn test_bad_iter() { 35 | struct OneTimeGlobal { 36 | served: Cell, 37 | } 38 | 39 | unsafe impl Allocator for OneTimeGlobal { 40 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 41 | if self.served.get() { 42 | Err(AllocError) 43 | } else { 44 | self.served.set(true); 45 | Global.allocate(layout) 46 | } 47 | } 48 | 49 | unsafe fn deallocate(&self, ptr: core::ptr::NonNull, layout: Layout) { 50 | Global.deallocate(ptr, layout) 51 | } 52 | } 53 | 54 | const ELEMENT_COUNT: usize = 2000; 55 | const ELEMENT_SIZE: usize = size_of::(); 56 | 57 | let mut blink = Blink::new_in(BlinkAlloc::with_chunk_size_in( 58 | ELEMENT_SIZE * ELEMENT_COUNT, 59 | OneTimeGlobal { 60 | served: Cell::new(false), 61 | }, 62 | )); 63 | 64 | blink 65 | .emplace() 66 | .from_iter((0..ELEMENT_COUNT as u32).filter(|_| true)); 67 | 68 | blink.reset(); 69 | } 70 | 71 | #[test] 72 | fn test_reuse() { 73 | struct ControlledGlobal { 74 | enabled: Cell, 75 | last: Cell, 76 | } 77 | 78 | unsafe impl Allocator for ControlledGlobal { 79 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 80 | if !self.enabled.get() { 81 | return Err(AllocError); 82 | } 83 | if self.last.get() { 84 | self.enabled.set(false); 85 | } 86 | Global.allocate(layout) 87 | } 88 | 89 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 90 | Global.deallocate(ptr, layout) 91 | } 92 | } 93 | 94 | let allocator = ControlledGlobal { 95 | enabled: Cell::new(true), 96 | last: Cell::new(false), 97 | }; 98 | 99 | let mut alloc = BlinkAlloc::with_chunk_size_in(0, &allocator); 100 | 101 | for _ in 0..123 { 102 | alloc.allocate(Layout::new::()).unwrap(); 103 | } 104 | alloc.reset(); 105 | 106 | allocator.last.set(false); 107 | 108 | for _ in 0..123 { 109 | alloc.allocate(Layout::new::()).unwrap(); 110 | } 111 | } 112 | 113 | #[test] 114 | fn test_emplace_no_drop() { 115 | use alloc::{borrow::ToOwned, string::String}; 116 | 117 | struct Foo<'a>(&'a String); 118 | 119 | impl Drop for Foo<'_> { 120 | fn drop(&mut self) { 121 | panic!("Dropped"); 122 | } 123 | } 124 | 125 | let mut blink = Blink::new(); 126 | let s = "Hello".to_owned(); 127 | let foo = blink.emplace_no_drop().value(Foo(&s)); 128 | assert_eq!(foo.0, "Hello"); 129 | let world = blink.put("World".to_owned()); 130 | // Would be unsound if `foo` could be dropped. 131 | foo.0 = world; 132 | blink.reset(); 133 | // assert_eq!(foo.0, "Universe"); // Cannot compile. `foo` does not outlive reset. 134 | } 135 | 136 | #[test] 137 | fn test_vec() { 138 | let mut blink_alloc = BlinkAlloc::new(); 139 | let mut vec = Vec::new_in(&blink_alloc); 140 | vec.extend([1, 2, 3]); 141 | 142 | vec.push(4); 143 | vec.extend(5..6); 144 | vec.push(6); 145 | 146 | assert_eq!(vec, [1, 2, 3, 4, 5, 6]); 147 | drop(vec); 148 | blink_alloc.reset(); 149 | } 150 | -------------------------------------------------------------------------------- /src/cache.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | cell::UnsafeCell, 3 | mem::{replace, ManuallyDrop, MaybeUninit}, 4 | sync::atomic::{AtomicUsize, Ordering}, 5 | }; 6 | 7 | use alloc::vec::Vec; 8 | 9 | #[cfg(feature = "nightly")] 10 | use alloc::alloc::{Allocator, Global}; 11 | #[cfg(not(feature = "nightly"))] 12 | use allocator_api2::alloc::{Allocator, Global}; 13 | 14 | use parking_lot::RwLock; 15 | 16 | use crate::local::BlinkAlloc; 17 | 18 | struct Inner { 19 | /// Array of [`BlinkAlloc`] instances ready to pop. 20 | pop_array: Vec>>>, 21 | 22 | /// Index of the next pop [`BlinkAlloc`] instance to pop. 23 | next_pop: AtomicUsize, 24 | 25 | /// Array of [`BlinkAlloc`] instances that are released. 26 | push_array: Vec>>>, 27 | 28 | /// Index to push next [`BlinkAlloc`] instance. 29 | next_push: AtomicUsize, 30 | } 31 | 32 | unsafe impl Sync for Inner 33 | where 34 | A: Allocator, 35 | BlinkAlloc: Send, 36 | { 37 | } 38 | 39 | /// Multi-thread cache for [`BlinkAlloc`] instances. 40 | /// Stores pushed [`BlinkAlloc`] instances and returns them on pop. 41 | /// Blink-allocators are kept warm in the cache. 42 | /// 43 | /// This type is internally synchronized with hybrid 44 | /// blocking + wait-free algorithm. 45 | pub struct BlinkAllocCache { 46 | inner: RwLock>, 47 | } 48 | 49 | impl Default for BlinkAllocCache 50 | where 51 | A: Allocator, 52 | { 53 | fn default() -> Self { 54 | Self::new() 55 | } 56 | } 57 | 58 | impl BlinkAllocCache 59 | where 60 | A: Allocator, 61 | { 62 | /// Creates a new empty [`BlinkAllocCache`]. 63 | pub const fn new() -> Self { 64 | BlinkAllocCache { 65 | inner: RwLock::new(Inner { 66 | pop_array: Vec::new(), 67 | next_pop: AtomicUsize::new(0), 68 | push_array: Vec::new(), 69 | next_push: AtomicUsize::new(0), 70 | }), 71 | } 72 | } 73 | 74 | /// Acquires some [`BlinkAlloc`] instance from the cache. 75 | /// Returns none if the cache is empty. 76 | pub fn pop(&self) -> Option> { 77 | let inner = self.inner.read(); 78 | 79 | if !inner.pop_array.is_empty() { 80 | // Acquire index to pop. 81 | let idx = inner.next_pop.fetch_add(1, Ordering::Acquire); 82 | 83 | if idx < inner.pop_array.len() { 84 | // Pop the [`BlinkAlloc`] instance. 85 | 86 | // Safety: Acquired exclusive index to this instance. 87 | let blink = unsafe { ManuallyDrop::take(&mut *inner.pop_array[idx].get()) }; 88 | 89 | return Some(blink); 90 | } 91 | 92 | prevent_overflow(&inner.next_pop, idx, inner.pop_array.len()); 93 | } 94 | 95 | if inner.next_push.load(Ordering::Relaxed) == 0 { 96 | return None; 97 | } 98 | 99 | drop(inner); 100 | let mut inner = self.inner.write(); 101 | 102 | Self::flush(&mut inner); 103 | 104 | inner 105 | .pop_array 106 | .pop() 107 | .map(|cell| ManuallyDrop::into_inner(cell.into_inner())) 108 | } 109 | 110 | pub fn push(&self, blink: BlinkAlloc) { 111 | let inner = self.inner.read(); 112 | 113 | if !inner.push_array.is_empty() { 114 | // Acquire index to push. 115 | let idx = inner.next_push.fetch_add(1, Ordering::Acquire); 116 | 117 | if idx < inner.push_array.len() { 118 | // Push the [`BlinkAlloc`] instance. 119 | 120 | // Safety: Acquired exclusive index to this instance. 121 | MaybeUninit::write(unsafe { &mut *inner.push_array[idx].get() }, blink); 122 | return; 123 | } 124 | 125 | prevent_overflow(&inner.next_push, idx, inner.push_array.len()); 126 | } 127 | 128 | drop(inner); 129 | let mut inner = self.inner.write(); 130 | 131 | Self::flush(&mut inner); 132 | 133 | inner 134 | .pop_array 135 | .push(UnsafeCell::new(ManuallyDrop::new(blink))); 136 | } 137 | 138 | fn flush(inner: &mut Inner) { 139 | let pushed = replace(inner.next_push.get_mut(), 0).min(inner.push_array.len()); 140 | let popped = replace(inner.next_pop.get_mut(), 0).min(inner.pop_array.len()); 141 | 142 | let pushed_iter = inner.push_array.drain(..pushed); 143 | 144 | inner.pop_array.splice( 145 | ..popped, 146 | pushed_iter.map(|cell| { 147 | // rustc, please, optimized this call to no-op. 148 | 149 | // Safety: `next_push` equals the number of elements for which push started. 150 | // Exclusive lock ensures initialization finished. 151 | UnsafeCell::new(ManuallyDrop::new(unsafe { 152 | cell.into_inner().assume_init() 153 | })) 154 | }), 155 | ); 156 | } 157 | } 158 | 159 | fn prevent_overflow(atomic: &AtomicUsize, current: usize, upper: usize) { 160 | #[cold] 161 | fn cold_store(atomic: &AtomicUsize, upper: usize) { 162 | atomic.store(upper, Ordering::Relaxed); 163 | } 164 | 165 | if current >= isize::MAX as usize { 166 | cold_store(atomic, upper); 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # blink-alloc 2 | 3 | [![crates](https://img.shields.io/crates/v/blink-alloc.svg?style=for-the-badge&label=blink-alloc)](https://crates.io/crates/blink-alloc) 4 | [![docs](https://img.shields.io/badge/docs.rs-blink--alloc-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white)](https://docs.rs/blink-alloc) 5 | [![actions](https://img.shields.io/github/actions/workflow/status/zakarumych/blink-alloc/badge.yml?branch=main&style=for-the-badge)](https://github.com/zakarumych/blink-alloc/actions/workflows/badge.yml) 6 | [![MIT/Apache](https://img.shields.io/badge/license-MIT%2FApache-blue.svg?style=for-the-badge)](./COPYING) 7 | ![loc](https://img.shields.io/tokei/lines/github/zakarumych/blink-alloc?style=for-the-badge) 8 | 9 | Blink-alloc is [extremely fast](./BENCHMARKS.md) allocator based on the common idea of 10 | allocating linearly zipping a cursor through memory chunk and 11 | reset everything at once by setting cursor back to start. 12 | 13 | With Rust's borrow checker this idea can be implemented safely, 14 | preventing resets to occur while allocated memory is in use. 15 | 16 | ### [Jump to examples](#examples) 17 | 18 | # Design 19 | 20 | The blink-allocator acts as an adaptor to some lower-level allocator. 21 | Grabs chunks of memory from underlying allocator 22 | and serves allocations from them. 23 | When chunk is exhausted blink-allocator requests new larger chunk of memory 24 | from lower-level allocator. 25 | 26 | On reset all but last chunks are returned back to underlying allocator. 27 | The goal is to allocate a single chunk large enough to serve all allocations 28 | between resets, so that lower-level allocator is not touched after 29 | initial warm-up phase. Making allocations and resets almost free. 30 | 31 | The way blink-allocators are implemented offers cheap allocation shrinks when 32 | new layout fits into currently allocated memory block. 33 | Which is certainly the case for things like [`Vec::shrink_to`] and [`Vec::shrink_to_fit`]. 34 | When done on the tip of allocation it also frees the memory for reuse. 35 | 36 | Additionally fast allocation grows is possible when done on the tip of allocation. 37 | Which is easy to control when using thread-local version. 38 | This opens a way for blazing fast [`Vec::push`] call even at full [`Vec`] capacity. 39 | 40 | The simple implementation uses unsynchronized interior mutability 41 | and thus only works in single-thread scenario. 42 | It can be send to another thread but not shared. 43 | 44 | Maintaining an instance per thread is one way to tackle this problem. 45 | Yet not always possible or desireable. 46 | 47 | This crate provides additional implementation for multi-threaded 48 | scenario with slight performance penalty. To negate it, a local copy 49 | of single-threaded blink-allocator can be created to take memory from 50 | shared multi-threaded blink-allocator, allowing reusing memory across threads 51 | while keeping blazing fast allocations of single-threaded version. 52 | It works best for fork-join type of parallelism since it requires 53 | a point where the multi-threaded blink-allocator is not shared 54 | to reset it. 55 | 56 | For task-based parallelism a cache of blink-allocators 57 | can be constructed. 58 | Task may borrow single-threaded blink-allocator and return it when its done. 59 | This will keep cache full of pre-warmed blink-allocators. 60 | 61 | # Single-threaded 62 | 63 | [`BlinkAlloc`] is a single-threaded version of the allocator. 64 | It uses unsynchronized interior mutability for fastest performance. 65 | [`BlinkAlloc`] can be sent to other threads but not shared. 66 | 67 | For multi-threading an instance of [`BlinkAlloc`] per thread/task 68 | can be created. 69 | Though it may be not possible or practical in some ways, so consider 70 | alternatives below. 71 | 72 | # Multi-threaded 73 | 74 | [`SyncBlinkAlloc`] works in multithreaded environment and shares memory 75 | from one chunk between threads. Reduces overall memory usage, 76 | skips warm-up phase for new threads and tasks. 77 | [`SyncBlinkAlloc`] can spawn [`LocalBlinkAlloc`] instances 78 | that work similarly to [`BlinkAlloc`]. 79 | [`LocalBlinkAlloc`] instances fetch memory chunks from shared [`SyncBlinkAlloc`]. 80 | [`SyncBlinkAlloc`] still requires exclusive access to reset. 81 | Works best for fork-join style of parallelism, where reset happens 82 | after joining all threads. 83 | 84 | For task/based parallelism this crate provides [`BlinkAllocCache`] type 85 | which is a cache of [`BlinkAlloc`] instances. 86 | Tasks may fetch blink allocator from cache, 87 | use it and then return it back to cache. 88 | Cache keeps [`BlinkAlloc`] instances warmed up. 89 | 90 | # Allocator API 91 | 92 | Allocators implement [`Allocator`] interface from [`alloc`] crate 93 | or a copy of it when feature `"nightly"` is not enabled. 94 | `"nightly"` requires Rust feature [`allocator_api`] 95 | and works only on nightly. 96 | Once [`Allocator`] trait is stable the feature will do nothing and 97 | removed in next major release. 98 | 99 | # Blink without collections 100 | 101 | [`BlinkAlloc`] and friends implement [`Allocator`] from [`allocator_api`] 102 | unstable feature. It is only available when `"nightly"` feature is enabled 103 | for this crate. Otherwise [`Allocator`] is not [`core::alloc::Allocator`] 104 | but a duplicate defined in the crate. 105 | With [`allocator_api`] it is possible to use [`BlinkAlloc`] and others 106 | for allocator type in collection types that support one. 107 | Currently [`Vec`], [`VecDeque`], [`BTreeMap`] and [`BTreeSet`] can use 108 | user-provided allocator. 109 | Also [`hashbrown::HashMap`] and [`hashbrown::HashSet`] support it with 110 | `"nightly"` feature. 111 | 112 | However on stable it cannot be used right now. 113 | 114 | It is still possible to use blink-allocators in safe manner - 115 | meet [`Blink`] allocator adaptor. 116 | Put anything into memory allocated by [`Blink`]. 117 | Values, iterators, closures to construct a value, 118 | slices and strings. 119 | It works with everything*. 120 | Uses underlying blink-allocator and returns mutable reference 121 | to values placed into allocated memory. 122 | By default it drops placed values on reset. 123 | 124 | \* Ask for API extension if doesn't work for your use case. 125 | 126 | # Examples 127 | 128 | Usage of [`Blink`] allocator adaptor. 129 | Initialize and start putting values there. 130 | 131 | ```rust 132 | use blink_alloc::Blink; 133 | 134 | #[cfg(feature = "alloc")] 135 | fn main() { 136 | // `Blink::new` uses `BlinkAlloc` 137 | let mut blink = Blink::new(); 138 | 139 | // Allocates memory and moves value there. 140 | // Returns mutable reference to it. 141 | let x = blink.put(42); 142 | assert_eq!(*x, 42); 143 | *x = 11; 144 | 145 | // Copies string slice to the blink-allocated memory. 146 | let string = blink.copy_str("Hello world"); 147 | 148 | // Mutable reference allows string mutation 149 | string.make_ascii_lowercase(); 150 | assert_eq!(string, "hello world"); 151 | 152 | // Consumes iterator and returns all values from it in slice. 153 | // Works fine on problematic iterators with terrible size hint. 154 | let slice = blink.emplace().from_iter((0..10).filter(|x| x % 3 != 0)); 155 | assert_eq!(&*slice, &[1, 2, 4, 5, 7, 8]); 156 | blink.reset(); 157 | } 158 | #[cfg(not(feature = "alloc"))] fn main() {} 159 | ``` 160 | 161 | ```rust 162 | #![cfg_attr(feature = "nightly", feature(allocator_api))] 163 | use blink_alloc::BlinkAlloc; 164 | 165 | #[cfg(feature = "alloc")] 166 | fn main() { 167 | #[cfg(feature = "nightly")] 168 | use std::vec::Vec; 169 | #[cfg(not(feature = "nightly"))] 170 | use allocator_api2::vec::Vec; 171 | 172 | let mut blink = BlinkAlloc::new(); 173 | let mut vec = Vec::new_in(&blink); 174 | vec.extend((1..10).map(|x| x * 3 - 2)); 175 | 176 | drop(vec); 177 | blink.reset(); 178 | } 179 | #[cfg(not(feature = "alloc"))] fn main() {} 180 | ``` 181 | 182 | # No-std 183 | 184 | This crate supports `no_std` environment. 185 | `"alloc"` feature is enabled by default and adds 186 | dependency on [`alloc`] crate. 187 | 188 | ## License 189 | 190 | Licensed under either of 191 | 192 | * Apache License, Version 2.0, ([license/APACHE](license/APACHE) or ) 193 | * MIT license ([license/MIT](license/MIT) or ) 194 | 195 | at your option. 196 | 197 | ## Contributions 198 | 199 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 200 | 201 | [`Vec::shrink_to`]: https://doc.rust-lang.org/alloc/vec/struct.Vec.html#method.shrink_to 202 | [`Vec::shrink_to_fit`]: https://doc.rust-lang.org/alloc/vec/struct.Vec.html#method.shrink_to_fit 203 | [`Vec::push`]: https://doc.rust-lang.org/alloc/vec/struct.Vec.html#method.push 204 | [`Vec`]: https://doc.rust-lang.org/alloc/vec/struct.Vec.html 205 | [`BlinkAlloc`]: https://docs.rs/blink-alloc/latest/blink_alloc/struct.BlinkAlloc.html 206 | [`SyncBlinkAlloc`]: https://docs.rs/blink-alloc/latest/blink_alloc/struct.SyncBlinkAlloc.html 207 | [`LocalBlinkAlloc`]: https://docs.rs/blink-alloc/latest/blink_alloc/struct.LocalBlinkAlloc.html 208 | [`BlinkAllocCache`]: https://docs.rs/blink-alloc/latest/blink_alloc/struct.BlinkAllocCache.html 209 | [`Blink`]: https://docs.rs/blink-alloc/latest/blink_alloc/struct.Blink.html 210 | [`Allocator`]: https://docs.rs/allocator-api2/latest/allocator_api2/ 211 | [`allocator_api`]: https://doc.rust-lang.org/beta/unstable-book/library-features/allocator-api.html 212 | [`core::alloc::Allocator`]: https://doc.rust-lang.org/core/alloc/trait.Allocator.html 213 | [`Vec`]: https://doc.rust-lang.org/alloc/vec/struct.Vec.html 214 | [`VecDeque`]: https://doc.rust-lang.org/alloc/collections/vec_deque/struct.VecDeque.html 215 | [`BTreeMap`]: https://doc.rust-lang.org/alloc/collections/btree_map/struct.BTreeMap.html 216 | [`BTreeSet`]: https://doc.rust-lang.org/alloc/collections/btree_set/struct.BTreeSet.html 217 | [`hashbrown::HashMap`]: https://docs.rs/hashbrown/latest/hashbrown/hash_map/struct.HashMap.html 218 | [`hashbrown::HashSet`]: https://docs.rs/hashbrown/latest/hashbrown/hash_set/struct.HashSet.html 219 | [`alloc`]: https://doc.rust-lang.org/alloc/index.html 220 | -------------------------------------------------------------------------------- /src/local.rs: -------------------------------------------------------------------------------- 1 | //! This module provides multi-threaded blink allocator\ 2 | //! with sync resets. 3 | 4 | use core::{alloc::Layout, mem::ManuallyDrop, ptr::NonNull}; 5 | 6 | #[cfg(feature = "nightly")] 7 | use core::alloc::{AllocError, Allocator}; 8 | 9 | #[cfg(not(feature = "nightly"))] 10 | use allocator_api2::alloc::{AllocError, Allocator}; 11 | 12 | #[cfg(all(feature = "nightly", feature = "alloc"))] 13 | use alloc::alloc::Global; 14 | 15 | #[cfg(all(not(feature = "nightly"), feature = "alloc"))] 16 | use allocator_api2::alloc::Global; 17 | 18 | use crate::{api::BlinkAllocator, arena::ArenaLocal}; 19 | 20 | switch_alloc_default! { 21 | /// Single-threaded blink allocator. 22 | /// 23 | /// Blink-allocator is arena-based allocator that 24 | /// allocates memory in growing chunks and serve allocations from them. 25 | /// When chunk is exhausted a new larger chunk is allocated. 26 | /// 27 | /// Deallocation is no-op. [`BlinkAlloc`] can be reset 28 | /// to free all chunks except the last one, that will be reused. 29 | /// 30 | /// Blink allocator aims to allocate a chunk large enough to 31 | /// serve all allocations between resets. 32 | /// 33 | /// A shared and mutable reference to the [`BlinkAlloc`] implement 34 | /// [`Allocator`] trait. 35 | /// When "nightly" feature is enabled, [`Allocator`] trait is 36 | /// [`core::alloc::Allocator`]. Otherwise it is duplicated trait defined 37 | /// in [`allocator-api2`](allocator_api2). 38 | /// 39 | /// Resetting blink allocator requires mutable borrow, so it is not possible 40 | /// to do while shared borrow is alive. That matches requirement of 41 | /// [`Allocator`] trait - while [`Allocator`] instance 42 | /// (a shared reference to [`BlinkAlloc`]) or any of its clones are alive, 43 | /// allocated memory must be valid. 44 | /// 45 | /// This version of blink-allocator is single-threaded. It is possible 46 | /// to send to another thread, but cannot be shared. 47 | /// Internally it uses [`Cell`](core::cell::Cell) for interior mutability and requires 48 | /// that state cannot be changed from another thread. 49 | /// 50 | #[cfg_attr(feature = "sync", doc = "For multi-threaded version see [`SyncBlinkAlloc`](crate::sync::SyncBlinkAlloc).")] 51 | #[cfg_attr(not(feature = "sync"), doc = "For multi-threaded version see `SyncBlinkAlloc`.")] 52 | /// Requires `"sync"` feature. 53 | /// 54 | /// # Example 55 | /// 56 | /// ``` 57 | /// # #![cfg_attr(feature = "nightly", feature(allocator_api))] 58 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 59 | /// # #[cfg(feature = "alloc")] fn main() { 60 | /// # use blink_alloc::BlinkAlloc; 61 | /// # use std::ptr::NonNull; 62 | /// 63 | /// let mut blink = BlinkAlloc::new(); 64 | /// let layout = std::alloc::Layout::new::<[u32; 8]>(); 65 | /// let ptr = blink.allocate(layout).unwrap(); 66 | /// let ptr = NonNull::new(ptr.as_ptr() as *mut u8).unwrap(); // Method for this is unstable. 67 | /// 68 | /// unsafe { 69 | /// std::ptr::write(ptr.as_ptr().cast(), [1, 2, 3, 4, 5, 6, 7, 8]); 70 | /// } 71 | /// 72 | /// blink.reset(); 73 | /// # } 74 | /// ``` 75 | /// 76 | /// # Example that uses nightly's `allocator_api` 77 | /// 78 | /// ``` 79 | /// # #![cfg_attr(feature = "nightly", feature(allocator_api))] 80 | /// # #[cfg(feature = "alloc")] 81 | /// # fn main() { 82 | /// # use blink_alloc::BlinkAlloc; 83 | /// # #[cfg(feature = "nightly")] 84 | /// # use std::vec::Vec; 85 | /// # #[cfg(not(feature = "nightly"))] 86 | /// # use allocator_api2::vec::Vec; 87 | /// let mut blink = BlinkAlloc::new(); 88 | /// let mut vec = Vec::new_in(&blink); 89 | /// vec.push(1); 90 | /// vec.extend(1..3); 91 | /// vec.extend(3..10); 92 | /// drop(vec); 93 | /// blink.reset(); 94 | /// # } 95 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 96 | /// ``` 97 | pub struct BlinkAlloc { 98 | arena: ArenaLocal, 99 | allocator: A, 100 | } 101 | } 102 | 103 | impl Drop for BlinkAlloc 104 | where 105 | A: Allocator, 106 | { 107 | #[inline] 108 | fn drop(&mut self) { 109 | // Safety: 110 | // Same instance is used for all allocations and resets. 111 | unsafe { 112 | self.arena.reset(false, &self.allocator); 113 | } 114 | } 115 | } 116 | 117 | impl Default for BlinkAlloc 118 | where 119 | A: Allocator + Default, 120 | { 121 | #[inline] 122 | fn default() -> Self { 123 | Self::new_in(Default::default()) 124 | } 125 | } 126 | 127 | #[cfg(feature = "alloc")] 128 | impl BlinkAlloc { 129 | /// Creates new blink allocator that uses global allocator 130 | /// to allocate memory chunks. 131 | /// 132 | /// See [`BlinkAlloc::new_in`] for using custom allocator. 133 | #[inline] 134 | pub const fn new() -> Self { 135 | BlinkAlloc::new_in(Global) 136 | } 137 | 138 | /// Creates new blink allocator that uses global allocator 139 | /// to allocate memory chunks. 140 | /// With this method you can specify initial chunk size. 141 | /// 142 | /// See [`BlinkAlloc::new_in`] for using custom allocator. 143 | #[inline] 144 | pub const fn with_chunk_size(chunk_size: usize) -> Self { 145 | BlinkAlloc::with_chunk_size_in(chunk_size, Global) 146 | } 147 | } 148 | 149 | impl BlinkAlloc 150 | where 151 | A: Allocator, 152 | { 153 | /// Creates new blink allocator that uses provided allocator 154 | /// to allocate memory chunks. 155 | /// 156 | /// See [`BlinkAlloc::new`] for using global allocator. 157 | #[inline] 158 | pub const fn new_in(allocator: A) -> Self { 159 | BlinkAlloc { 160 | arena: ArenaLocal::new(), 161 | allocator, 162 | } 163 | } 164 | 165 | /// Returns reference to the underlying allocator used by this blink allocator. 166 | #[inline(always)] 167 | pub const fn inner(&self) -> &A { 168 | &self.allocator 169 | } 170 | 171 | /// Creates new blink allocator that uses global allocator 172 | /// to allocate memory chunks. 173 | /// With this method you can specify initial chunk size. 174 | /// 175 | /// See [`BlinkAlloc::new_in`] for using custom allocator. 176 | #[inline] 177 | pub const fn with_chunk_size_in(chunk_size: usize, allocator: A) -> Self { 178 | BlinkAlloc { 179 | arena: ArenaLocal::with_chunk_size(chunk_size), 180 | allocator, 181 | } 182 | } 183 | 184 | /// Allocates memory with specified layout from this allocator. 185 | /// If needed it will allocate new chunk using underlying allocator. 186 | /// If chunk allocation fails, it will return `Err`. 187 | #[inline(always)] 188 | pub fn allocate(&self, layout: Layout) -> Result, AllocError> { 189 | // Safety: 190 | // Same instance is used for all allocations and resets. 191 | if let Some(ptr) = unsafe { self.arena.alloc_fast(layout) } { 192 | return Ok(ptr); 193 | } 194 | unsafe { self.arena.alloc_slow(layout, &self.allocator) } 195 | } 196 | 197 | /// Resizes memory allocation. 198 | /// Potentially happens in-place. 199 | /// 200 | /// # Safety 201 | /// 202 | /// `ptr` must be a pointer previously returned by [`allocate`](BlinkAlloc::allocate). 203 | /// `old_size` must be in range `layout.size()..=slice.len()` 204 | /// where `layout` is the layout used in the call to [`allocate`](BlinkAlloc::allocate). 205 | /// and `slice` is the slice pointer returned by [`allocate`](BlinkAlloc::allocate). 206 | /// 207 | /// On success, the old pointer is invalidated and the new pointer is returned. 208 | /// On error old allocation is still valid. 209 | #[inline(always)] 210 | pub unsafe fn resize( 211 | &self, 212 | ptr: NonNull, 213 | old_layout: Layout, 214 | new_layout: Layout, 215 | ) -> Result, AllocError> { 216 | if let Some(ptr) = unsafe { self.arena.resize_fast(ptr, old_layout, new_layout) } { 217 | return Ok(ptr); 218 | } 219 | 220 | // Safety: 221 | // Same instance is used for all allocations and resets. 222 | // `ptr` was allocated by this allocator. 223 | unsafe { 224 | self.arena 225 | .resize_slow(ptr, old_layout, new_layout, &self.allocator) 226 | } 227 | } 228 | 229 | /// Deallocates memory previously allocated from this allocator. 230 | /// 231 | /// This call may not actually free memory. 232 | /// All memory is guaranteed to be freed on [`reset`](BlinkAlloc::reset) call. 233 | /// 234 | /// # Safety 235 | /// 236 | /// `ptr` must be a pointer previously returned by [`allocate`](BlinkAlloc::allocate). 237 | /// `size` must be in range `layout.size()..=slice.len()` 238 | /// where `layout` is the layout used in the call to [`allocate`](BlinkAlloc::allocate). 239 | /// and `slice` is the slice pointer returned by [`allocate`](BlinkAlloc::allocate). 240 | #[inline(always)] 241 | pub unsafe fn deallocate(&self, ptr: NonNull, size: usize) { 242 | // Safety: 243 | // `ptr` was allocated by this allocator. 244 | unsafe { 245 | self.arena.dealloc(ptr, size); 246 | } 247 | } 248 | 249 | /// Resets this allocator, deallocating all chunks except the last one. 250 | /// Last chunk will be reused. 251 | /// With steady memory usage after few iterations 252 | /// one chunk should be sufficient for all allocations between resets. 253 | #[inline(always)] 254 | pub fn reset(&mut self) { 255 | // Safety: 256 | // Same instance is used for all allocations and resets. 257 | unsafe { 258 | self.arena.reset(true, &self.allocator); 259 | } 260 | } 261 | 262 | /// Resets this allocator, deallocating all chunks. 263 | #[inline(always)] 264 | pub fn reset_final(&mut self) { 265 | // Safety: 266 | // Same instance is used for all allocations and resets. 267 | unsafe { 268 | self.arena.reset(false, &self.allocator); 269 | } 270 | } 271 | 272 | /// Resets this allocator, deallocating all chunks except the last one. 273 | /// Last chunk will be reused. 274 | /// With steady memory usage after few iterations 275 | /// one chunk should be sufficient for all allocations between resets. 276 | /// 277 | /// # Safety 278 | /// 279 | /// Blink-allocators guarantee that memory can be used while shared 280 | /// borrow to the allocator is held, preventing safe `fn reset` call. 281 | /// 282 | /// With this method it becomes caller responsibility to ensure 283 | /// that allocated memory won't be used after reset. 284 | #[inline(always)] 285 | pub unsafe fn reset_unchecked(&self) { 286 | // Safety: 287 | // Same instance is used for all allocations and resets. 288 | unsafe { 289 | self.arena.reset_unchecked(true, &self.allocator); 290 | } 291 | } 292 | 293 | /// Unwrap this allocator, returning the underlying allocator. 294 | /// Leaks allocated chunks. 295 | /// 296 | /// To deallocate all chunks call [`reset_final`](BlinkAlloc::reset_final) first. 297 | pub fn into_inner(self) -> A { 298 | let me = ManuallyDrop::new(self); 299 | unsafe { core::ptr::read(&me.allocator) } 300 | } 301 | } 302 | 303 | unsafe impl Allocator for BlinkAlloc 304 | where 305 | A: Allocator, 306 | { 307 | #[inline(always)] 308 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 309 | BlinkAlloc::allocate(self, layout) 310 | } 311 | 312 | #[inline(always)] 313 | unsafe fn shrink( 314 | &self, 315 | ptr: NonNull, 316 | old_layout: Layout, 317 | new_layout: Layout, 318 | ) -> Result, AllocError> { 319 | BlinkAlloc::resize(self, ptr, old_layout, new_layout) 320 | } 321 | 322 | #[inline(always)] 323 | unsafe fn grow( 324 | &self, 325 | ptr: NonNull, 326 | old_layout: Layout, 327 | new_layout: Layout, 328 | ) -> Result, AllocError> { 329 | BlinkAlloc::resize(self, ptr, old_layout, new_layout) 330 | } 331 | 332 | #[inline(always)] 333 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 334 | BlinkAlloc::deallocate(self, ptr, layout.size()); 335 | } 336 | } 337 | 338 | unsafe impl BlinkAllocator for BlinkAlloc 339 | where 340 | A: Allocator, 341 | { 342 | #[inline(always)] 343 | fn reset(&mut self) { 344 | BlinkAlloc::reset(self) 345 | } 346 | } 347 | -------------------------------------------------------------------------------- /src/global/local.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::{GlobalAlloc, Layout}, 3 | cell::UnsafeCell, 4 | ptr::{null_mut, NonNull}, 5 | }; 6 | 7 | #[cfg(debug_assertions)] 8 | use core::cell::Cell; 9 | 10 | #[cfg(feature = "nightly")] 11 | use core::alloc::{AllocError, Allocator}; 12 | 13 | #[cfg(not(feature = "nightly"))] 14 | use allocator_api2::alloc::{AllocError, Allocator}; 15 | 16 | use crate::{cold, local::BlinkAlloc}; 17 | 18 | struct State { 19 | blink: BlinkAlloc, 20 | enabled: bool, 21 | } 22 | 23 | impl State { 24 | #[inline(always)] 25 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 26 | match self.enabled { 27 | true => self.blink.allocate(layout), 28 | false => { 29 | cold(); 30 | self.blink.inner().allocate(layout) 31 | } 32 | } 33 | } 34 | 35 | #[inline(always)] 36 | fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { 37 | match self.enabled { 38 | true => self.blink.allocate_zeroed(layout), 39 | false => { 40 | cold(); 41 | self.blink.inner().allocate_zeroed(layout) 42 | } 43 | } 44 | } 45 | 46 | #[inline(always)] 47 | unsafe fn resize( 48 | &self, 49 | ptr: NonNull, 50 | old_layout: Layout, 51 | new_layout: Layout, 52 | ) -> Result, AllocError> { 53 | match self.enabled { 54 | true => self.blink.resize(ptr, old_layout, new_layout), 55 | false => { 56 | cold(); 57 | if old_layout.size() >= new_layout.size() { 58 | self.blink.inner().grow(ptr, old_layout, new_layout) 59 | } else { 60 | self.blink.inner().shrink(ptr, old_layout, new_layout) 61 | } 62 | } 63 | } 64 | } 65 | 66 | #[inline(always)] 67 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 68 | match self.enabled { 69 | true => self.blink.deallocate(ptr, layout.size()), 70 | false => { 71 | cold(); 72 | self.blink.inner().deallocate(ptr, layout) 73 | } 74 | } 75 | } 76 | } 77 | 78 | switch_std_default! { 79 | /// [`GlobalAlloc`] implementation based on [`BlinkAlloc`]. 80 | pub struct UnsafeGlobalBlinkAlloc { 81 | state: UnsafeCell>, 82 | #[cfg(debug_assertions)] 83 | allocations: Cell, 84 | } 85 | } 86 | 87 | // The user is responsible for ensuring that this allocator 88 | // won't be used concurrently. 89 | // To make this sound, `UnsafeGlobalBlinkAlloc::new` and `UnsafeGlobalBlinkAlloc::new_in` 90 | // are marked unsafe. 91 | unsafe impl Send for UnsafeGlobalBlinkAlloc {} 92 | unsafe impl Sync for UnsafeGlobalBlinkAlloc {} 93 | 94 | #[cfg(feature = "std")] 95 | impl UnsafeGlobalBlinkAlloc { 96 | /// Create a new [`UnsafeGlobalBlinkAlloc`]. 97 | /// 98 | /// Const function can be used to initialize a static variable. 99 | /// 100 | /// # Safety 101 | /// 102 | /// This method is unsafe because this type is not thread-safe 103 | /// but implements `Sync`. 104 | /// Allocator returned by this method must not be used concurrently. 105 | /// 106 | /// For safer alternative see [`GlobalBlinkAlloc`](https://docs.rs/blink-alloc/0.2.2/blink_alloc/struct.GlobalBlinkAlloc.html). 107 | /// 108 | /// # Example 109 | /// 110 | /// ``` 111 | /// use blink_alloc::UnsafeGlobalBlinkAlloc; 112 | /// 113 | /// // Safety: This program is single-threaded. 114 | /// #[global_allocator] 115 | /// static GLOBAL_ALLOC: UnsafeGlobalBlinkAlloc = unsafe { UnsafeGlobalBlinkAlloc::new() }; 116 | /// 117 | /// let _ = Box::new(42); 118 | /// let _ = vec![1, 2, 3]; 119 | /// ``` 120 | pub const unsafe fn new() -> Self { 121 | UnsafeGlobalBlinkAlloc::new_in(std::alloc::System) 122 | } 123 | 124 | /// Create a new [`UnsafeGlobalBlinkAlloc`]. 125 | /// 126 | /// This method allows to specify initial chunk size. 127 | /// 128 | /// Const function can be used to initialize a static variable. 129 | /// 130 | /// # Safety 131 | /// 132 | /// This method is unsafe because this type is not thread-safe 133 | /// but implements `Sync`. 134 | /// Allocator returned by this method must not be used concurrently. 135 | /// 136 | /// For safer alternative see [`GlobalBlinkAlloc`](https://docs.rs/blink-alloc/0.2.2/blink_alloc/struct.GlobalBlinkAlloc.html). 137 | /// 138 | /// # Example 139 | /// 140 | /// ``` 141 | /// # #[cfg(feature = "std")] fn main() { 142 | /// use blink_alloc::UnsafeGlobalBlinkAlloc; 143 | /// 144 | /// // Safety: This program is single-threaded. 145 | /// #[global_allocator] 146 | /// static GLOBAL_ALLOC: UnsafeGlobalBlinkAlloc = unsafe { UnsafeGlobalBlinkAlloc::new_in(std::alloc::System) }; 147 | /// 148 | /// let _ = Box::new(42); 149 | /// let _ = vec![1, 2, 3]; 150 | /// # } 151 | /// # #[cfg(not(feature = "std"))] fn main() {} 152 | /// ``` 153 | pub const unsafe fn with_chunk_size(chunk_size: usize) -> Self { 154 | UnsafeGlobalBlinkAlloc::with_chunk_size_in(chunk_size, std::alloc::System) 155 | } 156 | } 157 | 158 | impl UnsafeGlobalBlinkAlloc 159 | where 160 | A: Allocator, 161 | { 162 | /// Create a new [`UnsafeGlobalBlinkAlloc`] 163 | /// with specified underlying allocator. 164 | /// 165 | /// Const function can be used to initialize a static variable. 166 | /// 167 | /// # Safety 168 | /// 169 | /// This method is unsafe because this type is not thread-safe 170 | /// but implements `Sync`. 171 | /// Allocator returned by this method must not be used concurrently. 172 | /// 173 | /// For safer alternative see [`GlobalBlinkAlloc`](https://docs.rs/blink-alloc/0.2.2/blink_alloc/struct.GlobalBlinkAlloc.html). 174 | /// 175 | /// # Example 176 | /// 177 | /// ``` 178 | /// # #[cfg(feature = "std")] fn main() { 179 | /// use blink_alloc::UnsafeGlobalBlinkAlloc; 180 | /// 181 | /// // Safety: This program is single-threaded. 182 | /// #[global_allocator] 183 | /// static GLOBAL_ALLOC: UnsafeGlobalBlinkAlloc = unsafe { UnsafeGlobalBlinkAlloc::new_in(std::alloc::System) }; 184 | /// 185 | /// let _ = Box::new(42); 186 | /// let _ = vec![1, 2, 3]; 187 | /// # } 188 | /// # #[cfg(not(feature = "std"))] fn main() {} 189 | /// ``` 190 | pub const unsafe fn new_in(allocator: A) -> Self { 191 | UnsafeGlobalBlinkAlloc { 192 | state: UnsafeCell::new(State { 193 | blink: BlinkAlloc::new_in(allocator), 194 | enabled: false, 195 | }), 196 | #[cfg(debug_assertions)] 197 | allocations: Cell::new(0), 198 | } 199 | } 200 | 201 | /// Create a new [`UnsafeGlobalBlinkAlloc`] 202 | /// with specified underlying allocator. 203 | /// 204 | /// This method allows to specify initial chunk size. 205 | /// 206 | /// Const function can be used to initialize a static variable. 207 | /// 208 | /// # Safety 209 | /// 210 | /// This method is unsafe because this type is not thread-safe 211 | /// but implements `Sync`. 212 | /// Allocator returned by this method must not be used concurrently. 213 | /// 214 | /// For safer alternative see [`GlobalBlinkAlloc`](https://docs.rs/blink-alloc/0.2.2/blink_alloc/struct.GlobalBlinkAlloc.html). 215 | /// 216 | /// # Example 217 | /// 218 | /// ``` 219 | /// # #[cfg(feature = "std")] fn main() { 220 | /// use blink_alloc::UnsafeGlobalBlinkAlloc; 221 | /// 222 | /// // Safety: This program is single-threaded. 223 | /// #[global_allocator] 224 | /// static GLOBAL_ALLOC: UnsafeGlobalBlinkAlloc = unsafe { UnsafeGlobalBlinkAlloc::new_in(std::alloc::System) }; 225 | /// 226 | /// let _ = Box::new(42); 227 | /// let _ = vec![1, 2, 3]; 228 | /// # } 229 | /// # #[cfg(not(feature = "std"))] fn main() {} 230 | /// ``` 231 | pub const unsafe fn with_chunk_size_in(chunk_size: usize, allocator: A) -> Self { 232 | UnsafeGlobalBlinkAlloc { 233 | state: UnsafeCell::new(State { 234 | blink: BlinkAlloc::with_chunk_size_in(chunk_size, allocator), 235 | enabled: false, 236 | }), 237 | #[cfg(debug_assertions)] 238 | allocations: Cell::new(0), 239 | } 240 | } 241 | 242 | /// Resets this allocator, deallocating all chunks except the last one. 243 | /// Last chunk will be reused. 244 | /// With steady memory usage after few iterations 245 | /// one chunk should be sufficient for all allocations between resets. 246 | /// 247 | /// # Safety 248 | /// 249 | /// Memory allocated from this allocator in blink mode becomes invalidated. 250 | /// The user is responsible to ensure that previously allocated memory 251 | /// won't be used after reset. 252 | /// 253 | /// # Example 254 | /// 255 | /// ``` 256 | /// # #[cfg(feature = "std")] fn main() { 257 | /// use blink_alloc::UnsafeGlobalBlinkAlloc; 258 | /// 259 | /// #[global_allocator] 260 | /// static GLOBAL_ALLOC: UnsafeGlobalBlinkAlloc = unsafe { UnsafeGlobalBlinkAlloc::new() }; 261 | /// 262 | /// unsafe { GLOBAL_ALLOC.blink_mode() }; 263 | /// 264 | /// let b = Box::new(42); 265 | /// let v = vec![1, 2, 3]; 266 | /// drop(b); 267 | /// drop(v); 268 | /// 269 | /// // Safety: memory allocated in blink mode won't be used after reset. 270 | /// unsafe { 271 | /// GLOBAL_ALLOC.reset(); 272 | /// GLOBAL_ALLOC.direct_mode(); 273 | /// }; 274 | /// # } 275 | /// # #[cfg(not(feature = "std"))] fn main() {} 276 | /// ``` 277 | #[inline(always)] 278 | pub unsafe fn reset(&self) { 279 | #[cfg(debug_assertions)] 280 | { 281 | assert_eq!(self.allocations.get(), 0, "Not everything was deallocated"); 282 | } 283 | 284 | (*self.state.get()).blink.reset_unchecked(); 285 | } 286 | 287 | /// Switches allocator to blink mode. 288 | /// All allocations will be served by blink-allocator. 289 | /// 290 | /// The type is created in direct mode. 291 | /// When used as global allocator, user may manually switch into blink mode 292 | /// in `main` or at any point later. 293 | /// 294 | /// However user must switch back to direct mode before returning from `main`. 295 | /// 296 | /// # Safety 297 | /// 298 | /// Must be externally synchronized with other threads accessing this allocator. 299 | /// Memory allocated in direct mode must not be deallocated while in blink mode. 300 | #[inline(always)] 301 | pub unsafe fn blink_mode(&self) { 302 | (*self.state.get()).enabled = true; 303 | } 304 | 305 | /// Switches allocator to blink mode. 306 | /// All allocations will be served by underlying allocator. 307 | /// 308 | /// The type is created in direct mode. 309 | /// When used as global allocator, user may manually switch into blink mode 310 | /// in `main` or at any point later. 311 | /// 312 | /// However user must switch back to direct mode before returning from `main`. 313 | /// 314 | /// # Safety 315 | /// 316 | /// Must be externally synchronized with other threads accessing this allocator. 317 | /// Memory allocated in blink mode must not be deallocated while in direct mode. 318 | #[inline(always)] 319 | pub unsafe fn direct_mode(&self) { 320 | self.reset(); 321 | (*self.state.get()).enabled = false; 322 | } 323 | } 324 | 325 | unsafe impl GlobalAlloc for UnsafeGlobalBlinkAlloc 326 | where 327 | A: Allocator, 328 | { 329 | #[inline] 330 | unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 { 331 | match (*self.state.get()).allocate(layout) { 332 | Ok(ptr) => { 333 | #[cfg(debug_assertions)] 334 | if (*self.state.get()).enabled { 335 | self.allocations.set(self.allocations.get() + 1); 336 | } 337 | ptr.as_ptr().cast() 338 | } 339 | Err(_) => null_mut(), 340 | } 341 | } 342 | 343 | #[inline] 344 | unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) { 345 | let ptr = NonNull::new_unchecked(ptr); 346 | (*self.state.get()).deallocate(ptr, layout); 347 | #[cfg(debug_assertions)] 348 | if (*self.state.get()).enabled { 349 | self.allocations 350 | .set(self.allocations.get().saturating_sub(1)); 351 | } 352 | } 353 | 354 | #[inline] 355 | unsafe fn alloc_zeroed(&self, layout: core::alloc::Layout) -> *mut u8 { 356 | match (*self.state.get()).allocate_zeroed(layout) { 357 | Ok(ptr) => { 358 | #[cfg(debug_assertions)] 359 | if (*self.state.get()).enabled { 360 | self.allocations.set(self.allocations.get() + 1); 361 | } 362 | ptr.as_ptr().cast() 363 | } 364 | Err(_) => null_mut(), 365 | } 366 | } 367 | 368 | #[inline] 369 | unsafe fn realloc( 370 | &self, 371 | ptr: *mut u8, 372 | layout: core::alloc::Layout, 373 | new_size: usize, 374 | ) -> *mut u8 { 375 | let Ok(new_layout) = Layout::from_size_align(new_size, layout.align()) else { 376 | return null_mut(); 377 | }; 378 | 379 | let result = match NonNull::new(ptr) { 380 | None => (*self.state.get()).allocate(new_layout), 381 | Some(ptr) => (*self.state.get()).resize(ptr, layout, new_layout), 382 | }; 383 | 384 | match result { 385 | Ok(ptr) => ptr.as_ptr().cast(), 386 | Err(_) => null_mut(), 387 | } 388 | } 389 | } 390 | -------------------------------------------------------------------------------- /src/global/sync.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::{GlobalAlloc, Layout}, 3 | cell::UnsafeCell, 4 | ptr::{null_mut, NonNull}, 5 | }; 6 | 7 | #[cfg(debug_assertions)] 8 | use core::sync::atomic::{AtomicU64, Ordering}; 9 | 10 | #[cfg(feature = "nightly")] 11 | use core::alloc::{AllocError, Allocator}; 12 | 13 | #[cfg(not(feature = "nightly"))] 14 | use allocator_api2::alloc::{AllocError, Allocator}; 15 | 16 | use crate::{cold, sync::SyncBlinkAlloc, LocalBlinkAlloc}; 17 | 18 | struct State { 19 | blink: SyncBlinkAlloc, 20 | enabled: bool, 21 | } 22 | 23 | impl State { 24 | #[inline(always)] 25 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 26 | match self.enabled { 27 | true => self.blink.allocate(layout), 28 | false => { 29 | cold(); 30 | self.blink.inner().allocate(layout) 31 | } 32 | } 33 | } 34 | 35 | #[inline(always)] 36 | fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { 37 | match self.enabled { 38 | true => self.blink.allocate_zeroed(layout), 39 | false => { 40 | cold(); 41 | self.blink.inner().allocate_zeroed(layout) 42 | } 43 | } 44 | } 45 | 46 | #[inline(always)] 47 | unsafe fn resize( 48 | &self, 49 | ptr: NonNull, 50 | old_layout: Layout, 51 | new_layout: Layout, 52 | ) -> Result, AllocError> { 53 | match self.enabled { 54 | true => self.blink.resize(ptr, old_layout, new_layout), 55 | false => { 56 | cold(); 57 | if old_layout.size() >= new_layout.size() { 58 | self.blink.inner().grow(ptr, old_layout, new_layout) 59 | } else { 60 | self.blink.inner().shrink(ptr, old_layout, new_layout) 61 | } 62 | } 63 | } 64 | } 65 | 66 | #[inline(always)] 67 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 68 | match self.enabled { 69 | true => self.blink.deallocate(ptr, layout.size()), 70 | false => { 71 | cold(); 72 | self.blink.inner().deallocate(ptr, layout) 73 | } 74 | } 75 | } 76 | } 77 | 78 | switch_std_default! { 79 | /// [`GlobalAlloc`] implementation based on [`SyncBlinkAlloc`]. 80 | /// 81 | /// # Example 82 | /// 83 | /// ``` 84 | /// use blink_alloc::GlobalBlinkAlloc; 85 | /// 86 | /// #[global_allocator] 87 | /// static GLOBAL_ALLOC: GlobalBlinkAlloc = GlobalBlinkAlloc::new(); 88 | /// 89 | /// fn main() { 90 | /// let _ = Box::new(42); 91 | /// let _ = vec![1, 2, 3]; 92 | /// } 93 | /// ``` 94 | pub struct GlobalBlinkAlloc { 95 | state: UnsafeCell>, 96 | #[cfg(debug_assertions)] 97 | allocations: AtomicU64, 98 | } 99 | } 100 | 101 | unsafe impl Send for GlobalBlinkAlloc {} 102 | unsafe impl Sync for GlobalBlinkAlloc {} 103 | 104 | #[cfg(feature = "std")] 105 | impl GlobalBlinkAlloc { 106 | /// Create a new [`GlobalBlinkAlloc`]. 107 | /// 108 | /// Const function can be used to initialize a static variable. 109 | /// 110 | /// # Example 111 | /// 112 | /// ``` 113 | /// use blink_alloc::GlobalBlinkAlloc; 114 | /// 115 | /// #[global_allocator] 116 | /// static GLOBAL_ALLOC: GlobalBlinkAlloc = GlobalBlinkAlloc::new(); 117 | /// 118 | /// fn main() { 119 | /// let _ = Box::new(42); 120 | /// let _ = vec![1, 2, 3]; 121 | /// } 122 | /// ``` 123 | pub const fn new() -> Self { 124 | GlobalBlinkAlloc::new_in(std::alloc::System) 125 | } 126 | 127 | /// Create a new [`GlobalBlinkAlloc`]. 128 | /// 129 | /// This method allows to specify initial chunk size. 130 | /// 131 | /// Const function can be used to initialize a static variable. 132 | /// 133 | /// # Example 134 | /// 135 | /// ``` 136 | /// use blink_alloc::GlobalBlinkAlloc; 137 | /// 138 | /// #[global_allocator] 139 | /// static GLOBAL_ALLOC: GlobalBlinkAlloc = GlobalBlinkAlloc::new(); 140 | /// 141 | /// fn main() { 142 | /// let _ = Box::new(42); 143 | /// let _ = vec![1, 2, 3]; 144 | /// } 145 | /// ``` 146 | pub const fn with_chunk_size(chunk_size: usize) -> Self { 147 | GlobalBlinkAlloc::with_chunk_size_in(chunk_size, std::alloc::System) 148 | } 149 | } 150 | 151 | impl GlobalBlinkAlloc 152 | where 153 | A: Allocator, 154 | { 155 | /// Create a new [`GlobalBlinkAlloc`] 156 | /// with specified underlying allocator. 157 | /// 158 | /// Const function can be used to initialize a static variable. 159 | /// 160 | /// # Example 161 | /// 162 | /// ``` 163 | /// use blink_alloc::GlobalBlinkAlloc; 164 | /// 165 | /// #[global_allocator] 166 | /// static GLOBAL_ALLOC: GlobalBlinkAlloc = GlobalBlinkAlloc::new_in(std::alloc::System); 167 | /// 168 | /// fn main() { 169 | /// let _ = Box::new(42); 170 | /// let _ = vec![1, 2, 3]; 171 | /// } 172 | /// ``` 173 | pub const fn new_in(allocator: A) -> Self { 174 | GlobalBlinkAlloc { 175 | state: UnsafeCell::new(State { 176 | blink: SyncBlinkAlloc::new_in(allocator), 177 | enabled: false, 178 | }), 179 | #[cfg(debug_assertions)] 180 | allocations: AtomicU64::new(0), 181 | } 182 | } 183 | 184 | /// Create a new [`GlobalBlinkAlloc`] 185 | /// with specified underlying allocator. 186 | /// 187 | /// This method allows to specify initial chunk size. 188 | /// 189 | /// Const function can be used to initialize a static variable. 190 | /// 191 | /// # Example 192 | /// 193 | /// ``` 194 | /// use blink_alloc::GlobalBlinkAlloc; 195 | /// 196 | /// #[global_allocator] 197 | /// static GLOBAL_ALLOC: GlobalBlinkAlloc = GlobalBlinkAlloc::new_in(std::alloc::System); 198 | /// 199 | /// fn main() { 200 | /// let _ = Box::new(42); 201 | /// let _ = vec![1, 2, 3]; 202 | /// } 203 | /// ``` 204 | pub const fn with_chunk_size_in(chunk_size: usize, allocator: A) -> Self { 205 | GlobalBlinkAlloc { 206 | state: UnsafeCell::new(State { 207 | blink: SyncBlinkAlloc::with_chunk_size_in(chunk_size, allocator), 208 | enabled: false, 209 | }), 210 | #[cfg(debug_assertions)] 211 | allocations: AtomicU64::new(0), 212 | } 213 | } 214 | 215 | /// Resets this allocator, deallocating all chunks except the last one. 216 | /// Last chunk will be reused. 217 | /// With steady memory usage after few iterations 218 | /// one chunk should be sufficient for all allocations between resets. 219 | /// 220 | /// # Safety 221 | /// 222 | /// Memory allocated from this allocator in blink mode becomes invalidated. 223 | /// The user is responsible to ensure that previously allocated memory 224 | /// won't be used after reset. 225 | /// 226 | /// # Example 227 | /// 228 | /// ``` 229 | /// # #[cfg(feature = "std")] fn main() { 230 | /// use blink_alloc::UnsafeGlobalBlinkAlloc; 231 | /// 232 | /// #[global_allocator] 233 | /// static GLOBAL_ALLOC: UnsafeGlobalBlinkAlloc = unsafe { UnsafeGlobalBlinkAlloc::new() }; 234 | /// 235 | /// unsafe { GLOBAL_ALLOC.blink_mode() }; 236 | /// 237 | /// let b = Box::new(42); 238 | /// let v = vec![1, 2, 3]; 239 | /// drop(b); 240 | /// drop(v); 241 | /// 242 | /// // Safety: memory allocated in blink mode won't be used after reset. 243 | /// unsafe { 244 | /// GLOBAL_ALLOC.reset(); 245 | /// GLOBAL_ALLOC.direct_mode(); 246 | /// }; 247 | /// # } 248 | /// # #[cfg(not(feature = "std"))] fn main() {} 249 | /// ``` 250 | #[inline(always)] 251 | pub unsafe fn reset(&self) { 252 | #[cfg(debug_assertions)] 253 | { 254 | assert_eq!( 255 | self.allocations.load(Ordering::SeqCst), 256 | 0, 257 | "Not everything was deallocated" 258 | ); 259 | } 260 | 261 | (*self.state.get()).blink.reset_unchecked(); 262 | } 263 | 264 | /// Switches allocator to blink mode. 265 | /// All allocations will be served by blink-allocator. 266 | /// 267 | /// The type is created in direct mode. 268 | /// When used as global allocator, user may manually switch into blink mode 269 | /// in `main` or at any point later. 270 | /// 271 | /// However user must switch back to direct mode before returning from `main`. 272 | /// 273 | /// # Safety 274 | /// 275 | /// Must be externally synchronized with other threads accessing this allocator. 276 | /// Memory allocated in direct mode must not be deallocated while in blink mode. 277 | #[inline(always)] 278 | pub unsafe fn blink_mode(&self) { 279 | (*self.state.get()).enabled = true; 280 | } 281 | 282 | /// Switches allocator to direct mode. 283 | /// All allocations will be served by underlying allocator. 284 | /// 285 | /// The type is created in direct mode. 286 | /// When used as global allocator, user may manually switch into blink mode 287 | /// in `main` or at any point later. 288 | /// 289 | /// However user must switch back to direct mode before returning from `main`. 290 | /// 291 | /// # Safety 292 | /// 293 | /// Must be externally synchronized with other threads accessing this allocator. 294 | /// Memory allocated in blink mode must not be deallocated while in direct mode. 295 | #[inline(always)] 296 | pub unsafe fn direct_mode(&self) { 297 | self.reset(); 298 | (*self.state.get()).enabled = false; 299 | } 300 | 301 | /// Creates a new thread-local blink allocator proxy 302 | /// that borrows from this multi-threaded allocator. 303 | /// 304 | /// The local proxy allocator works faster and 305 | /// allows more consistent memory reuse. 306 | /// It can be recreated without resetting the multi-threaded allocator, 307 | /// allowing [`SyncBlinkAlloc`] to be warm-up and serve all allocations 308 | /// from a single chunk without ever blocking. 309 | /// 310 | /// Best works for fork-join style of parallelism. 311 | /// Create a local allocator for each thread/task. 312 | /// Reset after all threads/tasks are finished. 313 | /// 314 | /// # Examples 315 | /// 316 | /// ``` 317 | /// # #![cfg_attr(feature = "nightly", feature(allocator_api))] 318 | /// # use blink_alloc::GlobalBlinkAlloc; 319 | /// # #[cfg(feature = "nightly")] 320 | /// # use std::vec::Vec; 321 | /// # #[cfg(not(feature = "nightly"))] 322 | /// # use allocator_api2::vec::Vec; 323 | /// # fn main() { 324 | /// static BLINK: GlobalBlinkAlloc = GlobalBlinkAlloc::new(); 325 | /// 326 | /// for _ in 0..3 { 327 | /// for i in 0..16 { 328 | /// let mut blink = BLINK.local(); // Sendable and 'static. 329 | /// std::thread::scope(move |_| { 330 | /// let mut vec = Vec::new_in(&blink); 331 | /// vec.push(i); 332 | /// for j in i*2..i*30 { 333 | /// vec.push(j); // Proxy will allocate enough memory to grow vec without reallocating on 2nd iteration and later. 334 | /// } 335 | /// drop(vec); // Without this line it will fail to borrow mutable on next line. 336 | /// blink.reset(); 337 | /// }); 338 | /// 339 | /// // Safety: Proxies and allocations are dropped. 340 | /// unsafe { BLINK.reset() }; 341 | /// } 342 | /// } 343 | /// # } 344 | /// ``` 345 | pub fn local(&self) -> LocalBlinkAlloc<'_, A> { 346 | unsafe { (*self.state.get()).blink.local() } 347 | } 348 | } 349 | 350 | unsafe impl GlobalAlloc for GlobalBlinkAlloc 351 | where 352 | A: Allocator, 353 | { 354 | #[inline] 355 | unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 { 356 | match (*self.state.get()).allocate(layout) { 357 | Ok(ptr) => { 358 | #[cfg(debug_assertions)] 359 | if (*self.state.get()).enabled { 360 | self.allocations.fetch_add(1, Ordering::SeqCst); 361 | } 362 | ptr.as_ptr().cast() 363 | } 364 | Err(_) => null_mut(), 365 | } 366 | } 367 | 368 | #[inline] 369 | unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) { 370 | let ptr = NonNull::new_unchecked(ptr); 371 | (*self.state.get()).deallocate(ptr, layout); 372 | #[cfg(debug_assertions)] 373 | { 374 | if (*self.state.get()).enabled { 375 | let _ = self.allocations.fetch_sub(1, Ordering::SeqCst); 376 | } 377 | } 378 | } 379 | 380 | #[inline] 381 | unsafe fn alloc_zeroed(&self, layout: core::alloc::Layout) -> *mut u8 { 382 | match (*self.state.get()).allocate_zeroed(layout) { 383 | Ok(ptr) => { 384 | #[cfg(debug_assertions)] 385 | if (*self.state.get()).enabled { 386 | self.allocations.fetch_add(1, Ordering::SeqCst); 387 | } 388 | ptr.as_ptr().cast() 389 | } 390 | Err(_) => null_mut(), 391 | } 392 | } 393 | 394 | #[inline] 395 | unsafe fn realloc( 396 | &self, 397 | ptr: *mut u8, 398 | layout: core::alloc::Layout, 399 | new_size: usize, 400 | ) -> *mut u8 { 401 | let Ok(new_layout) = Layout::from_size_align(new_size, layout.align()) else { 402 | return null_mut(); 403 | }; 404 | 405 | let result = match NonNull::new(ptr) { 406 | None => (*self.state.get()).allocate(new_layout), 407 | Some(ptr) => (*self.state.get()).resize(ptr, layout, new_layout), 408 | }; 409 | 410 | match result { 411 | Ok(ptr) => ptr.as_ptr().cast(), 412 | Err(_) => null_mut(), 413 | } 414 | } 415 | } 416 | -------------------------------------------------------------------------------- /benches/bench.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(feature = "nightly", feature(allocator_api))] 2 | 3 | use std::{ 4 | alloc::System, 5 | any::TypeId, 6 | sync::atomic::{AtomicUsize, Ordering}, 7 | }; 8 | 9 | #[cfg(feature = "nightly")] 10 | use std::{ 11 | alloc::{Allocator, Layout}, 12 | vec::Vec, 13 | }; 14 | 15 | #[cfg(not(feature = "nightly"))] 16 | use allocator_api2::{ 17 | alloc::{Allocator, Layout}, 18 | vec::Vec, 19 | }; 20 | 21 | use blink_alloc::*; 22 | use criterion::*; 23 | 24 | /// GlobalAlloc that counts the number of allocations and deallocations 25 | /// and number of bytes allocated and deallocated. 26 | #[cfg(feature = "bench-with-counting-allocator")] 27 | struct CountingGlobalAlloc { 28 | allocations: AtomicUsize, 29 | deallocations: AtomicUsize, 30 | 31 | bytes_allocated: AtomicUsize, 32 | bytes_deallocated: AtomicUsize, 33 | } 34 | 35 | #[cfg(feature = "bench-with-counting-allocator")] 36 | impl CountingGlobalAlloc { 37 | pub const fn new() -> Self { 38 | CountingGlobalAlloc { 39 | allocations: AtomicUsize::new(0), 40 | deallocations: AtomicUsize::new(0), 41 | bytes_allocated: AtomicUsize::new(0), 42 | bytes_deallocated: AtomicUsize::new(0), 43 | } 44 | } 45 | 46 | pub fn reset_stat(&self) { 47 | self.allocations.store(0, Ordering::Relaxed); 48 | self.deallocations.store(0, Ordering::Relaxed); 49 | self.bytes_allocated.store(0, Ordering::Relaxed); 50 | self.bytes_deallocated.store(0, Ordering::Relaxed); 51 | } 52 | 53 | pub fn print_stat(&self) { 54 | let allocations = self.allocations.load(Ordering::Relaxed); 55 | let deallocations = self.deallocations.load(Ordering::Relaxed); 56 | let bytes_allocated = self.bytes_allocated.load(Ordering::Relaxed); 57 | let bytes_deallocated = self.bytes_deallocated.load(Ordering::Relaxed); 58 | 59 | eprintln!( 60 | "allocations: {allocations}, 61 | deallocations: {deallocations}, 62 | bytes_allocated: {bytes_allocated}, 63 | bytes_deallocated: {bytes_deallocated}" 64 | ) 65 | } 66 | } 67 | 68 | #[cfg(feature = "bench-with-counting-allocator")] 69 | unsafe impl core::alloc::GlobalAlloc for CountingGlobalAlloc { 70 | unsafe fn alloc(&self, layout: Layout) -> *mut u8 { 71 | let ptr = System.alloc(layout); 72 | if !ptr.is_null() { 73 | self.allocations.fetch_add(1, Ordering::Relaxed); 74 | self.bytes_allocated 75 | .fetch_add(layout.size(), Ordering::Relaxed); 76 | } 77 | ptr 78 | } 79 | 80 | unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { 81 | System.dealloc(ptr, layout); 82 | self.deallocations.fetch_add(1, Ordering::Relaxed); 83 | self.bytes_deallocated 84 | .fetch_add(layout.size(), Ordering::Relaxed); 85 | } 86 | } 87 | 88 | #[cfg(feature = "bench-with-counting-allocator")] 89 | #[global_allocator] 90 | static COUNTING_ALLOCATOR: CountingGlobalAlloc = CountingGlobalAlloc::new(); 91 | 92 | #[inline] 93 | fn print_mem_stat() { 94 | #[cfg(feature = "bench-with-counting-allocator")] 95 | COUNTING_ALLOCATOR.print_stat(); 96 | } 97 | 98 | #[inline] 99 | fn reset_mem_stat() { 100 | #[cfg(feature = "bench-with-counting-allocator")] 101 | COUNTING_ALLOCATOR.reset_stat(); 102 | } 103 | 104 | trait BumpAllocator 105 | where 106 | for<'a> &'a Self: Allocator, 107 | { 108 | fn reset(&mut self); 109 | } 110 | 111 | impl BumpAllocator for BlinkAlloc { 112 | #[inline(always)] 113 | fn reset(&mut self) { 114 | self.reset(); 115 | } 116 | } 117 | 118 | impl BumpAllocator for SyncBlinkAlloc { 119 | #[inline(always)] 120 | fn reset(&mut self) { 121 | self.reset(); 122 | } 123 | } 124 | 125 | impl BumpAllocator for LocalBlinkAlloc<'_> { 126 | #[inline(always)] 127 | fn reset(&mut self) { 128 | self.reset(); 129 | } 130 | } 131 | 132 | impl BumpAllocator for bumpalo::Bump { 133 | #[inline(always)] 134 | fn reset(&mut self) { 135 | self.reset(); 136 | } 137 | } 138 | 139 | trait SyncBumpAllocator: BumpAllocator + Sync 140 | where 141 | for<'a> &'a Self: Allocator, 142 | { 143 | type Local<'a>: BlinkAllocator 144 | where 145 | Self: 'a; 146 | 147 | fn local(&self) -> Self::Local<'_>; 148 | } 149 | 150 | trait Adaptor { 151 | const CAN_DROP: bool; 152 | const ANY_ITER: bool; 153 | 154 | fn put(&self, value: T) -> &mut T; 155 | fn put_no_drop(&self, value: T) -> &mut T; 156 | fn copy_slice(&self, slice: &[T]) -> &mut [T]; 157 | fn copy_str(&self, string: &str) -> &mut str; 158 | fn from_iter(&self, iter: impl Iterator) -> &mut [T]; 159 | fn from_iter_no_drop(&self, iter: impl Iterator) -> &mut [T]; 160 | 161 | #[inline(always)] 162 | fn from_exact_size_iter_no_drop(&self, iter: impl ExactSizeIterator) -> &mut [T] { 163 | self.from_iter_no_drop(iter) 164 | } 165 | 166 | fn reset(&mut self); 167 | } 168 | 169 | impl Adaptor for Blink 170 | where 171 | A: BlinkAllocator, 172 | { 173 | const CAN_DROP: bool = true; 174 | const ANY_ITER: bool = true; 175 | 176 | #[inline(always)] 177 | fn put(&self, value: T) -> &mut T { 178 | self.put(value) 179 | } 180 | 181 | #[inline(always)] 182 | fn put_no_drop(&self, value: T) -> &mut T { 183 | self.emplace_no_drop().value(value) 184 | } 185 | 186 | #[inline(always)] 187 | fn copy_slice(&self, slice: &[T]) -> &mut [T] { 188 | self.copy_slice(slice) 189 | } 190 | 191 | #[inline(always)] 192 | fn copy_str(&self, string: &str) -> &mut str { 193 | self.copy_str(string) 194 | } 195 | 196 | #[inline(always)] 197 | fn from_iter(&self, iter: impl Iterator) -> &mut [T] { 198 | self.emplace().from_iter(iter) 199 | } 200 | 201 | #[inline(always)] 202 | fn from_iter_no_drop(&self, iter: impl Iterator) -> &mut [T] { 203 | self.emplace_no_drop().from_iter(iter) 204 | } 205 | 206 | #[inline(always)] 207 | fn reset(&mut self) { 208 | self.reset(); 209 | } 210 | } 211 | 212 | impl Adaptor for bumpalo::Bump { 213 | const CAN_DROP: bool = false; 214 | const ANY_ITER: bool = false; 215 | 216 | #[inline(always)] 217 | fn put(&self, _value: T) -> &mut T { 218 | unimplemented!() 219 | } 220 | 221 | #[inline(always)] 222 | fn put_no_drop(&self, value: T) -> &mut T { 223 | self.alloc(value) 224 | } 225 | 226 | #[inline(always)] 227 | fn copy_slice(&self, slice: &[T]) -> &mut [T] { 228 | self.alloc_slice_copy(slice) 229 | } 230 | 231 | #[inline(always)] 232 | fn copy_str(&self, string: &str) -> &mut str { 233 | self.alloc_str(string) 234 | } 235 | 236 | #[inline(always)] 237 | fn from_iter(&self, _iter: impl Iterator) -> &mut [T] { 238 | unimplemented!() 239 | } 240 | 241 | #[inline(always)] 242 | fn from_iter_no_drop(&self, _iter: impl Iterator) -> &mut [T] { 243 | unimplemented!() 244 | } 245 | 246 | fn from_exact_size_iter_no_drop(&self, iter: impl ExactSizeIterator) -> &mut [T] { 247 | self.alloc_slice_fill_iter(iter) 248 | } 249 | 250 | #[inline(always)] 251 | fn reset(&mut self) { 252 | self.reset(); 253 | } 254 | } 255 | 256 | const SIZE: usize = 17453; 257 | 258 | fn bench_alloc(name: &str, c: &mut Criterion) 259 | where 260 | for<'a> &'a A: Allocator, 261 | A: BumpAllocator + Default + 'static, 262 | { 263 | let mut group = c.benchmark_group(format!("allocation/{name}")); 264 | 265 | reset_mem_stat(); 266 | let mut alloc = A::default(); 267 | 268 | // Pre-warm the allocator 269 | (&alloc).allocate(Layout::new::<[u32; 65536]>()).unwrap(); 270 | alloc.reset(); 271 | 272 | group.bench_function(format!("alloc x {SIZE}"), |b| { 273 | b.iter(|| { 274 | for _ in 0..SIZE { 275 | black_box((&alloc).allocate(Layout::new::()).unwrap()); 276 | } 277 | alloc.reset(); 278 | }) 279 | }); 280 | 281 | print_mem_stat(); 282 | reset_mem_stat(); 283 | // let mut alloc = A::default(); 284 | 285 | group.bench_function(format!("grow same align x {SIZE}"), |b| { 286 | b.iter(|| { 287 | for _ in 0..SIZE { 288 | unsafe { 289 | let ptr = (&alloc).allocate(Layout::new::()).unwrap(); 290 | let ptr = (&alloc) 291 | .grow(ptr.cast(), Layout::new::(), Layout::new::<[u32; 2]>()) 292 | .unwrap(); 293 | black_box(ptr); 294 | } 295 | } 296 | alloc.reset(); 297 | }) 298 | }); 299 | 300 | group.bench_function(format!("grow smaller align x {SIZE}"), |b| { 301 | b.iter(|| { 302 | for _ in 0..SIZE { 303 | unsafe { 304 | let ptr = (&alloc).allocate(Layout::new::()).unwrap(); 305 | let ptr = (&alloc) 306 | .grow(ptr.cast(), Layout::new::(), Layout::new::<[u16; 4]>()) 307 | .unwrap(); 308 | black_box(ptr); 309 | } 310 | } 311 | alloc.reset(); 312 | }) 313 | }); 314 | 315 | group.bench_function(format!("grow larger align x {SIZE}"), |b| { 316 | b.iter(|| { 317 | for _ in 0..SIZE { 318 | unsafe { 319 | let ptr = (&alloc).allocate(Layout::new::()).unwrap(); 320 | let ptr = (&alloc) 321 | .grow(ptr.cast(), Layout::new::(), Layout::new::()) 322 | .unwrap(); 323 | black_box(ptr); 324 | } 325 | } 326 | alloc.reset(); 327 | }) 328 | }); 329 | 330 | group.bench_function(format!("shrink same align x {SIZE}"), |b| { 331 | b.iter(|| { 332 | for _ in 0..SIZE { 333 | unsafe { 334 | let ptr = (&alloc).allocate(Layout::new::<[u32; 2]>()).unwrap(); 335 | let ptr = (&alloc) 336 | .shrink(ptr.cast(), Layout::new::<[u32; 2]>(), Layout::new::()) 337 | .unwrap(); 338 | black_box(ptr); 339 | } 340 | } 341 | alloc.reset(); 342 | }) 343 | }); 344 | 345 | group.bench_function(format!("shrink smaller align x {SIZE}"), |b| { 346 | b.iter(|| { 347 | for _ in 0..SIZE { 348 | unsafe { 349 | let ptr = (&alloc).allocate(Layout::new::()).unwrap(); 350 | let ptr = (&alloc) 351 | .shrink(ptr.cast(), Layout::new::(), Layout::new::()) 352 | .unwrap(); 353 | black_box(ptr); 354 | } 355 | } 356 | alloc.reset(); 357 | }) 358 | }); 359 | 360 | if TypeId::of::() != TypeId::of::() { 361 | group.bench_function(format!("shrink larger align x {SIZE}"), |b| { 362 | b.iter(|| { 363 | for _ in 0..SIZE { 364 | unsafe { 365 | let ptr = (&alloc).allocate(Layout::new::<[u32; 4]>()).unwrap(); 366 | let ptr = (&alloc) 367 | .shrink(ptr.cast(), Layout::new::<[u32; 4]>(), Layout::new::()) 368 | .unwrap(); 369 | black_box(ptr); 370 | } 371 | } 372 | alloc.reset(); 373 | }) 374 | }); 375 | } 376 | 377 | print_mem_stat(); 378 | 379 | group.finish(); 380 | } 381 | 382 | fn bench_warm_up(name: &str, c: &mut Criterion) 383 | where 384 | for<'a> &'a A: Allocator, 385 | A: BumpAllocator + Default, 386 | { 387 | let mut group = c.benchmark_group(format!("warm-up/{name}")); 388 | 389 | reset_mem_stat(); 390 | 391 | group.bench_function(format!("alloc 4 bytes x {SIZE}"), |b| { 392 | b.iter(|| { 393 | let alloc = A::default(); 394 | for _ in 0..SIZE { 395 | black_box((&alloc).allocate(Layout::new::()).unwrap()); 396 | } 397 | }) 398 | }); 399 | 400 | print_mem_stat(); 401 | group.finish(); 402 | } 403 | 404 | fn bench_vec(name: &str, c: &mut Criterion) 405 | where 406 | for<'a> &'a A: Allocator, 407 | A: BumpAllocator + Default, 408 | { 409 | let mut group = c.benchmark_group(format!("vec/{name}")); 410 | 411 | reset_mem_stat(); 412 | let mut alloc = A::default(); 413 | 414 | // Pre-warm the allocator 415 | (&alloc).allocate(Layout::new::<[u32; 65536]>()).unwrap(); 416 | alloc.reset(); 417 | 418 | group.bench_function(format!("push x {SIZE}"), |b| { 419 | b.iter(|| { 420 | let mut vec = Vec::new_in(&alloc); 421 | for i in 0..SIZE { 422 | vec.push(i); 423 | } 424 | drop(vec); 425 | alloc.reset(); 426 | }) 427 | }); 428 | 429 | group.bench_function(format!("reserve_exact(1) x {SIZE}"), |b| { 430 | b.iter(|| { 431 | let mut vec = Vec::::new_in(&alloc); 432 | for i in 0..SIZE { 433 | vec.reserve_exact(i); 434 | } 435 | drop(vec); 436 | alloc.reset(); 437 | }) 438 | }); 439 | 440 | print_mem_stat(); 441 | reset_mem_stat(); 442 | 443 | group.finish(); 444 | } 445 | 446 | fn bench_from_iter(name: &str, c: &mut Criterion) 447 | where 448 | A: Adaptor + Default, 449 | { 450 | let mut group = c.benchmark_group(format!("from-iter/{name}")); 451 | 452 | reset_mem_stat(); 453 | let mut adaptor = A::default(); 454 | 455 | // Pre-warm the allocator 456 | adaptor.from_exact_size_iter_no_drop((0..65536).map(|_| 0u32)); 457 | adaptor.reset(); 458 | 459 | if A::CAN_DROP && A::ANY_ITER { 460 | group.bench_function(format!("basic x {SIZE}"), |b| { 461 | b.iter(|| { 462 | for _ in 0..SIZE { 463 | black_box(adaptor.from_iter((0..111).map(|_| black_box(0u32)))); 464 | } 465 | adaptor.reset(); 466 | }) 467 | }); 468 | 469 | print_mem_stat(); 470 | reset_mem_stat(); 471 | } 472 | 473 | group.bench_function(format!("no-drop x {SIZE}"), |b| { 474 | b.iter(|| { 475 | for _ in 0..SIZE { 476 | black_box(adaptor.from_exact_size_iter_no_drop((0..111).map(|_| black_box(0u32)))); 477 | } 478 | adaptor.reset(); 479 | }) 480 | }); 481 | 482 | print_mem_stat(); 483 | reset_mem_stat(); 484 | 485 | if A::CAN_DROP && A::ANY_ITER { 486 | group.bench_function(format!("bad-filter x {SIZE}"), |b| { 487 | b.iter(|| { 488 | for _ in 0..SIZE { 489 | black_box( 490 | adaptor.from_iter( 491 | (0..111) 492 | .filter(|_| black_box(true)) 493 | .map(|_| black_box(0u32)), 494 | ), 495 | ); 496 | } 497 | adaptor.reset(); 498 | }) 499 | }); 500 | 501 | print_mem_stat(); 502 | reset_mem_stat(); 503 | } 504 | 505 | if A::ANY_ITER { 506 | group.bench_function(format!("bad-filter no-drop x {SIZE}"), |b| { 507 | b.iter(|| { 508 | for _ in 0..SIZE { 509 | black_box( 510 | adaptor.from_iter_no_drop( 511 | (0..111) 512 | .filter(|_| black_box(true)) 513 | .map(|_| black_box(0u32)), 514 | ), 515 | ); 516 | } 517 | adaptor.reset(); 518 | }) 519 | }); 520 | 521 | print_mem_stat(); 522 | } 523 | 524 | group.finish(); 525 | } 526 | 527 | pub fn criterion_benchmark(c: &mut Criterion) { 528 | bench_alloc::("blink_alloc::BlinkAlloc", c); 529 | bench_alloc::("blink_alloc::SyncBlinkAlloc", c); 530 | bench_alloc::("bumpalo::Bump", c); 531 | 532 | bench_warm_up::("blink_alloc::BlinkAlloc", c); 533 | bench_warm_up::("blink_alloc::SyncBlinkAlloc", c); 534 | bench_warm_up::("bumpalo::Bump", c); 535 | 536 | bench_vec::("blink_alloc::BlinkAlloc", c); 537 | bench_vec::("blink_alloc::SyncBlinkAlloc", c); 538 | bench_vec::("bumpalo::Bump", c); 539 | 540 | bench_from_iter::>("blink_alloc::BlinkAlloc", c); 541 | bench_from_iter::>("blink_alloc::SyncBlinkAlloc", c); 542 | bench_from_iter::("bumpalo::Bump", c); 543 | } 544 | 545 | criterion_group!(benches, criterion_benchmark); 546 | criterion_main!(benches); 547 | -------------------------------------------------------------------------------- /src/sync.rs: -------------------------------------------------------------------------------- 1 | //! This module provides single-threaded blink allocator. 2 | 3 | use core::{ 4 | alloc::Layout, 5 | mem::ManuallyDrop, 6 | ptr::NonNull, 7 | sync::atomic::{AtomicUsize, Ordering}, 8 | }; 9 | 10 | #[cfg(feature = "nightly")] 11 | use core::alloc::{AllocError, Allocator}; 12 | #[cfg(not(feature = "nightly"))] 13 | use allocator_api2::alloc::{AllocError, Allocator}; 14 | 15 | #[cfg(all(feature = "nightly", feature = "alloc"))] 16 | use alloc::alloc::Global; 17 | 18 | #[cfg(all(not(feature = "nightly"), feature = "alloc"))] 19 | use allocator_api2::alloc::Global; 20 | 21 | use crate::{ 22 | api::BlinkAllocator, 23 | arena::{ArenaLocal, ArenaSync}, 24 | }; 25 | 26 | switch_alloc_default! { 27 | /// Multi-threaded blink allocator. 28 | /// 29 | /// Blink-allocator is arena-based allocator that 30 | /// allocates memory in growing chunks and serve allocations from them. 31 | /// When chunk is exhausted a new larger chunk is allocated. 32 | /// 33 | /// Deallocation is no-op. [`BlinkAllocator`] can be reset 34 | /// to free all chunks except the last one, that will be reused. 35 | /// 36 | /// Blink allocator aims to allocate a chunk large enough to 37 | /// serve all allocations between resets. 38 | /// 39 | /// A shared and mutable reference to the [`SyncBlinkAlloc`] implement 40 | /// [`Allocator`] trait. 41 | /// When "nightly" feature is enabled, [`Allocator`] trait is 42 | /// [`core::alloc::Allocator`]. Otherwise it is duplicated trait defined 43 | /// in [`allocator-api2`](allocator_api2). 44 | /// 45 | /// Resetting blink allocator requires mutable borrow, so it is not possible 46 | /// to do while shared borrow is alive. That matches requirement of 47 | /// [`Allocator`] trait - while [`Allocator`] instance 48 | /// (a shared reference to `BlinkAlloc`) or any of its clones are alive, 49 | /// allocated memory must be valid. 50 | /// 51 | /// This version of blink-allocator is multi-threaded. 52 | /// It can be used from multiple threads concurrently to allocate memory. 53 | /// As mutable borrow is required to reset the allocator, 54 | /// it is not possible to do when shared. 55 | /// Internally it uses [`RwLock`] and [`AtomicUsize`] for synchronized 56 | /// interior mutability. [`RwLock`] is only write-locked when new chunk 57 | /// must be allocated. The arena allocation is performed using lock-free 58 | /// algorithm. 59 | /// 60 | /// Still it is slower than single-threaded version [`BlinkAlloc`]. 61 | /// 62 | /// For best of both worlds [`LocalBlinkAlloc`] can be created from 63 | /// this allocator. [`LocalBlinkAlloc`] will allocate chunks from this 64 | /// allocator, but is single-threaded by itself. 65 | /// 66 | /// [`RwLock`]: parking_lot::RwLock 67 | /// [`AtomicUsize`]: core::sync::atomic::AtomicUsize 68 | /// [`BlinkAlloc`]: crate::local::BlinkAlloc 69 | /// [`LocalBlinkAlloc`]: crate::sync::LocalBlinkAlloc 70 | /// 71 | /// # Example 72 | /// 73 | /// ``` 74 | /// # #![cfg_attr(feature = "nightly", feature(allocator_api))] 75 | /// # use blink_alloc::SyncBlinkAlloc; 76 | /// # use std::ptr::NonNull; 77 | /// 78 | /// let mut blink = SyncBlinkAlloc::new(); 79 | /// let layout = std::alloc::Layout::new::<[u32; 8]>(); 80 | /// let ptr = blink.allocate(layout).unwrap(); 81 | /// let ptr = NonNull::new(ptr.as_ptr() as *mut u8).unwrap(); // Method for this is unstable. 82 | /// 83 | /// unsafe { 84 | /// std::ptr::write(ptr.as_ptr().cast(), [1, 2, 3, 4, 5, 6, 7, 8]); 85 | /// } 86 | /// 87 | /// blink.reset(); 88 | /// ``` 89 | /// 90 | /// # Example that uses nightly's `allocator_api` 91 | /// 92 | /// ``` 93 | /// # #![cfg_attr(feature = "nightly", feature(allocator_api))] 94 | /// # use blink_alloc::SyncBlinkAlloc; 95 | /// # #[cfg(feature = "nightly")] 96 | /// # use std::vec::Vec; 97 | /// # #[cfg(not(feature = "nightly"))] 98 | /// # use allocator_api2::vec::Vec; 99 | /// # fn main() { 100 | /// let mut blink = SyncBlinkAlloc::new(); 101 | /// let mut vec = Vec::new_in(&blink); 102 | /// vec.push(1); 103 | /// vec.extend(1..3); 104 | /// vec.extend(3..10); 105 | /// drop(vec); 106 | /// blink.reset(); 107 | /// # } 108 | /// ``` 109 | pub struct SyncBlinkAlloc { 110 | arena: ArenaSync, 111 | allocator: A, 112 | max_local_alloc: AtomicUsize, 113 | } 114 | } 115 | 116 | impl Drop for SyncBlinkAlloc { 117 | fn drop(&mut self) { 118 | unsafe { 119 | self.arena.reset(false, &self.allocator); 120 | } 121 | } 122 | } 123 | 124 | #[test] 125 | fn check_sync() { 126 | fn for_sync_alloc() { 127 | fn is_sink() {} 128 | is_sink::>(); 129 | } 130 | for_sync_alloc::(); 131 | } 132 | 133 | impl Default for SyncBlinkAlloc 134 | where 135 | A: Allocator + Default, 136 | { 137 | #[inline(always)] 138 | fn default() -> Self { 139 | Self::new_in(Default::default()) 140 | } 141 | } 142 | 143 | #[cfg(feature = "alloc")] 144 | impl SyncBlinkAlloc { 145 | /// Creates new blink allocator that uses global allocator 146 | /// to allocate memory chunks. 147 | /// 148 | /// See [`SyncBlinkAlloc::new_in`] for using custom allocator. 149 | #[inline(always)] 150 | pub const fn new() -> Self { 151 | SyncBlinkAlloc::new_in(Global) 152 | } 153 | } 154 | 155 | impl SyncBlinkAlloc 156 | where 157 | A: Allocator, 158 | { 159 | /// Creates new blink allocator that uses provided allocator 160 | /// to allocate memory chunks. 161 | /// 162 | /// See [`SyncBlinkAlloc::new`] for using global allocator. 163 | #[inline(always)] 164 | pub const fn new_in(allocator: A) -> Self { 165 | SyncBlinkAlloc { 166 | arena: ArenaSync::new(), 167 | allocator, 168 | max_local_alloc: AtomicUsize::new(0), 169 | } 170 | } 171 | 172 | /// Returns reference to the underlying allocator used by this blink allocator. 173 | #[inline(always)] 174 | pub const fn inner(&self) -> &A { 175 | &self.allocator 176 | } 177 | 178 | /// Creates new blink allocator that uses global allocator 179 | /// to allocate memory chunks. 180 | /// With this method you can specify initial chunk size. 181 | /// 182 | /// See [`SyncBlinkAlloc::new_in`] for using custom allocator. 183 | #[inline(always)] 184 | pub const fn with_chunk_size_in(chunk_size: usize, allocator: A) -> Self { 185 | SyncBlinkAlloc { 186 | arena: ArenaSync::with_chunk_size(chunk_size), 187 | allocator, 188 | max_local_alloc: AtomicUsize::new(0), 189 | } 190 | } 191 | 192 | /// Creates a new thread-local blink allocator proxy 193 | /// that borrows from this multi-threaded allocator. 194 | /// 195 | /// The local proxy allocator works faster and 196 | /// allows more consistent memory reuse. 197 | /// It can be recreated without resetting the multi-threaded allocator, 198 | /// allowing [`SyncBlinkAlloc`] to be warm-up and serve all allocations 199 | /// from a single chunk without ever blocking. 200 | /// 201 | /// Best works for fork-join style of parallelism. 202 | /// Create a local allocator for each thread/task. 203 | /// Reset after all threads/tasks are finished. 204 | /// 205 | /// # Examples 206 | /// 207 | /// ``` 208 | /// # #![cfg_attr(feature = "nightly", feature(allocator_api))] 209 | /// # use blink_alloc::SyncBlinkAlloc; 210 | /// # #[cfg(feature = "nightly")] 211 | /// # use std::vec::Vec; 212 | /// # #[cfg(not(feature = "nightly"))] 213 | /// # use allocator_api2::vec::Vec; 214 | /// # #[cfg(feature = "alloc")] fn main() { 215 | /// let mut blink = SyncBlinkAlloc::new(); 216 | /// for _ in 0..3 { 217 | /// for i in 0..16 { 218 | /// std::thread::scope(|_| { 219 | /// let blink = blink.local(); 220 | /// let mut vec = Vec::new_in(&blink); 221 | /// vec.push(i); 222 | /// for j in i*2..i*30 { 223 | /// vec.push(j); // Proxy will allocate enough memory to grow vec without reallocating on 2nd iteration and later. 224 | /// } 225 | /// }); 226 | /// } 227 | /// blink.reset(); 228 | /// } 229 | /// # } 230 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 231 | /// ``` 232 | #[inline(always)] 233 | pub fn local(&self) -> LocalBlinkAlloc<'_, A> { 234 | LocalBlinkAlloc { 235 | arena: ArenaLocal::with_chunk_size(self.max_local_alloc.load(Ordering::Relaxed)), 236 | shared: self, 237 | } 238 | } 239 | 240 | /// Allocates memory with specified layout from this allocator. 241 | /// If needed it will allocate new chunk using underlying allocator. 242 | /// If chunk allocation fails, it will return `Err`. 243 | #[inline(always)] 244 | pub fn allocate(&self, layout: Layout) -> Result, AllocError> { 245 | // Safety: 246 | // Same instance is used for all allocations and resets. 247 | if let Some(ptr) = unsafe { self.arena.alloc_fast(layout) } { 248 | return Ok(ptr); 249 | } 250 | unsafe { self.arena.alloc_slow(layout, &self.allocator) } 251 | } 252 | 253 | /// Resizes memory allocation. 254 | /// Potentially happens in-place. 255 | /// 256 | /// # Safety 257 | /// 258 | /// `ptr` must be a pointer previously returned by [`allocate`](SyncBlinkAlloc::allocate). 259 | /// `old_size` must be in range `layout.size()..=slice.len()` 260 | /// where `layout` is the layout used in the call to [`allocate`](SyncBlinkAlloc::allocate). 261 | /// and `slice` is the slice pointer returned by [`allocate`](SyncBlinkAlloc::allocate). 262 | /// 263 | /// On success, the old pointer is invalidated and the new pointer is returned. 264 | /// On error old allocation is still valid. 265 | #[inline(always)] 266 | pub unsafe fn resize( 267 | &self, 268 | ptr: NonNull, 269 | old_layout: Layout, 270 | new_layout: Layout, 271 | ) -> Result, AllocError> { 272 | if let Some(ptr) = unsafe { self.arena.resize_fast(ptr, old_layout, new_layout) } { 273 | return Ok(ptr); 274 | } 275 | 276 | // Safety: 277 | // Same instance is used for all allocations and resets. 278 | // `ptr` was allocated by this allocator. 279 | unsafe { 280 | self.arena 281 | .resize_slow(ptr, old_layout, new_layout, &self.allocator) 282 | } 283 | } 284 | 285 | /// Deallocates memory previously allocated from this allocator. 286 | /// 287 | /// This call may not actually free memory. 288 | /// All memory is guaranteed to be freed on [`reset`](SyncBlinkAlloc::reset) call. 289 | /// 290 | /// # Safety 291 | /// 292 | /// `ptr` must be a pointer previously returned by [`allocate`](SyncBlinkAlloc::allocate). 293 | /// `size` must be in range `layout.size()..=slice.len()` 294 | /// where `layout` is the layout used in the call to [`allocate`](SyncBlinkAlloc::allocate). 295 | /// and `slice` is the slice pointer returned by [`allocate`](SyncBlinkAlloc::allocate). 296 | #[inline(always)] 297 | pub unsafe fn deallocate(&self, ptr: NonNull, size: usize) { 298 | // Safety: 299 | // `ptr` was allocated by this allocator. 300 | unsafe { 301 | self.arena.dealloc(ptr, size); 302 | } 303 | } 304 | 305 | /// Resets this allocator, deallocating all chunks except the last one. 306 | /// Last chunk will be reused. 307 | /// With steady memory usage after few iterations 308 | /// one chunk should be sufficient for all allocations between resets. 309 | #[inline(always)] 310 | pub fn reset(&mut self) { 311 | // Safety: 312 | // Same instance is used for all allocations and resets. 313 | unsafe { 314 | self.arena.reset(true, &self.allocator); 315 | } 316 | } 317 | 318 | /// Resets this allocator, deallocating all chunks. 319 | #[inline(always)] 320 | pub fn reset_final(&mut self) { 321 | // Safety: 322 | // Same instance is used for all allocations and resets. 323 | unsafe { 324 | self.arena.reset(false, &self.allocator); 325 | } 326 | } 327 | 328 | /// Resets this allocator, deallocating all chunks except the last one. 329 | /// Last chunk will be reused. 330 | /// With steady memory usage after few iterations 331 | /// one chunk should be sufficient for all allocations between resets. 332 | /// 333 | /// # Safety 334 | /// 335 | /// Blink-allocators guarantee that memory can be used while shared 336 | /// borrow to the allocator is held, preventing safe `fn reset` call. 337 | /// 338 | /// With this method it becomes caller responsibility to ensure 339 | /// that allocated memory won't be used after reset. 340 | #[inline(always)] 341 | pub unsafe fn reset_unchecked(&self) { 342 | // Safety: 343 | // Same instance is used for all allocations and resets. 344 | unsafe { 345 | self.arena.reset_unchecked(true, &self.allocator); 346 | } 347 | } 348 | 349 | /// Unwrap this allocator, returning the underlying allocator. 350 | /// Leaks allocated chunks. 351 | /// 352 | /// To deallocate all chunks call [`reset_final`](BlinkAlloc::reset_final) first. 353 | /// 354 | /// The second returned value will use global allocator, so 355 | /// use with care if this method is used inside global allocator. 356 | pub fn into_inner(self) -> A { 357 | let me = ManuallyDrop::new(self); 358 | unsafe { core::ptr::read(&me.allocator) } 359 | } 360 | 361 | /// Update maximum local allocation size. 362 | /// Can be used by thread-local blink-allocators that use 363 | /// this shared blink-allocator. 364 | #[inline(always)] 365 | pub fn update_max_local_alloc(&self, max_local_alloc: usize) { 366 | self.max_local_alloc 367 | .fetch_max(max_local_alloc, Ordering::Relaxed); 368 | } 369 | } 370 | 371 | unsafe impl Allocator for SyncBlinkAlloc 372 | where 373 | A: Allocator, 374 | { 375 | #[inline(always)] 376 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 377 | SyncBlinkAlloc::allocate(self, layout) 378 | } 379 | 380 | #[inline(always)] 381 | unsafe fn shrink( 382 | &self, 383 | ptr: NonNull, 384 | old_layout: Layout, 385 | new_layout: Layout, 386 | ) -> Result, AllocError> { 387 | SyncBlinkAlloc::resize(self, ptr, old_layout, new_layout) 388 | } 389 | 390 | #[inline(always)] 391 | unsafe fn grow( 392 | &self, 393 | ptr: NonNull, 394 | old_layout: Layout, 395 | new_layout: Layout, 396 | ) -> Result, AllocError> { 397 | SyncBlinkAlloc::resize(self, ptr, old_layout, new_layout) 398 | } 399 | 400 | #[inline(always)] 401 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 402 | SyncBlinkAlloc::deallocate(self, ptr, layout.size()); 403 | } 404 | } 405 | 406 | unsafe impl BlinkAllocator for SyncBlinkAlloc 407 | where 408 | A: Allocator, 409 | { 410 | #[inline(always)] 411 | fn reset(&mut self) { 412 | SyncBlinkAlloc::reset(self) 413 | } 414 | } 415 | 416 | switch_alloc_default! { 417 | /// Thread-local proxy for [`SyncBlinkAlloc`]. 418 | /// 419 | /// Using proxy can yield better performance when 420 | /// it is possible to create proxy once to use for many allocations. 421 | /// 422 | /// See [`SyncBlinkAlloc::local`] for more details. 423 | pub struct LocalBlinkAlloc<'a, A: Allocator = +Global> { 424 | arena: ArenaLocal, 425 | shared: &'a SyncBlinkAlloc, 426 | } 427 | } 428 | 429 | impl Drop for LocalBlinkAlloc<'_, A> 430 | where 431 | A: Allocator, 432 | { 433 | fn drop(&mut self) { 434 | self.shared 435 | .update_max_local_alloc(self.arena.last_chunk_size()); 436 | self.arena.reset_leak(false); 437 | } 438 | } 439 | 440 | impl LocalBlinkAlloc<'_, A> 441 | where 442 | A: Allocator, 443 | { 444 | /// Allocates memory with specified layout from this allocator. 445 | /// If needed it will allocate new chunk using underlying allocator. 446 | /// If chunk allocation fails, it will return `Err`. 447 | #[inline(always)] 448 | pub fn allocate(&self, layout: Layout) -> Result, AllocError> { 449 | // Safety: 450 | // Same instance is used for all allocations and resets. 451 | if let Some(ptr) = unsafe { self.arena.alloc_fast(layout) } { 452 | return Ok(ptr); 453 | } 454 | unsafe { self.arena.alloc_slow(layout, self.shared) } 455 | } 456 | 457 | /// Resizes memory allocation. 458 | /// Potentially happens in-place. 459 | /// 460 | /// # Safety 461 | /// 462 | /// `ptr` must be a pointer previously returned by [`allocate`](LocalBlinkAlloc::allocate). 463 | /// `old_size` must be in range `layout.size()..=slice.len()` 464 | /// where `layout` is the layout used in the call to [`allocate`](LocalBlinkAlloc::allocate). 465 | /// and `slice` is the slice pointer returned by [`allocate`](LocalBlinkAlloc::allocate). 466 | /// 467 | /// On success, the old pointer is invalidated and the new pointer is returned. 468 | /// On error old allocation is still valid. 469 | #[inline(always)] 470 | pub unsafe fn resize( 471 | &self, 472 | ptr: NonNull, 473 | old_layout: Layout, 474 | new_layout: Layout, 475 | ) -> Result, AllocError> { 476 | if let Some(ptr) = unsafe { self.arena.resize_fast(ptr, old_layout, new_layout) } { 477 | return Ok(ptr); 478 | } 479 | 480 | // Safety: 481 | // Same instance is used for all allocations and resets. 482 | // `ptr` was allocated by this allocator. 483 | unsafe { 484 | self.arena 485 | .resize_slow(ptr, old_layout, new_layout, self.shared) 486 | } 487 | } 488 | 489 | /// Deallocates memory previously allocated from this allocator. 490 | /// 491 | /// This call may not actually free memory. 492 | /// All memory is guaranteed to be freed on [`reset`](LocalBlinkAlloc::reset) call. 493 | /// 494 | /// # Safety 495 | /// 496 | /// `ptr` must be a pointer previously returned by [`allocate`](LocalBlinkAlloc::allocate). 497 | /// `size` must be in range `layout.size()..=slice.len()` 498 | /// where `layout` is the layout used in the call to [`allocate`](LocalBlinkAlloc::allocate). 499 | /// and `slice` is the slice pointer returned by [`allocate`](LocalBlinkAlloc::allocate). 500 | #[inline(always)] 501 | pub unsafe fn deallocate(&self, ptr: NonNull, size: usize) { 502 | // Safety: 503 | // `ptr` was allocated by this allocator. 504 | unsafe { 505 | self.arena.dealloc(ptr, size); 506 | } 507 | } 508 | 509 | /// Resets this allocator, deallocating all chunks except the last one. 510 | /// Last chunk will be reused. 511 | /// With steady memory usage after few iterations 512 | /// one chunk should be sufficient for all allocations between resets. 513 | #[inline(always)] 514 | pub fn reset(&mut self) { 515 | self.shared 516 | .update_max_local_alloc(self.arena.last_chunk_size()); 517 | self.arena.reset_leak(true); 518 | } 519 | 520 | /// Resets this allocator, deallocating all chunks except the last one. 521 | /// Last chunk will be reused. 522 | /// With steady memory usage after few iterations 523 | /// one chunk should be sufficient for all allocations between resets. 524 | /// 525 | /// # Safety 526 | /// 527 | /// Blink-allocators guarantee that memory can be used while shared 528 | /// borrow to the allocator is held, preventing safe `fn reset` call. 529 | /// 530 | /// With this method it becomes caller responsibility to ensure 531 | /// that allocated memory won't be used after reset. 532 | #[inline(always)] 533 | pub unsafe fn reset_unchecked(&self) { 534 | // Safety: 535 | // Same instance is used for all allocations and resets. 536 | unsafe { 537 | self.arena.reset_unchecked(true, self.shared); 538 | } 539 | } 540 | } 541 | 542 | unsafe impl Allocator for LocalBlinkAlloc<'_, A> 543 | where 544 | A: Allocator, 545 | { 546 | #[inline(always)] 547 | fn allocate(&self, layout: Layout) -> Result, AllocError> { 548 | LocalBlinkAlloc::allocate(self, layout) 549 | } 550 | 551 | #[inline(always)] 552 | unsafe fn shrink( 553 | &self, 554 | ptr: NonNull, 555 | old_layout: Layout, 556 | new_layout: Layout, 557 | ) -> Result, AllocError> { 558 | LocalBlinkAlloc::resize(self, ptr, old_layout, new_layout) 559 | } 560 | 561 | #[inline(always)] 562 | unsafe fn grow( 563 | &self, 564 | ptr: NonNull, 565 | old_layout: Layout, 566 | new_layout: Layout, 567 | ) -> Result, AllocError> { 568 | LocalBlinkAlloc::resize(self, ptr, old_layout, new_layout) 569 | } 570 | 571 | #[inline(always)] 572 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { 573 | LocalBlinkAlloc::deallocate(self, ptr, layout.size()) 574 | } 575 | } 576 | 577 | unsafe impl BlinkAllocator for LocalBlinkAlloc<'_, A> 578 | where 579 | A: Allocator, 580 | { 581 | #[inline(always)] 582 | fn reset(&mut self) { 583 | LocalBlinkAlloc::reset(self) 584 | } 585 | } 586 | -------------------------------------------------------------------------------- /src/arena/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module provides `ArenaLocal` and `ArenaSync` 2 | //! types which share implementation 3 | //! but use `Cell` and `RwLock` + `AtomicUsize` respectively. 4 | //! for the interior mutability. 5 | 6 | use core::{ 7 | alloc::Layout, 8 | cell::Cell, 9 | mem::{align_of, size_of}, 10 | ptr::{self, NonNull}, 11 | sync::atomic::{AtomicPtr, Ordering}, 12 | }; 13 | 14 | #[cfg(feature = "nightly")] 15 | use core::alloc::{AllocError, Allocator}; 16 | #[cfg(not(feature = "nightly"))] 17 | use allocator_api2::alloc::{AllocError, Allocator}; 18 | 19 | use crate::cold; 20 | 21 | #[cfg(feature = "sync")] 22 | use parking_lot::RwLock; 23 | 24 | #[inline(always)] 25 | fn is_aligned_to(value: usize, align: usize) -> bool { 26 | debug_assert!(align.is_power_of_two()); 27 | let mask = align - 1; 28 | value & mask == 0 29 | } 30 | 31 | #[inline(always)] 32 | fn align_up(value: usize, align: usize) -> Option { 33 | debug_assert!(align.is_power_of_two()); 34 | let mask = align - 1; 35 | Some(value.checked_add(mask)? & !mask) 36 | } 37 | 38 | #[inline(always)] 39 | fn align_down(value: usize, align: usize) -> usize { 40 | debug_assert!(align.is_power_of_two()); 41 | let mask = align - 1; 42 | value & !mask 43 | } 44 | 45 | /// A sum of layout size and align mask. 46 | #[inline(always)] 47 | fn layout_sum(layout: &Layout) -> usize { 48 | // Layout constrains guarantee that this won't overflow. 49 | layout.size() + (layout.align() - 1) 50 | } 51 | 52 | pub(crate) trait CasPtr { 53 | #[allow(dead_code)] 54 | fn new(value: *mut u8) -> Self; 55 | fn load(&self, order: Ordering) -> *mut u8; 56 | fn set(&mut self, value: *mut u8); 57 | fn compare_exchange( 58 | &self, 59 | old: *mut u8, 60 | new: *mut u8, 61 | success: Ordering, 62 | failure: Ordering, 63 | ) -> Result<(), *mut u8>; 64 | 65 | fn compare_exchange_weak( 66 | &self, 67 | old: *mut u8, 68 | new: *mut u8, 69 | success: Ordering, 70 | failure: Ordering, 71 | ) -> Result<(), *mut u8>; 72 | } 73 | 74 | impl CasPtr for Cell<*mut u8> { 75 | #[inline(always)] 76 | fn new(value: *mut u8) -> Self { 77 | Cell::new(value) 78 | } 79 | 80 | #[inline(always)] 81 | fn load(&self, _: Ordering) -> *mut u8 { 82 | self.get() 83 | } 84 | 85 | #[inline(always)] 86 | fn set(&mut self, value: *mut u8) { 87 | *self.get_mut() = value; 88 | } 89 | 90 | #[inline(always)] 91 | fn compare_exchange( 92 | &self, 93 | old: *mut u8, 94 | new: *mut u8, 95 | _: Ordering, 96 | _: Ordering, 97 | ) -> Result<(), *mut u8> { 98 | if old == self.get() { 99 | self.set(new); 100 | Ok(()) 101 | } else { 102 | Err(self.get()) 103 | } 104 | } 105 | 106 | #[inline(always)] 107 | fn compare_exchange_weak( 108 | &self, 109 | old: *mut u8, 110 | new: *mut u8, 111 | _: Ordering, 112 | _: Ordering, 113 | ) -> Result<(), *mut u8> { 114 | debug_assert_eq!( 115 | old, 116 | self.get(), 117 | "Must be used only in loop where `old` is last loaded value" 118 | ); 119 | self.set(new); 120 | Ok(()) 121 | } 122 | } 123 | 124 | impl CasPtr for AtomicPtr { 125 | #[inline(always)] 126 | fn new(value: *mut u8) -> Self { 127 | AtomicPtr::new(value) 128 | } 129 | 130 | #[inline(always)] 131 | fn load(&self, order: Ordering) -> *mut u8 { 132 | self.load(order) 133 | } 134 | 135 | #[inline(always)] 136 | fn set(&mut self, value: *mut u8) { 137 | *self.get_mut() = value; 138 | } 139 | 140 | #[inline(always)] 141 | fn compare_exchange( 142 | &self, 143 | old: *mut u8, 144 | new: *mut u8, 145 | success: Ordering, 146 | failure: Ordering, 147 | ) -> Result<(), *mut u8> { 148 | self.compare_exchange(old, new, success, failure)?; 149 | Ok(()) 150 | } 151 | 152 | #[inline(always)] 153 | fn compare_exchange_weak( 154 | &self, 155 | old: *mut u8, 156 | new: *mut u8, 157 | success: Ordering, 158 | failure: Ordering, 159 | ) -> Result<(), *mut u8> { 160 | self.compare_exchange_weak(old, new, success, failure)?; 161 | Ok(()) 162 | } 163 | } 164 | 165 | /// 0.25 KB. Initial chunk size. 166 | const CHUNK_START_SIZE: usize = 256; 167 | 168 | /// 16 KB. After this size, new chunk size is not aligned to next power of two. 169 | const CHUNK_POWER_OF_TWO_THRESHOLD: usize = 1 << 14; 170 | 171 | /// 1/16 KB. Minimum chunk size growth step. 172 | const CHUNK_MIN_GROW_STEP: usize = 64; 173 | 174 | macro_rules! with_cursor { 175 | ($cursor:ty) => { 176 | #[repr(C)] 177 | pub struct ChunkHeader { 178 | cursor: $cursor, 179 | end: *mut u8, 180 | prev: Option>, 181 | cumulative_size: usize, 182 | } 183 | 184 | impl ChunkHeader { 185 | #[inline] 186 | unsafe fn alloc_chunk( 187 | size: usize, 188 | allocator: impl Allocator, 189 | prev: Option>, 190 | ) -> Result, AllocError> { 191 | let Some(size) = align_up(size, align_of::()) else { 192 | return Err(AllocError); 193 | }; 194 | 195 | // Safety: 196 | // size + (align - 1) hasn't overflow above. 197 | // `align_of` returns valid align value. 198 | let layout = unsafe { Layout::from_size_align_unchecked(size, align_of::()) }; 199 | let slice = allocator.allocate(layout)?; 200 | Ok(Self::init_chunk(slice, prev)) 201 | } 202 | 203 | #[inline] 204 | unsafe fn dealloc_chunk( 205 | chunk: NonNull, 206 | allocator: impl Allocator, 207 | ) -> Option> { 208 | let me = unsafe { chunk.as_ref() }; 209 | let prev = me.prev; 210 | 211 | let size = unsafe { me.end.offset_from(chunk.as_ptr().cast()) } as usize; 212 | 213 | // Safety: 214 | // Making layout of actual allocation. 215 | let layout = unsafe { Layout::from_size_align_unchecked(size, align_of::()) }; 216 | 217 | allocator.deallocate(chunk.cast(), layout); 218 | prev 219 | } 220 | 221 | /// # Safety 222 | /// 223 | /// `ptr` must be a pointer to the valid chunk allocation. 224 | /// `ptr` must be aligned for `ChunkHeader` structure. 225 | /// `size` must be the size of the allocation. 226 | /// `size` must be large enough to fit `Chunk` structure. 227 | #[inline] 228 | unsafe fn init_chunk( 229 | slice: NonNull<[u8]>, 230 | prev: Option>, 231 | ) -> NonNull { 232 | let len = slice.len(); 233 | let ptr = slice.as_ptr().cast::(); 234 | debug_assert!(is_aligned_to(ptr as usize, align_of::())); 235 | debug_assert!(len > size_of::()); 236 | 237 | let end = ptr.add(len); 238 | 239 | let header_ptr = ptr.cast::(); 240 | let base = header_ptr.add(1).cast::(); 241 | 242 | let cumulative_size = match prev { 243 | None => 0, 244 | Some(prev) => { 245 | let prev = unsafe { prev.as_ref() }; 246 | prev.cap() + prev.cumulative_size 247 | } 248 | }; 249 | 250 | ptr::write( 251 | header_ptr, 252 | ChunkHeader { 253 | cursor: <$cursor>::new(base), 254 | end, 255 | prev, 256 | cumulative_size, 257 | }, 258 | ); 259 | NonNull::new_unchecked(header_ptr) 260 | } 261 | 262 | #[inline(always)] 263 | fn base(&self) -> *const u8 { 264 | unsafe { <*const Self>::add(self, 1).cast() } 265 | } 266 | 267 | /// # Safety 268 | /// 269 | /// `ptr` must be a pointer withing the usable memory of the chunk. 270 | /// e.g. it must be between `base` and `self`. 271 | #[inline(always)] 272 | unsafe fn offset_from_end(&self, ptr: *const u8) -> usize { 273 | // Safety: end and base belong to the same memory chunk. 274 | let offset = unsafe { self.end.offset_from(ptr) }; 275 | offset as usize 276 | } 277 | 278 | #[inline(always)] 279 | fn cap(&self) -> usize { 280 | // Safety: `base` fits `base..=self` range. 281 | unsafe { self.offset_from_end(self.base()) } 282 | } 283 | 284 | // Safety: `chunk` must be a pointer to the valid chunk allocation. 285 | #[inline(always)] 286 | unsafe fn alloc(chunk: NonNull, layout: Layout) -> Option> { 287 | // Safety: `chunk` is a valid pointer to chunk allocation. 288 | let me = unsafe { chunk.as_ref() }; 289 | let mut cursor = me.cursor.load(Ordering::Relaxed); 290 | 291 | loop { 292 | let cursor_addr = cursor as usize; 293 | 294 | let layout_sum = layout_sum(&layout); 295 | 296 | let unaligned = cursor_addr.checked_add(layout_sum)?; 297 | 298 | let aligned_addr = align_down(unaligned - layout.size(), layout.align()); 299 | 300 | debug_assert!( 301 | aligned_addr >= cursor_addr, 302 | "aligned_addr addr must not be less than cursor" 303 | ); 304 | debug_assert!( 305 | (aligned_addr - cursor_addr) < layout.align(), 306 | "Cannot waste space more than alignment size" 307 | ); 308 | 309 | let next_addr = aligned_addr + layout.size(); 310 | 311 | let end_addr = me.end as usize; 312 | if next_addr > end_addr { 313 | return None; 314 | } 315 | 316 | let aligned = unsafe { cursor.add(aligned_addr - cursor_addr) }; 317 | let next = unsafe { aligned.add(layout.size()) }; 318 | 319 | if let Err(updated) = me.cursor.compare_exchange_weak( 320 | cursor, 321 | next, 322 | Ordering::Acquire, // Memory access valid only *after* this succeeds. 323 | Ordering::Relaxed, 324 | ) { 325 | cursor = updated; 326 | continue; 327 | }; 328 | 329 | // Actual allocation length. 330 | let len = next_addr - aligned_addr; 331 | debug_assert!(len >= layout.size()); 332 | 333 | // Safety: 334 | // offset is within unused allocated memory range starting from base. 335 | // base is not null. 336 | let slice = unsafe { 337 | debug_assert_eq!(aligned_addr % layout.align(), 0); 338 | let slice = core::ptr::slice_from_raw_parts_mut(aligned, len); 339 | NonNull::new_unchecked(slice) 340 | }; 341 | 342 | return Some(slice); 343 | } 344 | } 345 | 346 | /// Optimistic resize for arena-allocated memory. 347 | /// Handles grows, shrinks if new alignment requirement is not met - shifts. 348 | /// When alignment requirement is already met (checked for pointer itself) 349 | /// shifts do not happen for both shrinks and grows. 350 | /// Even more, cheap shrinks are always successful if alignment is met by `ptr`. 351 | /// Cheap grows are successful if this is the last allocation in the chunk 352 | /// and there is enough space for the new allocation. 353 | /// If cheap shrink or grow is not possible - reallocates. 354 | /// 355 | /// Safety: `chunk` must be a pointer to the valid chunk allocation. 356 | /// `ptr` must be a pointer to the allocated memory of at least `old_size` bytes. 357 | /// `ptr` may be allocated from different chunk. 358 | #[inline] 359 | unsafe fn resize( 360 | chunk: NonNull, 361 | ptr: NonNull, 362 | old_layout: Layout, 363 | new_layout: Layout, 364 | ) -> Option> { 365 | // Safety: `chunk` is a valid pointer to chunk allocation. 366 | let me = unsafe { chunk.as_ref() }; 367 | 368 | let addr = ptr.as_ptr() as usize; 369 | if old_layout.align() >= new_layout.align() { 370 | if new_layout.size() <= old_layout.size() { 371 | let slice = 372 | core::ptr::slice_from_raw_parts_mut(ptr.as_ptr(), old_layout.size()); 373 | return Some(NonNull::new_unchecked(slice)); 374 | } else { 375 | // Safety: 376 | // `ptr + old_layout.size()` is within allocation or one by past end. 377 | let old_end = unsafe { ptr.as_ptr().add(old_layout.size()) }; 378 | 379 | let cursor = me.cursor.load(Ordering::Relaxed); 380 | if cursor == old_end { 381 | let next_addr = addr.checked_add(new_layout.size())?; 382 | 383 | let end_addr = me.end as usize; 384 | if next_addr > end_addr { 385 | // Not enough space. 386 | return None; 387 | } 388 | 389 | let next = unsafe { ptr.as_ptr().add(new_layout.size()) }; 390 | 391 | let result = CasPtr::compare_exchange( 392 | &me.cursor, 393 | cursor, 394 | next, 395 | Ordering::Acquire, // Acquire more memory. 396 | Ordering::Relaxed, 397 | ); 398 | 399 | if let Ok(()) = result { 400 | let len = next_addr - addr; 401 | debug_assert!(len >= new_layout.size()); 402 | 403 | let slice = core::ptr::slice_from_raw_parts_mut(ptr.as_ptr(), len); 404 | return Some(NonNull::new_unchecked(slice)); 405 | } 406 | cold(); 407 | } 408 | } 409 | } else { 410 | cold(); 411 | } 412 | 413 | // Have to reallocate. 414 | let new_ptr = ChunkHeader::alloc(chunk, new_layout)?; 415 | 416 | // Copy bytes from old location to new. 417 | // Separate allocations cannot overlap. 418 | core::ptr::copy_nonoverlapping( 419 | ptr.as_ptr(), 420 | new_ptr.as_ptr().cast(), 421 | new_layout.size().min(old_layout.size()), 422 | ); 423 | 424 | // Deallocation is not possible. 425 | Some(new_ptr) 426 | } 427 | 428 | // Safety: `chunk` must be a pointer to the valid chunk allocation. 429 | #[inline(always)] 430 | unsafe fn reset(mut chunk: NonNull) -> Option> { 431 | let me = chunk.as_mut(); 432 | let base = me.end.sub(me.cap()); 433 | me.cursor.set(base); 434 | me.cumulative_size = 0; 435 | me.prev.take() 436 | } 437 | 438 | // Safety: `chunk` must be a pointer to the valid chunk allocation. 439 | // `ptr` must be a pointer to the allocated memory of at least `size` bytes. 440 | // `ptr` may be allocated from different chunk. 441 | #[inline(always)] 442 | unsafe fn dealloc(chunk: NonNull, ptr: NonNull, size: usize) { 443 | // Safety: `chunk` is a valid pointer to chunk allocation. 444 | let me = unsafe { chunk.as_ref() }; 445 | 446 | // Safety: `ptr` is a valid pointer to the allocated memory of at least `size` bytes. 447 | let new = unsafe { ptr.as_ptr().add(size) }; 448 | 449 | // Single attempt to update cursor. 450 | // Fails if `ptr` is not the last memory allocated from this chunk. 451 | // Spurious failures in multithreaded environment are possible 452 | // but do not affect correctness. 453 | let _ = me.cursor.compare_exchange( 454 | ptr.as_ptr(), 455 | new, 456 | Ordering::Release, // Released some memory. 457 | Ordering::Relaxed, 458 | ); 459 | } 460 | } 461 | 462 | #[cold] 463 | pub unsafe fn alloc_slow( 464 | root: &Cell>>, 465 | mut chunk_size: usize, 466 | layout: Layout, 467 | allocator: impl Allocator, 468 | ) -> Result, AllocError> { 469 | if let Some(root) = root.get() { 470 | chunk_size = chunk_size.max(root.as_ref().cumulative_size); 471 | chunk_size = chunk_size 472 | .checked_add(layout.size().max(CHUNK_MIN_GROW_STEP)) 473 | .ok_or(AllocError)?; 474 | } else { 475 | chunk_size = chunk_size.max(layout.size()); 476 | } 477 | 478 | if layout.align() > align_of::() { 479 | chunk_size = chunk_size.checked_add(layout.align()).ok_or(AllocError)?; 480 | } 481 | 482 | let Some(mut chunk_size) = chunk_size.checked_add(size_of::()) else { 483 | return Err(AllocError); 484 | }; 485 | 486 | // Grow size exponentially until a threshold. 487 | if chunk_size < CHUNK_POWER_OF_TWO_THRESHOLD { 488 | chunk_size = chunk_size.next_power_of_two(); 489 | } else { 490 | chunk_size = 491 | align_up(chunk_size, CHUNK_POWER_OF_TWO_THRESHOLD).unwrap_or(chunk_size); 492 | } 493 | 494 | debug_assert_eq!(chunk_size % align_of::(), 0); 495 | let new_chunk = ChunkHeader::alloc_chunk(chunk_size, allocator, root.get())?; 496 | 497 | // Safety: `chunk` is a valid pointer to chunk allocation. 498 | let ptr = unsafe { ChunkHeader::alloc(new_chunk, layout).unwrap_unchecked() }; 499 | 500 | root.set(Some(new_chunk)); 501 | Ok(ptr) 502 | } 503 | 504 | #[cold] 505 | pub unsafe fn resize_slow( 506 | root: &Cell>>, 507 | chunk_size: usize, 508 | ptr: NonNull, 509 | old_layout: Layout, 510 | new_layout: Layout, 511 | allocator: impl Allocator, 512 | ) -> Result, AllocError> { 513 | let new_ptr = alloc_slow(root, chunk_size, new_layout, allocator)?; 514 | core::ptr::copy_nonoverlapping( 515 | ptr.as_ptr(), 516 | new_ptr.as_ptr().cast(), 517 | new_layout.size().min(old_layout.size()), 518 | ); 519 | // Deallocation is impossible. 520 | Ok(new_ptr) 521 | } 522 | 523 | #[inline(always)] 524 | pub unsafe fn dealloc(root: Option>, ptr: NonNull, size: usize) { 525 | if let Some(root) = root { 526 | // Safety: 527 | // `chunk` is a valid pointer to chunk allocation. 528 | // `ptr` is a valid pointer to the allocated memory of at least `size` bytes. 529 | unsafe { 530 | ChunkHeader::dealloc(root, ptr, size); 531 | } 532 | } 533 | } 534 | 535 | /// Safety: 536 | /// `allocator` must be the same allocator that was used in `alloc`. 537 | #[inline(always)] 538 | pub unsafe fn reset( 539 | root: &Cell>>, 540 | keep_last: bool, 541 | allocator: A, 542 | ) where 543 | A: Allocator, 544 | { 545 | let mut prev = if keep_last { 546 | let Some(root) = root.get() else { 547 | return; 548 | }; 549 | 550 | // Safety: `chunk` is a valid pointer to chunk allocation. 551 | // This function owns mutable reference to `self`. 552 | unsafe { ChunkHeader::reset(root) } 553 | } else { 554 | root.take() 555 | }; 556 | 557 | while let Some(chunk) = prev { 558 | // Safety: `chunk` is a valid pointer to chunk allocation. 559 | // Allocated from this allocator with this layout. 560 | prev = unsafe { ChunkHeader::dealloc_chunk(chunk, &allocator) }; 561 | } 562 | } 563 | 564 | #[allow(dead_code)] 565 | #[inline(always)] 566 | pub fn reset_leak(root: &Cell>>, keep_last: bool) { 567 | if keep_last { 568 | let Some(chunk) = root.get() else { 569 | return; 570 | }; 571 | 572 | // Safety: `chunk` is a valid pointer to chunk allocation. 573 | // This function owns mutable reference to `self`. 574 | unsafe { 575 | ChunkHeader::reset(chunk); 576 | } 577 | } else { 578 | root.set(None); 579 | }; 580 | } 581 | }; 582 | } 583 | 584 | mod local; 585 | pub use self::local::ArenaLocal; 586 | 587 | #[cfg(feature = "sync")] 588 | mod sync; 589 | 590 | #[cfg(feature = "sync")] 591 | pub use self::sync::ArenaSync; 592 | -------------------------------------------------------------------------------- /src/blink.rs: -------------------------------------------------------------------------------- 1 | //! Provides `Blink` allocator adaptor. 2 | 3 | use core::{ 4 | alloc::Layout, 5 | convert::{identity, Infallible}, 6 | marker::PhantomData, 7 | mem::{needs_drop, size_of, ManuallyDrop, MaybeUninit}, 8 | ptr::{self, NonNull}, 9 | }; 10 | 11 | #[cfg(all(feature = "nightly",feature = "alloc"))] 12 | use alloc::alloc::Global; 13 | 14 | #[cfg(all(not(feature = "nightly"),feature = "alloc"))] 15 | use allocator_api2::alloc::Global; 16 | 17 | use crate::{ 18 | api::BlinkAllocator, 19 | cold, 20 | drop_list::{DropItem, DropList}, 21 | in_place, 22 | }; 23 | 24 | #[cfg(not(no_global_oom_handling))] 25 | use crate::ResultExt; 26 | 27 | #[cfg(feature = "alloc")] 28 | use crate::local::BlinkAlloc; 29 | 30 | #[cfg(not(no_global_oom_handling))] 31 | use crate::oom::{handle_alloc_error, size_overflow}; 32 | 33 | type EmplaceType = Result>; 34 | type EmplaceSlot = MaybeUninit>; 35 | 36 | pub trait CoerceFromMut<'a, T: ?Sized> { 37 | fn coerce(t: &'a mut T) -> Self; 38 | } 39 | 40 | impl<'a, T: ?Sized> CoerceFromMut<'a, T> for &'a mut T { 41 | #[inline(always)] 42 | fn coerce(t: &'a mut T) -> Self { 43 | t 44 | } 45 | } 46 | 47 | impl<'a, T: ?Sized> CoerceFromMut<'a, T> for &'a T { 48 | #[inline(always)] 49 | fn coerce(t: &'a mut T) -> Self { 50 | t 51 | } 52 | } 53 | 54 | /// Iterator extension trait for collecting iterators into blink allocator. 55 | /// 56 | /// # Examples 57 | /// 58 | /// ``` 59 | /// # #[cfg(feature = "alloc")] fn main() { 60 | /// # use blink_alloc::{Blink, IteratorExt}; 61 | /// let mut blink = Blink::new(); 62 | /// let slice = (0..10).filter(|x| x % 3 != 0).collect_to_blink(&mut blink); 63 | /// assert_eq!(slice, [1, 2, 4, 5, 7, 8]); 64 | /// slice[0] = 10; 65 | /// assert_eq!(slice, [10, 2, 4, 5, 7, 8]); 66 | /// # } 67 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 68 | /// ``` 69 | /// 70 | /// For non-static data with drop. 71 | /// 72 | /// ``` 73 | /// # #[cfg(feature = "alloc")] fn main() { 74 | /// # use blink_alloc::{Blink, IteratorExt}; 75 | /// let mut blink = Blink::new(); 76 | /// let slice = (0..10).filter(|x| x % 3 != 0).collect_to_blink_shared(&mut blink); 77 | /// assert_eq!(slice, [1, 2, 4, 5, 7, 8]); 78 | /// # } 79 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 80 | /// ``` 81 | /// 82 | /// For non-static data and no drop. 83 | /// 84 | /// ``` 85 | /// # #[cfg(feature = "alloc")] fn main() { 86 | /// # use blink_alloc::{Blink, IteratorExt}; 87 | /// let mut blink = Blink::new(); 88 | /// let slice = (0..10).filter(|x| x % 3 != 0).collect_to_blink_no_drop(&mut blink); 89 | /// assert_eq!(slice, [1, 2, 4, 5, 7, 8]); 90 | /// slice[0] = 10; 91 | /// assert_eq!(slice, [10, 2, 4, 5, 7, 8]); 92 | /// # } 93 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 94 | /// ``` 95 | pub trait IteratorExt: Iterator { 96 | /// Collect iterator into blink allocator and return slice reference. 97 | #[cfg(not(no_global_oom_handling))] 98 | #[inline(always)] 99 | fn collect_to_blink(self, blink: &mut Blink) -> &mut [Self::Item] 100 | where 101 | Self: Sized, 102 | Self::Item: 'static, 103 | { 104 | blink.emplace().from_iter(self) 105 | } 106 | 107 | /// Collect iterator into blink allocator and return slice reference. 108 | #[cfg(not(no_global_oom_handling))] 109 | #[inline(always)] 110 | fn collect_to_blink_shared(self, blink: &mut Blink) -> &[Self::Item] 111 | where 112 | Self: Sized, 113 | { 114 | blink.emplace_shared().from_iter(self) 115 | } 116 | 117 | /// Collect iterator into blink allocator and return slice reference. 118 | #[cfg(not(no_global_oom_handling))] 119 | #[inline(always)] 120 | fn collect_to_blink_no_drop(self, blink: &mut Blink) -> &mut [Self::Item] 121 | where 122 | Self: Sized, 123 | { 124 | blink.emplace_no_drop().from_iter(self) 125 | } 126 | 127 | /// Attempts to collect iterator into blink allocator and return slice reference. 128 | #[inline(always)] 129 | #[allow(clippy::type_complexity)] 130 | fn try_collect_to_blink( 131 | self, 132 | blink: &mut Blink, 133 | ) -> Result<&mut [Self::Item], (&mut [Self::Item], Option)> 134 | where 135 | Self: Sized, 136 | Self::Item: 'static, 137 | { 138 | blink.emplace().try_from_iter(self) 139 | } 140 | 141 | /// Attempts to collect iterator into blink allocator and return slice reference. 142 | #[inline(always)] 143 | #[allow(clippy::type_complexity)] 144 | fn try_collect_to_blink_shared( 145 | self, 146 | blink: &mut Blink, 147 | ) -> Result<&[Self::Item], (&[Self::Item], Option)> 148 | where 149 | Self: Sized, 150 | { 151 | blink.emplace_shared().try_from_iter(self) 152 | } 153 | 154 | /// Attempts to collect iterator into blink allocator and return slice reference. 155 | #[inline(always)] 156 | #[allow(clippy::type_complexity)] 157 | fn try_collect_to_blink_no_drop( 158 | self, 159 | blink: &mut Blink, 160 | ) -> Result<&mut [Self::Item], (&mut [Self::Item], Option)> 161 | where 162 | Self: Sized, 163 | { 164 | blink.emplace_no_drop().try_from_iter(self) 165 | } 166 | } 167 | 168 | impl IteratorExt for I where I: Iterator {} 169 | 170 | switch_alloc_default! { 171 | /// An allocator adaptor for designed for blink allocator. 172 | /// Provides user-friendly methods to emplace values into allocated memory. 173 | /// Supports emplace existing, constructing value in allocated memory directly or indirectly. 174 | /// And also emplacing items yield from iterator into contiguous memory and returning slice reference. 175 | /// 176 | /// [`Blink`] calls [`Drop::drop`] for emplaced values when reset or dropped. 177 | /// This allows to use [`Blink`] instead of collections in some scenarios without needing to enable [`allocation_api`] feature. 178 | /// 179 | /// A blink-allocator adapter for user-friendly safe allocations 180 | /// without use of collections. 181 | /// 182 | /// Provides an ability to emplace values into allocated memory, 183 | /// baked by the associated blink-allocator instance. 184 | /// And returns mutable reference to the value. 185 | /// Values can be emplaced by move, by construction in allocated memory 186 | /// (when compiler likes us), from iterators etc. 187 | /// Most operations are provided in two flavors: 188 | /// `try_` prefixed methods returns `Result` with allocation errors. 189 | /// And non-prefixed methods calls [`handle_alloc_error`] method 190 | /// (unless "alloc" feature is not enabled, in this case it panics). 191 | /// Non-prefixed methods require "no_global_oom_handling" feature cfg is disabled. 192 | /// 193 | /// [`Blink`] can be reset by calling `reset` method. 194 | /// It drops all emplaced values and resets associated allocator instance. 195 | /// If allocator instance is shared, resetting it will have no effect. 196 | /// 197 | /// [`handle_alloc_error`]: alloc::alloc::handle_alloc_error 198 | /// [`allocation_api`]: https://doc.rust-lang.org/beta/unstable-book/library-features/allocator-api.html 199 | pub struct Blink> { 200 | drop_list: DropList, 201 | alloc: A, 202 | } 203 | } 204 | 205 | impl Drop for Blink { 206 | #[inline(always)] 207 | fn drop(&mut self) { 208 | self.drop_all(); 209 | } 210 | } 211 | 212 | // Safety: `Blink` is not auto-send because of `DropList`. 213 | // The `DropList` contains pointers to objects allocated from `Blink`. 214 | // If `Blink` is moved to another thread (or `&mut Blink`) then all returned pointers 215 | // to allocated objects were invalidated. 216 | unsafe impl Send for Blink where A: Send {} 217 | 218 | impl Default for Blink 219 | where 220 | A: Default, 221 | { 222 | fn default() -> Self { 223 | Blink::new_in(Default::default()) 224 | } 225 | } 226 | 227 | #[cfg(feature = "alloc")] 228 | impl Blink> { 229 | /// Creates new blink instance with `BlinkAlloc` baked by `Global` 230 | /// allocator. 231 | /// 232 | /// # Examples 233 | /// 234 | /// ``` 235 | /// use blink_alloc::Blink; 236 | /// let mut blink = Blink::new(); 237 | /// 238 | /// blink.put(42); 239 | /// ``` 240 | #[inline(always)] 241 | pub const fn new() -> Self { 242 | Blink::new_in(BlinkAlloc::new()) 243 | } 244 | 245 | /// Creates new blink instance with `BlinkAlloc` baked by `Global` 246 | /// allocator. 247 | /// `BlinkAlloc` receives starting chunk size. 248 | /// 249 | /// # Examples 250 | /// 251 | /// ``` 252 | /// use blink_alloc::Blink; 253 | /// let mut blink = Blink::with_chunk_size(16); 254 | /// 255 | /// blink.put(42); 256 | #[inline(always)] 257 | pub const fn with_chunk_size(capacity: usize) -> Self { 258 | Blink::new_in(BlinkAlloc::with_chunk_size(capacity)) 259 | } 260 | } 261 | 262 | impl Blink { 263 | /// Creates new blink instance with provided allocator instance. 264 | #[inline(always)] 265 | pub const fn new_in(alloc: A) -> Self { 266 | Blink { 267 | drop_list: DropList::new(), 268 | alloc, 269 | } 270 | } 271 | 272 | /// Returns reference to allocator instance. 273 | #[inline(always)] 274 | pub fn allocator(&self) -> &A { 275 | &self.alloc 276 | } 277 | 278 | /// Drops all allocated values. 279 | /// 280 | /// Prefer to use `reset` method if associated allocator instance supports it. 281 | #[inline(always)] 282 | pub fn drop_all(&mut self) { 283 | self.drop_list.reset(); 284 | } 285 | } 286 | 287 | impl Blink 288 | where 289 | A: BlinkAllocator, 290 | { 291 | /// Drops all allocated values. 292 | /// And resets associated allocator instance. 293 | #[inline(always)] 294 | pub fn reset(&mut self) { 295 | self.drop_list.reset(); 296 | self.alloc.reset(); 297 | } 298 | 299 | /// Allocates memory for a copy of the slice. 300 | /// If allocation fails, returns `Err`. 301 | /// Otherwise copies the slice into the allocated memory and returns 302 | /// mutable reference to the copy. 303 | #[inline] 304 | unsafe fn _try_copy_slice<'a, T, E>( 305 | &'a self, 306 | slice: &[T], 307 | alloc_err: impl FnOnce(Layout) -> E, 308 | ) -> Result<&'a mut [T], E> 309 | where 310 | T: Copy, 311 | { 312 | let layout = Layout::for_value(slice); 313 | let Ok(ptr) = self.alloc.allocate(layout) else { 314 | return Err(alloc_err(layout)); 315 | }; 316 | 317 | let ptr = ptr.as_ptr().cast(); 318 | core::ptr::copy_nonoverlapping(slice.as_ptr(), ptr, slice.len()); 319 | Ok(core::slice::from_raw_parts_mut(ptr, slice.len())) 320 | } 321 | 322 | unsafe fn _try_emplace_drop<'a, T, I, G: 'a, E>( 323 | &'a self, 324 | init: I, 325 | f: impl FnOnce(&mut EmplaceSlot, I), 326 | err: impl FnOnce(G) -> E, 327 | alloc_err: impl FnOnce(I, Layout) -> E, 328 | ) -> Result<&'a mut T, E> { 329 | let layout = Layout::new::>>>(); 330 | 331 | let Ok(ptr) = self.alloc.allocate(layout) else { 332 | return Err(alloc_err(init, layout)); 333 | }; 334 | 335 | // Safety: `item_ptr` is a valid pointer to allocated memory for type `DropItem`. 336 | let item = unsafe { DropItem::init_value(ptr.cast(), init, f) }; 337 | 338 | if item.value.is_ok() { 339 | match self.drop_list.add(item) { 340 | Ok(value) => return Ok(value), 341 | _ => unreachable!(), 342 | } 343 | } 344 | 345 | match &mut item.value { 346 | Err(g) => { 347 | let err = err(unsafe { ManuallyDrop::take(g) }); 348 | // Give memory back. 349 | self.alloc.deallocate(ptr.cast(), layout); 350 | Err(err) 351 | } 352 | _ => unreachable!(), 353 | } 354 | } 355 | 356 | unsafe fn _try_emplace_no_drop<'a, T, I, G: 'a, E>( 357 | &self, 358 | init: I, 359 | f: impl FnOnce(&mut EmplaceSlot, I), 360 | err: impl FnOnce(G) -> E, 361 | alloc_err: impl FnOnce(I, Layout) -> E, 362 | ) -> Result<&'a mut T, E> { 363 | let layout = Layout::new::(); 364 | let Ok(ptr) = self.alloc.allocate(layout) else { 365 | return Err(alloc_err(init, layout)); 366 | }; 367 | 368 | // Safety: `ptr` is a valid pointer to allocated memory. 369 | // Allocated with this `T`'s layout. 370 | // Duration of the allocation is until next call to [`BlinkAlloc::reset`]. 371 | 372 | let uninit = &mut *ptr.as_ptr().cast(); 373 | f(uninit, init); 374 | 375 | match uninit.assume_init_mut() { 376 | Ok(value) => Ok(value), 377 | Err(g) => { 378 | let err = err(unsafe { ManuallyDrop::take(g) }); 379 | // Give memory back. 380 | self.alloc.deallocate(ptr.cast(), layout); 381 | Err(err) 382 | } 383 | } 384 | } 385 | 386 | /// Allocates memory for a value and emplaces value into the memory 387 | /// using init value and provided closure. 388 | /// If allocation fails, returns `Err(init)`. 389 | /// Otherwise calls closure consuming `init` 390 | /// and initializes memory with closure result. 391 | #[inline(always)] 392 | unsafe fn _try_emplace<'a, T, I, G: 'a, E>( 393 | &'a self, 394 | init: I, 395 | f: impl FnOnce(&mut EmplaceSlot, I), 396 | no_drop: bool, 397 | err: impl FnOnce(G) -> E, 398 | alloc_err: impl FnOnce(I, Layout) -> E, 399 | ) -> Result<&'a mut T, E> { 400 | if !needs_drop::() || no_drop { 401 | self._try_emplace_no_drop(init, f, err, alloc_err) 402 | } else { 403 | self._try_emplace_drop(init, f, err, alloc_err) 404 | } 405 | } 406 | 407 | unsafe fn _try_emplace_drop_from_iter<'a, T: 'a, I, E>( 408 | &'a self, 409 | mut iter: I, 410 | err: impl FnOnce(&'a mut [T], Option, Option) -> E, 411 | ) -> Result<&'a mut [T], E> 412 | where 413 | I: Iterator, 414 | { 415 | if size_of::() == 0 { 416 | let item_layout = Layout::new::>(); 417 | let Ok(ptr) = self.alloc.allocate(item_layout) else { 418 | return Err(err(&mut [], None, Some(item_layout))); 419 | }; 420 | // Drain elements from iterator. 421 | // Stop at `usize::MAX`. 422 | // Drop exactly this number of elements on reset. 423 | let count = saturating_drain_iter(iter); 424 | let (item, slice) = DropItem::init_slice(ptr.cast(), count); 425 | self.drop_list.add(item); 426 | return Ok(slice); 427 | } 428 | 429 | struct Guard<'a, T: 'a, A: BlinkAllocator> { 430 | ptr: Option>>, 431 | count: usize, 432 | cap: usize, 433 | layout: Layout, 434 | alloc: &'a A, 435 | drop_list: &'a DropList, 436 | } 437 | 438 | impl<'a, T, A> Drop for Guard<'a, T, A> 439 | where 440 | A: BlinkAllocator, 441 | { 442 | #[inline(always)] 443 | fn drop(&mut self) { 444 | self.flush(); 445 | } 446 | } 447 | 448 | impl<'a, T, A> Guard<'a, T, A> 449 | where 450 | A: BlinkAllocator, 451 | { 452 | #[inline(always)] 453 | fn flush(&mut self) -> &'a mut [T] { 454 | match self.ptr.take() { 455 | Some(ptr) if self.count != 0 => { 456 | // if self.count < self.cap { 457 | // // shrink the allocation to the actual size. 458 | // // `BlinkAllocator` guarantees that this will not fail 459 | // // be a no-op. 460 | 461 | // let item_layout = Layout::new::>(); 462 | 463 | // let (new_layout, _) = Layout::array::(self.count) 464 | // .and_then(|array| item_layout.extend(array)) 465 | // .expect("Smaller than actual allocation"); 466 | 467 | // // Safety: 468 | // // Shrinking the allocation to the actual used size. 469 | // let new_ptr = 470 | // unsafe { self.alloc.shrink(ptr.cast(), self.layout, new_layout) } 471 | // .expect("BlinkAllocator guarantees this will succeed"); 472 | 473 | // ptr = new_ptr.cast(); 474 | // } 475 | 476 | // Safety: `item` was properly initialized. 477 | let (item, slice) = unsafe { DropItem::init_slice(ptr, self.count) }; 478 | unsafe { 479 | self.drop_list.add(item); 480 | } 481 | slice 482 | } 483 | Some(ptr) => unsafe { 484 | self.alloc.deallocate(ptr.cast(), self.layout); 485 | &mut [] 486 | }, 487 | None => &mut [], 488 | } 489 | } 490 | 491 | #[inline(always)] 492 | fn fill( 493 | &mut self, 494 | size_hint: usize, 495 | one_more_elem: &mut Option, 496 | iter: &mut impl Iterator, 497 | ) -> Result<(), Option> { 498 | let Ok(array_layout) = Layout::array::(size_hint) else { 499 | return Err(None); 500 | }; 501 | 502 | let item_layout = Layout::new::>(); 503 | let Ok((full_layout, array_offset)) = item_layout.extend(array_layout) else { 504 | return Err(None); 505 | }; 506 | 507 | debug_assert_eq!(array_offset, size_of::>()); 508 | 509 | let res = match self.ptr { 510 | None => self.alloc.allocate(full_layout), 511 | Some(ptr) => unsafe { self.alloc.grow(ptr.cast(), self.layout, full_layout) }, 512 | }; 513 | 514 | let Ok(ptr) = res else { 515 | return Err(Some(full_layout)); 516 | }; 517 | self.layout = full_layout; 518 | 519 | let item_ptr = ptr.cast(); 520 | self.ptr = Some(item_ptr); 521 | 522 | let len = ptr.len(); 523 | if len > full_layout.size() { 524 | self.cap = (len - size_of::>()) / size_of::() 525 | } else { 526 | debug_assert_eq!(len, full_layout.size()); 527 | self.cap = size_hint; 528 | }; 529 | 530 | let array_ptr = unsafe { item_ptr.as_ptr().add(1).cast::() }; 531 | 532 | if let Some(one_more_elem) = one_more_elem.take() { 533 | debug_assert!(self.count < self.cap); 534 | 535 | // Safety: `array_ptr` is a valid pointer to allocated memory for type `[T; hint]`. 536 | // And `hint` is larger than `self.count` 537 | unsafe { 538 | ptr::write(array_ptr.add(self.count), one_more_elem); 539 | } 540 | self.count += 1; 541 | } 542 | 543 | for idx in self.count..self.cap { 544 | if Layout::new::>() == Layout::new::() { 545 | // Putting elements directly into the array. 546 | 547 | let elem = unsafe { 548 | in_place(array_ptr.add(idx).cast(), &mut *iter, Iterator::next) 549 | }; 550 | 551 | if elem.is_none() { 552 | break; 553 | } 554 | } else { 555 | match iter.next() { 556 | None => break, 557 | Some(elem) => unsafe { ptr::write(array_ptr.add(idx), elem) }, 558 | } 559 | } 560 | self.count = idx + 1; 561 | } 562 | 563 | Ok(()) 564 | } 565 | } 566 | 567 | let mut guard = Guard { 568 | ptr: None, 569 | count: 0, 570 | cap: 0, 571 | layout: Layout::new::<()>(), 572 | alloc: &self.alloc, 573 | drop_list: &self.drop_list, 574 | }; 575 | 576 | let (lower, _) = iter.size_hint(); 577 | 578 | if lower != 0 { 579 | if let Err(layout) = guard.fill(lower.max(FASTER_START), &mut None, &mut iter) { 580 | return Err(err(guard.flush(), None, layout)); 581 | } 582 | } 583 | 584 | let mut one_more = iter.next(); 585 | if one_more.is_none() { 586 | return Ok(guard.flush()); 587 | }; 588 | cold(); 589 | 590 | loop { 591 | let (lower, upper) = iter.size_hint(); 592 | let Some(size_hint) = size_hint_and_one(lower, upper, guard.count.max(FASTER_START)) 593 | else { 594 | return Err(err(guard.flush(), one_more, None)); 595 | }; 596 | 597 | if let Err(layout) = guard.fill(size_hint, &mut one_more, &mut iter) { 598 | return Err(err(guard.flush(), one_more, layout)); 599 | } 600 | 601 | one_more = iter.next(); 602 | if one_more.is_none() { 603 | return Ok(guard.flush()); 604 | }; 605 | } 606 | } 607 | 608 | unsafe fn _try_emplace_no_drop_from_iter<'a, T: 'a, I, E>( 609 | &'a self, 610 | mut iter: I, 611 | err: impl FnOnce(&'a mut [T], Option, Option) -> E, 612 | ) -> Result<&'a mut [T], E> 613 | where 614 | I: Iterator, 615 | { 616 | if size_of::() == 0 { 617 | // Drain elements from iterator. 618 | // Stop at `usize::MAX`. 619 | // Drop exactly this number of elements on reset. 620 | let count = saturating_drain_iter(iter); 621 | let ptr = NonNull::::dangling(); 622 | let slice = core::slice::from_raw_parts_mut(ptr.as_ptr(), count); 623 | return Ok(slice); 624 | } 625 | 626 | struct Guard<'a, T: 'a, A: BlinkAllocator> { 627 | ptr: Option>, 628 | count: usize, 629 | cap: usize, 630 | layout: Layout, 631 | alloc: &'a A, 632 | } 633 | 634 | impl<'a, T, A> Guard<'a, T, A> 635 | where 636 | A: BlinkAllocator, 637 | { 638 | #[inline(always)] 639 | fn flush(&mut self) -> &'a mut [T] { 640 | match self.ptr.take() { 641 | Some(ptr) if self.count != 0 => { 642 | // if self.count < self.cap { 643 | // // shrink the allocation to the actual size. 644 | // // `BlinkAllocator` guarantees that this will not fail 645 | // // be a no-op. 646 | 647 | // let new_layout = Layout::array::(self.count) 648 | // .expect("Smaller than actual allocation"); 649 | 650 | // // Safety: 651 | // // Shrinking the allocation to the actual used size. 652 | // let new_ptr = 653 | // unsafe { self.alloc.shrink(ptr.cast(), self.layout, new_layout) } 654 | // .expect("BlinkAllocator guarantees this will succeed"); 655 | 656 | // ptr = new_ptr.cast(); 657 | // } 658 | 659 | // Safety: reallocated for slice of size `self.count` 660 | unsafe { &mut *core::slice::from_raw_parts_mut(ptr.as_ptr(), self.count) } 661 | } 662 | Some(ptr) => { 663 | unsafe { self.alloc.deallocate(ptr.cast(), self.layout) }; 664 | &mut [] 665 | } 666 | None => &mut [], 667 | } 668 | } 669 | 670 | #[inline(always)] 671 | fn fill( 672 | &mut self, 673 | size_hint: usize, 674 | one_more_elem: &mut Option, 675 | iter: &mut impl Iterator, 676 | ) -> Result<(), Option> { 677 | let Ok(full_layout) = Layout::array::(size_hint) else { 678 | return Err(None); 679 | }; 680 | 681 | let res = match self.ptr { 682 | None => self.alloc.allocate(full_layout), 683 | Some(ptr) => unsafe { self.alloc.grow(ptr.cast(), self.layout, full_layout) }, 684 | }; 685 | 686 | let Ok(ptr) = res else { 687 | return Err(Some(full_layout)); 688 | }; 689 | 690 | self.layout = full_layout; 691 | self.ptr = Some(ptr.cast()); 692 | 693 | let len = ptr.len(); 694 | if len > full_layout.size() { 695 | self.cap = len / size_of::() 696 | } else { 697 | debug_assert_eq!(len, full_layout.size()); 698 | self.cap = size_hint; 699 | }; 700 | 701 | let array_ptr = ptr.as_ptr().cast::(); 702 | 703 | if let Some(one_more_elem) = one_more_elem.take() { 704 | debug_assert!(self.count < self.cap); 705 | 706 | // Safety: `array_ptr` is a valid pointer to allocated memory for type `[T; hint]`. 707 | // And `hint` is larger than `self.count` 708 | unsafe { 709 | ptr::write(array_ptr.add(self.count), one_more_elem); 710 | } 711 | self.count += 1; 712 | } 713 | 714 | for idx in self.count..size_hint { 715 | if Layout::new::>() == Layout::new::() { 716 | // Putting elements directly into the array. 717 | let elem = unsafe { 718 | in_place(array_ptr.add(idx).cast(), &mut *iter, Iterator::next) 719 | }; 720 | if elem.is_none() { 721 | break; 722 | } 723 | } else { 724 | match iter.next() { 725 | None => break, 726 | Some(elem) => unsafe { ptr::write(array_ptr.add(idx), elem) }, 727 | } 728 | } 729 | self.count = idx + 1; 730 | } 731 | 732 | Ok(()) 733 | } 734 | } 735 | 736 | let mut guard = Guard { 737 | ptr: None, 738 | count: 0, 739 | cap: 0, 740 | layout: Layout::new::(), 741 | alloc: &self.alloc, 742 | }; 743 | 744 | let (lower, _) = iter.size_hint(); 745 | 746 | if lower != 0 { 747 | if let Err(layout) = guard.fill(lower.max(FASTER_START), &mut None, &mut iter) { 748 | return Err(err(guard.flush(), None, layout)); 749 | } 750 | } 751 | 752 | let mut one_more = iter.next(); 753 | if one_more.is_none() { 754 | return Ok(guard.flush()); 755 | }; 756 | cold(); 757 | 758 | loop { 759 | let (lower, upper) = iter.size_hint(); 760 | let Some(size_hint) = size_hint_and_one(lower, upper, guard.count.max(FASTER_START)) 761 | else { 762 | return Err(err(guard.flush(), one_more, None)); 763 | }; 764 | 765 | if let Err(layout) = guard.fill(size_hint, &mut one_more, &mut iter) { 766 | return Err(err(guard.flush(), one_more, layout)); 767 | } 768 | 769 | one_more = iter.next(); 770 | if one_more.is_none() { 771 | return Ok(guard.flush()); 772 | }; 773 | } 774 | } 775 | 776 | /// Allocates memory for a value and emplaces value into the memory 777 | /// using init value and provided closure. 778 | /// If allocation fails, returns `Err(init)`. 779 | /// Otherwise calls closure consuming `init` 780 | /// and initializes memory with closure result. 781 | #[inline(always)] 782 | unsafe fn _try_emplace_from_iter<'a, T: 'a, I, E>( 783 | &'a self, 784 | iter: I, 785 | no_drop: bool, 786 | err: impl FnOnce(&'a mut [T], Option, Option) -> E, 787 | ) -> Result<&'a mut [T], E> 788 | where 789 | I: IntoIterator, 790 | { 791 | if !needs_drop::() || no_drop { 792 | self._try_emplace_no_drop_from_iter(iter.into_iter(), err) 793 | } else { 794 | self._try_emplace_drop_from_iter(iter.into_iter(), err) 795 | } 796 | } 797 | } 798 | 799 | /// Provides interface for emplacing values. 800 | /// Created by [`Blink::emplace`], [`Blink::emplace_no_drop`] 801 | /// and [`Blink::emplace_unchecked`]. 802 | pub struct Emplace<'a, A, T, R = &'a mut T, S = &'a mut [T]> { 803 | blink: &'a Blink, 804 | no_drop: bool, 805 | marker: PhantomData (R, S)>, 806 | } 807 | 808 | impl<'a, A, T, R, S> Emplace<'a, A, T, R, S> 809 | where 810 | A: BlinkAllocator, 811 | T: 'a, 812 | R: CoerceFromMut<'a, T>, 813 | S: CoerceFromMut<'a, [T]>, 814 | { 815 | /// Allocates memory for a value and moves `value` into the memory. 816 | /// If allocation fails, returns `Err(value)`. 817 | /// On success returns reference to the emplaced value. 818 | #[inline(always)] 819 | pub fn try_value(&self, value: T) -> Result { 820 | unsafe { 821 | self.blink._try_emplace( 822 | value, 823 | |slot, value| { 824 | slot.write(Ok::<_, ManuallyDrop>(value)); 825 | }, 826 | self.no_drop, 827 | |never| match never {}, 828 | |init, _| init, 829 | ) 830 | } 831 | .map(R::coerce) 832 | } 833 | 834 | /// Allocates memory for a value and moves `value` into the memory. 835 | /// Returns reference to the emplaced value. 836 | /// If allocation fails, diverges. 837 | #[cfg(not(no_global_oom_handling))] 838 | #[inline(always)] 839 | pub fn value(&self, value: T) -> R { 840 | R::coerce( 841 | unsafe { 842 | self.blink._try_emplace( 843 | value, 844 | |slot, value| { 845 | slot.write(Ok::<_, ManuallyDrop>(value)); 846 | }, 847 | self.no_drop, 848 | identity, 849 | |_, layout| handle_alloc_error(layout), 850 | ) 851 | } 852 | .safe_ok(), 853 | ) 854 | } 855 | 856 | /// Allocates memory for a value. 857 | /// On success invokes closure and initialize the value. 858 | /// Returns reference to the value. 859 | /// If allocation fails, returns error with closure. 860 | #[inline(always)] 861 | pub fn try_with(&self, f: F) -> Result 862 | where 863 | F: FnOnce() -> T, 864 | { 865 | unsafe { 866 | self.blink._try_emplace( 867 | f, 868 | |slot, f| { 869 | slot.write(Ok::<_, ManuallyDrop>(f())); 870 | }, 871 | self.no_drop, 872 | never, 873 | |f, _| f, 874 | ) 875 | } 876 | .map(R::coerce) 877 | } 878 | 879 | /// Allocates memory for a value. 880 | /// On success invokes closure and initialize the value. 881 | /// Returns reference to the value. 882 | /// If allocation fails, diverges. 883 | #[cfg(not(no_global_oom_handling))] 884 | #[inline(always)] 885 | pub fn with(&self, f: F) -> R 886 | where 887 | F: FnOnce() -> T, 888 | { 889 | R::coerce( 890 | unsafe { 891 | self.blink._try_emplace( 892 | f, 893 | |slot, f| { 894 | slot.write(Ok::<_, ManuallyDrop>(f())); 895 | }, 896 | self.no_drop, 897 | never, 898 | |_, layout| handle_alloc_error(layout), 899 | ) 900 | } 901 | .safe_ok(), 902 | ) 903 | } 904 | /// Allocates memory for a value. 905 | /// If allocation fails, returns error with closure. 906 | /// On success invokes closure and initialize the value. 907 | /// If closure fails, returns the error. 908 | /// Returns reference to the value. 909 | #[inline(always)] 910 | pub fn try_with_fallible(&self, f: F) -> Result> 911 | where 912 | F: FnOnce() -> Result, 913 | E: 'a, 914 | { 915 | unsafe { 916 | self.blink._try_emplace( 917 | f, 918 | |slot, f| { 919 | slot.write(f().map_err(ManuallyDrop::new)); 920 | }, 921 | self.no_drop, 922 | |err| Ok(err), 923 | |f, _| Err(f), 924 | ) 925 | } 926 | .map(R::coerce) 927 | } 928 | 929 | /// Allocates memory for a value. 930 | /// If allocation fails, returns error with closure. 931 | /// On success invokes closure and initialize the value. 932 | /// If closure fails, returns the error. 933 | /// Returns reference to the value. 934 | #[cfg(not(no_global_oom_handling))] 935 | #[inline(always)] 936 | pub fn with_fallible(&self, f: F) -> Result 937 | where 938 | F: FnOnce() -> Result, 939 | E: 'a, 940 | { 941 | unsafe { 942 | self.blink._try_emplace( 943 | f, 944 | |slot, f| { 945 | slot.write(f().map_err(ManuallyDrop::new)); 946 | }, 947 | self.no_drop, 948 | identity, 949 | |_, layout| handle_alloc_error(layout), 950 | ) 951 | } 952 | .map(R::coerce) 953 | } 954 | 955 | /// Allocates memory for an array and initializes it with 956 | /// values from iterator. 957 | /// Uses iterator hints to allocate memory. 958 | /// If iterator yields more values than allocated array can hold, 959 | /// grows allocation and moves next values to extended array. 960 | /// Repeats until iterator is exhausted. 961 | /// Works best on iterators that report accurate upper size hint. 962 | /// Grows allocated memory potentially reducing number of allocations 963 | /// and copies. 964 | /// If allocation fails, returns slice of values emplaced so far. 965 | /// And one element that was taken from iterator and not emplaced. 966 | #[inline(always)] 967 | pub fn try_from_iter(&self, iter: I) -> Result)> 968 | where 969 | I: IntoIterator, 970 | { 971 | unsafe { 972 | self.blink 973 | ._try_emplace_from_iter(iter, self.no_drop, |slice: &'a mut [T], value, _| { 974 | (S::coerce(slice), value) 975 | }) 976 | } 977 | .map(S::coerce) 978 | } 979 | 980 | /// Allocates memory for an array and initializes it with 981 | /// values from iterator. 982 | /// Uses iterator hints to allocate memory. 983 | /// If iterator yields more values than allocated array can hold, 984 | /// grows allocation and moves next values to extended array. 985 | /// Repeats until iterator is exhausted. 986 | /// Works best on iterators that report accurate upper size hint. 987 | /// Grows allocated memory potentially reducing number of allocations 988 | /// and copies. 989 | /// If allocation fails, diverges. 990 | /// Values already emplaced will be dropped. 991 | /// One last value that was taken from iterator and not emplaced 992 | /// is dropped before this method returns. 993 | #[cfg(not(no_global_oom_handling))] 994 | #[inline(always)] 995 | pub fn from_iter(&self, iter: I) -> S 996 | where 997 | I: Iterator, 998 | { 999 | S::coerce( 1000 | unsafe { 1001 | self.blink 1002 | ._try_emplace_from_iter(iter, self.no_drop, |_, _, layout| match layout { 1003 | Some(layout) => handle_alloc_error(layout), 1004 | None => size_overflow(), 1005 | }) 1006 | } 1007 | .safe_ok(), 1008 | ) 1009 | } 1010 | } 1011 | 1012 | impl Blink 1013 | where 1014 | A: BlinkAllocator, 1015 | { 1016 | /// Puts value into this `Blink` instance. 1017 | /// Returns reference to the value. 1018 | /// 1019 | /// Effectively extends lifetime of the value 1020 | /// from local scope to the reset scope. 1021 | /// 1022 | /// For more flexible value placement see 1023 | /// [`Blink::emplace`], [`Blink::emplace_no_drop`] and 1024 | /// [`Blink::emplace_unchecked`]. 1025 | /// 1026 | /// # Example 1027 | /// 1028 | /// ``` 1029 | /// # #[cfg(feature = "alloc")] fn main() { 1030 | /// # use blink_alloc::Blink; 1031 | /// let mut blink = Blink::new(); 1032 | /// let foo = blink.put(42); 1033 | /// assert_eq!(*foo, 42); 1034 | /// *foo = 24; 1035 | /// blink.reset(); 1036 | /// // assert_eq!(*foo, 24); // Cannot compile. `foo` does not outlive reset. 1037 | /// # } 1038 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1039 | /// ``` 1040 | #[cfg(not(no_global_oom_handling))] 1041 | #[inline(always)] 1042 | #[allow(clippy::mut_from_ref)] 1043 | pub fn put(&self, value: T) -> &mut T { 1044 | unsafe { 1045 | self._try_emplace( 1046 | value, 1047 | |slot, value| { 1048 | slot.write(Ok::<_, ManuallyDrop>(value)); 1049 | }, 1050 | false, 1051 | identity, 1052 | |_, layout| handle_alloc_error(layout), 1053 | ) 1054 | } 1055 | .safe_ok() 1056 | } 1057 | 1058 | /// Puts value into this `Blink` instance. 1059 | /// Returns reference to the value. 1060 | /// 1061 | /// The value will not be dropped when `Blink` is reset. 1062 | /// 1063 | /// Effectively extends lifetime of the value 1064 | /// from local scope to the reset scope. 1065 | /// 1066 | /// For more flexible value placement see 1067 | /// [`Blink::emplace`], [`Blink::emplace_no_drop`] and 1068 | /// [`Blink::emplace_unchecked`]. 1069 | /// 1070 | /// # Example 1071 | /// 1072 | /// ``` 1073 | /// # #[cfg(feature = "alloc")] fn main() { 1074 | /// # use blink_alloc::Blink; 1075 | /// let mut blink = Blink::new(); 1076 | /// let foo = blink.put(42); 1077 | /// assert_eq!(*foo, 42); 1078 | /// *foo = 24; 1079 | /// blink.reset(); 1080 | /// // assert_eq!(*foo, 24); // Cannot compile. `foo` does not outlive reset. 1081 | /// # } 1082 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1083 | /// ``` 1084 | #[cfg(not(no_global_oom_handling))] 1085 | #[inline(always)] 1086 | #[allow(clippy::mut_from_ref)] 1087 | pub fn put_no_drop(&self, value: T) -> &mut T { 1088 | unsafe { 1089 | self._try_emplace( 1090 | value, 1091 | |slot, value| { 1092 | slot.write(Ok::<_, ManuallyDrop>(value)); 1093 | }, 1094 | true, 1095 | identity, 1096 | |_, layout| handle_alloc_error(layout), 1097 | ) 1098 | } 1099 | .safe_ok() 1100 | } 1101 | 1102 | /// Allocates memory for a value. 1103 | /// Returns some reference to the uninitialized value. 1104 | /// If allocation fails, returns none. 1105 | #[inline(always)] 1106 | pub fn try_uninit(&self) -> Option<&mut MaybeUninit> { 1107 | let layout = Layout::new::(); 1108 | let ptr = self.alloc.allocate(layout).ok()?; 1109 | 1110 | // Safety: 1111 | // - `ptr` is valid for `layout`. 1112 | // - `MaybeUninit` is always initialized. 1113 | Some(unsafe { &mut *ptr.as_ptr().cast() }) 1114 | } 1115 | 1116 | /// Allocates memory for a value. 1117 | /// Returns reference to the uninitialized value. 1118 | #[cfg(not(no_global_oom_handling))] 1119 | #[inline(always)] 1120 | #[allow(clippy::mut_from_ref)] 1121 | pub fn uninit(&self) -> &mut MaybeUninit { 1122 | let layout = Layout::new::(); 1123 | let ptr = self 1124 | .alloc 1125 | .allocate(layout) 1126 | .unwrap_or_else(|_| handle_alloc_error(layout)); 1127 | 1128 | // Safety: 1129 | // - `ptr` is valid for `layout`. 1130 | // - `MaybeUninit` is always initialized. 1131 | unsafe { &mut *ptr.as_ptr().cast() } 1132 | } 1133 | 1134 | /// Copies the slice to the allocated memory 1135 | /// and returns reference to the new slice. 1136 | #[cfg(not(no_global_oom_handling))] 1137 | #[inline(always)] 1138 | #[allow(clippy::mut_from_ref)] 1139 | pub fn copy_slice(&self, slice: &[T]) -> &mut [T] 1140 | where 1141 | T: Copy, 1142 | { 1143 | let result = unsafe { self._try_copy_slice(slice, handle_alloc_error) }; 1144 | match result { 1145 | Ok(slice) => slice, 1146 | Err(never) => never, 1147 | } 1148 | } 1149 | 1150 | /// Allocates memory for a copy of the slice. 1151 | /// Copies the slice to the allocated memory 1152 | /// and returns reference to the new slice. 1153 | /// If allocation fails, returns `None`. 1154 | #[inline(always)] 1155 | pub fn try_copy_slice(&self, slice: &[T]) -> Option<&mut [T]> 1156 | where 1157 | T: Copy, 1158 | { 1159 | unsafe { self._try_copy_slice(slice, |_| ()) }.ok() 1160 | } 1161 | 1162 | /// Copies the slice to the allocated memory 1163 | /// and returns reference to the new slice. 1164 | #[cfg(not(no_global_oom_handling))] 1165 | #[inline(always)] 1166 | #[allow(clippy::mut_from_ref)] 1167 | pub fn copy_str(&self, string: &str) -> &mut str { 1168 | let result = unsafe { self._try_copy_slice(string.as_bytes(), handle_alloc_error) }; 1169 | match result { 1170 | Ok(slice) => unsafe { core::str::from_utf8_unchecked_mut(slice) }, 1171 | Err(never) => never, 1172 | } 1173 | } 1174 | 1175 | /// Allocates memory for a copy of the slice. 1176 | /// Copies the slice to the allocated memory 1177 | /// and returns reference to the new slice. 1178 | /// If allocation fails, returns `None`. 1179 | #[inline(always)] 1180 | pub fn try_copy_str(&self, string: &str) -> Option<&mut str> { 1181 | unsafe { self._try_copy_slice(string.as_bytes(), |_| ()) } 1182 | .ok() 1183 | .map(|bytes| unsafe { core::str::from_utf8_unchecked_mut(bytes) }) 1184 | } 1185 | 1186 | /// Returns an `Emplace` adaptor that can emplace values into 1187 | /// the blink allocator. 1188 | /// 1189 | /// This version requires the value type to be `'static`. 1190 | /// To use with non-static types consider using one of the following: 1191 | /// 1192 | /// * [`Blink::emplace_no_drop`] 1193 | /// Causes emplaced value to not be dropped on reset. 1194 | /// Avoiding potential unsoundness in `Drop` implementation. 1195 | /// * [`Blink::emplace_shared`] 1196 | /// Returns shared reference to emplaced values. 1197 | /// * [`Blink::emplace_unchecked`] 1198 | /// Unsafe version of `emplace`. 1199 | /// User must guarantee that the value won't have access to references 1200 | /// allocated by the blink allocator later. 1201 | /// 1202 | /// # Example 1203 | /// 1204 | /// ``` 1205 | /// # #[cfg(feature = "alloc")] fn main() { 1206 | /// # use blink_alloc::Blink; 1207 | /// let mut blink = Blink::new(); 1208 | /// let foo = blink.put(42); 1209 | /// assert_eq!(*foo, 42); 1210 | /// *foo = 24; 1211 | /// blink.reset(); 1212 | /// // assert_eq!(*foo, 24); // Cannot compile. `foo` does not outlive reset. 1213 | /// # } 1214 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1215 | /// ``` 1216 | #[inline(always)] 1217 | pub fn emplace(&self) -> Emplace<'_, A, T> { 1218 | Emplace { 1219 | blink: self, 1220 | no_drop: false, 1221 | marker: PhantomData, 1222 | } 1223 | } 1224 | 1225 | /// Returns an `Emplace` adaptor that can emplace values into 1226 | /// the blink allocator. 1227 | /// 1228 | /// This version causes emplaced value to be not-dropped on reset. 1229 | /// To drop returned value on reset, consider one of the following: 1230 | /// 1231 | /// * [`Blink::emplace`] 1232 | /// Requires the value type to be `'static`. 1233 | /// * [`Blink::emplace_shared`] 1234 | /// Returns shared reference to emplaced values. 1235 | /// * [`Blink::emplace_unchecked`] 1236 | /// Unsafe version of `emplace`. 1237 | /// User must guarantee that the value won't have access to references 1238 | /// allocated by the blink allocator later. 1239 | /// 1240 | /// # Example 1241 | /// 1242 | /// ``` 1243 | /// # #[cfg(feature = "alloc")] fn main() { 1244 | /// # use blink_alloc::Blink; 1245 | /// struct Foo<'a>(&'a String); 1246 | /// 1247 | /// impl Drop for Foo<'_> { 1248 | /// fn drop(&mut self) { 1249 | /// println!("{}", self.0); 1250 | /// } 1251 | /// } 1252 | /// 1253 | /// let mut blink = Blink::new(); 1254 | /// let s = "Hello".to_owned(); 1255 | /// let foo = blink.emplace_no_drop().value(Foo(&s)); 1256 | /// assert_eq!(foo.0, "Hello"); 1257 | /// let world = blink.put("World".to_owned()); 1258 | /// // Would be unsound if `foo` could be dropped. 1259 | /// foo.0 = world; 1260 | /// blink.reset(); 1261 | /// // assert_eq!(foo.0, "Universe"); // Cannot compile. `foo` does not outlive reset. 1262 | /// # } 1263 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1264 | /// ``` 1265 | #[inline(always)] 1266 | pub fn emplace_no_drop(&self) -> Emplace<'_, A, T> { 1267 | Emplace { 1268 | blink: self, 1269 | no_drop: true, 1270 | marker: PhantomData, 1271 | } 1272 | } 1273 | 1274 | /// Returns an `Emplace` adaptor that can emplace values into 1275 | /// the blink allocator. 1276 | /// 1277 | /// This version returns shared references to emplaced values. 1278 | /// Lifts the `'static` requirement. 1279 | /// Still allows emplaced values to be dropped on reset. 1280 | /// 1281 | /// To drop returned value on reset, consider one of the following: 1282 | /// 1283 | /// * [`Blink::emplace`] 1284 | /// Requires the value type to be `'static`. 1285 | /// * [`Blink::emplace_no_drop`] 1286 | /// Causes emplaced value to not be dropped on reset. 1287 | /// Avoiding potential unsoundness in `Drop` implementation. 1288 | /// * [`Blink::emplace_unchecked`] 1289 | /// Unsafe version of `emplace`. 1290 | /// User must guarantee that the value won't have access to references 1291 | /// allocated by the blink allocator later. 1292 | /// 1293 | /// 1294 | /// ``` 1295 | /// # #[cfg(feature = "alloc")] fn main() { 1296 | /// # use blink_alloc::Blink; 1297 | /// struct Foo<'a>(&'a String); 1298 | /// 1299 | /// impl Drop for Foo<'_> { 1300 | /// fn drop(&mut self) { 1301 | /// println!("{}", self.0); 1302 | /// } 1303 | /// } 1304 | /// 1305 | /// let mut blink = Blink::new(); 1306 | /// let s = "Hello".to_owned(); 1307 | /// let foo = blink.emplace_no_drop().value(Foo(&s)); 1308 | /// assert_eq!(foo.0, "Hello"); 1309 | /// let world = blink.put("World".to_owned()); 1310 | /// // Would be unsound if `foo` was mutable. 1311 | /// // foo.0 = world; 1312 | /// blink.reset(); 1313 | /// // assert_eq!(foo.0, "Universe"); // Cannot compile. `foo` does not outlive reset. 1314 | /// # } 1315 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1316 | /// ``` 1317 | #[inline(always)] 1318 | pub fn emplace_shared(&self) -> Emplace<'_, A, T, &T, &[T]> { 1319 | Emplace { 1320 | blink: self, 1321 | no_drop: true, 1322 | marker: PhantomData, 1323 | } 1324 | } 1325 | 1326 | /// Returns an `Emplace` adaptor that can emplace values into 1327 | /// the blink allocator. 1328 | /// 1329 | /// This is unsafe version of [`Blink::emplace`]. 1330 | /// User must guarantee that values won't attempt to access 1331 | /// memory allocated by the blink allocator later in their [`Drop::drop`] 1332 | /// For safe code consider using one of the following: 1333 | /// 1334 | /// * [`Blink::emplace`] 1335 | /// Requires the value type to be `'static`. 1336 | /// * [`Blink::emplace_no_drop`] 1337 | /// Causes emplaced value to not be dropped on reset. 1338 | /// Avoiding potential unsoundness in `Drop` implementation. 1339 | /// * [`Blink::emplace_shared`] 1340 | /// Returns shared reference to emplaced values. 1341 | /// 1342 | /// # Safety 1343 | /// 1344 | /// Avoid incorrect usage. See below. 1345 | /// 1346 | /// # Incorrect usage example 1347 | /// 1348 | /// Other emplace methods are safe as they guarantee following case 1349 | /// is impossible. 1350 | /// 1351 | /// ```no_run 1352 | /// # #[cfg(feature = "alloc")] fn main() { 1353 | /// # use blink_alloc::Blink; 1354 | /// struct Foo<'a>(&'a String); 1355 | /// 1356 | /// impl Drop for Foo<'_> { 1357 | /// fn drop(&mut self) { 1358 | /// println!("{}", self.0); 1359 | /// } 1360 | /// } 1361 | /// 1362 | /// let mut blink = Blink::new(); 1363 | /// let s = "Hello".to_owned(); 1364 | /// let foo = blink.emplace_no_drop().value(Foo(&s)); 1365 | /// assert_eq!(foo.0, "Hello"); 1366 | /// let world = blink.put("World".to_owned()); 1367 | /// // Unsound since `foo` would access `world` in `Drop` 1368 | /// // and `world` is dropped earlier. 1369 | /// foo.0 = world; 1370 | /// blink.reset(); 1371 | /// // assert_eq!(foo.0, "Universe"); // Cannot compile. `foo` does not outlive reset. 1372 | /// # } 1373 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1374 | /// ``` 1375 | #[inline(always)] 1376 | pub unsafe fn emplace_unchecked(&self) -> Emplace<'_, A, T> { 1377 | Emplace { 1378 | blink: self, 1379 | no_drop: false, 1380 | marker: PhantomData, 1381 | } 1382 | } 1383 | } 1384 | 1385 | /// Wrapper for [`Blink`] that implements [`Send`]. 1386 | /// 1387 | /// Normally it is impossible to send [`Blink`] to another thread 1388 | /// due to the fact that it will drop non-sendable types on reset. 1389 | /// 1390 | /// This wrapper resets [`Blink`] on construction and thus safe to send. 1391 | /// 1392 | /// # Example 1393 | /// 1394 | /// ``` 1395 | /// # #[cfg(feature = "alloc")] fn main() { 1396 | /// # use blink_alloc::{SendBlink, Blink}; 1397 | /// let mut blink = Blink::new(); 1398 | /// let rc = std::rc::Rc::new(42); 1399 | /// let rc = blink.put(rc); 1400 | /// assert_eq!(**rc, 42); 1401 | /// let send_blink = SendBlink::new(blink); 1402 | /// 1403 | /// std::thread::scope(move |_| { 1404 | /// let mut blink = send_blink.into_inner(); 1405 | /// blink.put(42); 1406 | /// }); 1407 | /// # } 1408 | /// # #[cfg(not(feature = "alloc"))] fn main() {} 1409 | /// ```` 1410 | pub struct SendBlink { 1411 | blink: Blink, 1412 | } 1413 | 1414 | impl SendBlink 1415 | where 1416 | A: BlinkAllocator, 1417 | { 1418 | /// Creates new [`SendBlink`] from [`Blink`]. 1419 | /// Resets the blink allocator to avoid dropping non-sendable types on other threads. 1420 | #[inline(always)] 1421 | pub fn new(mut blink: Blink) -> Self { 1422 | blink.reset(); 1423 | SendBlink { blink } 1424 | } 1425 | 1426 | /// Returns inner [`Blink`] value. 1427 | #[inline(always)] 1428 | pub fn into_inner(self) -> Blink { 1429 | self.blink 1430 | } 1431 | } 1432 | 1433 | #[inline(always)] 1434 | fn never(never: Infallible) -> T { 1435 | match never {} 1436 | } 1437 | 1438 | const FASTER_START: usize = 8; 1439 | 1440 | #[inline] 1441 | fn size_hint_and_one(lower: usize, upper: Option, count: usize) -> Option { 1442 | // Upper bound is limited by current size. 1443 | // Constant for faster start. 1444 | let upper = upper.map_or(count, |upper| upper.min(count)); 1445 | let size_hint = lower.max(upper); 1446 | 1447 | // Add one more element to size hint. 1448 | let size_hint = size_hint.checked_add(1)?; 1449 | 1450 | // Sum with current count. 1451 | count.checked_add(size_hint) 1452 | } 1453 | 1454 | #[inline] 1455 | fn saturating_drain_iter(mut iter: impl Iterator) -> usize { 1456 | let mut drained = 0; 1457 | loop { 1458 | let (lower, _) = iter.size_hint(); 1459 | if lower == 0 { 1460 | match iter.next() { 1461 | None => return drained, 1462 | Some(_) => drained += 1, 1463 | } 1464 | continue; 1465 | } 1466 | // Don't drink too much. 1467 | let lower = lower.min(usize::MAX - drained); 1468 | match iter.nth(lower - 1) { 1469 | None => { 1470 | // This bastard lied about lower bound. 1471 | // No idea how many elements were actually drained. 1472 | return drained; 1473 | } 1474 | Some(_) => { 1475 | drained += lower; 1476 | } 1477 | } 1478 | if drained == usize::MAX { 1479 | // Enough is enough. 1480 | return usize::MAX; 1481 | } 1482 | } 1483 | } 1484 | 1485 | #[test] 1486 | fn test_iter_drain() { 1487 | assert_eq!(5, saturating_drain_iter(0..5)); 1488 | assert_eq!(usize::MAX, saturating_drain_iter(0..usize::MAX)); 1489 | assert_eq!(usize::MAX, saturating_drain_iter(core::iter::repeat(1))); 1490 | } 1491 | --------------------------------------------------------------------------------