├── .github └── workflows │ └── ci.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE ├── README.md ├── src ├── atomics.rs ├── lib.rs └── smart_ptrs.rs └── tests └── integration_tests.rs /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: [ push, pull_request ] 2 | 3 | env: 4 | RUST_BACKTRACE: 1 5 | 6 | jobs: 7 | basic-checks-and-tests: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | - run: cargo check 12 | - run: > 13 | rustup component add rustfmt && 14 | cargo fmt --all --check 15 | - run: > 16 | rustup component add clippy && 17 | cargo clippy --all-targets --all-features -- -D warnings 18 | - run: cargo test -- --test-threads=1 19 | - run: rustup +nightly component add miri 20 | - run: cargo +nightly miri test --many-seeds=0..3 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.idea 2 | /target 3 | 4 | Cargo.lock 5 | rust-toolchain.toml -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## v0.3.1 - 2024-10-14 2 | 3 | * moved all SMR-related functionality to a separate crate - `fast-smr`. 4 | * brought back custom Arc and Weak implementations. 5 | * renamed `Snapshot` to `Guard`. 6 | 7 | ## v0.2.1 - 2024-04-05 8 | 9 | * fixed `upgrade` method of `AtomicWeak`: disallow loading `Snapshot`. 10 | 11 | ## v0.2.0 - 2024-03-31 12 | 13 | * removed custom Arc and Weak implementations. 14 | * removed memory ordering parameters from all methods due to potential UB if the user did not 15 | provide strict enough orderings. 16 | * renamed marker traits to "SmartPtr" and "StrongPtr". 17 | * fixed incorrect Send / Sync auto impls on atomics: they were previously omitted. 18 | * fixed bug in compare_exchange methods: potential UB in the failure case. 19 | * added support for multiple critical sections per thread (e.g. during signal handling). 20 | * added thread-local handles to vacate slots automatically on exit. 21 | * replaced boxed Fns with fn ptrs and cache to eliminate unnecessary allocations. 22 | * changed smr trait methods to use RAII guards instead of functions. 23 | * removed unnecessary Release trait. 24 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "aarc" 3 | version = "0.3.2" 4 | edition = "2021" 5 | description = "Atomically updatable variants of Arc and Weak for lock-free concurrency." 6 | homepage = "https://github.com/aarc-rs/aarc" 7 | repository = "https://github.com/aarc-rs/aarc" 8 | license = "MIT" 9 | keywords = ["atomic", "arc", "thread-safe", "sync", "lock-free"] 10 | categories = ["concurrency", "memory-management", "data-structures", "algorithms"] 11 | exclude = [".github/", ".gitignore", "target/"] 12 | 13 | [dependencies] 14 | fast-smr = "0.2.2" 15 | 16 | [dev-dependencies] 17 | rand = "0.8" 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 aarc-rs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aarc 2 | 3 | - [Quickstart](#quickstart) 4 | - [Motivation](#motivation) 5 | - [Examples](#examples) 6 | - [Roadmap](#roadmap) 7 | - [Resources](#resources) 8 | 9 | ### Quickstart 10 | 11 | - [`Arc`](https://docs.rs/aarc/latest/aarc/struct.Arc.html) / 12 | [`Weak`](https://docs.rs/aarc/latest/aarc/struct.Weak.html): drop-in replacements for the standard library's `Arc` 13 | and `Weak`, but implemented with deferred reclamation semantics. 14 | - [`AtomicArc`](https://docs.rs/aarc/latest/aarc/struct.AtomicArc.html) / 15 | [`AtomicWeak`](https://docs.rs/aarc/latest/aarc/struct.AtomicWeak.html): variants of `Arc` and 16 | `Weak` with atomically updatable pointers, supporting standard atomic operations like `load` and `compare_exchange`. 17 | - [`Guard`](https://docs.rs/aarc/latest/aarc/struct.Guard.html): A novel smart pointer that can be loaded from 18 | `AtomicArc` or `AtomicWeak`, designed to reduce contention when multiple threads operate on the same atomic variable. 19 | It prevents deallocation but does not contribute to reference counts. (This was renamed from `Snapshot` in an earlier 20 | version, to reduce confusion.) 21 | 22 | ### Motivation 23 | 24 | Data structures built with `Arc` typically require locks for synchronization, as only 25 | the reference counts may be atomically updated, not the pointer nor the contained data. While locks 26 | are often the right approach, lock-free data structures can have better theoretical and practical 27 | performance guarantees in highly-contended settings. 28 | 29 | Instead of protecting in-place updates with locks, an alternative approach is to perform copy-on-write updates by 30 | atomically installing pointers. To avoid use-afer-free, mechanisms for safe memory reclamation (SMR) are typically 31 | utilized (i.e. hazard pointers, epoch-based reclamation). `aarc` uses the blazingly fast algorithm provided by the 32 | [`fast-smr`](https://github.com/aarc-rs/fast-smr) crate and builds on top of it, hiding unsafety and providing 33 | convenient RAII semantics through reference-counted pointers. 34 | 35 | ### Examples 36 | 37 | Example 1: [Treiber Stack](https://en.wikipedia.org/wiki/Treiber_stack) 38 | 39 | ```rust no_run 40 | use std::ptr::null; 41 | use aarc::{Arc, AsPtr, AtomicArc, Guard}; 42 | 43 | struct StackNode { 44 | val: usize, 45 | next: Option>, 46 | } 47 | 48 | struct Stack { 49 | top: AtomicArc, 50 | } 51 | 52 | impl Stack { 53 | fn push(&self, val: usize) { 54 | let mut top = self.top.load(); 55 | loop { 56 | let top_ptr = top.as_ref().map_or(null(), AsPtr::as_ptr); 57 | let new_node = Arc::new(StackNode { 58 | val, 59 | next: top.as_ref().map(Arc::from), 60 | }); 61 | match self.top.compare_exchange(top_ptr, Some(&new_node)) { 62 | Ok(()) => break, 63 | Err(before) => top = before, 64 | } 65 | } 66 | } 67 | fn pop(&self) -> Option> { 68 | let mut top = self.top.load(); 69 | while let Some(top_node) = top.as_ref() { 70 | match self 71 | .top 72 | .compare_exchange(top_node.as_ptr(), top_node.next.as_ref()) 73 | { 74 | Ok(()) => return top, 75 | Err(actual_top) => top = actual_top, 76 | } 77 | } 78 | None 79 | } 80 | } 81 | ``` 82 | 83 | ### Roadmap 84 | 85 | - [ ] relax atomic orderings from SeqCst to Acq/Rel 86 | - [ ] add tagged pointers 87 | - [ ] add more tests and stabilize APIs 88 | 89 | ### Resources 90 | 91 | 1. [Anderson, Daniel, et al. "Concurrent Deferred Reference Counting with Constant-Time Overhead."](https://dl.acm.org/doi/10.1145/3453483.3454060) 92 | 2. [Anderson, Daniel, et al. "Turning Manual Concurrent Memory Reclamation into Automatic Reference Counting."](https://dl.acm.org/doi/10.1145/3519939.3523730) -------------------------------------------------------------------------------- /src/atomics.rs: -------------------------------------------------------------------------------- 1 | use std::marker::PhantomData; 2 | use std::ptr; 3 | use std::ptr::{null, null_mut, NonNull}; 4 | use std::sync::atomic::AtomicPtr; 5 | use std::sync::atomic::Ordering::{Relaxed, SeqCst}; 6 | 7 | use fast_smr::smr::{load, protect}; 8 | 9 | use crate::smart_ptrs::{Arc, AsPtr, Guard, Weak}; 10 | use crate::StrongPtr; 11 | 12 | /// An [`Arc`] with an atomically updatable pointer. 13 | /// 14 | /// Usage notes: 15 | /// * An `AtomicArc` can intrinsically store `None` (a hypothetical `Option>` would 16 | /// no longer be atomic). 17 | /// * An `AtomicArc` contributes to the strong count of the pointed-to allocation, if any. However, 18 | /// it does not implement `Deref`, so methods like `load` must be used to obtain a [`Guard`] 19 | /// through which the data can be accessed. 20 | /// * `T` must be `Sized` for compatibility with `AtomicPtr`. This may be relaxed in the future. 21 | /// * When an `AtomicArc` is updated or dropped, the strong count of the previously pointed-to 22 | /// object may not be immediately decremented. Thus: 23 | /// * `T` must be `'static` to support delayed deallocations. 24 | /// * The value returned by `strong_count` will likely be an overestimate. 25 | /// 26 | /// # Examples 27 | /// ``` 28 | /// use aarc::{Arc, AtomicArc, Guard, RefCount}; 29 | /// 30 | /// let atomic = AtomicArc::new(53); 31 | /// 32 | /// let guard = atomic.load().unwrap(); // guard doesn't affect strong count 33 | /// assert_eq!(*guard, 53); 34 | /// 35 | /// let arc = Arc::from(&guard); 36 | /// assert_eq!(arc.strong_count(), 2); 37 | /// 38 | /// assert_eq!(*arc, *guard); 39 | /// ``` 40 | #[derive(Default)] 41 | pub struct AtomicArc { 42 | ptr: AtomicPtr, 43 | phantom: PhantomData, 44 | } 45 | 46 | impl AtomicArc { 47 | /// Similar to [`Arc::new`], but `None` is a valid input, in which case the `AtomicArc` will 48 | /// store a null pointer. 49 | /// 50 | /// To create an `AtomicArc` from an existing `Arc`, use `from`. 51 | pub fn new>>(data: D) -> Self { 52 | let ptr = data.into().map_or(null(), |x| Arc::into_raw(Arc::new(x))); 53 | Self { 54 | ptr: AtomicPtr::new(ptr.cast_mut()), 55 | phantom: PhantomData, 56 | } 57 | } 58 | 59 | /// If `self` and `current` point to the same object, new’s pointer will be stored into self 60 | /// and the result will be an empty `Ok`. Otherwise, a `load` occurs, and an `Err` containing 61 | /// a [`Guard`] will be returned. 62 | pub fn compare_exchange + StrongPtr>( 63 | &self, 64 | current: *const T, 65 | new: Option<&N>, 66 | ) -> Result<(), Option>> { 67 | let c = current.cast_mut(); 68 | let n = new.map_or(null(), N::as_ptr).cast_mut(); 69 | match self.ptr.compare_exchange(c, n, SeqCst, SeqCst) { 70 | Ok(before) => unsafe { 71 | Self::after_swap(n, before); 72 | Ok(()) 73 | }, 74 | Err(actual) => { 75 | let mut opt = None; 76 | if let Some(ptr) = NonNull::new(actual) { 77 | if let Some(guard) = protect(&self.ptr, ptr) { 78 | opt = Some(Guard { guard }) 79 | } 80 | } 81 | Err(opt) 82 | } 83 | } 84 | } 85 | 86 | /// Loads a [`Guard`], which allows the pointed-to value to be accessed. `None` indicates that 87 | /// the inner atomic pointer is null. 88 | pub fn load(&self) -> Option> { 89 | let guard = load(&self.ptr)?; 90 | Some(Guard { guard }) 91 | } 92 | 93 | /// Stores `new`'s pointer (or `None`) into `self`. 94 | pub fn store + StrongPtr>(&self, new: Option<&N>) { 95 | // TODO: rework this method to possibly take ownership of new (avoid increment). 96 | let n = new.map_or(null(), N::as_ptr); 97 | let before = self.ptr.swap(n.cast_mut(), SeqCst); 98 | unsafe { 99 | Self::after_swap(n, before); 100 | } 101 | } 102 | 103 | unsafe fn after_swap(new: *const T, before: *const T) { 104 | if !ptr::eq(new, before) { 105 | if !new.is_null() { 106 | Arc::increment_strong_count(new); 107 | } 108 | if !before.is_null() { 109 | drop(Arc::from_raw(before)); 110 | } 111 | } 112 | } 113 | } 114 | 115 | impl Clone for AtomicArc { 116 | fn clone(&self) -> Self { 117 | let ptr = if let Some(guard) = self.load() { 118 | unsafe { 119 | Arc::increment_strong_count(guard.as_ptr()); 120 | } 121 | guard.as_ptr().cast_mut() 122 | } else { 123 | null_mut() 124 | }; 125 | Self { 126 | ptr: AtomicPtr::new(ptr), 127 | phantom: PhantomData, 128 | } 129 | } 130 | } 131 | 132 | impl Drop for AtomicArc { 133 | fn drop(&mut self) { 134 | if let Some(ptr) = NonNull::new(self.ptr.load(Relaxed)) { 135 | unsafe { 136 | drop(Arc::from_raw(ptr.as_ptr())); 137 | } 138 | } 139 | } 140 | } 141 | 142 | unsafe impl Send for AtomicArc {} 143 | 144 | unsafe impl Sync for AtomicArc {} 145 | 146 | /// A [`Weak`] with an atomically updatable pointer. 147 | /// 148 | /// See [`AtomicArc`] for usage notes. `AtomicWeak` differs only in that it contributes to the weak 149 | /// count instead of the strong count. 150 | /// 151 | /// # Examples 152 | /// ``` 153 | /// use aarc::{Arc, AtomicWeak, RefCount, Weak}; 154 | /// 155 | /// let arc = Arc::new(53); 156 | /// 157 | /// let atomic = AtomicWeak::from(&arc); // +1 weak count 158 | /// 159 | /// let guard = atomic.load().unwrap(); 160 | /// 161 | /// assert_eq!(*arc, *guard); 162 | /// assert_eq!(arc.weak_count(), 1); 163 | /// ``` 164 | #[derive(Default)] 165 | pub struct AtomicWeak { 166 | ptr: AtomicPtr, 167 | } 168 | 169 | impl AtomicWeak { 170 | /// If `self` and `current` point to the same object, new’s pointer will be stored into self 171 | /// and the result will be an empty `Ok`. Otherwise, a load will be attempted and a 172 | /// [`Guard`] will be returned if possible. See `load`. 173 | pub fn compare_exchange>( 174 | &self, 175 | current: *const T, 176 | new: Option<&N>, 177 | ) -> Result<(), Option>> { 178 | let c = current.cast_mut(); 179 | let n = new.map_or(null(), N::as_ptr).cast_mut(); 180 | match self.ptr.compare_exchange(c, n, SeqCst, SeqCst) { 181 | Ok(before) => unsafe { 182 | Self::after_swap(n, before); 183 | Ok(()) 184 | }, 185 | Err(actual) => unsafe { 186 | let mut opt = None; 187 | if let Some(ptr) = NonNull::new(actual) { 188 | if let Some(guard) = protect(&self.ptr, ptr) { 189 | opt = (Arc::strong_count_raw(guard.as_ptr()) > 0).then_some(Guard { guard }) 190 | } 191 | } 192 | Err(opt) 193 | }, 194 | } 195 | } 196 | 197 | /// Attempts to load a [`Guard`]. This method differs from the one on `AtomicArc` in that 198 | /// `None` may indicate one of two things: 199 | /// * The `AtomicWeak` is indeed not pointing to anything (null pointer). 200 | /// * The pointer is not null, but the strong count is 0, so a `Guard` cannot be loaded. 201 | /// 202 | /// There is currently no way for the user to differentiate between the two cases (this may 203 | /// change in the future). 204 | pub fn load(&self) -> Option> { 205 | let guard = load(&self.ptr)?; 206 | unsafe { (Arc::strong_count_raw(guard.as_ptr()) > 0).then_some(Guard { guard }) } 207 | } 208 | 209 | /// Stores `new`'s pointer (or `None`) into `self`. 210 | pub fn store>(&self, new: Option<&N>) { 211 | let n = new.map_or(null(), N::as_ptr); 212 | let before = self.ptr.swap(n.cast_mut(), SeqCst); 213 | unsafe { 214 | Self::after_swap(n, before); 215 | } 216 | } 217 | 218 | unsafe fn after_swap(new: *const T, before: *const T) { 219 | if !ptr::eq(new, before) { 220 | if !new.is_null() { 221 | Weak::increment_weak_count(new); 222 | } 223 | if !before.is_null() { 224 | drop(Weak::from_raw(before)); 225 | } 226 | } 227 | } 228 | } 229 | 230 | impl Clone for AtomicWeak { 231 | fn clone(&self) -> Self { 232 | let ptr = if let Some(guard) = self.load() { 233 | unsafe { 234 | Weak::increment_weak_count(guard.as_ptr()); 235 | } 236 | guard.as_ptr().cast_mut() 237 | } else { 238 | null_mut() 239 | }; 240 | Self { 241 | ptr: AtomicPtr::new(ptr), 242 | } 243 | } 244 | } 245 | 246 | impl Drop for AtomicWeak { 247 | fn drop(&mut self) { 248 | if let Some(ptr) = NonNull::new(self.ptr.load(Relaxed)) { 249 | unsafe { 250 | drop(Weak::from_raw(ptr.as_ptr())); 251 | } 252 | } 253 | } 254 | } 255 | 256 | impl + StrongPtr> From<&P> for AtomicArc { 257 | fn from(value: &P) -> Self { 258 | unsafe { 259 | let ptr = P::as_ptr(value); 260 | Arc::increment_strong_count(ptr); 261 | Self { 262 | ptr: AtomicPtr::new(ptr.cast_mut()), 263 | phantom: PhantomData, 264 | } 265 | } 266 | } 267 | } 268 | 269 | impl> From<&P> for AtomicWeak { 270 | fn from(value: &P) -> Self { 271 | unsafe { 272 | let ptr = P::as_ptr(value); 273 | Weak::increment_weak_count(ptr); 274 | Self { 275 | ptr: AtomicPtr::new(ptr.cast_mut()), 276 | } 277 | } 278 | } 279 | } 280 | 281 | unsafe impl Send for AtomicWeak {} 282 | 283 | unsafe impl Sync for AtomicWeak {} 284 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | 3 | pub use atomics::AtomicArc; 4 | pub use atomics::AtomicWeak; 5 | pub use smart_ptrs::Arc; 6 | pub use smart_ptrs::AsPtr; 7 | pub use smart_ptrs::Guard; 8 | pub use smart_ptrs::RefCount; 9 | pub use smart_ptrs::StrongPtr; 10 | pub use smart_ptrs::Weak; 11 | 12 | pub(crate) mod atomics; 13 | 14 | pub(crate) mod smart_ptrs; 15 | -------------------------------------------------------------------------------- /src/smart_ptrs.rs: -------------------------------------------------------------------------------- 1 | use std::alloc::{dealloc, Layout}; 2 | use std::marker::PhantomData; 3 | use std::mem::{forget, ManuallyDrop}; 4 | use std::ops::Deref; 5 | use std::ptr::{addr_of_mut, drop_in_place, NonNull}; 6 | use std::sync::atomic::AtomicUsize; 7 | use std::sync::atomic::Ordering::{Relaxed, SeqCst}; 8 | 9 | use fast_smr::smr; 10 | use fast_smr::smr::{load_era, retire}; 11 | 12 | /// An [`Arc`]-like smart pointer that can be loaded from atomics. 13 | /// 14 | /// Usage notes: 15 | /// * A `Guard` should be used as a temporary variable within a local scope, not as a replacement 16 | /// for [`Arc`] in a data structure. 17 | /// * `Guard` implements `Deref` and prevents deallocation like [`Arc`], but it does not contribute 18 | /// to the strong count. 19 | pub struct Guard { 20 | pub(crate) guard: smr::Guard, 21 | } 22 | 23 | impl Deref for Guard { 24 | type Target = T; 25 | 26 | fn deref(&self) -> &Self::Target { 27 | unsafe { &*self.guard.as_ptr() } 28 | } 29 | } 30 | 31 | impl From<&Guard> for Arc { 32 | fn from(value: &Guard) -> Self { 33 | unsafe { 34 | let ptr = value.guard.as_ptr(); 35 | Self::increment_strong_count(ptr); 36 | Self { 37 | ptr: NonNull::new_unchecked(find_inner_ptr(ptr).cast_mut()), 38 | phantom: PhantomData, 39 | } 40 | } 41 | } 42 | } 43 | 44 | impl From<&Guard> for Weak { 45 | fn from(value: &Guard) -> Self { 46 | unsafe { 47 | let ptr = value.guard.as_ptr(); 48 | Self::increment_weak_count(ptr); 49 | Self { 50 | ptr: NonNull::new_unchecked(find_inner_ptr(ptr).cast_mut()), 51 | } 52 | } 53 | } 54 | } 55 | 56 | /// A drop-in replacement for [`std::sync::Arc`]. 57 | pub struct Arc { 58 | ptr: NonNull>, 59 | phantom: PhantomData>, 60 | } 61 | 62 | impl Arc { 63 | pub fn new(data: T) -> Self { 64 | unsafe { 65 | let ptr = NonNull::new_unchecked(Box::into_raw(Box::new(ArcInner { 66 | strong_count: AtomicUsize::new(1), 67 | weak_count: AtomicUsize::new(1), 68 | birth_era: load_era(), 69 | data, 70 | }))); 71 | Self { 72 | ptr, 73 | phantom: PhantomData, 74 | } 75 | } 76 | } 77 | pub fn into_raw(this: Self) -> *const T { 78 | let ptr = this.as_ptr(); 79 | forget(this); 80 | ptr 81 | } 82 | /// # Safety 83 | /// See [`std::sync::Arc::from_raw`]. 84 | pub unsafe fn from_raw(ptr: *const T) -> Self { 85 | Self { 86 | ptr: NonNull::new_unchecked(find_inner_ptr(ptr).cast_mut()), 87 | phantom: PhantomData, 88 | } 89 | } 90 | pub(crate) unsafe fn strong_count_raw(ptr: *const T) -> usize { 91 | (*find_inner_ptr(ptr)).strong_count.load(SeqCst) 92 | } 93 | pub(crate) unsafe fn increment_strong_count(ptr: *const T) { 94 | _ = ManuallyDrop::new(Self::from_raw(ptr)).clone(); 95 | } 96 | } 97 | 98 | impl Clone for Arc { 99 | fn clone(&self) -> Self { 100 | unsafe { 101 | self.ptr.as_ref().strong_count.fetch_add(1, SeqCst); 102 | } 103 | Self { 104 | ptr: self.ptr, 105 | phantom: PhantomData, 106 | } 107 | } 108 | } 109 | 110 | impl Deref for Arc { 111 | type Target = T; 112 | 113 | fn deref(&self) -> &Self::Target { 114 | unsafe { &self.ptr.as_ref().data } 115 | } 116 | } 117 | 118 | impl Drop for Arc { 119 | fn drop(&mut self) { 120 | let birth_era = unsafe { self.ptr.as_ref().birth_era }; 121 | retire(self.ptr.cast(), decrement_strong_count::, birth_era); 122 | } 123 | } 124 | 125 | fn decrement_strong_count(ptr: NonNull) { 126 | unsafe { 127 | let inner = ptr.cast::>().as_ptr(); 128 | if (*inner).strong_count.fetch_sub(1, SeqCst) == 1 { 129 | drop_in_place(&mut (*inner).data); 130 | decrement_weak_count::(ptr); 131 | } 132 | } 133 | } 134 | 135 | /// A drop-in replacement for [`std::sync::Weak`]. 136 | pub struct Weak { 137 | ptr: NonNull>, 138 | } 139 | 140 | impl Weak { 141 | pub(crate) unsafe fn increment_weak_count(ptr: *const T) { 142 | _ = ManuallyDrop::new(Self::from_raw(ptr)).clone(); 143 | } 144 | pub(crate) unsafe fn from_raw(ptr: *const T) -> Self { 145 | Self { 146 | ptr: NonNull::new_unchecked(find_inner_ptr(ptr).cast_mut()), 147 | } 148 | } 149 | } 150 | 151 | impl Clone for Weak { 152 | fn clone(&self) -> Self { 153 | unsafe { 154 | self.ptr.as_ref().weak_count.fetch_add(1, SeqCst); 155 | } 156 | Self { ptr: self.ptr } 157 | } 158 | } 159 | 160 | impl Drop for Weak { 161 | fn drop(&mut self) { 162 | let birth_era = unsafe { self.ptr.as_ref().birth_era }; 163 | retire(self.ptr.cast(), decrement_weak_count::, birth_era); 164 | } 165 | } 166 | 167 | fn decrement_weak_count(ptr: NonNull) { 168 | unsafe { 169 | let inner = ptr.cast::>().as_ptr(); 170 | if (*inner).weak_count.fetch_sub(1, SeqCst) == 1 { 171 | dealloc(ptr.as_ptr(), Layout::new::>()); 172 | } 173 | } 174 | } 175 | 176 | unsafe fn find_inner_ptr(ptr: *const T) -> *const ArcInner { 177 | let layout = Layout::new::>(); 178 | let offset = layout.size() + padding_needed_for(&layout, align_of::()); 179 | ptr.byte_sub(offset) as *const ArcInner 180 | } 181 | 182 | // See: [`Layout::padding_needed_for`] 183 | fn padding_needed_for(layout: &Layout, align: usize) -> usize { 184 | let len = layout.size(); 185 | let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); 186 | len_rounded_up.wrapping_sub(len) 187 | } 188 | 189 | #[repr(C)] 190 | struct ArcInner { 191 | strong_count: AtomicUsize, 192 | weak_count: AtomicUsize, 193 | birth_era: u64, 194 | data: T, 195 | } 196 | 197 | /// A trait for extracting a raw pointer from a smart pointer. 198 | pub trait AsPtr { 199 | type Target; 200 | 201 | fn as_ptr(&self) -> *const Self::Target; 202 | } 203 | 204 | impl AsPtr for Arc { 205 | type Target = T; 206 | 207 | fn as_ptr(&self) -> *const T { 208 | unsafe { addr_of_mut!((*self.ptr.as_ptr()).data) } 209 | } 210 | } 211 | 212 | impl AsPtr for Weak { 213 | type Target = T; 214 | 215 | fn as_ptr(&self) -> *const T { 216 | unsafe { addr_of_mut!((*self.ptr.as_ptr()).data) } 217 | } 218 | } 219 | 220 | impl AsPtr for Guard { 221 | type Target = T; 222 | 223 | fn as_ptr(&self) -> *const T { 224 | self.guard.as_ptr().cast_const() 225 | } 226 | } 227 | 228 | /// A marker trait for types that prevent deallocation ([`Arc`] and [`Guard`]). 229 | pub trait StrongPtr {} 230 | impl StrongPtr for Arc {} 231 | impl StrongPtr for Guard {} 232 | 233 | pub trait RefCount { 234 | fn strong_count(&self) -> usize; 235 | fn weak_count(&self) -> usize; 236 | } 237 | 238 | impl RefCount for T { 239 | fn strong_count(&self) -> usize { 240 | unsafe { 241 | let inner = find_inner_ptr(self.as_ptr()); 242 | (*inner).strong_count.load(Relaxed) 243 | } 244 | } 245 | 246 | fn weak_count(&self) -> usize { 247 | unsafe { 248 | let inner = find_inner_ptr(self.as_ptr()); 249 | (*inner).weak_count.load(Relaxed) - 1 250 | } 251 | } 252 | } 253 | 254 | #[cfg(test)] 255 | mod tests { 256 | use crate::smart_ptrs::Arc; 257 | 258 | #[test] 259 | fn test_arc() { 260 | let x = Arc::new(55usize); 261 | assert_eq!(*x, 55); 262 | unsafe { 263 | let y = Arc::from_raw(Arc::into_raw(x)); 264 | assert_eq!(*y, 55); 265 | } 266 | } 267 | } 268 | -------------------------------------------------------------------------------- /tests/integration_tests.rs: -------------------------------------------------------------------------------- 1 | use std::ptr::{null, null_mut}; 2 | use std::sync::atomic::AtomicUsize; 3 | use std::sync::atomic::Ordering::Relaxed; 4 | use std::thread; 5 | 6 | use rand::random; 7 | 8 | use aarc::{Arc, AsPtr, AtomicArc, AtomicWeak, Guard}; 9 | 10 | fn test_stack(threads_count: usize, iters_per_thread: usize) { 11 | #[derive(Default)] 12 | struct StackNode { 13 | val: usize, 14 | next: Option>, 15 | } 16 | 17 | #[derive(Default)] 18 | struct Stack { 19 | top: AtomicArc, 20 | } 21 | 22 | unsafe impl Send for Stack {} 23 | unsafe impl Sync for Stack {} 24 | 25 | impl Stack { 26 | fn push(&self, val: usize) { 27 | let mut top = self.top.load(); 28 | loop { 29 | let top_ptr = top.as_ref().map_or(null(), AsPtr::as_ptr); 30 | let new_node = Arc::new(StackNode { 31 | val, 32 | next: top.as_ref().map(Arc::from), 33 | }); 34 | match self.top.compare_exchange(top_ptr, Some(&new_node)) { 35 | Ok(()) => break, 36 | Err(before) => top = before, 37 | } 38 | } 39 | } 40 | fn pop(&self) -> Option> { 41 | let mut top = self.top.load(); 42 | while let Some(top_node) = top.as_ref() { 43 | match self 44 | .top 45 | .compare_exchange(top_node.as_ptr(), top_node.next.as_ref()) 46 | { 47 | Ok(()) => return top, 48 | Err(actual_top) => top = actual_top, 49 | } 50 | } 51 | None 52 | } 53 | } 54 | 55 | let stack = Stack::default(); 56 | 57 | thread::scope(|s| { 58 | for _ in 0..threads_count { 59 | s.spawn(|| { 60 | for i in 0..iters_per_thread { 61 | stack.push(i); 62 | } 63 | }); 64 | } 65 | }); 66 | 67 | let val_counts: Vec = (0..iters_per_thread) 68 | .map(|_| AtomicUsize::default()) 69 | .collect(); 70 | thread::scope(|s| { 71 | for _ in 0..threads_count { 72 | s.spawn(|| { 73 | for _ in 0..iters_per_thread { 74 | let node = stack.pop().unwrap(); 75 | val_counts[node.val].fetch_add(1, Relaxed); 76 | } 77 | }); 78 | } 79 | }); 80 | 81 | // Verify that no nodes were lost. 82 | 83 | for count in &val_counts { 84 | assert_eq!(count.load(Relaxed), threads_count); 85 | } 86 | } 87 | 88 | #[test] 89 | fn test_stack_small() { 90 | test_stack(5, 10); 91 | } 92 | 93 | #[test] 94 | #[cfg_attr(miri, ignore)] 95 | fn test_stack_full() { 96 | test_stack(8, 500); 97 | } 98 | 99 | fn test_sorted_linked_list(threads_count: usize, iters_per_thread: usize) { 100 | #[derive(Default)] 101 | struct ListNode { 102 | val: usize, 103 | prev: AtomicWeak, 104 | next: AtomicArc, 105 | } 106 | 107 | struct LinkedList { 108 | head: AtomicArc, 109 | } 110 | 111 | impl LinkedList { 112 | fn insert_sorted(&self, val: usize) { 113 | let mut curr_node = self.head.load().unwrap(); 114 | let mut next = curr_node.next.load(); 115 | loop { 116 | if next.is_none() || val < next.as_ref().unwrap().val { 117 | let new = Arc::new(ListNode { 118 | val, 119 | prev: AtomicWeak::from(&curr_node), 120 | next: next.as_ref().map_or(AtomicArc::default(), AtomicArc::from), 121 | }); 122 | match curr_node.next.compare_exchange( 123 | next.as_ref().map_or(null_mut(), Guard::as_ptr), 124 | Some(&new), 125 | ) { 126 | Ok(()) => { 127 | if let Some(next_node) = next { 128 | // This is technically incorrect; another node could've been 129 | // inserted, but it's not crucial for this test. 130 | next_node.prev.store(Some(&new)); 131 | } 132 | break; 133 | } 134 | Err(actual_next) => next = actual_next, 135 | } 136 | } else { 137 | curr_node = next.unwrap(); 138 | next = curr_node.next.load(); 139 | } 140 | } 141 | } 142 | } 143 | 144 | let list = LinkedList { 145 | head: AtomicArc::new(Some(ListNode::default())), 146 | }; 147 | 148 | thread::scope(|s| { 149 | for _ in 0..threads_count { 150 | s.spawn(|| { 151 | for _ in 0..iters_per_thread { 152 | list.insert_sorted(random::()); 153 | } 154 | }); 155 | } 156 | }); 157 | 158 | // Verify that no nodes were lost and that the list is in sorted order. 159 | let mut i = 0; 160 | let mut curr_node = list.head.load().unwrap(); 161 | loop { 162 | let next = curr_node.next.load(); 163 | if let Some(next_node) = next { 164 | assert!(curr_node.val <= next_node.val); 165 | curr_node = next_node; 166 | i += 1; 167 | } else { 168 | break; 169 | } 170 | } 171 | assert_eq!(threads_count * iters_per_thread, i); 172 | // Iterate in reverse order using the weak ptrs. 173 | while let Some(prev_node) = curr_node.prev.load() { 174 | assert!(curr_node.val >= prev_node.val); 175 | curr_node = prev_node; 176 | } 177 | } 178 | 179 | #[test] 180 | fn test_sorted_linked_list_small() { 181 | test_sorted_linked_list(5, 10); 182 | } 183 | 184 | #[test] 185 | #[cfg_attr(miri, ignore)] 186 | fn test_sorted_linked_list_full() { 187 | test_sorted_linked_list(8, 500); 188 | } 189 | --------------------------------------------------------------------------------