├── .github └── workflows │ └── ci.yml ├── .gitignore ├── Cargo.toml ├── LICENCE-APACHE ├── LICENCE-MIT ├── README.md └── src ├── lib.rs ├── map.rs ├── sync.rs ├── tests.rs └── unsync.rs /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: CI 4 | 5 | jobs: 6 | check: 7 | name: Run tests and clippy 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v4 11 | 12 | - run: cargo clippy --all-features -- -D warnings 13 | - run: cargo test 14 | - run: cargo test --all-features 15 | 16 | check-no_std: 17 | name: Check no_std 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - name: Install toolchain 23 | run: rustup target add aarch64-unknown-none 24 | 25 | - name: cargo check 26 | run: cargo check --target aarch64-unknown-none --no-default-features --features ahash 27 | 28 | fmt: 29 | name: Check formatting 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v4 33 | 34 | - run: cargo fmt -- --check 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "once_map" 3 | version = "0.4.21" 4 | edition = "2021" 5 | rust-version = "1.63" 6 | license = "MIT OR Apache-2.0" 7 | readme = "README.md" 8 | description = "Single assignment and lazy maps" 9 | 10 | repository = "https://github.com/a1phyr/once_map" 11 | documentation = "https://docs.rs/once_map" 12 | 13 | categories = ["caching", "concurrency", "data-structures", "rust-patterns", "no-std"] 14 | keywords = ["map", "once", "lazy"] 15 | 16 | 17 | [features] 18 | default = ["std", "ahash"] 19 | 20 | std = ["dep:parking_lot", "stable_deref_trait/std", "ahash?/std", "ahash?/runtime-rng"] 21 | ahash = ["dep:ahash"] 22 | 23 | rayon = ["dep:rayon", "hashbrown/rayon", "std"] 24 | 25 | [dependencies] 26 | ahash = { version = "0.8", default-features = false, optional = true } 27 | hashbrown = { version = "0.15", default-features = false } 28 | stable_deref_trait = { version = "1.2", features = ["alloc"], default-features = false } 29 | 30 | equivalent = { version = "1.0", optional = true } 31 | 32 | parking_lot = { version = "0.12", optional = true } 33 | rayon = { version = "1.6", optional = true } 34 | serde = { version = "1.0.210", optional = true, default-features = false } 35 | 36 | 37 | [package.metadata.docs.rs] 38 | all-features = true 39 | -------------------------------------------------------------------------------- /LICENCE-APACHE: -------------------------------------------------------------------------------- 1 | Copyright 2022 Benoît du Garreau 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /LICENCE-MIT: -------------------------------------------------------------------------------- 1 | Copyright 2022 Benoît du Garreau 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `once_map` 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/once_map.svg)](https://crates.io/crates/once_map) 4 | [![Docs.rs](https://docs.rs/once_map/badge.svg)](https://docs.rs/once_map/) 5 | ![Minimum rustc version](https://img.shields.io/badge/rustc-1.63+-lightgray.svg) 6 | 7 | This crate provides `OnceMap`, a type of `HashMap` where entries can be written with a shared reference, 8 | but can be written ony once. This is similar to [`once_cell`], but with a map. 9 | This enables to reference values inside the map for the lifetime of the map, without the need 10 | of further locks. 11 | 12 | This makes this type perfect for implementation of caches. A type `LazyMap` is provided for such cases. 13 | 14 | This crate provides such a map heavily optimized for concurrent use, but also a single-threaded version. 15 | 16 | [`once_cell`]: https://docs.rs/once_cell 17 | 18 | # Example 19 | 20 | ```rust 21 | let map = OnceMap::new(); 22 | 23 | // All these are `&str` pointing directly in the map. 24 | // Note that we don't need a mutable reference, so we can have several of 25 | // them at the same time. 26 | let roses = map.insert(String::from("rose"), |_| String::from("red")); 27 | let violets = map.insert(String::from("violets"), |_| String::from("blue")); 28 | let sugar = map.insert(String::from("sugar"), |_| String::from("sweet")); 29 | 30 | assert_eq!(roses, "red"); 31 | assert_eq!(violets, "blue"); 32 | assert_eq!(sugar, "sweet"); 33 | 34 | // The closure is never run here, because we already have a value for "rose" 35 | let roses = map.insert(String::from("rose"), |_| String::from("green")); 36 | // The old value did not change 37 | assert_eq!(roses, "red"); 38 | ``` 39 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(feature = "std"), no_std)] 2 | #![cfg_attr(docsrs, feature(doc_cfg))] 3 | 4 | extern crate alloc; 5 | 6 | #[cfg(feature = "std")] 7 | pub mod sync; 8 | 9 | #[cfg(feature = "std")] 10 | pub use sync::{LazyMap, OnceMap}; 11 | 12 | pub mod unsync; 13 | 14 | mod map; 15 | 16 | #[cfg(test)] 17 | mod tests; 18 | 19 | use core::hash::{BuildHasher, Hash, Hasher}; 20 | 21 | #[cfg(feature = "equivalent")] 22 | pub use equivalent::Equivalent; 23 | 24 | /// Generalization of `Borrow` that works with more types. 25 | #[cfg(not(feature = "equivalent"))] 26 | pub trait Equivalent { 27 | fn equivalent(&self, key: &K) -> bool; 28 | } 29 | 30 | #[cfg(not(feature = "equivalent"))] 31 | impl Equivalent for Q 32 | where 33 | Q: Eq + ?Sized, 34 | K: core::borrow::Borrow + ?Sized, 35 | { 36 | fn equivalent(&self, key: &K) -> bool { 37 | self == key.borrow() 38 | } 39 | } 40 | 41 | /// Generalization of `ToOwned` that works with more types. 42 | pub trait ToOwnedEquivalent: Equivalent { 43 | fn to_owned_equivalent(&self) -> K; 44 | } 45 | 46 | impl ToOwnedEquivalent for Q 47 | where 48 | Q: alloc::borrow::ToOwned + Eq + ?Sized, 49 | { 50 | fn to_owned_equivalent(&self) -> Q::Owned { 51 | self.to_owned() 52 | } 53 | } 54 | 55 | fn hash_one(hash_builder: &S, key: &Q) -> u64 { 56 | let mut hasher = hash_builder.build_hasher(); 57 | key.hash(&mut hasher); 58 | hasher.finish() 59 | } 60 | 61 | trait InfallibleResult { 62 | type Ok; 63 | 64 | fn unwrap_infallible(self) -> Self::Ok; 65 | } 66 | 67 | impl InfallibleResult for Result { 68 | type Ok = T; 69 | 70 | #[inline] 71 | fn unwrap_infallible(self) -> T { 72 | match self { 73 | Ok(v) => v, 74 | Err(void) => match void {}, 75 | } 76 | } 77 | } 78 | 79 | #[cfg(feature = "ahash")] 80 | use ahash::{AHasher as HasherInner, RandomState as RandomStateInner}; 81 | 82 | #[cfg(all(not(feature = "ahash"), feature = "std"))] 83 | use std::collections::hash_map::{DefaultHasher as HasherInner, RandomState as RandomStateInner}; 84 | 85 | #[cfg(all(not(feature = "ahash"), not(feature = "std")))] 86 | compile_error!("Either feature `ahash` or `std` must be enabled"); 87 | 88 | /// The default hasher used by this crate. 89 | #[derive(Debug, Clone)] 90 | pub struct RandomState(RandomStateInner); 91 | 92 | #[derive(Debug, Clone, Default)] 93 | pub struct DefaultHasher(HasherInner); 94 | 95 | impl RandomState { 96 | #[inline] 97 | pub fn new() -> Self { 98 | Self(RandomStateInner::new()) 99 | } 100 | } 101 | 102 | impl Default for RandomState { 103 | #[inline] 104 | fn default() -> Self { 105 | Self::new() 106 | } 107 | } 108 | 109 | impl core::hash::BuildHasher for RandomState { 110 | type Hasher = DefaultHasher; 111 | 112 | #[inline] 113 | fn build_hasher(&self) -> Self::Hasher { 114 | DefaultHasher(self.0.build_hasher()) 115 | } 116 | } 117 | 118 | impl core::hash::Hasher for DefaultHasher { 119 | #[inline] 120 | fn finish(&self) -> u64 { 121 | self.0.finish() 122 | } 123 | 124 | #[inline] 125 | fn write(&mut self, bytes: &[u8]) { 126 | self.0.write(bytes) 127 | } 128 | 129 | #[inline] 130 | fn write_u8(&mut self, i: u8) { 131 | self.0.write_u8(i) 132 | } 133 | 134 | #[inline] 135 | fn write_u16(&mut self, i: u16) { 136 | self.0.write_u16(i) 137 | } 138 | 139 | #[inline] 140 | fn write_u32(&mut self, i: u32) { 141 | self.0.write_u32(i) 142 | } 143 | 144 | #[inline] 145 | fn write_u64(&mut self, i: u64) { 146 | self.0.write_u64(i) 147 | } 148 | 149 | #[inline] 150 | fn write_u128(&mut self, i: u128) { 151 | self.0.write_u128(i) 152 | } 153 | 154 | #[inline] 155 | fn write_usize(&mut self, i: usize) { 156 | self.0.write_usize(i) 157 | } 158 | } 159 | 160 | /// ```compile_fail 161 | /// fn assert_send() {} 162 | /// assert_send::>(); 163 | /// ``` 164 | struct PhantomUnsend(core::marker::PhantomData<*const ()>); 165 | unsafe impl Sync for PhantomUnsend {} 166 | -------------------------------------------------------------------------------- /src/map.rs: -------------------------------------------------------------------------------- 1 | use crate::Equivalent; 2 | use core::{ 3 | fmt, 4 | hash::{BuildHasher, Hash}, 5 | }; 6 | use hashbrown::hash_table; 7 | #[cfg(feature = "rayon")] 8 | use rayon::prelude::*; 9 | 10 | #[inline] 11 | fn equivalent(key: &Q) -> impl Fn(&(K, V)) -> bool + '_ 12 | where 13 | Q: Hash + Equivalent + ?Sized, 14 | { 15 | |(k, _)| key.equivalent(k) 16 | } 17 | 18 | /// Unfortunately `hashbrown` can drop elements if the hash function panics. 19 | /// 20 | /// We prevent this by: 21 | /// - Catching unwinds when `std` is enabled and returning a dummy hash 22 | /// - Panicking in panic when `std` is not enabled, which result in an abort 23 | /// 24 | /// See https://github.com/a1phyr/once_map/issues/3 25 | #[inline] 26 | fn hash_one(hasher: &S) -> impl Fn(&(K, V)) -> u64 + '_ { 27 | #[cfg(feature = "std")] 28 | let hash_one = |(k, _): &(K, V)| { 29 | std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| crate::hash_one(hasher, k))) 30 | .unwrap_or(0) 31 | }; 32 | 33 | #[cfg(not(feature = "std"))] 34 | let hash_one = |(k, _): &(K, V)| { 35 | struct Guard; 36 | impl Drop for Guard { 37 | #[inline] 38 | fn drop(&mut self) { 39 | panic!("Hash implementation panicked"); 40 | } 41 | } 42 | 43 | let guard = Guard; 44 | let h = crate::hash_one(hasher, k); 45 | core::mem::forget(guard); 46 | h 47 | }; 48 | 49 | hash_one 50 | } 51 | 52 | /// This is just like std's `HashMap`, but it does not store its `BuildHasher`, 53 | /// so it has to be provided (or a hash) for each operation. 54 | pub struct HashMap(hash_table::HashTable<(K, V)>); 55 | 56 | impl HashMap { 57 | #[inline] 58 | pub const fn new() -> Self { 59 | Self(hash_table::HashTable::new()) 60 | } 61 | 62 | #[inline] 63 | pub fn len(&self) -> usize { 64 | self.0.len() 65 | } 66 | 67 | #[inline] 68 | pub fn is_empty(&self) -> bool { 69 | self.0.is_empty() 70 | } 71 | 72 | #[inline] 73 | pub fn iter(&self) -> impl Iterator { 74 | self.0.iter().map(|(k, v)| (k, v)) 75 | } 76 | 77 | #[inline] 78 | pub fn iter_mut(&mut self) -> impl Iterator { 79 | self.0.iter_mut().map(|(k, v)| (&*k, v)) 80 | } 81 | 82 | #[inline] 83 | pub fn keys(&self) -> impl Iterator { 84 | self.0.iter().map(|(k, _)| k) 85 | } 86 | 87 | #[inline] 88 | pub fn values(&self) -> impl Iterator { 89 | self.0.iter().map(|(_, v)| v) 90 | } 91 | 92 | #[inline] 93 | pub fn values_mut(&mut self) -> impl Iterator { 94 | self.0.iter_mut().map(|(_, v)| v) 95 | } 96 | 97 | #[inline] 98 | pub fn clear(&mut self) { 99 | self.0.clear(); 100 | } 101 | } 102 | 103 | #[cfg(feature = "rayon")] 104 | impl HashMap 105 | where 106 | K: Send, 107 | V: Send, 108 | { 109 | #[inline] 110 | pub fn into_par_iter(self) -> impl rayon::iter::ParallelIterator { 111 | self.0.into_par_iter() 112 | } 113 | } 114 | 115 | #[cfg(feature = "rayon")] 116 | impl HashMap 117 | where 118 | K: Sync, 119 | V: Sync, 120 | { 121 | #[inline] 122 | pub fn par_iter(&self) -> impl rayon::iter::ParallelIterator + '_ { 123 | self.0.par_iter().map(|(k, v)| (k, v)) 124 | } 125 | 126 | #[inline] 127 | pub fn par_keys(&self) -> impl rayon::iter::ParallelIterator + '_ { 128 | self.0.par_iter().map(|(k, _)| k) 129 | } 130 | 131 | #[inline] 132 | pub fn par_values(&self) -> impl rayon::iter::ParallelIterator + '_ { 133 | self.0.par_iter().map(|(_, v)| v) 134 | } 135 | } 136 | 137 | impl HashMap 138 | where 139 | K: Eq + Hash, 140 | { 141 | #[inline] 142 | #[allow(clippy::manual_map)] 143 | pub fn get(&self, hash: u64, k: &Q) -> Option<&V> 144 | where 145 | Q: Hash + Equivalent + ?Sized, 146 | { 147 | match self.0.find(hash, equivalent(k)) { 148 | Some((_, v)) => Some(v), 149 | None => None, 150 | } 151 | } 152 | 153 | #[inline] 154 | #[allow(clippy::manual_map)] 155 | pub fn get_key_value(&self, hash: u64, k: &Q) -> Option<(&K, &V)> 156 | where 157 | Q: Hash + Equivalent + ?Sized, 158 | { 159 | match self.0.find(hash, equivalent(k)) { 160 | Some((k, v)) => Some((k, v)), 161 | None => None, 162 | } 163 | } 164 | 165 | #[inline] 166 | pub fn contains_key(&self, hash: u64, k: &Q) -> bool 167 | where 168 | Q: Hash + Equivalent + ?Sized, 169 | { 170 | self.0.find(hash, equivalent(k)).is_some() 171 | } 172 | 173 | #[inline] 174 | pub fn entry(&mut self, hash: u64, k: &Q, hasher: &S) -> Entry 175 | where 176 | Q: Hash + Equivalent + ?Sized, 177 | S: core::hash::BuildHasher, 178 | { 179 | match self.0.entry(hash, equivalent(k), hash_one(hasher)) { 180 | hash_table::Entry::Occupied(e) => Entry::Occupied(OccupiedEntry(e)), 181 | hash_table::Entry::Vacant(e) => Entry::Vacant(VacantEntry(e)), 182 | } 183 | } 184 | 185 | #[inline] 186 | pub fn remove(&mut self, hash: u64, k: &Q) -> Option 187 | where 188 | Q: Hash + Equivalent + ?Sized, 189 | { 190 | match self.0.find_entry(hash, equivalent(k)) { 191 | Ok(e) => Some(e.remove().0 .1), 192 | Err(_) => None, 193 | } 194 | } 195 | 196 | #[inline] 197 | pub fn remove_entry(&mut self, hash: u64, k: &Q) -> Option<(K, V)> 198 | where 199 | Q: Hash + Equivalent + ?Sized, 200 | { 201 | match self.0.find_entry(hash, equivalent(k)) { 202 | Ok(e) => Some(e.remove().0), 203 | Err(_) => None, 204 | } 205 | } 206 | } 207 | 208 | impl IntoIterator for HashMap { 209 | type Item = (K, V); 210 | type IntoIter = hash_table::IntoIter<(K, V)>; 211 | 212 | #[inline] 213 | fn into_iter(self) -> Self::IntoIter { 214 | self.0.into_iter() 215 | } 216 | } 217 | 218 | impl fmt::Debug for HashMap 219 | where 220 | K: fmt::Debug, 221 | V: fmt::Debug, 222 | { 223 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 224 | f.debug_map().entries(self.iter()).finish() 225 | } 226 | } 227 | 228 | pub enum Entry<'a, K, V> { 229 | Vacant(VacantEntry<'a, K, V>), 230 | Occupied(OccupiedEntry<'a, K, V>), 231 | } 232 | 233 | pub struct OccupiedEntry<'a, K, V>(hash_table::OccupiedEntry<'a, (K, V)>); 234 | 235 | impl OccupiedEntry<'_, K, V> { 236 | #[inline] 237 | pub fn get(&self) -> &V { 238 | &self.0.get().1 239 | } 240 | } 241 | 242 | pub struct VacantEntry<'a, K, V>(hash_table::VacantEntry<'a, (K, V)>); 243 | 244 | impl<'a, K, V> VacantEntry<'a, K, V> 245 | where 246 | K: Hash, 247 | { 248 | #[inline] 249 | pub fn insert(self, key: K, value: V) -> &'a mut (K, V) { 250 | self.0.insert((key, value)).into_mut() 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /src/sync.rs: -------------------------------------------------------------------------------- 1 | use crate::{map, map::HashMap, Equivalent, InfallibleResult, ToOwnedEquivalent}; 2 | use alloc::boxed::Box; 3 | use core::{ 4 | borrow::Borrow, 5 | fmt, 6 | hash::{BuildHasher, Hash, Hasher}, 7 | ptr::NonNull, 8 | }; 9 | use parking_lot::{Condvar, Mutex, MutexGuard, RwLock}; 10 | #[cfg(feature = "rayon")] 11 | use rayon::prelude::*; 12 | use stable_deref_trait::StableDeref; 13 | 14 | #[cfg(feature = "std")] 15 | #[inline] 16 | fn default_shards_amount() -> usize { 17 | use std::{ 18 | sync::atomic::{AtomicUsize, Ordering}, 19 | thread::available_parallelism, 20 | }; 21 | 22 | // We want to cache the result to compute it only once 23 | static N_SHARDS: AtomicUsize = AtomicUsize::new(0); 24 | 25 | #[cold] 26 | fn load_slow() -> usize { 27 | // This is racy but that's fine 28 | let n = available_parallelism() 29 | .ok() 30 | .and_then(|n| n.get().checked_next_power_of_two()) 31 | .and_then(|n| n.checked_mul(4)) 32 | .unwrap_or(16); 33 | N_SHARDS.store(n, Ordering::Relaxed); 34 | n 35 | } 36 | 37 | let n = N_SHARDS.load(Ordering::Relaxed); 38 | if n != 0 { 39 | return n; 40 | } 41 | 42 | load_slow() 43 | } 44 | 45 | #[cfg(not(feature = "std"))] 46 | #[inline] 47 | fn default_shards_amount() -> usize { 48 | 32 49 | } 50 | 51 | unsafe fn extend_lifetime<'a, T: StableDeref>(ptr: &T) -> &'a T::Target { 52 | &*(&**ptr as *const T::Target) 53 | } 54 | 55 | struct ValidPtr(NonNull); 56 | 57 | unsafe impl Send for ValidPtr {} 58 | unsafe impl Sync for ValidPtr {} 59 | 60 | impl core::ops::Deref for ValidPtr { 61 | type Target = T; 62 | 63 | fn deref(&self) -> &T { 64 | unsafe { self.0.as_ref() } 65 | } 66 | } 67 | 68 | impl Clone for ValidPtr { 69 | fn clone(&self) -> Self { 70 | *self 71 | } 72 | } 73 | 74 | impl Copy for ValidPtr {} 75 | 76 | impl Borrow for ValidPtr { 77 | fn borrow(&self) -> &T { 78 | self 79 | } 80 | } 81 | 82 | impl Hash for ValidPtr { 83 | fn hash(&self, state: &mut H) { 84 | (**self).hash(state); 85 | } 86 | } 87 | 88 | impl PartialEq for ValidPtr { 89 | fn eq(&self, other: &Self) -> bool { 90 | **self == **other 91 | } 92 | } 93 | 94 | impl Eq for ValidPtr {} 95 | 96 | /// Looks like a Condvar, but wait for all notified threads to wake up when 97 | /// calling `notify_all`. 98 | struct WaitingBarrier { 99 | condvar: Condvar, 100 | n_waiters: Mutex, 101 | } 102 | 103 | struct Waiter<'a> { 104 | guard: MutexGuard<'a, usize>, 105 | condvar: &'a Condvar, 106 | } 107 | 108 | impl WaitingBarrier { 109 | fn new() -> Self { 110 | Self { 111 | condvar: Condvar::new(), 112 | n_waiters: Mutex::new(0), 113 | } 114 | } 115 | 116 | /// Registers ourselves as willing to wait 117 | fn prepare_waiting(&self) -> Waiter { 118 | let mut guard = self.n_waiters.lock(); 119 | *guard += 1; 120 | Waiter { 121 | guard, 122 | condvar: &self.condvar, 123 | } 124 | } 125 | 126 | /// Notifies all waiters and wait for them to wake up 127 | fn notify_all(&self) { 128 | let mut n = self.n_waiters.lock(); 129 | self.condvar.notify_all(); 130 | while *n != 0 { 131 | self.condvar.wait(&mut n); 132 | } 133 | } 134 | } 135 | 136 | impl Waiter<'_> { 137 | fn wait(mut self) { 138 | self.condvar.wait(&mut self.guard); 139 | } 140 | } 141 | 142 | impl Drop for Waiter<'_> { 143 | fn drop(&mut self) { 144 | *self.guard -= 1; 145 | if *self.guard == 0 { 146 | self.condvar.notify_one(); 147 | } 148 | } 149 | } 150 | 151 | struct BarrierGuard<'a>(&'a WaitingBarrier); 152 | 153 | impl Drop for BarrierGuard<'_> { 154 | fn drop(&mut self) { 155 | self.0.notify_all(); 156 | } 157 | } 158 | 159 | type Waiters = Mutex, ValidPtr>>; 160 | 161 | struct WaitersGuard<'a, K: Eq + Hash> { 162 | waiters: &'a Waiters, 163 | key: &'a K, 164 | hash: u64, 165 | } 166 | 167 | impl Drop for WaitersGuard<'_, K> { 168 | fn drop(&mut self) { 169 | let mut writing = self.waiters.lock(); 170 | writing.remove(self.hash, self.key); 171 | } 172 | } 173 | 174 | #[repr(align(64))] 175 | struct Shard { 176 | map: RwLock>, 177 | 178 | // This lock should always be taken after `map` 179 | waiters: Waiters, 180 | } 181 | 182 | impl Shard { 183 | fn new() -> Self { 184 | Self { 185 | map: RwLock::new(HashMap::new()), 186 | waiters: Mutex::new(HashMap::new()), 187 | } 188 | } 189 | } 190 | 191 | impl Shard 192 | where 193 | K: Hash + Eq, 194 | { 195 | fn get(&self, hash: u64, key: &Q, with_result: impl FnOnce(&K, &V) -> T) -> Option 196 | where 197 | Q: Hash + Equivalent + ?Sized, 198 | { 199 | let this = self.map.read(); 200 | let (k, v) = this.get_key_value(hash, key)?; 201 | Some(with_result(k, v)) 202 | } 203 | 204 | fn try_get(&self, hash: u64, key: &Q, data: T, with_result: G) -> Result 205 | where 206 | Q: Hash + Equivalent + ?Sized, 207 | G: FnOnce(T, &K, &V) -> U, 208 | { 209 | let this = self.map.read(); 210 | match this.get_key_value(hash, key) { 211 | Some((k, v)) => Ok(with_result(data, k, v)), 212 | None => Err((data, with_result)), 213 | } 214 | } 215 | 216 | #[cold] 217 | fn get_or_try_insert( 218 | &self, 219 | hash: u64, 220 | key: K, 221 | data: T, 222 | hasher: &impl BuildHasher, 223 | on_vacant: impl FnOnce(T, &K) -> Result<(V, U), E>, 224 | on_occupied: impl FnOnce(T, &K, &V) -> U, 225 | ) -> Result { 226 | let barrier = WaitingBarrier::new(); 227 | 228 | loop { 229 | // If a value already exists, we're done 230 | let map = self.map.read(); 231 | if let Some((key, value)) = map.get_key_value(hash, &key) { 232 | return Ok(on_occupied(data, key, value)); 233 | } 234 | 235 | // Else try to register ourselves as willing to write 236 | let mut writing = self.waiters.lock(); 237 | 238 | drop(map); 239 | 240 | match writing.entry(hash, &key, hasher) { 241 | map::Entry::Occupied(entry) => { 242 | // Somebody is already writing this value ! Wait until it 243 | // is done, then start again. 244 | 245 | // Safety: We call `prepare_wait` before dropping the mutex 246 | // guard, so the barrier is guaranteed to be valid for the 247 | // wait even if it was removed from the map. 248 | let barrier = unsafe { entry.get().0.as_ref() }; 249 | let waiter = barrier.prepare_waiting(); 250 | 251 | // Ensure that other threads will be able to use the mutex 252 | // while we wait for the value's writing to complete 253 | drop(writing); 254 | waiter.wait(); 255 | continue; 256 | } 257 | map::Entry::Vacant(entry) => { 258 | // We're the first ! Register our barrier so other can wait 259 | // on it. 260 | let key_ref = ValidPtr(NonNull::from(&key)); 261 | let barrier_ref = ValidPtr(NonNull::from(&barrier)); 262 | entry.insert(key_ref, barrier_ref); 263 | break; 264 | } 265 | } 266 | } 267 | 268 | // We know that are we know that we are the only one reaching this point 269 | // for this key 270 | 271 | // Now that our barrier is shared, some other thread might wait on it 272 | // even if it is removed from `self.waiters.tokens`, so we make sure 273 | // that we don't leave this function while someone still thinks the 274 | // barrier is alive. 275 | let _barrier_guard = BarrierGuard(&barrier); 276 | let guard = WaitersGuard { 277 | waiters: &self.waiters, 278 | key: &key, 279 | hash, 280 | }; 281 | 282 | // It is important not to hold any lock here 283 | let (value, ret) = on_vacant(data, &key)?; 284 | 285 | // Take this lock first to avoid deadlocks 286 | let mut map = self.map.write(); 287 | 288 | // We'll have to move the key to insert it in the map, which will 289 | // invalidate the pointer we put in `waiters`, so we remove it now. 290 | // 291 | // Note that the mutex guard will stay alive until the end of the 292 | // function, which is intentional. 293 | let mut writing = self.waiters.lock(); 294 | 295 | match writing.remove(hash, &key) { 296 | Some(b) => debug_assert!(core::ptr::eq(b.0.as_ptr(), &barrier)), 297 | None => debug_assert!(false), 298 | } 299 | 300 | // We have just done the cleanup manually 301 | core::mem::forget(guard); 302 | 303 | // We can finally insert the value in the map. 304 | match map.entry(hash, &key, hasher) { 305 | map::Entry::Vacant(entry) => { 306 | entry.insert(key, value); 307 | } 308 | map::Entry::Occupied(_) => panic!("re-entrant init"), 309 | } 310 | Ok(ret) 311 | 312 | // Leaving the function will wake up waiting threads. 313 | } 314 | 315 | pub fn contains_key(&self, hash: u64, key: &Q) -> bool 316 | where 317 | Q: Hash + Equivalent + ?Sized, 318 | { 319 | self.map.read().contains_key(hash, key) 320 | } 321 | 322 | pub fn remove_entry(&mut self, hash: u64, key: &Q) -> Option<(K, V)> 323 | where 324 | Q: Hash + Equivalent + ?Sized, 325 | { 326 | self.map.get_mut().remove_entry(hash, key) 327 | } 328 | } 329 | 330 | pub struct OnceMap { 331 | shards: Box<[Shard]>, 332 | hash_builder: S, 333 | } 334 | 335 | impl OnceMap { 336 | /// Creates an empty `OnceMap`. 337 | pub fn new() -> Self { 338 | Self::with_hasher(crate::RandomState::new()) 339 | } 340 | 341 | #[cfg(test)] 342 | pub(crate) fn with_single_shard() -> Self { 343 | let hash_builder = crate::RandomState::new(); 344 | let shards = Box::new([Shard::new()]); 345 | Self { 346 | shards, 347 | hash_builder, 348 | } 349 | } 350 | } 351 | 352 | impl OnceMap { 353 | /// Creates an empty `OnceMap` which will use the given hash builder to hash keys. 354 | pub fn with_hasher(hash_builder: S) -> Self { 355 | let shards = (0..default_shards_amount()).map(|_| Shard::new()).collect(); 356 | Self { 357 | shards, 358 | hash_builder, 359 | } 360 | } 361 | } 362 | 363 | impl OnceMap { 364 | /// Removes all key-value pairs from the map, but keeps the allocated memory. 365 | pub fn clear(&mut self) { 366 | self.shards.iter_mut().for_each(|s| s.map.get_mut().clear()); 367 | } 368 | 369 | pub fn values_mut(&mut self) -> impl Iterator { 370 | self.shards 371 | .iter_mut() 372 | .flat_map(|s| s.map.get_mut().values_mut()) 373 | } 374 | 375 | pub fn iter_mut(&mut self) -> impl Iterator { 376 | self.shards 377 | .iter_mut() 378 | .flat_map(|s| s.map.get_mut().iter_mut()) 379 | } 380 | 381 | #[allow(clippy::should_implement_trait)] 382 | pub fn into_iter(self) -> impl Iterator { 383 | self.shards 384 | .into_vec() 385 | .into_iter() 386 | .flat_map(|s| s.map.into_inner().into_iter()) 387 | } 388 | 389 | /// Returns a reference to the map's [`BuildHasher`]. 390 | pub fn hasher(&self) -> &S { 391 | &self.hash_builder 392 | } 393 | 394 | /// Locks the whole map for reading. 395 | /// 396 | /// This enables more methods, such as iterating on the maps, but will cause 397 | /// a deadlock if trying to insert values in the map from the same thread. 398 | pub fn read_only_view(&self) -> ReadOnlyView { 399 | ReadOnlyView::new(self) 400 | } 401 | } 402 | 403 | #[cfg(feature = "rayon")] 404 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 405 | impl OnceMap 406 | where 407 | K: Send, 408 | V: Send, 409 | { 410 | pub fn into_par_iter(self) -> impl rayon::iter::ParallelIterator { 411 | self.shards 412 | .into_vec() 413 | .into_par_iter() 414 | .flat_map(|s| s.map.into_inner().into_par_iter()) 415 | } 416 | } 417 | 418 | impl OnceMap 419 | where 420 | K: Eq + Hash, 421 | S: BuildHasher, 422 | { 423 | fn hash_one(&self, key: &Q) -> u64 424 | where 425 | Q: Hash + Equivalent + ?Sized, 426 | { 427 | crate::hash_one(&self.hash_builder, key) 428 | } 429 | 430 | fn get_shard(&self, hash: u64) -> &Shard { 431 | let len = self.shards.len(); 432 | &self.shards[(len - 1) & (hash as usize)] 433 | } 434 | 435 | fn get_shard_mut(&mut self, hash: u64) -> &mut Shard { 436 | let len = self.shards.len(); 437 | &mut self.shards[(len - 1) & (hash as usize)] 438 | } 439 | 440 | /// Returns `true` if the map contains a value for the specified key. 441 | pub fn contains_key(&self, key: &Q) -> bool 442 | where 443 | Q: Hash + Equivalent + ?Sized, 444 | { 445 | let hash = self.hash_one(key); 446 | self.get_shard(hash).contains_key(hash, key) 447 | } 448 | 449 | pub fn remove(&mut self, key: &Q) -> Option 450 | where 451 | Q: Hash + Equivalent + ?Sized, 452 | { 453 | let hash = self.hash_one(key); 454 | let (_, v) = self.get_shard_mut(hash).remove_entry(hash, key)?; 455 | Some(v) 456 | } 457 | 458 | pub fn remove_entry(&mut self, key: &Q) -> Option<(K, V)> 459 | where 460 | Q: Hash + Equivalent + ?Sized, 461 | { 462 | let hash = self.hash_one(key); 463 | self.get_shard_mut(hash).remove_entry(hash, key) 464 | } 465 | } 466 | 467 | impl OnceMap 468 | where 469 | K: Eq + Hash, 470 | S: BuildHasher, 471 | V: StableDeref, 472 | { 473 | /// Returns a reference to the value corresponding to the key. 474 | pub fn get(&self, key: &Q) -> Option<&V::Target> 475 | where 476 | Q: Hash + Equivalent + ?Sized, 477 | { 478 | self.map_get(key, |_, v| unsafe { extend_lifetime(v) }) 479 | } 480 | 481 | /// Returns a reference to the value corresponding to the key or insert one 482 | /// with the given closure. 483 | pub fn insert(&self, key: K, make_val: impl FnOnce(&K) -> V) -> &V::Target { 484 | self.map_insert(key, make_val, |_, v| unsafe { extend_lifetime(v) }) 485 | } 486 | 487 | /// Same as `insert` but the closure is allowed to fail. 488 | /// 489 | /// If the closure is called and an error is returned, no value is stored in 490 | /// the map. 491 | pub fn try_insert( 492 | &self, 493 | key: K, 494 | make_val: impl FnOnce(&K) -> Result, 495 | ) -> Result<&V::Target, E> { 496 | self.map_try_insert(key, make_val, |_, v| unsafe { extend_lifetime(v) }) 497 | } 498 | } 499 | 500 | impl OnceMap 501 | where 502 | K: Eq + Hash, 503 | S: BuildHasher, 504 | V: Clone, 505 | { 506 | pub fn get_cloned(&self, key: &Q) -> Option 507 | where 508 | Q: Hash + Equivalent + ?Sized, 509 | { 510 | self.map_get(key, |_, v| v.clone()) 511 | } 512 | 513 | pub fn insert_cloned(&self, key: K, make_val: impl FnOnce(&K) -> V) -> V { 514 | self.map_insert(key, make_val, |_, v| v.clone()) 515 | } 516 | 517 | pub fn try_insert_cloned( 518 | &self, 519 | key: K, 520 | make_val: impl FnOnce(&K) -> Result, 521 | ) -> Result { 522 | self.map_try_insert(key, make_val, |_, v| v.clone()) 523 | } 524 | } 525 | 526 | impl OnceMap 527 | where 528 | K: Eq + Hash, 529 | S: BuildHasher, 530 | { 531 | pub fn map_get(&self, key: &Q, with_result: impl FnOnce(&K, &V) -> T) -> Option 532 | where 533 | Q: Hash + Equivalent + ?Sized, 534 | { 535 | let hash = self.hash_one(key); 536 | self.get_shard(hash).get(hash, key, with_result) 537 | } 538 | 539 | pub fn map_insert( 540 | &self, 541 | key: K, 542 | make_val: impl FnOnce(&K) -> V, 543 | with_result: impl FnOnce(&K, &V) -> T, 544 | ) -> T { 545 | self.map_try_insert(key, |k| Ok(make_val(k)), with_result) 546 | .unwrap_infallible() 547 | } 548 | 549 | pub fn map_insert_ref( 550 | &self, 551 | key: &Q, 552 | make_key: impl FnOnce(&Q) -> K, 553 | make_val: impl FnOnce(&K) -> V, 554 | with_result: impl FnOnce(&K, &V) -> T, 555 | ) -> T 556 | where 557 | Q: Hash + Equivalent + ?Sized, 558 | { 559 | self.map_try_insert_ref(key, make_key, |k| Ok(make_val(k)), with_result) 560 | .unwrap_infallible() 561 | } 562 | 563 | pub fn map_try_insert( 564 | &self, 565 | key: K, 566 | make_val: impl FnOnce(&K) -> Result, 567 | with_result: impl FnOnce(&K, &V) -> T, 568 | ) -> Result { 569 | self.get_or_try_insert( 570 | key, 571 | with_result, 572 | |with_result, k| { 573 | let v = make_val(k)?; 574 | let ret = with_result(k, &v); 575 | Ok((v, ret)) 576 | }, 577 | |with_result, k, v| with_result(k, v), 578 | ) 579 | } 580 | 581 | pub fn map_try_insert_ref( 582 | &self, 583 | key: &Q, 584 | make_key: impl FnOnce(&Q) -> K, 585 | make_val: impl FnOnce(&K) -> Result, 586 | with_result: impl FnOnce(&K, &V) -> T, 587 | ) -> Result 588 | where 589 | Q: Hash + Equivalent + ?Sized, 590 | { 591 | self.get_or_try_insert_ref( 592 | key, 593 | with_result, 594 | make_key, 595 | |with_result, k| { 596 | let v = make_val(k)?; 597 | let ret = with_result(k, &v); 598 | Ok((v, ret)) 599 | }, 600 | |with_result, k, v| with_result(k, v), 601 | ) 602 | } 603 | 604 | pub fn get_or_try_insert( 605 | &self, 606 | key: K, 607 | data: T, 608 | on_vacant: impl FnOnce(T, &K) -> Result<(V, U), E>, 609 | on_occupied: impl FnOnce(T, &K, &V) -> U, 610 | ) -> Result { 611 | let hash = self.hash_one(&key); 612 | let shard = self.get_shard(hash); 613 | 614 | match shard.try_get(hash, &key, data, on_occupied) { 615 | Ok(result) => Ok(result), 616 | Err((data, on_occupied)) => { 617 | shard.get_or_try_insert(hash, key, data, &self.hash_builder, on_vacant, on_occupied) 618 | } 619 | } 620 | } 621 | 622 | pub fn get_or_try_insert_ref( 623 | &self, 624 | key: &Q, 625 | data: T, 626 | make_key: impl FnOnce(&Q) -> K, 627 | on_vacant: impl FnOnce(T, &K) -> Result<(V, U), E>, 628 | on_occupied: impl FnOnce(T, &K, &V) -> U, 629 | ) -> Result 630 | where 631 | Q: Hash + Equivalent + ?Sized, 632 | { 633 | let hash = self.hash_one(key); 634 | let shard = self.get_shard(hash); 635 | 636 | // Try to get the value from the map as a fast-path, to avoid having to 637 | // compute an owned version of the key. 638 | shard.try_get(hash, key, data, on_occupied).or_else( 639 | #[cold] 640 | |(data, on_occupied)| { 641 | let owned_key = make_key(key); 642 | debug_assert_eq!(self.hash_one::(&owned_key), hash); 643 | debug_assert!(key.equivalent(&owned_key)); 644 | shard.get_or_try_insert( 645 | hash, 646 | owned_key, 647 | data, 648 | &self.hash_builder, 649 | on_vacant, 650 | on_occupied, 651 | ) 652 | }, 653 | ) 654 | } 655 | } 656 | 657 | impl Default for OnceMap { 658 | fn default() -> Self { 659 | Self::with_hasher(S::default()) 660 | } 661 | } 662 | 663 | impl Extend<(K, V)> for OnceMap 664 | where 665 | K: Eq + Hash, 666 | S: BuildHasher, 667 | { 668 | fn extend>(&mut self, iter: T) { 669 | iter.into_iter() 670 | .for_each(|(k, v)| self.map_insert(k, |_| v, |_, _| ())) 671 | } 672 | } 673 | 674 | impl Extend<(K, V)> for &'_ OnceMap 675 | where 676 | K: Eq + Hash, 677 | S: BuildHasher, 678 | { 679 | fn extend>(&mut self, iter: T) { 680 | iter.into_iter() 681 | .for_each(|(k, v)| self.map_insert(k, |_| v, |_, _| ())) 682 | } 683 | } 684 | 685 | impl FromIterator<(K, V)> for OnceMap 686 | where 687 | K: Eq + Hash, 688 | S: BuildHasher + Default, 689 | { 690 | fn from_iter>(iter: T) -> Self { 691 | let mut map = OnceMap::default(); 692 | map.extend(iter); 693 | map 694 | } 695 | } 696 | 697 | impl From<[(K, V); N]> for OnceMap 698 | where 699 | K: Eq + Hash, 700 | S: BuildHasher + Default, 701 | { 702 | fn from(array: [(K, V); N]) -> Self { 703 | Self::from_iter(array) 704 | } 705 | } 706 | 707 | #[cfg(feature = "rayon")] 708 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 709 | impl ParallelExtend<(K, V)> for OnceMap 710 | where 711 | K: Eq + Hash + Send + Sync, 712 | V: Send + Sync, 713 | S: BuildHasher + Sync, 714 | { 715 | fn par_extend(&mut self, par_iter: I) 716 | where 717 | I: IntoParallelIterator, 718 | { 719 | par_iter 720 | .into_par_iter() 721 | .for_each(|(k, v)| self.map_insert(k, |_| v, |_, _| ())); 722 | } 723 | } 724 | 725 | #[cfg(feature = "rayon")] 726 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 727 | impl ParallelExtend<(K, V)> for &'_ OnceMap 728 | where 729 | K: Eq + Hash + Send + Sync, 730 | V: Send + Sync, 731 | S: BuildHasher + Sync, 732 | { 733 | fn par_extend(&mut self, par_iter: I) 734 | where 735 | I: IntoParallelIterator, 736 | { 737 | par_iter 738 | .into_par_iter() 739 | .for_each(|(k, v)| self.map_insert(k, |_| v, |_, _| ())); 740 | } 741 | } 742 | 743 | #[cfg(feature = "rayon")] 744 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 745 | impl FromParallelIterator<(K, V)> for OnceMap 746 | where 747 | K: Eq + Hash + Send + Sync, 748 | V: Send + Sync, 749 | S: BuildHasher + Default + Sync, 750 | { 751 | fn from_par_iter(par_iter: I) -> Self 752 | where 753 | I: IntoParallelIterator, 754 | { 755 | let mut map = Self::default(); 756 | map.par_extend(par_iter); 757 | map 758 | } 759 | } 760 | 761 | #[cfg(feature = "serde")] 762 | #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] 763 | impl serde::Serialize for OnceMap 764 | where 765 | K: serde::Serialize, 766 | V: serde::Serialize, 767 | { 768 | fn serialize(&self, serializer: Ser) -> Result 769 | where 770 | Ser: serde::Serializer, 771 | { 772 | serializer.collect_map(self.read_only_view().iter()) 773 | } 774 | } 775 | 776 | #[cfg(feature = "serde")] 777 | #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] 778 | impl<'de, K, V, S> serde::Deserialize<'de> for OnceMap 779 | where 780 | K: Eq + Hash + serde::Deserialize<'de>, 781 | V: serde::Deserialize<'de>, 782 | S: BuildHasher + Default, 783 | { 784 | fn deserialize(deserializer: D) -> Result 785 | where 786 | D: serde::Deserializer<'de>, 787 | { 788 | struct OnceMapVisitor(OnceMap); 789 | 790 | impl<'de, K, V, S> serde::de::Visitor<'de> for OnceMapVisitor 791 | where 792 | K: Eq + Hash + serde::Deserialize<'de>, 793 | V: serde::Deserialize<'de>, 794 | S: BuildHasher, 795 | { 796 | type Value = OnceMap; 797 | 798 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 799 | formatter.write_str("a map") 800 | } 801 | 802 | fn visit_map(self, mut map: A) -> Result 803 | where 804 | A: serde::de::MapAccess<'de>, 805 | { 806 | while let Some((key, value)) = map.next_entry()? { 807 | self.0.map_insert(key, |_| value, |_, _| ()) 808 | } 809 | 810 | Ok(self.0) 811 | } 812 | } 813 | 814 | deserializer.deserialize_map(OnceMapVisitor(OnceMap::default())) 815 | } 816 | } 817 | 818 | impl fmt::Debug for OnceMap 819 | where 820 | K: fmt::Debug, 821 | V: fmt::Debug, 822 | { 823 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 824 | f.debug_map().entries(self.read_only_view().iter()).finish() 825 | } 826 | } 827 | 828 | #[repr(transparent)] 829 | struct LockedShard(Shard); 830 | 831 | unsafe impl Sync for LockedShard {} 832 | 833 | impl LockedShard { 834 | fn get(&self) -> &HashMap { 835 | unsafe { &*self.0.map.data_ptr() } 836 | } 837 | } 838 | 839 | pub struct ReadOnlyView<'a, K, V, S = crate::RandomState> { 840 | shards: &'a [LockedShard], 841 | hasher: &'a S, 842 | _marker: crate::PhantomUnsend, 843 | } 844 | 845 | impl<'a, K, V, S> ReadOnlyView<'a, K, V, S> { 846 | fn new(map: &'a OnceMap) -> Self { 847 | use parking_lot::lock_api::RawRwLockRecursive; 848 | 849 | for shard in map.shards.iter() { 850 | unsafe { 851 | shard.map.raw().lock_shared_recursive(); 852 | } 853 | } 854 | 855 | let shards = unsafe { &*(&*map.shards as *const [_] as *const [_]) }; 856 | 857 | Self { 858 | shards, 859 | hasher: &map.hash_builder, 860 | _marker: crate::PhantomUnsend(std::marker::PhantomData), 861 | } 862 | } 863 | 864 | #[inline] 865 | fn iter_shards(&self) -> impl ExactSizeIterator> { 866 | self.shards.iter().map(|shard| shard.get()) 867 | } 868 | 869 | pub fn len(&self) -> usize { 870 | self.iter_shards().map(|s| s.len()).sum() 871 | } 872 | 873 | pub fn is_empty(&self) -> bool { 874 | self.iter_shards().all(|s| s.is_empty()) 875 | } 876 | 877 | pub fn hasher(&self) -> &S { 878 | self.hasher 879 | } 880 | 881 | pub fn iter(&self) -> impl Iterator { 882 | self.iter_shards().flat_map(|shard| shard.iter()) 883 | } 884 | 885 | pub fn keys(&self) -> impl Iterator { 886 | self.iter_shards().flat_map(|shard| shard.keys()) 887 | } 888 | 889 | pub fn values(&self) -> impl Iterator { 890 | self.iter_shards().flat_map(|shard| shard.values()) 891 | } 892 | } 893 | 894 | impl ReadOnlyView<'_, K, V, S> 895 | where 896 | K: Eq + Hash, 897 | S: BuildHasher, 898 | { 899 | #[inline] 900 | fn hash_one(&self, key: &Q) -> u64 901 | where 902 | Q: Hash + Equivalent + ?Sized, 903 | { 904 | crate::hash_one(self.hasher, key) 905 | } 906 | 907 | fn get_shard(&self, hash: u64) -> &HashMap { 908 | let len = self.shards.len(); 909 | let shard = &self.shards[(len - 1) & (hash as usize)]; 910 | shard.get() 911 | } 912 | 913 | pub fn get(&self, key: &Q) -> Option<&V> 914 | where 915 | Q: Hash + Equivalent + ?Sized, 916 | { 917 | let hash = self.hash_one(key); 918 | self.get_shard(hash).get(hash, key) 919 | } 920 | 921 | pub fn get_key_value(&self, key: &Q) -> Option<(&K, &V)> 922 | where 923 | Q: Hash + Equivalent + ?Sized, 924 | { 925 | let hash = self.hash_one(key); 926 | self.get_shard(hash).get_key_value(hash, key) 927 | } 928 | 929 | pub fn contains_key(&self, key: &Q) -> bool 930 | where 931 | Q: Hash + Equivalent + ?Sized, 932 | { 933 | let hash = self.hash_one(key); 934 | self.get_shard(hash).contains_key(hash, key) 935 | } 936 | } 937 | 938 | impl Drop for ReadOnlyView<'_, K, V, S> { 939 | fn drop(&mut self) { 940 | for shard in self.shards.iter() { 941 | unsafe { shard.0.map.force_unlock_read() } 942 | } 943 | } 944 | } 945 | 946 | #[cfg(feature = "rayon")] 947 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 948 | impl ReadOnlyView<'_, K, V, S> 949 | where 950 | K: Sync, 951 | V: Sync, 952 | { 953 | #[inline] 954 | fn par_iter_shards(&self) -> impl rayon::iter::IndexedParallelIterator> { 955 | self.shards.par_iter().map(|shard| shard.get()) 956 | } 957 | 958 | pub fn par_iter(&self) -> impl rayon::iter::ParallelIterator { 959 | self.par_iter_shards().flat_map(|shard| shard.par_iter()) 960 | } 961 | 962 | pub fn par_keys(&self) -> impl rayon::iter::ParallelIterator { 963 | self.par_iter_shards().flat_map(|shard| shard.par_keys()) 964 | } 965 | 966 | pub fn par_values(&self) -> impl rayon::iter::ParallelIterator { 967 | self.par_iter_shards().flat_map(|shard| shard.par_values()) 968 | } 969 | } 970 | 971 | #[cfg(feature = "serde")] 972 | #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] 973 | impl serde::Serialize for ReadOnlyView<'_, K, V, S> 974 | where 975 | K: serde::Serialize, 976 | V: serde::Serialize, 977 | { 978 | fn serialize(&self, serializer: Ser) -> Result 979 | where 980 | Ser: serde::Serializer, 981 | { 982 | serializer.collect_map(self.iter()) 983 | } 984 | } 985 | 986 | impl fmt::Debug for ReadOnlyView<'_, K, V, S> 987 | where 988 | K: fmt::Debug, 989 | V: fmt::Debug, 990 | { 991 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 992 | f.debug_map().entries(self.iter()).finish() 993 | } 994 | } 995 | 996 | /// A map where values are automatically filled at access. 997 | /// 998 | /// This type can be shared across threads. 999 | /// 1000 | /// ``` 1001 | /// let map = once_map::LazyMap::new(|x: &i32| x.to_string()); 1002 | /// 1003 | /// assert_eq!(&map[&3], "3"); 1004 | /// assert_eq!(map.get(&-67), "-67"); 1005 | /// ``` 1006 | pub struct LazyMap V> { 1007 | map: OnceMap, 1008 | init: F, 1009 | } 1010 | 1011 | impl LazyMap { 1012 | pub fn new(f: F) -> Self { 1013 | Self::with_hasher(crate::RandomState::new(), f) 1014 | } 1015 | } 1016 | 1017 | impl LazyMap { 1018 | pub fn with_hasher(hash_builder: S, f: F) -> Self { 1019 | Self { 1020 | map: OnceMap::with_hasher(hash_builder), 1021 | init: f, 1022 | } 1023 | } 1024 | 1025 | /// Removes all entries from the map. 1026 | pub fn clear(&mut self) { 1027 | self.map.clear(); 1028 | } 1029 | } 1030 | 1031 | impl LazyMap 1032 | where 1033 | K: Eq + Hash, 1034 | S: BuildHasher, 1035 | F: Fn(&K) -> V, 1036 | V: StableDeref, 1037 | { 1038 | pub fn get(&self, key: &Q) -> &V::Target 1039 | where 1040 | Q: Hash + ToOwnedEquivalent + ?Sized, 1041 | { 1042 | self.map_get(key, |_, v| unsafe { extend_lifetime(v) }) 1043 | } 1044 | } 1045 | 1046 | impl LazyMap 1047 | where 1048 | K: Eq + Hash, 1049 | S: BuildHasher, 1050 | F: Fn(&K) -> V, 1051 | V: Clone, 1052 | { 1053 | pub fn get_cloned(&self, key: &Q) -> V 1054 | where 1055 | Q: Hash + ToOwnedEquivalent + ?Sized, 1056 | { 1057 | self.map_get(key, |_, v| v.clone()) 1058 | } 1059 | } 1060 | 1061 | impl LazyMap 1062 | where 1063 | K: Eq + Hash, 1064 | S: BuildHasher, 1065 | F: Fn(&K) -> V, 1066 | { 1067 | pub fn map_get(&self, key: &Q, with_result: impl FnOnce(&K, &V) -> T) -> T 1068 | where 1069 | Q: Hash + ToOwnedEquivalent + ?Sized, 1070 | { 1071 | self.map 1072 | .map_insert_ref(key, Q::to_owned_equivalent, &self.init, with_result) 1073 | } 1074 | } 1075 | 1076 | impl LazyMap 1077 | where 1078 | K: Eq + Hash, 1079 | S: BuildHasher, 1080 | { 1081 | pub fn remove(&mut self, key: &Q) -> Option 1082 | where 1083 | Q: Hash + Equivalent + ?Sized, 1084 | { 1085 | self.map.remove(key) 1086 | } 1087 | } 1088 | 1089 | impl core::ops::Index<&Q> for LazyMap 1090 | where 1091 | K: Eq + Hash, 1092 | S: BuildHasher, 1093 | F: Fn(&K) -> V, 1094 | V: StableDeref, 1095 | Q: Hash + ToOwnedEquivalent + ?Sized, 1096 | { 1097 | type Output = V::Target; 1098 | 1099 | fn index(&self, key: &Q) -> &V::Target { 1100 | self.get(key) 1101 | } 1102 | } 1103 | 1104 | /// Creates a `LazyMap` that fills all values with `V::default()`. 1105 | impl Default for LazyMap { 1106 | fn default() -> Self { 1107 | Self::with_hasher(S::default(), |_| V::default()) 1108 | } 1109 | } 1110 | 1111 | impl fmt::Debug for LazyMap 1112 | where 1113 | K: fmt::Debug, 1114 | V: fmt::Debug, 1115 | { 1116 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 1117 | f.debug_struct("LazyMap") 1118 | .field("values", &self.map) 1119 | .finish_non_exhaustive() 1120 | } 1121 | } 1122 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | use crate::*; 2 | use core::cell::Cell; 3 | use std::{thread, time}; 4 | 5 | #[test] 6 | fn smoke_test() { 7 | let store = OnceMap::new(); 8 | let val = store.insert(String::from("aaa"), |_| String::from("bbb")); 9 | assert_eq!(val, store.get("aaa").unwrap()); 10 | } 11 | 12 | #[test] 13 | #[cfg_attr(miri, ignore)] 14 | fn concurrent_init() { 15 | let store = OnceMap::new(); 16 | let count = parking_lot::Mutex::new(0); 17 | 18 | std::thread::scope(|s| { 19 | s.spawn(|| { 20 | thread::sleep(time::Duration::from_millis(50)); 21 | store.insert(String::from("aaa"), |_| { 22 | thread::sleep(time::Duration::from_millis(50)); 23 | *count.lock() += 1; 24 | String::from("bbb") 25 | }) 26 | }); 27 | 28 | s.spawn(|| { 29 | thread::sleep(time::Duration::from_millis(50)); 30 | store.insert(String::from("aaa"), |_| { 31 | thread::sleep(time::Duration::from_millis(50)); 32 | *count.lock() += 1; 33 | String::from("bbb") 34 | }) 35 | }); 36 | 37 | store 38 | .try_insert(String::from("aaa"), |_| { 39 | thread::sleep(time::Duration::from_millis(200)); 40 | *count.lock() += 2; 41 | Err(()) 42 | }) 43 | .unwrap_err(); 44 | }); 45 | 46 | assert_eq!(*count.lock(), 3); 47 | assert_eq!(store.get("aaa").unwrap(), "bbb"); 48 | } 49 | 50 | #[test] 51 | fn reentrant_init() { 52 | let store = OnceMap::with_single_shard(); 53 | 54 | let res = store.insert(String::from("aaa"), |_| { 55 | let x = store.insert_cloned(String::from("bbb"), |_| String::from("x")); 56 | let y = store.insert(String::from("ccc"), |_| String::from("y")); 57 | assert!(store.get("aaa").is_none()); 58 | x + y 59 | }); 60 | 61 | assert_eq!(res, "xy"); 62 | } 63 | 64 | #[test] 65 | fn panic_init() { 66 | let store = OnceMap::new(); 67 | 68 | let res = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { 69 | store.insert(0, |_| panic!()) 70 | })); 71 | assert!(res.is_err()); 72 | 73 | assert!(store.insert(0, |x| x.to_string()) == "0"); 74 | } 75 | 76 | #[test] 77 | fn lazy() { 78 | let init_count = Cell::new(0); 79 | let int_map = LazyMap::new(|n: &i32| { 80 | init_count.set(init_count.get() + 1); 81 | n.to_string() 82 | }); 83 | 84 | assert_eq!(&int_map[&3], "3"); 85 | assert_eq!(&int_map[&12], "12"); 86 | assert_eq!(&int_map[&3], "3"); 87 | assert_eq!(init_count.get(), 2) 88 | } 89 | 90 | #[cfg(feature = "rayon")] 91 | #[test] 92 | fn rayon() { 93 | use rayon::prelude::*; 94 | 95 | let map: OnceMap<_, _> = (0..1000) 96 | .into_par_iter() 97 | .map(|n| (n, n.to_string())) 98 | .collect(); 99 | 100 | let view = map.read_only_view(); 101 | 102 | assert_eq!( 103 | view.par_values().map(|s| s.len()).sum::(), 104 | view.values().map(|s| s.len()).sum() 105 | ); 106 | } 107 | 108 | /// https://github.com/a1phyr/once_map/issues/3 109 | #[test] 110 | fn issue_3() { 111 | use std::{ 112 | panic::{catch_unwind, AssertUnwindSafe}, 113 | sync::Mutex, 114 | }; 115 | 116 | #[derive(PartialEq, Eq, Debug)] 117 | struct H(u32); 118 | 119 | impl std::hash::Hash for H { 120 | fn hash(&self, state: &mut H) { 121 | if PANIC_ON.lock().unwrap().as_ref() == Some(&self.0) { 122 | panic!(); 123 | } 124 | 0_u32.hash(state); 125 | } 126 | } 127 | 128 | static PANIC_ON: Mutex> = Mutex::new(None); 129 | 130 | let mut map = crate::unsync::OnceMap::new(); 131 | for i in 1..=28 { 132 | map.insert(H(i), |k| { 133 | if *k == H(28) { 134 | String::from("Hello World!") 135 | } else { 136 | String::new() 137 | } 138 | }); 139 | } 140 | for i in 1..=27 { 141 | map.remove(&H(i)); 142 | } 143 | 144 | let hello_world = map.get(&H(28)).unwrap(); 145 | 146 | assert!(hello_world == "Hello World!"); 147 | 148 | let _ = catch_unwind(AssertUnwindSafe(|| { 149 | *PANIC_ON.lock().unwrap() = Some(28); 150 | map.insert(H(1), |_| String::new()); 151 | })); 152 | 153 | assert!(hello_world == "Hello World!"); 154 | } 155 | -------------------------------------------------------------------------------- /src/unsync.rs: -------------------------------------------------------------------------------- 1 | use crate::{map, map::HashMap, Equivalent, InfallibleResult, ToOwnedEquivalent}; 2 | use core::{ 3 | cell::RefCell, 4 | fmt, 5 | hash::{BuildHasher, Hash}, 6 | }; 7 | use stable_deref_trait::StableDeref; 8 | 9 | unsafe fn extend_lifetime<'a, T: StableDeref>(ptr: &T) -> &'a T::Target { 10 | &*(&**ptr as *const T::Target) 11 | } 12 | 13 | pub struct OnceMap { 14 | map: RefCell>, 15 | hash_builder: S, 16 | } 17 | 18 | impl OnceMap { 19 | /// Creates an empty `OnceMap`. 20 | pub fn new() -> Self { 21 | Self::with_hasher(crate::RandomState::new()) 22 | } 23 | } 24 | 25 | impl OnceMap { 26 | /// Creates an empty `OnceMap` which will use the given hash builder to hash keys. 27 | pub const fn with_hasher(hash_builder: S) -> Self { 28 | let map = RefCell::new(HashMap::new()); 29 | Self { map, hash_builder } 30 | } 31 | 32 | pub fn len(&self) -> usize { 33 | self.map.borrow().len() 34 | } 35 | 36 | pub fn is_empty(&self) -> bool { 37 | self.map.borrow().is_empty() 38 | } 39 | 40 | /// Returns a reference to the map's [`BuildHasher`]. 41 | pub fn hasher(&self) -> &S { 42 | &self.hash_builder 43 | } 44 | 45 | /// Locks the whole map for reading. 46 | /// 47 | /// This enables more methods, such as iterating on the maps, but will cause 48 | /// a panic if trying to insert values in the map while the view is live. 49 | pub fn read_only_view(&self) -> ReadOnlyView { 50 | ReadOnlyView::new(self) 51 | } 52 | 53 | /// Removes all key-value pairs from the map, but keeps the allocated memory. 54 | pub fn clear(&mut self) { 55 | self.map.get_mut().clear(); 56 | } 57 | 58 | pub fn values_mut(&mut self) -> impl Iterator { 59 | self.map.get_mut().values_mut() 60 | } 61 | 62 | pub fn iter_mut(&mut self) -> impl Iterator { 63 | self.map.get_mut().iter_mut() 64 | } 65 | 66 | #[allow(clippy::should_implement_trait)] 67 | pub fn into_iter(self) -> impl Iterator { 68 | self.map.into_inner().into_iter() 69 | } 70 | } 71 | 72 | #[cfg(feature = "rayon")] 73 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 74 | impl OnceMap 75 | where 76 | K: Send, 77 | V: Send, 78 | { 79 | pub fn into_par_iter(self) -> impl rayon::iter::ParallelIterator { 80 | self.map.into_inner().into_par_iter() 81 | } 82 | } 83 | 84 | impl OnceMap 85 | where 86 | K: Eq + Hash, 87 | S: BuildHasher, 88 | { 89 | fn hash_one(&self, key: &Q) -> u64 90 | where 91 | Q: Hash + Equivalent + ?Sized, 92 | { 93 | crate::hash_one(&self.hash_builder, key) 94 | } 95 | 96 | /// Returns `true` if the map contains a value for the specified key. 97 | pub fn contains_key(&self, key: &Q) -> bool 98 | where 99 | Q: Hash + Equivalent + ?Sized, 100 | { 101 | let hash = self.hash_one(key); 102 | self.map.borrow().contains_key(hash, key) 103 | } 104 | 105 | pub fn remove(&mut self, key: &Q) -> Option 106 | where 107 | Q: Hash + Equivalent + ?Sized, 108 | { 109 | let hash = self.hash_one(key); 110 | self.map.get_mut().remove(hash, key) 111 | } 112 | 113 | pub fn remove_entry(&mut self, key: &Q) -> Option<(K, V)> 114 | where 115 | Q: Hash + Equivalent + ?Sized, 116 | { 117 | let hash = self.hash_one(key); 118 | self.map.get_mut().remove_entry(hash, key) 119 | } 120 | } 121 | 122 | impl OnceMap 123 | where 124 | K: Eq + Hash, 125 | S: BuildHasher, 126 | V: StableDeref, 127 | { 128 | /// Returns a reference to the value corresponding to the key. 129 | pub fn get(&self, key: &Q) -> Option<&V::Target> 130 | where 131 | Q: Hash + Equivalent + ?Sized, 132 | { 133 | self.map_get(key, |_, v| unsafe { extend_lifetime(v) }) 134 | } 135 | 136 | /// Returns a reference to the value corresponding to the key or insert one 137 | /// with the given closure. 138 | pub fn insert(&self, key: K, make_val: impl FnOnce(&K) -> V) -> &V::Target { 139 | self.map_insert(key, make_val, |_, v| unsafe { extend_lifetime(v) }) 140 | } 141 | 142 | /// Same as `insert` but the closure is allowed to fail. 143 | /// 144 | /// If the closure is called and an error is returned, no value is stored in 145 | /// the map. 146 | pub fn try_insert( 147 | &self, 148 | key: K, 149 | make_val: impl FnOnce(&K) -> Result, 150 | ) -> Result<&V::Target, E> { 151 | self.map_try_insert(key, make_val, |_, v| unsafe { extend_lifetime(v) }) 152 | } 153 | } 154 | 155 | impl OnceMap 156 | where 157 | K: Eq + Hash, 158 | S: BuildHasher, 159 | V: Clone, 160 | { 161 | pub fn get_cloned(&self, key: &Q) -> Option 162 | where 163 | Q: Hash + Equivalent + ?Sized, 164 | { 165 | self.map_get(key, |_, v| v.clone()) 166 | } 167 | 168 | pub fn insert_cloned(&self, key: K, make_val: impl FnOnce(&K) -> V) -> V { 169 | self.map_insert(key, make_val, |_, v| v.clone()) 170 | } 171 | 172 | pub fn try_insert_cloned( 173 | &self, 174 | key: K, 175 | make_val: impl FnOnce(&K) -> Result, 176 | ) -> Result { 177 | self.map_try_insert(key, make_val, |_, v| v.clone()) 178 | } 179 | } 180 | 181 | impl OnceMap 182 | where 183 | K: Eq + Hash, 184 | S: BuildHasher, 185 | { 186 | pub fn map_get(&self, key: &Q, with_result: impl FnOnce(&K, &V) -> T) -> Option 187 | where 188 | Q: Hash + Equivalent + ?Sized, 189 | { 190 | let map = self.map.borrow(); 191 | let hash = self.hash_one(key); 192 | let (key, value) = map.get_key_value(hash, key)?; 193 | Some(with_result(key, value)) 194 | } 195 | 196 | pub fn map_insert( 197 | &self, 198 | key: K, 199 | make_val: impl FnOnce(&K) -> V, 200 | with_result: impl FnOnce(&K, &V) -> T, 201 | ) -> T { 202 | self.map_try_insert(key, |k| Ok(make_val(k)), with_result) 203 | .unwrap_infallible() 204 | } 205 | 206 | pub fn map_insert_ref( 207 | &self, 208 | key: &Q, 209 | make_key: impl FnOnce(&Q) -> K, 210 | make_val: impl FnOnce(&K) -> V, 211 | with_result: impl FnOnce(&K, &V) -> T, 212 | ) -> T 213 | where 214 | Q: Hash + Equivalent + ?Sized, 215 | { 216 | self.map_try_insert_ref(key, make_key, |k| Ok(make_val(k)), with_result) 217 | .unwrap_infallible() 218 | } 219 | 220 | pub fn map_try_insert( 221 | &self, 222 | key: K, 223 | make_val: impl FnOnce(&K) -> Result, 224 | with_result: impl FnOnce(&K, &V) -> T, 225 | ) -> Result { 226 | self.get_or_try_insert( 227 | key, 228 | with_result, 229 | |with_result, k| { 230 | let v = make_val(k)?; 231 | let ret = with_result(k, &v); 232 | Ok((v, ret)) 233 | }, 234 | |with_result, k, v| with_result(k, v), 235 | ) 236 | } 237 | 238 | pub fn map_try_insert_ref( 239 | &self, 240 | key: &Q, 241 | make_key: impl FnOnce(&Q) -> K, 242 | make_val: impl FnOnce(&K) -> Result, 243 | with_result: impl FnOnce(&K, &V) -> T, 244 | ) -> Result 245 | where 246 | Q: Hash + Equivalent + ?Sized, 247 | { 248 | self.get_or_try_insert_ref( 249 | key, 250 | with_result, 251 | make_key, 252 | |with_result, k| { 253 | let v = make_val(k)?; 254 | let ret = with_result(k, &v); 255 | Ok((v, ret)) 256 | }, 257 | |with_result, k, v| with_result(k, v), 258 | ) 259 | } 260 | 261 | pub fn get_or_try_insert( 262 | &self, 263 | key: K, 264 | data: T, 265 | on_vacant: impl FnOnce(T, &K) -> Result<(V, U), E>, 266 | on_occupied: impl FnOnce(T, &K, &V) -> U, 267 | ) -> Result { 268 | let map = self.map.borrow(); 269 | let hash = self.hash_one(&key); 270 | 271 | if let Some((key, value)) = map.get_key_value(hash, &key) { 272 | return Ok(on_occupied(data, key, value)); 273 | } 274 | drop(map); 275 | 276 | // We must not borrow `self.map` here 277 | let (value, ret) = on_vacant(data, &key)?; 278 | 279 | self.raw_insert(hash, key, value); 280 | Ok(ret) 281 | } 282 | 283 | pub fn get_or_try_insert_ref( 284 | &self, 285 | key: &Q, 286 | data: T, 287 | make_key: impl FnOnce(&Q) -> K, 288 | on_vacant: impl FnOnce(T, &K) -> Result<(V, U), E>, 289 | on_occupied: impl FnOnce(T, &K, &V) -> U, 290 | ) -> Result 291 | where 292 | Q: Hash + Equivalent + ?Sized, 293 | { 294 | let map = self.map.borrow(); 295 | let hash = self.hash_one(key); 296 | 297 | if let Some((key, value)) = map.get_key_value(hash, key) { 298 | return Ok(on_occupied(data, key, value)); 299 | } 300 | drop(map); 301 | 302 | // We must not borrow `self.map` here 303 | let owned_key = make_key(key); 304 | debug_assert!(key.equivalent(&owned_key)); 305 | let (value, ret) = on_vacant(data, &owned_key)?; 306 | 307 | self.raw_insert(hash, owned_key, value); 308 | Ok(ret) 309 | } 310 | 311 | fn raw_insert(&self, hash: u64, key: K, value: V) { 312 | let mut map = self.map.borrow_mut(); 313 | match map.entry(hash, &key, &self.hash_builder) { 314 | map::Entry::Vacant(entry) => { 315 | entry.insert(key, value); 316 | } 317 | map::Entry::Occupied(_) => panic!("re-entrant init"), 318 | } 319 | } 320 | } 321 | 322 | impl Default for OnceMap { 323 | fn default() -> Self { 324 | Self::with_hasher(S::default()) 325 | } 326 | } 327 | 328 | impl Extend<(K, V)> for OnceMap 329 | where 330 | K: Eq + Hash, 331 | S: BuildHasher, 332 | { 333 | fn extend>(&mut self, iter: T) { 334 | iter.into_iter() 335 | .for_each(|(k, v)| self.map_insert(k, |_| v, |_, _| ())) 336 | } 337 | } 338 | 339 | impl Extend<(K, V)> for &'_ OnceMap 340 | where 341 | K: Eq + Hash, 342 | S: BuildHasher, 343 | { 344 | fn extend>(&mut self, iter: T) { 345 | iter.into_iter() 346 | .for_each(|(k, v)| self.map_insert(k, |_| v, |_, _| ())) 347 | } 348 | } 349 | 350 | impl FromIterator<(K, V)> for OnceMap 351 | where 352 | K: Eq + Hash, 353 | S: BuildHasher + Default, 354 | { 355 | fn from_iter>(iter: T) -> Self { 356 | let mut map = OnceMap::default(); 357 | map.extend(iter); 358 | map 359 | } 360 | } 361 | 362 | impl From<[(K, V); N]> for OnceMap 363 | where 364 | K: Eq + Hash, 365 | S: BuildHasher + Default, 366 | { 367 | fn from(array: [(K, V); N]) -> Self { 368 | Self::from_iter(array) 369 | } 370 | } 371 | 372 | #[cfg(feature = "serde")] 373 | #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] 374 | impl serde::Serialize for OnceMap 375 | where 376 | K: serde::Serialize, 377 | V: serde::Serialize, 378 | { 379 | fn serialize(&self, serializer: Ser) -> Result 380 | where 381 | Ser: serde::Serializer, 382 | { 383 | serializer.collect_map(self.read_only_view().iter()) 384 | } 385 | } 386 | 387 | #[cfg(feature = "serde")] 388 | #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] 389 | impl<'de, K, V, S> serde::Deserialize<'de> for OnceMap 390 | where 391 | K: Eq + Hash + serde::Deserialize<'de>, 392 | V: serde::Deserialize<'de>, 393 | S: BuildHasher + Default, 394 | { 395 | fn deserialize(deserializer: D) -> Result 396 | where 397 | D: serde::Deserializer<'de>, 398 | { 399 | struct OnceMapVisitor(OnceMap); 400 | 401 | impl<'de, K, V, S> serde::de::Visitor<'de> for OnceMapVisitor 402 | where 403 | K: Eq + Hash + serde::Deserialize<'de>, 404 | V: serde::Deserialize<'de>, 405 | S: BuildHasher, 406 | { 407 | type Value = OnceMap; 408 | 409 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 410 | formatter.write_str("a map") 411 | } 412 | 413 | fn visit_map(self, mut map: A) -> Result 414 | where 415 | A: serde::de::MapAccess<'de>, 416 | { 417 | while let Some((key, value)) = map.next_entry()? { 418 | self.0.map_insert(key, |_| value, |_, _| ()) 419 | } 420 | 421 | Ok(self.0) 422 | } 423 | } 424 | 425 | deserializer.deserialize_map(OnceMapVisitor(OnceMap::default())) 426 | } 427 | } 428 | 429 | impl fmt::Debug for OnceMap 430 | where 431 | K: fmt::Debug, 432 | V: fmt::Debug, 433 | { 434 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 435 | self.map.borrow().fmt(f) 436 | } 437 | } 438 | 439 | pub struct ReadOnlyView<'a, K, V, S = crate::RandomState> { 440 | map: core::cell::Ref<'a, HashMap>, 441 | hash_builder: &'a S, 442 | } 443 | 444 | impl<'a, K, V, S> ReadOnlyView<'a, K, V, S> { 445 | fn new(map: &'a OnceMap) -> Self { 446 | Self { 447 | map: map.map.borrow(), 448 | hash_builder: &map.hash_builder, 449 | } 450 | } 451 | 452 | pub fn len(&self) -> usize { 453 | self.map.len() 454 | } 455 | 456 | pub fn is_empty(&self) -> bool { 457 | self.map.is_empty() 458 | } 459 | 460 | pub fn hasher(&self) -> &S { 461 | self.hash_builder 462 | } 463 | 464 | pub fn iter(&self) -> impl Iterator { 465 | self.map.iter() 466 | } 467 | 468 | pub fn keys(&self) -> impl Iterator { 469 | self.map.keys() 470 | } 471 | 472 | pub fn values(&self) -> impl Iterator { 473 | self.map.values() 474 | } 475 | } 476 | 477 | impl ReadOnlyView<'_, K, V, S> 478 | where 479 | K: Eq + Hash, 480 | S: BuildHasher, 481 | { 482 | fn hash_one(&self, key: &Q) -> u64 483 | where 484 | Q: Hash + Equivalent + ?Sized, 485 | { 486 | crate::hash_one(self.hash_builder, key) 487 | } 488 | 489 | pub fn get(&self, key: &Q) -> Option<&V> 490 | where 491 | Q: Hash + Equivalent + ?Sized, 492 | { 493 | let hash = self.hash_one(key); 494 | self.map.get(hash, key) 495 | } 496 | 497 | pub fn get_key_value(&self, key: &Q) -> Option<(&K, &V)> 498 | where 499 | Q: Hash + Equivalent + ?Sized, 500 | { 501 | let hash = self.hash_one(key); 502 | self.map.get_key_value(hash, key) 503 | } 504 | 505 | pub fn contains_key(&self, key: &Q) -> bool 506 | where 507 | Q: Hash + Equivalent + ?Sized, 508 | { 509 | let hash = self.hash_one(key); 510 | self.map.contains_key(hash, key) 511 | } 512 | } 513 | 514 | #[cfg(feature = "rayon")] 515 | #[cfg_attr(docsrs, doc(cfg(feature = "rayon")))] 516 | impl ReadOnlyView<'_, K, V, S> 517 | where 518 | K: Sync, 519 | V: Sync, 520 | { 521 | pub fn par_iter(&self) -> impl rayon::iter::ParallelIterator { 522 | self.map.par_iter() 523 | } 524 | 525 | pub fn par_keys(&self) -> impl rayon::iter::ParallelIterator { 526 | self.map.par_keys() 527 | } 528 | 529 | pub fn par_values(&self) -> impl rayon::iter::ParallelIterator { 530 | self.map.par_values() 531 | } 532 | } 533 | 534 | #[cfg(feature = "serde")] 535 | #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] 536 | impl serde::Serialize for ReadOnlyView<'_, K, V, S> 537 | where 538 | K: serde::Serialize, 539 | V: serde::Serialize, 540 | { 541 | fn serialize(&self, serializer: Ser) -> Result 542 | where 543 | Ser: serde::Serializer, 544 | { 545 | serializer.collect_map(self.iter()) 546 | } 547 | } 548 | 549 | impl fmt::Debug for ReadOnlyView<'_, K, V, S> 550 | where 551 | K: fmt::Debug, 552 | V: fmt::Debug, 553 | { 554 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 555 | f.debug_struct("ReadOnlyView") 556 | .field("map", &self.map) 557 | .finish() 558 | } 559 | } 560 | 561 | /// A map where values are automatically filled at access. 562 | /// 563 | /// This type has less overhead than [`crate::sync::LazyMap`] but it cannot be 564 | /// shared across threads. 565 | /// 566 | /// ``` 567 | /// let map = once_map::unsync::LazyMap::new(|x: &i32| x.to_string()); 568 | /// 569 | /// assert_eq!(&map[&3], "3"); 570 | /// assert_eq!(map.get(&-67), "-67"); 571 | /// ``` 572 | pub struct LazyMap V> { 573 | map: OnceMap, 574 | init: F, 575 | } 576 | 577 | impl LazyMap { 578 | pub fn new(f: F) -> Self { 579 | Self::with_hasher(crate::RandomState::new(), f) 580 | } 581 | } 582 | 583 | impl LazyMap { 584 | pub const fn with_hasher(hash_builder: S, f: F) -> Self { 585 | Self { 586 | map: OnceMap::with_hasher(hash_builder), 587 | init: f, 588 | } 589 | } 590 | 591 | /// Removes all entries from the map. 592 | pub fn clear(&mut self) { 593 | self.map.clear(); 594 | } 595 | } 596 | 597 | impl LazyMap 598 | where 599 | K: Eq + Hash, 600 | S: BuildHasher, 601 | F: Fn(&K) -> V, 602 | V: StableDeref, 603 | { 604 | pub fn get(&self, key: &Q) -> &V::Target 605 | where 606 | Q: Hash + ToOwnedEquivalent + ?Sized, 607 | { 608 | self.map_get(key, |_, v| unsafe { extend_lifetime(v) }) 609 | } 610 | } 611 | 612 | impl LazyMap 613 | where 614 | K: Eq + Hash, 615 | S: BuildHasher, 616 | F: Fn(&K) -> V, 617 | V: Clone, 618 | { 619 | pub fn get_cloned(&self, key: &Q) -> V 620 | where 621 | Q: Hash + ToOwnedEquivalent + ?Sized, 622 | { 623 | self.map_get(key, |_, v| v.clone()) 624 | } 625 | } 626 | 627 | impl LazyMap 628 | where 629 | K: Eq + Hash, 630 | S: BuildHasher, 631 | F: Fn(&K) -> V, 632 | { 633 | pub fn map_get(&self, key: &Q, with_result: impl FnOnce(&K, &V) -> T) -> T 634 | where 635 | Q: Hash + ToOwnedEquivalent + ?Sized, 636 | { 637 | self.map 638 | .map_insert_ref(key, Q::to_owned_equivalent, &self.init, with_result) 639 | } 640 | } 641 | 642 | impl LazyMap 643 | where 644 | K: Eq + Hash, 645 | S: BuildHasher, 646 | { 647 | pub fn remove(&mut self, key: &Q) -> Option 648 | where 649 | Q: Hash + Equivalent + ?Sized, 650 | { 651 | self.map.remove(key) 652 | } 653 | } 654 | 655 | /// Creates a `LazyMap` that fills all values with `V::default()`. 656 | impl Default for LazyMap { 657 | fn default() -> Self { 658 | Self::with_hasher(S::default(), |_| V::default()) 659 | } 660 | } 661 | 662 | impl core::ops::Index<&Q> for LazyMap 663 | where 664 | K: Eq + Hash, 665 | S: BuildHasher, 666 | F: Fn(&K) -> V, 667 | V: StableDeref, 668 | Q: Hash + ToOwnedEquivalent + ?Sized, 669 | { 670 | type Output = V::Target; 671 | 672 | fn index(&self, key: &Q) -> &V::Target { 673 | self.get(key) 674 | } 675 | } 676 | 677 | impl fmt::Debug for LazyMap 678 | where 679 | K: fmt::Debug, 680 | V: fmt::Debug, 681 | { 682 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 683 | f.debug_struct("LazyMap") 684 | .field("values", &self.map) 685 | .finish_non_exhaustive() 686 | } 687 | } 688 | --------------------------------------------------------------------------------