├── .gitignore ├── .rustfmt.toml ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── etc └── rfc.md └── src ├── alloc.rs ├── collection.rs ├── collection ├── concurrent_vec.rs ├── linked_list.rs ├── skip_list.rs ├── store_box.rs ├── store_vec.rs └── utils.rs ├── extension.rs ├── extension ├── typed.rs ├── typed_metadata.rs ├── typed_single.rs ├── unique.rs └── unique_single.rs ├── interface.rs ├── lib.rs ├── store.rs └── store ├── allocator_store.rs ├── inline_bump_store.rs ├── inline_single_store.rs └── stack_bump_store.rs /.gitignore: -------------------------------------------------------------------------------- 1 | .cargo 2 | target 3 | Cargo.lock -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 120 -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "storage" 3 | version = "0.1.0" 4 | authors = ["Matthieu M. "] 5 | edition = "2021" 6 | description = "An exploration of Storage, instead of Allocators, for collections" 7 | repository = "https://github.com/matthieu-m/storage" 8 | license = "MIT OR Apache-2.0" 9 | keywords = ["allocator", "collection", "experimental", "nightly", "storage"] 10 | categories = ["no-std"] 11 | 12 | [features] 13 | default = [] 14 | # Enables integration with the alloc crate. 15 | alloc = [] 16 | # Enables CoerceUnsized for Box, by using a placeholder implementation. 17 | coercible-metadata = [] 18 | 19 | [dependencies] 20 | 21 | # For Skip List. 22 | oorandom = "11.1.3" 23 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020 matthieu-m 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright 2023 matthieu-m 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Yet another iteration on the Storage API. 2 | 3 | # Goals 4 | 5 | This is an advanced Proof-of-Concept aiming at: 6 | 7 | - Demonstrating the technical feasibility. 8 | - Showcasing the flexibility of storages. 9 | - Streamlining the API of previous PoCs. 10 | 11 | This experiment does not (yet?) intend to provide production-ready collections. 12 | 13 | 14 | # Why Storage over Allocator? 15 | 16 | `Storage` does NOT intend to displace `Allocator`, `Allocator` is the right level of abstraction for a wide variety of 17 | situations, in which case it should be used. 18 | 19 | `Storage`, instead, aims at offering a more flexible, lower-level, API than `Allocator` for those cases where 20 | `Allocator` does not cut it. 21 | 22 | When should you favor `Storage` over `Allocator`: 23 | 24 | - To avoid a pointer indirection: an inline storage enables storing the "allocated" item in the same cache line as 25 | the storage object, hence one less level of indirection. 26 | - To avoid memory allocation while retaining a `'static` item: an inline storage enables in-situ allocation without 27 | imposing any restriction on the duration of the storage, the resulting containers can be stored in long-lived 28 | collections, sent across threads, etc... 29 | - To allow complex values to be stored in ROM: rustc currently is unable to store items with allocated in ROM -- 30 | although it is theoretically possible -- in which case an inline storage nicely works around this limitation, 31 | allowing `static` variables of type `Vec`, `BTreeMap`, etc... as long as `const fn` are good enough to calculate 32 | them. 33 | - To allow complex values to be stored in shared memory, although restrictions will remain -- traits are out, for 34 | example. 35 | 36 | The `Storage` API achieves this by returning an abstract `Handle`, rather than a pointer, and offering a way to 37 | _temporarily_ derive a pointer from this `Handle`. Since the `Handle` is what is stored, it can be a ZST, it can be an 38 | offset, etc... allowing it to fit where a pointer doesn't always. 39 | 40 | 41 | # How to navigate this repository? 42 | 43 | The repository contains 3 parts: 44 | 45 | - `interface` sketches out the `Storage` trait, and its two companion traits. 46 | - `storage` contains a number of storages, including an adapter to turn any `Allocator` into a `Storage`. 47 | - `collection` contains a variety of collections, demonstrating the viability of `Storage` for those usecases. 48 | 49 | 50 | # Can we replace the `std` collections tomorrow? 51 | 52 | Most collections are replaceable, `Box`... is a tad more complicated. 53 | 54 | The main issue for `Box` there is that `CoerceUnsized` and `Unsize` are pretty locked down. Even though it is possible 55 | to implement a `coerce` method, it is not possible to implement `CoerceUnsized` because `T::Metadata` is not coercible 56 | to `U::Metadata`. 57 | 58 | There are various solutions, of course, including a special compiler-blessed solution, etc... which to pick is up in 59 | the air. 60 | 61 | 62 | # History 63 | 64 | The idea of a Storage API dates back a wee bit -- to 2021 -- and this is yet another iteration: 65 | 66 | - First iteration: https://github.com/matthieu-m/storage-poc 67 | - Second iteration: https://github.com/CAD97/storages-api 68 | 69 | The first iteration started with the concept of a very strongly typed API. Since a Storage could require allocating 70 | different types, this led to requiring GATs for handles, and a proliferation of traits. 71 | 72 | @CAD97 had the insight that eliminating typed handles would allow streamlining the API, and thus the second iteration 73 | was born. It was _much_ simpler: no GATs, fewer traits and methods, ... all around better! 74 | 75 | Coming back to the second iteration after a few months, I felt that the second iteration was not as simple as it could 76 | be, though, and that a number of decisions were unfortunate -- requiring `Layout` in `resolve`, and taking `&mut` -- as 77 | they would reduce the flexibility. See https://github.com/CAD97/storages-api/issues/6 for my remarks. 78 | 79 | Thus the idea for a third iteration was born: 80 | 81 | - Eliminating `Layout` as an argument to `resolve` to support `ThinPointer`, skip lists, etc... 82 | - Taking `&self` rather than `&mut self` to support concurrent collections. 83 | 84 | And taking the opportunity to streamline the API further: less methods, less traits. Or in the words of Saint-Exupery: 85 | 86 | > Simplicity is achieved not when there is nothing to add, but when there is nothing to remove. 87 | 88 | 89 | # That's all folks! 90 | 91 | And thanks for reading so far. 92 | -------------------------------------------------------------------------------- /src/alloc.rs: -------------------------------------------------------------------------------- 1 | //! A polyfill over some alloc crate pieces of functionality. 2 | 3 | #[cfg(feature = "alloc")] 4 | pub use alloc::alloc::handle_alloc_error; 5 | 6 | #[cfg(not(feature = "alloc"))] 7 | pub use polyfill::handle_alloc_error; 8 | 9 | #[cfg(not(feature = "alloc"))] 10 | mod polyfill { 11 | use core::alloc::Layout; 12 | 13 | pub const fn handle_alloc_error(_layout: Layout) -> ! { 14 | panic!("allocation failed") 15 | } 16 | } // mod polyfill 17 | -------------------------------------------------------------------------------- /src/collection.rs: -------------------------------------------------------------------------------- 1 | //! A variety of collections implemented in terms of `Store`. 2 | //! 3 | //! The collections may have a rather minimal interface, as the emphasis is put on demonstrating the flexibility of the 4 | //! `Store` trait, rather than providing fully implemented collections -- for now. 5 | 6 | mod concurrent_vec; 7 | mod linked_list; 8 | mod skip_list; 9 | mod store_box; 10 | mod store_vec; 11 | 12 | #[cfg(test)] 13 | mod utils; 14 | 15 | pub use concurrent_vec::ConcurrentVec; 16 | pub use linked_list::LinkedList; 17 | pub use skip_list::SkipList; 18 | pub use store_box::StoreBox; 19 | pub use store_vec::StoreVec; 20 | -------------------------------------------------------------------------------- /src/collection/concurrent_vec.rs: -------------------------------------------------------------------------------- 1 | //! Proof of concept concurrent access vector. 2 | //! 3 | //! For simplification, the capacity is fixed at creation, and elements cannot be removed. 4 | 5 | use core::{ 6 | alloc::Layout, 7 | fmt, hint, 8 | mem::{self, ManuallyDrop, MaybeUninit}, 9 | ops, 10 | ptr::{self, NonNull}, 11 | sync::atomic::{AtomicIsize, Ordering}, 12 | }; 13 | 14 | use crate::{extension::unique::UniqueHandle, interface::Store}; 15 | 16 | /// A fixed-capacity vector which can be modified concurrently. 17 | pub struct ConcurrentVec { 18 | // Invariants: 19 | // - `length` is negative if a thread is appending a new element. 20 | // - `length.abs() - 1 <= self.store.capacity`. 21 | // - Elements in 0..(length.abs() - 1) are initialized. 22 | length: AtomicIsize, 23 | store: Inner, 24 | } 25 | 26 | impl ConcurrentVec { 27 | /// Creates a vector with a given capacity and a default store. 28 | /// 29 | /// Since the vector cannot be resized later, pick well! 30 | pub fn new(capacity: usize) -> Self 31 | where 32 | S: Default, 33 | { 34 | Self::with_store(capacity, S::default()) 35 | } 36 | 37 | /// Creates a vector with a given capacity and store. 38 | /// 39 | /// Since the vector cannot be resized later, pick well! 40 | pub fn with_store(capacity: usize, store: S) -> Self { 41 | let length = AtomicIsize::new(1); 42 | let store = Inner::with_store(capacity, store); 43 | 44 | Self { length, store } 45 | } 46 | 47 | /// Returns whether the vector is empty. 48 | pub fn is_empty(&self) -> bool { 49 | self.len() == 0 50 | } 51 | 52 | /// Returns the length of the vector. 53 | pub fn len(&self) -> usize { 54 | (self.length.load(Ordering::Acquire).abs() - 1) as usize 55 | } 56 | 57 | /// Returns the capacity of the vector. 58 | pub fn capacity(&self) -> usize { 59 | self.store.capacity() 60 | } 61 | 62 | /// Returns a reference to the slice of initialized elements. 63 | pub fn as_slice(&self) -> &[T] { 64 | let initialized = self.initialized(); 65 | 66 | // Safety: 67 | // - `initialized` covers a valid area of memory. 68 | // - `initialized` covers a readable area of memory. 69 | // - `initialized` is accessible in shared mode, as `self` is borrowed immutably for the duration. 70 | // - The lifetime of the resulting slice will not exceed that of `self.store`. 71 | unsafe { initialized.as_ref() } 72 | } 73 | 74 | /// Returns a mutable reference to the slice of initialized elements. 75 | pub fn as_slice_mut(&mut self) -> &mut [T] { 76 | let mut initialized = self.initialized(); 77 | 78 | // Safety: 79 | // - `initialized` covers a valid area of memory. 80 | // - `initialized` covers a readable and writeable area of memory. 81 | // - `initialized` is accessible in exclusive mode, as `self` is borrowed mutably for the duration. 82 | // - The lifetime of the resulting slice will not exceed that of `self.store`. 83 | unsafe { initialized.as_mut() } 84 | } 85 | 86 | /// Returns a reference to the element at `index`. 87 | /// 88 | /// # Safety 89 | /// 90 | /// - `index` must be strictly less than `self.len()`. 91 | pub unsafe fn get_unchecked(&self, index: usize) -> &T { 92 | // Safety: 93 | // - `index <= self.len()`, as per method invariant. 94 | let initialized = unsafe { self.initialized_unchecked(index + 1) }; 95 | 96 | // Safety: 97 | // - `index < index + 1`. 98 | let element = unsafe { initialized.get_unchecked_mut(index) }; 99 | 100 | // Safety: 101 | // - `initialized` covers a valid area of memory. 102 | // - `initialized` covers a readable area of memory. 103 | // - `initialized` is accessible in shared mode, as `self` is borrowed immutably for the duration. 104 | // - The lifetime of the resulting slice will not exceed that of `self.store`. 105 | unsafe { element.as_ref() } 106 | } 107 | 108 | /// Returns a mutable reference to the element at `index`. 109 | /// 110 | /// # Safety 111 | /// 112 | /// - `index` must be strictly less than `self.len()`. 113 | pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T { 114 | // Safety: 115 | // - `index <= self.len()`, as per method invariant. 116 | let initialized = unsafe { self.initialized_unchecked(index + 1) }; 117 | 118 | // Safety: 119 | // - `index < index + 1`. 120 | let mut element = unsafe { initialized.get_unchecked_mut(index) }; 121 | 122 | // Safety: 123 | // - `initialized` covers a valid area of memory. 124 | // - `initialized` covers a readable and writeable area of memory. 125 | // - `initialized` is accessible in exclusive mode, as `self` is borrowed mutably for the duration. 126 | // - The lifetime of the resulting slice will not exceed that of `self.store`. 127 | unsafe { element.as_mut() } 128 | } 129 | 130 | /// Attempts to push a new element into the vector. 131 | /// 132 | /// The vector is locked for writes for the duration of the operation. 133 | /// 134 | /// Returns an error if the vector is full, that is, if `self.len() == self.capacity()`. 135 | pub fn push(&self, element: T) -> Result<(), T> { 136 | let mut length = self.length.load(Ordering::Acquire); 137 | 138 | loop { 139 | if length.unsigned_abs() > self.store.capacity() { 140 | return Err(element); 141 | } 142 | 143 | if length < 0 { 144 | hint::spin_loop(); 145 | 146 | length = self.length.load(Ordering::Acquire); 147 | continue; 148 | } 149 | 150 | debug_assert!(length > 0); 151 | 152 | let result = self 153 | .length 154 | .compare_exchange_weak(length, -length, Ordering::Acquire, Ordering::Relaxed); 155 | 156 | if let Err(prev) = result { 157 | hint::spin_loop(); 158 | 159 | length = prev; 160 | continue; 161 | } 162 | 163 | break; 164 | } 165 | 166 | // The slot at `length - 1` is ours! 167 | debug_assert!(length > 0, "{length}"); 168 | debug_assert!( 169 | length.unsigned_abs() <= self.store.capacity(), 170 | "{length} > {}", 171 | self.store.capacity() 172 | ); 173 | 174 | let slots = self.store.slots(); 175 | 176 | // Safety: 177 | // - `length - 1 < self.store.capacity()`, since `length > 0` and `length <= self.store.capacity()`. 178 | let slot = unsafe { slots.get_unchecked_mut(length as usize - 1) }; 179 | 180 | // Safety: 181 | // - `slot` points to a valid area of memory. 182 | // - `slot` points to a writeable area of memory. 183 | // - `slot` is accessible in exclusive mode, as per the lock on `self.length`. 184 | unsafe { ptr::write(slot.as_ptr(), element) }; 185 | 186 | self.length.store(length + 1, Ordering::Release); 187 | 188 | Ok(()) 189 | } 190 | } 191 | 192 | impl Clone for ConcurrentVec 193 | where 194 | T: Clone, 195 | S: Store + Clone, 196 | { 197 | fn clone(&self) -> Self { 198 | let clone = Self::with_store(self.store.capacity(), self.store.store.clone()); 199 | 200 | let elements = self.as_slice(); 201 | let slots = clone.store.slots(); 202 | 203 | debug_assert!(elements.len() <= slots.len()); 204 | 205 | // Safety: 206 | // - `elements.len() <= slots.len()`. 207 | let slots = unsafe { slots.get_unchecked_mut(..elements.len()) }; 208 | 209 | // Safety: 210 | // - `slots` is valid for reads and writes of `slots.len()`, since the allocation succeeded and we have 211 | // exlusive access for now. 212 | // - `slots.len()` is no larger than `isize::MAX`, since the allocation succeeded. 213 | // - The result `slots` will not outlive this function call. 214 | let slots = unsafe { slots.as_uninit_slice_mut() }; 215 | 216 | MaybeUninit::write_slice_cloned(slots, elements); 217 | 218 | clone.length.store(elements.len() as isize + 1, Ordering::Release); 219 | 220 | clone 221 | } 222 | } 223 | 224 | impl Drop for ConcurrentVec { 225 | fn drop(&mut self) { 226 | if !mem::needs_drop::() { 227 | return; 228 | } 229 | 230 | let initialized = self.initialized(); 231 | 232 | for index in 0..initialized.len() { 233 | // Safety: 234 | // - `index <= initialized.len()`. 235 | let element = unsafe { initialized.get_unchecked_mut(index) }; 236 | 237 | // Safety: 238 | // - `element` is valid for both reads and writes. 239 | // - `element` is properly aligned. 240 | // - There are no specific invariant to uphold for `element`. 241 | unsafe { ptr::drop_in_place(element.as_ptr()) }; 242 | } 243 | } 244 | } 245 | 246 | impl fmt::Debug for ConcurrentVec 247 | where 248 | T: fmt::Debug, 249 | { 250 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 251 | write!(f, "{:?}", self.as_slice()) 252 | } 253 | } 254 | 255 | impl ops::Deref for ConcurrentVec { 256 | type Target = [T]; 257 | 258 | fn deref(&self) -> &Self::Target { 259 | self.as_slice() 260 | } 261 | } 262 | 263 | impl ops::DerefMut for ConcurrentVec { 264 | fn deref_mut(&mut self) -> &mut Self::Target { 265 | self.as_slice_mut() 266 | } 267 | } 268 | 269 | // Safety: 270 | // - Same as `Vec`. 271 | unsafe impl Send for ConcurrentVec 272 | where 273 | T: Send, 274 | S: Store + Send, 275 | { 276 | } 277 | 278 | // Safety: 279 | // - Same as `Vec`. 280 | unsafe impl Sync for ConcurrentVec 281 | where 282 | T: Sync, 283 | S: Store + Sync, 284 | { 285 | } 286 | 287 | // 288 | // Implementation 289 | // 290 | 291 | impl ConcurrentVec { 292 | // Returns a pointer to the slice of initialized elements. 293 | fn initialized(&self) -> NonNull<[T]> { 294 | // Safety: 295 | // - `self.len() <= self.len()`. 296 | unsafe { self.initialized_unchecked(self.len()) } 297 | } 298 | 299 | // Returns a pointer to the slice of initialized elements up to `index`, not included. 300 | // 301 | // # Safety 302 | // 303 | // - `index` must be less than or equal to `self.len()`. 304 | unsafe fn initialized_unchecked(&self, index: usize) -> NonNull<[T]> { 305 | debug_assert!(index <= self.len(), "{index} > {}", self.len()); 306 | 307 | let slots = self.store.slots(); 308 | 309 | debug_assert_eq!(slots.len(), self.capacity()); 310 | 311 | // Safety: 312 | // - `index` is within bounds, as per invariant `self.len() <= self.capacity()`. 313 | unsafe { slots.get_unchecked_mut(..index) } 314 | } 315 | } 316 | 317 | struct Inner { 318 | store: S, 319 | handle: ManuallyDrop>, 320 | } 321 | 322 | impl Inner { 323 | // Creates a store with a given capacity and store. 324 | fn with_store(capacity: usize, store: S) -> Self { 325 | let layout = Layout::array::(capacity).expect("Small enough capacity"); 326 | 327 | let (handle, _) = store.allocate(layout).expect("Successful allocation"); 328 | 329 | // Safety: 330 | // - `handle` is associated to a block of memory which fits `[T; capacity]`. 331 | // - `handle` is the unique handle associated to this block of memory. 332 | // - `capacity` is the suitable metadata for this block of memory. 333 | let handle = unsafe { UniqueHandle::from_raw_parts(handle, capacity.into()) }; 334 | 335 | let handle = ManuallyDrop::new(handle); 336 | 337 | Self { store, handle } 338 | } 339 | 340 | // Returns the capacity of the store, in number of elements. 341 | fn capacity(&self) -> usize { 342 | self.handle.len() 343 | } 344 | 345 | // Retrieves the slots of store. 346 | // 347 | // The slice is only valid as long as `self` is live. 348 | fn slots(&self) -> NonNull<[T]> { 349 | // Safety: 350 | // - `self.handle` has been allocated by `self.store`. 351 | // - `self.handle` is still valid, since no operation other than `resolve` occurred. 352 | // - The block of memory associated to the handle will only be used as long as `self.handle` is valid. 353 | unsafe { self.handle.resolve_raw(&self.store) } 354 | } 355 | } 356 | 357 | impl Drop for Inner { 358 | fn drop(&mut self) { 359 | // Safety: 360 | // - `self.handle` will no longer be used. 361 | let handle = unsafe { ManuallyDrop::take(&mut self.handle) }; 362 | 363 | // Safety: 364 | // - `handle` has been allocated by `self.store`. 365 | // - `handle` is still valid, since no operation other than `resolve` occurred. 366 | unsafe { handle.deallocate(&self.store) } 367 | } 368 | } 369 | 370 | #[cfg(test)] 371 | mod tests { 372 | use std::{sync::Arc, thread}; 373 | 374 | use crate::collection::utils::Global; 375 | 376 | use super::*; 377 | 378 | type GlobalVec = ConcurrentVec; 379 | 380 | #[test] 381 | fn empty() { 382 | let empty = GlobalVec::new(42); 383 | 384 | assert!(empty.is_empty()); 385 | assert_eq!(0, empty.len()); 386 | assert_eq!(42, empty.capacity()); 387 | } 388 | 389 | #[test] 390 | fn brush() { 391 | let vec = GlobalVec::new(42); 392 | 393 | for i in 0..3 { 394 | vec.push(i.to_string()).unwrap(); 395 | } 396 | 397 | assert_eq!(&["0", "1", "2"][..], vec.as_slice()); 398 | } 399 | 400 | #[test] 401 | fn overflow() { 402 | const CAP: usize = 5; 403 | 404 | let vec = GlobalVec::new(CAP); 405 | 406 | for i in 0..CAP { 407 | vec.push(i.to_string()).unwrap(); 408 | } 409 | 410 | let result = vec.push(CAP.to_string()); 411 | assert_eq!(Err(CAP.to_string()), result); 412 | } 413 | 414 | #[test] 415 | fn multithreaded() { 416 | const THREADS: usize = 4; 417 | const ELEMENTS: usize = 4; 418 | 419 | let vec = Arc::new(GlobalVec::new(THREADS * ELEMENTS)); 420 | 421 | let handles: Vec<_> = (0..THREADS) 422 | .map(|i| { 423 | let vec = vec.clone(); 424 | 425 | thread::spawn(move || { 426 | for k in 0..ELEMENTS { 427 | vec.push((i * ELEMENTS + k).to_string()).unwrap(); 428 | } 429 | }) 430 | }) 431 | .collect(); 432 | 433 | for h in handles { 434 | h.join().unwrap(); 435 | } 436 | 437 | assert_eq!(THREADS * ELEMENTS, vec.len()); 438 | 439 | let mut elements: Vec = vec.as_slice().iter().map(|n| n.parse().unwrap()).collect(); 440 | elements.sort(); 441 | 442 | assert_eq!(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15][..], &elements); 443 | } 444 | } // mod tests 445 | -------------------------------------------------------------------------------- /src/collection/skip_list.rs: -------------------------------------------------------------------------------- 1 | //! An example implementation of a Skip List. 2 | //! 3 | //! The implementation is incomplete, only intended to demonstrate why thin pointers matter. 4 | 5 | use core::{ 6 | alloc::Layout, 7 | cmp, 8 | marker::PhantomData, 9 | mem, 10 | ptr::{self, NonNull}, 11 | slice, 12 | }; 13 | 14 | use oorandom::Rand32; 15 | 16 | use crate::{ 17 | extension::{typed::TypedHandle, typed_metadata::TypedMetadata}, 18 | interface::{Store, StoreStable}, 19 | }; 20 | 21 | /// A Skip List, with minimal memory usage. 22 | pub struct SkipList { 23 | // Invariant: `length == 0` => `head` is a dangling handle. 24 | length: usize, 25 | head: NodeHandle, 26 | store: S, 27 | prng: Rand32, 28 | } 29 | 30 | impl SkipList { 31 | /// Creates a new, empty, instance. 32 | pub fn new() -> Self 33 | where 34 | S: Default, 35 | { 36 | Self::with_store(S::default()) 37 | } 38 | 39 | /// Creates a new, empty, instance with the given store. 40 | pub fn with_store(store: S) -> Self { 41 | let length = 0; 42 | let head = NodeHandle::dangling(&store); 43 | 44 | // 0 is not particularly good; on the allocation of the first node it'll be switched with its address instead. 45 | let prng = Rand32::new(0); 46 | 47 | Self { 48 | length, 49 | head, 50 | store, 51 | prng, 52 | } 53 | } 54 | 55 | /// Returns whether the list is empty. 56 | pub fn is_empty(&self) -> bool { 57 | self.length == 0 58 | } 59 | 60 | /// Returns the number of nodes in the list. 61 | pub fn len(&self) -> usize { 62 | self.length 63 | } 64 | 65 | /// Clears the list, destroying any node. 66 | /// 67 | /// Afterwards, the list is empty. 68 | pub fn clear(&mut self) { 69 | if self.length == 0 { 70 | return; 71 | } 72 | 73 | // When `length == 0`, `head` is a dangling handle. 74 | // 75 | // Hence, if a panic occurs during this method, no further attempt at using the handles will occur. This is 76 | // safe, at the cost of leaking the existing handles. 77 | let length = mem::replace(&mut self.length, 0); 78 | let mut handle = self.head; 79 | 80 | for _ in 0..(length - 1) { 81 | let next_handle = { 82 | // Safety: 83 | // - `handle` has been allocated by `self.store`. 84 | // - `handle` is valid, since `length` nodes exist. 85 | // - No other reference to the block of memory of `handle` exist, since `self` is borrowed mutably. 86 | let node = unsafe { handle.resolve_mut(&self.store) }; 87 | 88 | let links = node.links(); 89 | 90 | // Safety: 91 | // - All nodes have at least one link. 92 | unsafe { *links.get_unchecked(0) } 93 | }; 94 | 95 | // Safety: 96 | // - `handle` has been allocated by `self.store`. 97 | // - `handle` is valid, since `length` nodes exist. 98 | // - No other reference to the block of memory of `handle` exist, since `self` is borrowed mutably. 99 | unsafe { NodeHeader::::deallocate(handle, &self.store) }; 100 | 101 | handle = next_handle; 102 | } 103 | 104 | // Safety: 105 | // - `handle` has been allocated by `self.store`. 106 | // - `handle` is valid, since `length` nodes exist. 107 | // - No other reference to the block of memory of `handle` exist, since `self` is borrowed mutably. 108 | unsafe { NodeHeader::::deallocate(handle, &self.store) }; 109 | } 110 | } 111 | 112 | impl SkipList 113 | where 114 | K: Ord, 115 | { 116 | /// Gets the value associated to a `key`, if it exists. 117 | pub fn get(&self, key: &K) -> Option<&V> { 118 | Self::get_impl(key, self.length, self.head, &self.store).map(|pointer| { 119 | // Safety: 120 | // - `pointer` points to a valid instance of `V`. 121 | // - No mutable reference to `V` is active, since `self` is borrowed immutably. 122 | // - The lifetime of the result is tied to that of `self. 123 | unsafe { pointer.as_ref() } 124 | }) 125 | } 126 | 127 | /// Gets the value associated to a `key`, if it exists. 128 | pub fn get_mut(&mut self, key: &K) -> Option<&mut V> { 129 | Self::get_impl(key, self.length, self.head, &self.store).map(|mut pointer| { 130 | // Safety: 131 | // - `pointer` points to a valid instance of `V`. 132 | // - No other reference to `V` is active, since `self` is borrowed mutably. 133 | // - The lifetime of the result is tied to that of `self. 134 | unsafe { pointer.as_mut() } 135 | }) 136 | } 137 | 138 | /// Inserts a new key and value in the list. 139 | /// 140 | /// If a `key` comparing equal is already in the list, it is returned alongside the value it's in with. 141 | pub fn insert(&mut self, key: K, value: V) -> Option<(K, V)> { 142 | if self.length == 0 { 143 | self.head = NodeHeader::new(key, value, 0, &self.store).0; 144 | self.length = 1; 145 | 146 | // Safety: 147 | // - `self.head` was allocated by `self.store`. 148 | // - `self.head` is still valid. 149 | let pointer = unsafe { self.head.resolve_raw(&self.store) }; 150 | 151 | let seed = pointer.as_ptr() as usize as u64; 152 | 153 | self.prng = Rand32::new(seed); 154 | 155 | return None; 156 | } 157 | 158 | let target_links = self.determine_number_links(); 159 | 160 | // There are already elements, so we need to figure out: 161 | // - Whether a node with an equal key exist, and replace it key and value. 162 | // - Otherwise find the pair of nodes between which to allocate this particular node, and link it in. 163 | // - And as a further complication, if the PRNG decides to use more links for this node than the head node 164 | // currently has, we need to reallocate the first node with more handles. 165 | 166 | // Safety: 167 | // - `self.head` was allocated by `self.store.` 168 | // - `self.head` is still valid, notably it is not dangling per invariant, since `self.length > 0`. 169 | // - No other reference to the block of memory exist, since `self` is borrowed mutably. 170 | let mut node = unsafe { self.head.resolve_mut(&self.store) }; 171 | let head_links = node.number_links as usize; 172 | 173 | // Well, that'll avoid having to reallocate `head`! 174 | if key < node.key { 175 | let target_links = cmp::max(target_links, head_links); 176 | 177 | let (node, links) = NodeHeader::new(key, value, target_links, &self.store); 178 | 179 | links.iter_mut().for_each(|link| *link = self.head); 180 | 181 | self.head = node; 182 | self.length += 1; 183 | 184 | return None; 185 | } 186 | 187 | // And what if the right node is just in front of our eyes? 188 | if key == node.key { 189 | let key = mem::replace(&mut node.key, key); 190 | let value = mem::replace(&mut node.value, value); 191 | 192 | return Some((key, value)); 193 | } 194 | 195 | debug_assert!(key > node.key); 196 | 197 | // Buffer of handles: 198 | // - For each level in `0..head_links`, a pointer to the handle in the node preceeding the new node, and 199 | // pointing to the node following the new node (or dangling). 200 | // - This handle will need to be replaced _if_ the new node is tall enough. 201 | // 202 | // IMPORTANT: if the last node should preceed the new node, they are swapped instead. 203 | #[allow(clippy::type_complexity)] 204 | let mut handles: [Option>>; MAX_NUMBER_LINKS] = [None; MAX_NUMBER_LINKS]; 205 | 206 | let mut last = (head_links == 0).then_some(self.head); 207 | 208 | for level in (0..head_links).rev() { 209 | // Advance as far as possible in this level. 210 | loop { 211 | let Some(next) = node.links_mut().get_mut(level) else { 212 | break; 213 | }; 214 | 215 | // Safety: 216 | // - `next` was allocated by `self.store.` 217 | // - `next` is still valid, since apart from `self.head`, only valid handles are kept. 218 | // - No other reference to the block of memory exist, since `self` is borrowed mutably. 219 | let next_node = unsafe { next.resolve_mut(&self.store) }; 220 | 221 | if key > next_node.key { 222 | if next_node.number_links == 0 { 223 | last = Some(*next); 224 | break; 225 | } 226 | 227 | node = next_node; 228 | continue; 229 | } 230 | 231 | if key == next_node.key { 232 | let key = mem::replace(&mut next_node.key, key); 233 | let value = mem::replace(&mut next_node.value, value); 234 | 235 | return Some((key, value)); 236 | } 237 | 238 | debug_assert!(key < next_node.key); 239 | 240 | break; 241 | } 242 | 243 | debug_assert!(key > node.key); 244 | 245 | handles[level] = Some(NonNull::from(&mut node.links_mut()[level])); 246 | } 247 | 248 | // `handles` is now filled, and a new node need be introduced. 249 | let (mut handle, links) = NodeHeader::new(key, value, target_links, &self.store); 250 | 251 | // Splice in the new node, at each level it participates in. 252 | for (prev_handle, dangling_handle) in handles.iter_mut().take(head_links).zip(links.iter_mut()) { 253 | let Some(prev_handle) = prev_handle else { continue }; 254 | 255 | // Safety: 256 | // - `prev_handle` points to a readable and writeable block of memory. 257 | // - `prev_handle` points to an initialized handle. 258 | // - No other reference to `prev_handle` is active, since `self` is borrow mutably. 259 | let prev_handle = unsafe { prev_handle.as_mut() }; 260 | 261 | let prev_handle = mem::replace(prev_handle, handle); 262 | *dangling_handle = prev_handle; 263 | } 264 | 265 | // Exchange with last, if it goes beyond last. 266 | if let Some(mut last) = last { 267 | // Safety: 268 | // - `next` was allocated by `self.store.` 269 | // - `next` is still valid, since apart from `self.head`, only valid handles are kept. 270 | // - No other reference to the block of memory exist, since `self` is borrowed mutably. 271 | let last_node = unsafe { last.resolve_mut(&self.store) }; 272 | 273 | // Safety: 274 | // - `handle` was allocated by `self.store`. 275 | // - `handle` is still valid. 276 | // - No other active reference to the block of memory pointed to by `handle` exists. 277 | let new_node = unsafe { handle.resolve_mut(&self.store) }; 278 | 279 | mem::swap(&mut last_node.key, &mut new_node.key); 280 | mem::swap(&mut last_node.value, &mut new_node.value); 281 | 282 | links.iter_mut().for_each(|link| *link = last); 283 | } 284 | 285 | // Last is head. 286 | if head_links == 0 { 287 | debug_assert!(last.is_some()); 288 | 289 | self.head = handle; 290 | self.length += 1; 291 | 292 | return None; 293 | } 294 | 295 | // Reallocate head, if necessary. 296 | if target_links > head_links { 297 | // Safety: 298 | // - `self.head` was allocated by `self.store`. 299 | // - `self.head` is still valid. 300 | // - No other reference to the block of memory associated with `self.head` is active, since `self` is 301 | // borrowed mutably. 302 | // - `head_links` is the number of links of `self.head`. 303 | // - `target_links > head_links`. 304 | self.head = 305 | unsafe { NodeHeader::::grow(self.head, handle, head_links, target_links, &self.store) }; 306 | } 307 | 308 | self.length += 1; 309 | 310 | None 311 | } 312 | } 313 | 314 | impl Drop for SkipList { 315 | fn drop(&mut self) { 316 | self.clear(); 317 | } 318 | } 319 | 320 | impl Default for SkipList 321 | where 322 | S: Store + Default, 323 | { 324 | fn default() -> Self { 325 | Self::new() 326 | } 327 | } 328 | 329 | // 330 | // Implementation 331 | // 332 | 333 | const MAX_NUMBER_LINKS: usize = 32; 334 | 335 | impl SkipList { 336 | // Returns the number of links a (new) node should have. 337 | fn determine_number_links(&mut self) -> usize { 338 | (self.prng.rand_u32() | 1).trailing_ones() as usize 339 | } 340 | 341 | // # Safety 342 | // 343 | // - `handle` must have been allocated by `store`. 344 | // - `handle` must still be valid. 345 | unsafe fn resolve_value(handle: NodeHandle, store: &S) -> NonNull { 346 | // Safety: 347 | // - `handle` has been allocated by `store`, as per pre-conditions. 348 | // - `handle` is still valid, as per pre-conditions. 349 | let pointer = unsafe { handle.resolve_raw(store) }; 350 | 351 | let offset = mem::offset_of!(NodeHeader, value); 352 | 353 | // Safety: 354 | // - `pointer` points to a valid `NodeHeader`. 355 | // - `offset` is an offset within the allocation of `NodeHeader`. 356 | let pointer = unsafe { pointer.as_ptr().add(offset) }; 357 | 358 | // Safety: 359 | // - `pointer` is not null. 360 | unsafe { NonNull::new_unchecked(pointer).cast() } 361 | } 362 | } 363 | 364 | impl SkipList 365 | where 366 | K: Ord, 367 | { 368 | fn get_impl(key: &K, length: usize, head: NodeHandle, store: &S) -> Option> { 369 | if length == 0 { 370 | return None; 371 | } 372 | 373 | // Safety: 374 | // - `head` was allocated by `store.` 375 | // - `head` is still valid, notably it is not dangling per invariant, since `length > 0`. 376 | // - `head` is associated to block of memory containing a live instance of `NodeHeader`. 377 | let mut node = unsafe { head.resolve(store) }; 378 | let number_links = node.number_links as usize; 379 | 380 | if *key < node.key { 381 | return None; 382 | } 383 | 384 | if *key == node.key { 385 | // Safety: 386 | // - `head` was allocated by `store`. 387 | // - `head` is still valid. 388 | let value = unsafe { Self::resolve_value(head, store) }; 389 | 390 | return Some(value); 391 | } 392 | 393 | for level in (0..number_links).rev() { 394 | // Advance as far as possible in this level. 395 | loop { 396 | let Some(next) = node.links().get(level) else { break }; 397 | 398 | // Safety: 399 | // - `next` was allocated by `store.` 400 | // - `next` is still valid, since apart from `head`, only valid handles are kept. 401 | // - `next` is associated to block of memory containing a live instance of `NodeHeader`. 402 | let next_node = unsafe { next.resolve(store) }; 403 | 404 | if *key > next_node.key { 405 | node = next_node; 406 | continue; 407 | } 408 | 409 | if *key == next_node.key { 410 | // Safety: 411 | // - `next` was allocated by `store`. 412 | // - `next` is still valid. 413 | let value = unsafe { Self::resolve_value(*next, store) }; 414 | 415 | return Some(value); 416 | } 417 | 418 | debug_assert!(*key < next_node.key); 419 | 420 | break; 421 | } 422 | } 423 | 424 | None 425 | } 426 | } 427 | 428 | type NodeHandle = TypedHandle, H>; 429 | 430 | struct NodeHeader { 431 | key: K, 432 | value: V, 433 | // A node always has at least 1 link, with the exception of the last node, which always has 0 links. 434 | number_links: u8, 435 | _marker: PhantomData, 436 | } 437 | 438 | impl NodeHeader 439 | where 440 | H: Copy, 441 | { 442 | // Returns the layout of a node with the given number of links, and the offset of the array of links. 443 | fn layout(number_links: usize) -> (Layout, usize) { 444 | let layout = Layout::new::(); 445 | let links = Layout::array::(number_links).expect("Sufficiently small number of links"); 446 | 447 | layout.extend(links).expect("Sufficiently small number of links") 448 | } 449 | 450 | // Creates a node with `number_links` links, returning a handle to the node and an array of dangling links. 451 | #[allow(clippy::new_ret_no_self, clippy::type_complexity)] 452 | fn new(key: K, value: V, number_links: usize, store: &S) -> (NodeHandle, &mut [NodeHandle]) 453 | where 454 | S: Store, 455 | { 456 | let (layout, offset) = Self::layout(number_links); 457 | 458 | let (handle, _) = store.allocate(layout).expect("Allocation to succeed."); 459 | 460 | // Safety: 461 | // - `handle` was allocated by `store`, and is still valid. 462 | let pointer = unsafe { store.resolve(handle) }; 463 | 464 | { 465 | let number_links: u8 = number_links.try_into().expect("number_links to be sufficiently small"); 466 | let _marker = PhantomData; 467 | 468 | let header = Self { 469 | key, 470 | value, 471 | number_links, 472 | _marker, 473 | }; 474 | 475 | // Safety: 476 | // - `pointer` is valid for writes. 477 | // - `pointer` is properly aligned. 478 | unsafe { ptr::write(pointer.as_ptr() as *mut _, header) }; 479 | } 480 | 481 | // Safety: 482 | // - `offset + index * size` is within bounds, since the calculation of the layout succeeded. 483 | let pointer = unsafe { pointer.as_ptr().add(offset) as *mut NodeHandle }; 484 | 485 | for index in 0..number_links { 486 | // Safety: 487 | // - `offset + index * size` is within bounds, since the calculation of the layout succeeded. 488 | let link = unsafe { pointer.add(index) }; 489 | 490 | let dangling = NodeHandle::dangling(store); 491 | 492 | // Safety: 493 | // - `link` is valid for writes. 494 | // - `link` is properly aligned. 495 | unsafe { ptr::write(link, dangling) }; 496 | } 497 | 498 | // Safety: 499 | // - `pointer` is valid for both reads and writes for `number_links` elements. 500 | // - Access to `links` is exclusive, as the memory is freshly allocated. 501 | let links = unsafe { slice::from_raw_parts_mut(pointer, number_links) }; 502 | 503 | let handle = NodeHandle::from_raw_parts(handle, TypedMetadata::default()); 504 | 505 | (handle, links) 506 | } 507 | 508 | // # Safety 509 | // 510 | // - `handle` must have been allocated by `store`. 511 | // - `handle` must still be valid. 512 | // - No other reference to its block of memory is active. 513 | // - `old_number_links` must match the previous number of links. 514 | // - `new_number_links` must be strictly greater than `old_number_links`. 515 | unsafe fn grow( 516 | handle: NodeHandle, 517 | with: NodeHandle, 518 | old_number_links: usize, 519 | new_number_links: usize, 520 | store: &S, 521 | ) -> NodeHandle 522 | where 523 | S: Store, 524 | { 525 | let (old_layout, offset) = Self::layout(old_number_links); 526 | let (new_layout, _) = Self::layout(new_number_links); 527 | 528 | // Safety: 529 | // - `handle` has been allocated by `store`. 530 | // - `handle` is still valid. 531 | // - No other reference to its block of memory is active. 532 | // - `old_layout` fits the block of memory associated with `handle`. 533 | // - `new_layout` is greater than `old_layout`. 534 | let (handle, _) = unsafe { 535 | store 536 | .grow(handle.to_raw_parts().0, old_layout, new_layout) 537 | .expect("Allocation to succeed") 538 | }; 539 | 540 | // Safety: 541 | // - `handle` was allocated by `store`, and is still valid. 542 | let pointer = unsafe { store.resolve(handle) }; 543 | 544 | { 545 | // Safety: 546 | // - `pointer` points to a readable and writeable area of memory. 547 | // - `pointer` points to an initialized area of memory of `Self` type. 548 | // - No other reference to this area of memory is active. 549 | let this: &mut Self = unsafe { pointer.cast().as_mut() }; 550 | 551 | this.number_links = new_number_links 552 | .try_into() 553 | .expect("new_number_links to be sufficiently small"); 554 | } 555 | 556 | // Safety: 557 | // - `offset + index * size` is within bounds, since the calculation of the layout succeeded. 558 | let pointer = unsafe { pointer.as_ptr().add(offset) as *mut NodeHandle }; 559 | 560 | for index in old_number_links..new_number_links { 561 | // Safety: 562 | // - `offset + index * size` is within bounds, since the calculation of the layout succeeded. 563 | let link = unsafe { pointer.add(index) }; 564 | 565 | // Safety: 566 | // - `link` is valid for writes. 567 | // - `link` is properly aligned. 568 | unsafe { ptr::write(link, with) }; 569 | } 570 | 571 | NodeHandle::from_raw_parts(handle, TypedMetadata::default()) 572 | } 573 | 574 | // # Safety 575 | // 576 | // - `handle` must have been allocated by `store`. 577 | // - `handle` must still be valid. 578 | // - `handle` must be associated to a block of memory containing a live instance of `NodeHeader`. 579 | // - No other reference to its block of memory is active. 580 | unsafe fn deallocate(mut handle: NodeHandle, store: &S) -> (K, V) 581 | where 582 | S: Store, 583 | { 584 | // Safety: 585 | // - `handle` was allocated by `store`, and is still valid, as per pre-conditions. 586 | // - `handle` is associated to a block of memory containing a live instance of `NodeHeader`, as per 587 | // pre-conditions. 588 | // - No other reference to its block of memory is active, as per pre-conditions. 589 | let this = unsafe { handle.resolve_mut(store) }; 590 | 591 | // Safety: 592 | // - `this.key` and `this.value` are valid for reads. 593 | // - `this.key` and `this.value` are properly aligned. 594 | // - The values are initialized, and will no longer be used. 595 | let key = unsafe { ptr::read(&this.key) }; 596 | let value = unsafe { ptr::read(&this.value) }; 597 | let number_links: usize = this.number_links.into(); 598 | 599 | let (layout, _) = Self::layout(number_links); 600 | 601 | // Safety: 602 | // - `handle` was allocated by `store`. 603 | // - `handle` is still valid. 604 | // - `layout` fits the block of memory. 605 | unsafe { store.deallocate(handle.to_raw_parts().0, layout) }; 606 | 607 | (key, value) 608 | } 609 | 610 | fn links(&self) -> &[NodeHandle] { 611 | let number_links: usize = self.number_links.into(); 612 | 613 | if number_links == 0 { 614 | return &[]; 615 | } 616 | 617 | let (_, offset) = Self::layout(number_links); 618 | 619 | // Safety: 620 | // - `offset` is within bounds, since the node was allocated. 621 | let first = unsafe { (self as *const Self as *const u8).add(offset) }; 622 | 623 | // Safety: 624 | // - The pointer is properly aligned. 625 | // - The pointer is dereferenceable. 626 | // - The pointer points to an initialized instance of `[NodeHandle]`. 627 | // - The slice is accessible in shared mode, since `self` is, and its lifetime is bound to `self`. 628 | unsafe { slice::from_raw_parts(first as *const NodeHandle, number_links) } 629 | } 630 | 631 | fn links_mut(&mut self) -> &mut [NodeHandle] { 632 | let number_links: usize = self.number_links.into(); 633 | 634 | if number_links == 0 { 635 | return &mut []; 636 | } 637 | 638 | let (_, offset) = Self::layout(number_links); 639 | 640 | // Safety: 641 | // - `offset` is within bounds, since the node was allocated. 642 | let first = unsafe { (self as *mut Self as *mut u8).add(offset) }; 643 | 644 | // Safety: 645 | // - The pointer is properly aligned. 646 | // - The pointer is dereferenceable. 647 | // - The pointer points to an initialized instance of `[NodeHandle]`. 648 | // - The slice is accessible in exclusive mode, since `self` is, and its lifetime is bound to `self`. 649 | unsafe { slice::from_raw_parts_mut(first as *mut NodeHandle, number_links) } 650 | } 651 | } 652 | 653 | #[cfg(test)] 654 | mod tests { 655 | use super::*; 656 | 657 | use crate::collection::utils::Global; 658 | 659 | type GlobalList = SkipList; 660 | 661 | #[test] 662 | fn empty() { 663 | let list = GlobalList::default(); 664 | 665 | assert!(list.is_empty()); 666 | assert_eq!(0, list.len()); 667 | assert_eq!(None, list.get(&0)); 668 | } 669 | 670 | #[test] 671 | fn insert_single() { 672 | let mut list = GlobalList::default(); 673 | 674 | list.insert(0, String::from("0")); 675 | 676 | assert!(!list.is_empty()); 677 | assert_eq!(1, list.len()); 678 | 679 | assert_eq!(None, list.get(&-1)); 680 | assert_eq!(Some(&String::from("0")), list.get(&0)); 681 | assert_eq!(None, list.get(&1)); 682 | 683 | let Some(v) = list.get_mut(&0) else { unreachable!() }; 684 | 685 | v.push('0'); 686 | 687 | assert_eq!(Some(&String::from("00")), list.get(&0)); 688 | } 689 | 690 | // MIRI does not like the idea of borrowing the "tail" links from the header, due to the original borrow of the 691 | // header not encompassing the tail. 692 | #[cfg_attr(miri, ignore)] 693 | #[test] 694 | fn insert_front() { 695 | let mut list = GlobalList::default(); 696 | 697 | list.insert(1, String::from("1")); 698 | 699 | assert_eq!(1, list.len()); 700 | 701 | list.insert(0, String::from("0")); 702 | 703 | assert_eq!(2, list.len()); 704 | 705 | assert_eq!(None, list.get(&-1)); 706 | assert_eq!(Some(&String::from("0")), list.get(&0)); 707 | assert_eq!(Some(&String::from("1")), list.get(&1)); 708 | assert_eq!(None, list.get(&2)); 709 | } 710 | 711 | // MIRI does not like the idea of borrowing the "tail" links from the header, due to the original borrow of the 712 | // header not encompassing the tail. 713 | #[cfg_attr(miri, ignore)] 714 | #[test] 715 | fn insert_back() { 716 | let mut list = GlobalList::default(); 717 | 718 | list.insert(0, String::from("0")); 719 | 720 | assert_eq!(1, list.len()); 721 | 722 | list.insert(1, String::from("1")); 723 | 724 | assert_eq!(2, list.len()); 725 | 726 | assert_eq!(None, list.get(&-1)); 727 | assert_eq!(Some(&String::from("0")), list.get(&0)); 728 | assert_eq!(Some(&String::from("1")), list.get(&1)); 729 | assert_eq!(None, list.get(&2)); 730 | } 731 | } // mod tests 732 | -------------------------------------------------------------------------------- /src/collection/store_box.rs: -------------------------------------------------------------------------------- 1 | //! Proof-of-Concept implementation of a `Box` atop a `StoreSingle`. 2 | 3 | use core::{ 4 | alloc::AllocError, 5 | fmt, 6 | marker::Unsize, 7 | mem::{self, ManuallyDrop}, 8 | ops, ptr, 9 | }; 10 | 11 | #[cfg(feature = "coercible-metadata")] 12 | use core::ops::CoerceUnsized; 13 | 14 | use crate::{extension::unique_single::UniqueSingleHandle, interface::StoreSingle}; 15 | 16 | /// A `Box` atop a `StoreSingle`. 17 | pub struct StoreBox { 18 | store: ManuallyDrop, 19 | handle: UniqueSingleHandle, 20 | } 21 | 22 | impl StoreBox { 23 | /// Creates a new instance. 24 | pub fn new(value: T) -> Self { 25 | Self::new_in(value, S::default()) 26 | } 27 | } 28 | 29 | impl StoreBox { 30 | /// Creates a new instance. 31 | pub fn new_in(value: T, mut store: S) -> Self { 32 | let handle = UniqueSingleHandle::new(value, &mut store); 33 | let store = ManuallyDrop::new(store); 34 | 35 | Self { store, handle } 36 | } 37 | 38 | /// Attempts to create a new instance. 39 | pub fn try_new_in(value: T, mut store: S) -> Result { 40 | let handle = UniqueSingleHandle::try_new(value, &mut store)?; 41 | let store = ManuallyDrop::new(store); 42 | 43 | Ok(Self { store, handle }) 44 | } 45 | } 46 | 47 | impl Clone for StoreBox { 48 | fn clone(&self) -> Self { 49 | let value: &T = self; 50 | 51 | Self::new(value.clone()) 52 | } 53 | 54 | fn clone_from(&mut self, source: &StoreBox) { 55 | let dest: &mut T = self; 56 | let source: &T = source; 57 | 58 | dest.clone_from(source); 59 | } 60 | } 61 | 62 | impl Drop for StoreBox { 63 | fn drop(&mut self) { 64 | let value: &mut T = &mut *self; 65 | 66 | // Safety: 67 | // - The instance is live. 68 | unsafe { ptr::drop_in_place(value) }; 69 | 70 | // Safety: 71 | // - `self.handle` is valid. 72 | // - `self.handle` will not be used after this point. 73 | let handle = unsafe { ptr::read(&self.handle) }; 74 | 75 | // Safety: 76 | // - `self.store` will never be used ever again. 77 | let mut store = unsafe { ManuallyDrop::take(&mut self.store) }; 78 | 79 | // Safety: 80 | // - `handle` was allocated by `store`. 81 | // - `handle` is still valid. 82 | unsafe { handle.deallocate(&mut store) }; 83 | } 84 | } 85 | 86 | impl StoreBox { 87 | /// Coerces to another `StoreBox`. 88 | /// 89 | /// A poor's man `CoerceUnsized`, since that trait cannot unfortunately be implemented. 90 | pub fn coerce(mut self) -> StoreBox 91 | where 92 | T: Unsize, 93 | { 94 | // Safety: 95 | // - `self.handle` is valid. 96 | // - `self.handle` will not be used after this point. 97 | let handle = unsafe { ptr::read(&self.handle) }; 98 | 99 | // Safety: 100 | // - `self.store` will never be used ever again. 101 | let store = unsafe { ManuallyDrop::take(&mut self.store) }; 102 | 103 | mem::forget(self); 104 | 105 | let handle = handle.coerce(); 106 | 107 | let store = ManuallyDrop::new(store); 108 | 109 | StoreBox { store, handle } 110 | } 111 | } 112 | 113 | impl ops::Deref for StoreBox { 114 | type Target = T; 115 | 116 | fn deref(&self) -> &T { 117 | // Safety: 118 | // - `self.handle` was allocated by `self.store`. 119 | // - `self.handle` is still valid. 120 | // - `handle` is associated to a block of memory containing a live instance of T. 121 | unsafe { self.handle.resolve(&*self.store) } 122 | } 123 | } 124 | 125 | impl ops::DerefMut for StoreBox { 126 | fn deref_mut(&mut self) -> &mut T { 127 | // Safety: 128 | // - `self.handle` was allocated by `self.store`. 129 | // - `self.handle` is still valid. 130 | // - `handle` is associated to a block of memory containing a live instance of T. 131 | unsafe { self.handle.resolve_mut(&mut *self.store) } 132 | } 133 | } 134 | 135 | impl fmt::Debug for StoreBox 136 | where 137 | T: fmt::Debug, 138 | { 139 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 140 | let value: &T = self; 141 | 142 | write!(f, "StoreBox({value:?})") 143 | } 144 | } 145 | 146 | #[cfg(feature = "coercible-metadata")] 147 | impl CoerceUnsized> for StoreBox where T: Unsize {} 148 | 149 | #[cfg(test)] 150 | mod test_inline { 151 | use crate::store::InlineSingleStore; 152 | 153 | use super::*; 154 | 155 | #[test] 156 | fn sized_store() { 157 | let store = InlineSingleStore::::default(); 158 | let mut boxed = StoreBox::new_in(1u8, store); 159 | 160 | assert_eq!(1u8, *boxed); 161 | 162 | *boxed = 2; 163 | 164 | assert_eq!(2u8, *boxed); 165 | 166 | let mut clone = boxed.clone(); 167 | 168 | *clone = 3; 169 | 170 | assert_eq!(2u8, *boxed); 171 | assert_eq!(3u8, *clone); 172 | } 173 | 174 | #[test] 175 | fn slice_store() { 176 | let store = InlineSingleStore::<[u8; 4]>::default(); 177 | let boxed = StoreBox::new_in([1u8, 2, 3], store); 178 | let mut boxed: StoreBox<[u8], _> = StoreBox::coerce(boxed); 179 | 180 | assert_eq!([1u8, 2, 3], &*boxed); 181 | 182 | boxed[2] = 4; 183 | 184 | assert_eq!([1u8, 2, 4], &*boxed); 185 | } 186 | 187 | #[cfg(feature = "coercible-metadata")] 188 | #[test] 189 | fn slice_coercion() { 190 | let store = InlineSingleStore::<[u8; 4]>::default(); 191 | let boxed = StoreBox::new_in([1u8, 2, 3], store); 192 | let mut boxed: StoreBox<[u8], _> = boxed; 193 | 194 | assert_eq!([1u8, 2, 3], &*boxed); 195 | 196 | boxed[2] = 4; 197 | 198 | assert_eq!([1u8, 2, 4], &*boxed); 199 | } 200 | 201 | #[test] 202 | fn trait_store() { 203 | let store = InlineSingleStore::<[u8; 4]>::default(); 204 | let boxed = StoreBox::new_in([1u8, 2, 3], store); 205 | let boxed: StoreBox = StoreBox::coerce(boxed); 206 | 207 | assert_eq!("StoreBox([1, 2, 3])", format!("{:?}", boxed)); 208 | } 209 | 210 | #[cfg(feature = "coercible-metadata")] 211 | #[test] 212 | fn trait_coercion() { 213 | let store = InlineSingleStore::<[u8; 4]>::default(); 214 | let boxed = StoreBox::new_in([1u8, 2, 3], store); 215 | let boxed: StoreBox = boxed; 216 | 217 | assert_eq!("StoreBox([1, 2, 3])", format!("{:?}", boxed)); 218 | } 219 | } // mod test_inline 220 | 221 | #[cfg(test)] 222 | mod test_allocator { 223 | use std::alloc::System; 224 | 225 | use crate::collection::utils::NonAllocator; 226 | 227 | use super::*; 228 | 229 | #[test] 230 | fn sized_failure() { 231 | StoreBox::try_new_in(1, NonAllocator).unwrap_err(); 232 | } 233 | 234 | #[test] 235 | fn sized_allocated() { 236 | let mut boxed = StoreBox::new_in(1, System); 237 | 238 | assert_eq!(1u32, *boxed); 239 | 240 | *boxed = 2; 241 | 242 | assert_eq!(2u32, *boxed); 243 | 244 | let mut clone = boxed.clone(); 245 | 246 | *clone = 3; 247 | 248 | assert_eq!(2u32, *boxed); 249 | assert_eq!(3u32, *clone); 250 | } 251 | 252 | #[test] 253 | fn slice_failure() { 254 | StoreBox::try_new_in([1u8, 2, 3], NonAllocator).unwrap_err(); 255 | } 256 | 257 | #[test] 258 | fn slice_allocated() { 259 | let boxed = StoreBox::new_in([1u8, 2, 3], System); 260 | let mut boxed: StoreBox<[u8], _> = StoreBox::coerce(boxed); 261 | 262 | assert_eq!([1u8, 2, 3], &*boxed); 263 | 264 | boxed[2] = 4; 265 | 266 | assert_eq!([1u8, 2, 4], &*boxed); 267 | } 268 | 269 | #[cfg(feature = "coercible-metadata")] 270 | #[test] 271 | fn slice_coercion() { 272 | let boxed = StoreBox::new_in([1u8, 2, 3], System); 273 | let mut boxed: StoreBox<[u8], _> = boxed; 274 | 275 | assert_eq!([1u8, 2, 3], &*boxed); 276 | 277 | boxed[2] = 4; 278 | 279 | assert_eq!([1u8, 2, 4], &*boxed); 280 | } 281 | 282 | #[test] 283 | fn trait_failure() { 284 | StoreBox::try_new_in([1u8, 2, 3], NonAllocator).unwrap_err(); 285 | } 286 | 287 | #[test] 288 | fn trait_allocated() { 289 | let boxed = StoreBox::new_in([1u8, 2, 3], System); 290 | let boxed: StoreBox = StoreBox::coerce(boxed); 291 | 292 | assert_eq!("StoreBox([1, 2, 3])", format!("{:?}", boxed)); 293 | } 294 | 295 | #[cfg(feature = "coercible-metadata")] 296 | #[test] 297 | fn trait_coercion() { 298 | let boxed = StoreBox::new_in([1u8, 2, 3], System); 299 | let boxed: StoreBox = boxed; 300 | 301 | assert_eq!("StoreBox([1, 2, 3])", format!("{:?}", boxed)); 302 | } 303 | } // mod test_allocator 304 | -------------------------------------------------------------------------------- /src/collection/store_vec.rs: -------------------------------------------------------------------------------- 1 | //! A Dynamic Array. 2 | //! 3 | //! This implementation is solely meant to demonstrate the use of `StoreSharing`, it is incomplete, and may be buggy. 4 | 5 | use core::{ 6 | mem::{self, MaybeUninit}, 7 | ops::Range, 8 | ptr::{self, NonNull}, 9 | }; 10 | 11 | use crate::{ 12 | extension::unique_single::UniqueSingleHandle, 13 | interface::{StoreDangling, StoreSingle}, 14 | }; 15 | 16 | /// A dynamic array. 17 | pub struct StoreVec { 18 | // Type invariant: 19 | // - `self.length < self.array.capacity()`. 20 | // - Slots in `0..self.length` are initialized. 21 | // - Slots in `self.length..` may be uninitialized. 22 | length: usize, 23 | array: UniqueArray, 24 | } 25 | 26 | impl StoreVec { 27 | /// Creates a new, empty, instance. 28 | pub fn new() -> Self { 29 | Self::new_in(S::default()) 30 | } 31 | 32 | /// Creates a new, empty, instance with at least the specified capacity. 33 | pub fn with_capacity(capacity: usize) -> Self { 34 | Self::with_capacity_in(capacity, S::default()) 35 | } 36 | } 37 | 38 | impl StoreVec { 39 | /// Creates a new, empty, instance. 40 | pub const fn new_in(store: S) -> Self 41 | where 42 | S: ~const StoreDangling, 43 | { 44 | let length = 0; 45 | let array = UniqueArray::new_in(store); 46 | 47 | Self { length, array } 48 | } 49 | 50 | /// Creates a new, empty, instance with at least the specified capacity. 51 | pub const fn with_capacity_in(capacity: usize, store: S) -> Self 52 | where 53 | S: ~const StoreSingle + ~const StoreDangling, 54 | { 55 | let length = 0; 56 | let array = UniqueArray::with_capacity_in(capacity, store); 57 | 58 | Self { length, array } 59 | } 60 | } 61 | 62 | impl StoreVec { 63 | /// Returns whether the vector is empty. 64 | pub const fn is_empty(&self) -> bool { 65 | self.length == 0 66 | } 67 | 68 | /// Returns the number of elements in the vector. 69 | pub const fn len(&self) -> usize { 70 | self.length 71 | } 72 | 73 | /// Returns the capacity of the vector. 74 | pub const fn capacity(&self) -> usize { 75 | self.array.capacity() 76 | } 77 | 78 | /// Forces the length of the vector to `new_len`. 79 | /// 80 | /// # Safety 81 | /// 82 | /// - `new_len` must less than or equal to `self.capacity()`. 83 | /// - The elements in `self.len()..new_len` must be initialized. 84 | pub const unsafe fn set_len(&mut self, new_len: usize) { 85 | self.length = new_len; 86 | } 87 | } 88 | 89 | impl StoreVec { 90 | /// Returns a raw pointer to the vector’s buffer. 91 | /// 92 | /// If the vector didn't allocate yet, that is, if its capacity is 0, this pointer is dangling, and valid for zero 93 | /// sized reads. 94 | pub const fn as_ptr(&self) -> *const T 95 | where 96 | S: ~const StoreSingle, 97 | { 98 | self.array.as_slice().as_mut_ptr() as *const T 99 | } 100 | 101 | /// Returns a raw pointer to the vector’s buffer. 102 | /// 103 | /// If the vector didn't allocate yet, that is, if its capacity is 0, this pointer is dangling, and valid for zero 104 | /// sized reads. 105 | pub const fn as_mut_ptr(&mut self) -> *mut T 106 | where 107 | S: ~const StoreSingle, 108 | { 109 | self.array.as_mut_slice().as_mut_ptr() 110 | } 111 | 112 | /// Returns a slice of the elements of the vector. 113 | pub const fn as_slice(&self) -> &[T] 114 | where 115 | S: ~const StoreSingle, 116 | { 117 | debug_assert!(self.length <= self.capacity()); 118 | 119 | // Safety: 120 | // - `0 <= self.length`, as `self.length` is unsigned. 121 | // - `self.length <= self.capacity()`, as per type invariant. 122 | let slice = unsafe { self.array.as_sub_slice_unchecked(0..self.length) }; 123 | 124 | // Safety: 125 | // - Slots in `0..self.length` are initialized, as per type invariant. 126 | // - `self` is borrowed immutably for the lifetime of the result. 127 | unsafe { slice.as_ref() } 128 | } 129 | 130 | /// Returns a mutable slice of the elements of the vector. 131 | pub const fn as_mut_slice(&mut self) -> &mut [T] 132 | where 133 | S: ~const StoreSingle, 134 | { 135 | debug_assert!(self.length <= self.capacity()); 136 | 137 | // Safety: 138 | // - `0 <= self.length`, as `self.length` is unsigned. 139 | // - `self.length <= self.capacity()`, as per type invariant. 140 | let mut slice = unsafe { self.array.as_mut_sub_slice_unchecked(0..self.length) }; 141 | 142 | // Safety: 143 | // - Slots in `0..self.length` are initialized, as per type invariant. 144 | // - `self` is borrowed mutably for the lifetime of the result. 145 | unsafe { slice.as_mut() } 146 | } 147 | 148 | /// Returns the remaining spare capacity of the vector as a slice of `MaybeUninit`. 149 | pub const fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] 150 | where 151 | S: ~const StoreSingle, 152 | { 153 | debug_assert!(self.length <= self.capacity()); 154 | 155 | let capacity = self.capacity(); 156 | 157 | // Safety: 158 | // - `self.length <= self.capacity()`, as per type invariant. 159 | // - `self.capacity() <= self.capacity()`, tautologically. 160 | let slice = unsafe { self.array.as_mut_sub_slice_unchecked(self.length..capacity) }; 161 | 162 | // Safety: 163 | // - `self` is borrowed mutably for the lifetime of the result. 164 | unsafe { slice.as_uninit_slice_mut() } 165 | } 166 | } 167 | 168 | impl StoreVec { 169 | /// Reserves capacity for at least `additional` more elements. 170 | /// 171 | /// # Panics 172 | /// 173 | /// Panics if the new capacity exceeds `isize::MAX` bytes. 174 | pub const fn reserve(&mut self, additional: usize) 175 | where 176 | S: ~const StoreSingle + ~const StoreDangling, 177 | { 178 | if additional < self.capacity() && self.length <= self.capacity() - additional { 179 | return; 180 | } 181 | 182 | self.grow_for(additional) 183 | } 184 | } 185 | 186 | impl StoreVec { 187 | /// Returns a reference to the element at index `n`, if any. 188 | pub const fn get(&self, n: usize) -> Option<&T> 189 | where 190 | S: ~const StoreSingle, 191 | { 192 | debug_assert!(self.length <= self.capacity()); 193 | 194 | if n >= self.length { 195 | return None; 196 | } 197 | 198 | // Safety: 199 | // - `n <= self.length`, as per condition above. 200 | // - `self.length <= self.capacity()`, as per type invariant. 201 | let slice = unsafe { self.array.as_sub_slice_unchecked(n..self.length) }; 202 | 203 | let slot = slice.as_mut_ptr() as *const T; 204 | 205 | // Safety: 206 | // - Slots in `0..self.length` are initialized, as per type invariant. 207 | // - `self` is borrowed immutably for the lifetime of the result. 208 | unsafe { Some(&*slot) } 209 | } 210 | 211 | /// Returns a mutable reference to the element at index `n`, if any. 212 | pub const fn get_mut(&mut self, n: usize) -> Option<&mut T> 213 | where 214 | S: ~const StoreSingle, 215 | { 216 | debug_assert!(self.length <= self.capacity()); 217 | 218 | if n >= self.length { 219 | return None; 220 | } 221 | 222 | // Safety: 223 | // - `n <= self.length`, as per condition above. 224 | // - `self.length <= self.capacity()`, as per type invariant. 225 | let slice = unsafe { self.array.as_mut_sub_slice_unchecked(n..self.length) }; 226 | 227 | let slot = slice.as_mut_ptr(); 228 | 229 | // Safety: 230 | // - Slots in `0..self.length` are initialized, as per type invariant. 231 | // - `self` is borrowed mutably for the lifetime of the result. 232 | unsafe { Some(&mut *slot) } 233 | } 234 | } 235 | 236 | impl StoreVec { 237 | /// Clears the vector, removing all values. 238 | pub fn clear(&mut self) { 239 | debug_assert!(self.length <= self.capacity()); 240 | 241 | let length = mem::replace(&mut self.length, 0); 242 | 243 | // Safety: 244 | // - `0 <= length`, as `length` is unsigned. 245 | // - `length <= self.capacity()`, as per type invariant. 246 | let slice = unsafe { self.array.as_mut_sub_slice_unchecked(0..length) }; 247 | 248 | let pointer: *mut [T] = slice.as_ptr(); 249 | 250 | // Safety: 251 | // - `pointer` is properly aligned. 252 | // - `pointer` is non-null. 253 | // - `pointer` is valid for both reads and writes. 254 | // - `pointer` points to a slice of initialized elements. 255 | unsafe { ptr::drop_in_place(pointer) }; 256 | } 257 | 258 | /// Appends an element at the back the vector. 259 | pub const fn push(&mut self, value: T) 260 | where 261 | S: ~const StoreSingle + ~const StoreDangling, 262 | { 263 | if self.length == self.capacity() { 264 | self.grow_for(1); 265 | } 266 | 267 | let spare = self.spare_capacity_mut(); 268 | debug_assert!(!spare.is_empty()); 269 | 270 | let slot = spare.as_mut_ptr() as *mut T; 271 | 272 | // Safety: 273 | // - `slot` is well aligned. 274 | // - `slot` is valid for writes of size `T`, since `spare` is not empty after growth. 275 | unsafe { ptr::write(slot, value) }; 276 | 277 | self.length += 1; 278 | } 279 | 280 | /// Removes the last element from this vector and returns it, if any. 281 | pub const fn pop(&mut self) -> Option 282 | where 283 | S: ~const StoreSingle, 284 | { 285 | debug_assert!(self.length <= self.capacity()); 286 | 287 | if self.is_empty() { 288 | return None; 289 | } 290 | 291 | self.length -= 1; 292 | 293 | // Safety: 294 | // - `0 <= self.length`, as `self.length` is unsigned. 295 | // - `self.length <= self.capacity()`, as per type invariant. 296 | let slice = unsafe { self.array.as_mut_sub_slice_unchecked(self.length..self.capacity()) }; 297 | 298 | let slot = slice.as_mut_ptr() as *const T; 299 | 300 | // Safety: 301 | // - `slot` is well-aligned. 302 | // - `slot` is valid for read of size T. 303 | // - `slot` is initialized, as per type invariant. 304 | let element = unsafe { ptr::read(slot) }; 305 | 306 | Some(element) 307 | } 308 | } 309 | 310 | impl Default for StoreVec { 311 | fn default() -> Self { 312 | Self::new() 313 | } 314 | } 315 | 316 | impl Drop for StoreVec { 317 | fn drop(&mut self) { 318 | self.clear(); 319 | } 320 | } 321 | 322 | // 323 | // Implementation 324 | // 325 | 326 | impl StoreVec { 327 | #[inline(never)] 328 | const fn grow_for(&mut self, additional: usize) 329 | where 330 | S: ~const StoreSingle + ~const StoreDangling, 331 | { 332 | let Some(target_capacity) = self.length.checked_add(additional) else { 333 | UniqueArray::::capacity_exceeded() 334 | }; 335 | 336 | // The caller shouldn't have called... 337 | if target_capacity <= self.capacity() { 338 | return; 339 | } 340 | 341 | let target_capacity = UniqueArray::::round_up_capacity(target_capacity); 342 | 343 | // Safety: 344 | // - `target_capacity` is greater than or equal to `self.array.capacity()`. 345 | unsafe { self.array.grow_to(target_capacity) }; 346 | } 347 | } 348 | 349 | struct UniqueArray { 350 | handle: UniqueSingleHandle<[T], S::Handle>, 351 | store: S, 352 | } 353 | 354 | impl UniqueArray { 355 | const fn new_in(store: S) -> Self 356 | where 357 | S: ~const StoreDangling, 358 | { 359 | let handle = UniqueSingleHandle::dangling_slice(&store); 360 | 361 | Self { handle, store } 362 | } 363 | 364 | const fn with_capacity_in(capacity: usize, mut store: S) -> Self 365 | where 366 | S: ~const StoreSingle + ~const StoreDangling, 367 | { 368 | let handle = UniqueSingleHandle::allocate_slice(capacity, &mut store); 369 | 370 | Self { handle, store } 371 | } 372 | 373 | const fn capacity(&self) -> usize { 374 | self.handle.len() 375 | } 376 | 377 | const fn as_slice(&self) -> NonNull<[T]> 378 | where 379 | S: ~const StoreSingle, 380 | { 381 | // Safety: 382 | // - `self.handle` is a valid or dangling handle. 383 | // - `self.handle` was obtained from `self.store` in either case. 384 | unsafe { self.handle.resolve_raw(&self.store) } 385 | } 386 | 387 | const fn as_mut_slice(&mut self) -> NonNull<[T]> 388 | where 389 | S: ~const StoreSingle, 390 | { 391 | // Safety: 392 | // - `self.handle` is a valid or dangling handle. 393 | // - `self.handle` was obtained from `self.store` in either case. 394 | unsafe { self.handle.resolve_raw_mut(&mut self.store) } 395 | } 396 | 397 | // # Safety 398 | // 399 | // - `range.start <= range.end`. 400 | // - `range.end <= self.capacity()`. 401 | const unsafe fn as_sub_slice_unchecked(&self, range: Range) -> NonNull<[T]> 402 | where 403 | S: ~const StoreSingle, 404 | { 405 | debug_assert!(range.start <= range.end); 406 | debug_assert!(range.end <= self.handle.len()); 407 | 408 | let slice = self.as_slice(); 409 | 410 | let pointer = slice.as_mut_ptr(); 411 | 412 | // Safety: 413 | // - `pointer` is correctly aligned. 414 | // - `range.start <= slice.len()`. 415 | let pointer = unsafe { pointer.add(range.start) }; 416 | 417 | // Safety: 418 | // - `pointer` is non-null, since it comes from a `NonNull`, and was not decremented. 419 | let pointer = unsafe { NonNull::new_unchecked(pointer) }; 420 | 421 | NonNull::slice_from_raw_parts(pointer, range.end - range.start) 422 | } 423 | 424 | // # Safety 425 | // 426 | // - `range.start <= range.end`. 427 | // - `range.end <= self.capacity()`. 428 | const unsafe fn as_mut_sub_slice_unchecked(&mut self, range: Range) -> NonNull<[T]> 429 | where 430 | S: ~const StoreSingle, 431 | { 432 | debug_assert!(range.start <= range.end); 433 | debug_assert!(range.end <= self.handle.len()); 434 | 435 | let slice = self.as_mut_slice(); 436 | 437 | let pointer = slice.as_mut_ptr(); 438 | 439 | // Safety: 440 | // - `pointer` is correctly aligned. 441 | // - `range.start <= slice.len()`. 442 | let pointer = unsafe { pointer.add(range.start) }; 443 | 444 | // Safety: 445 | // - `pointer` is non-null, since it comes from a `NonNull`, and was not decremented. 446 | let pointer = unsafe { NonNull::new_unchecked(pointer) }; 447 | 448 | NonNull::slice_from_raw_parts(pointer, range.end - range.start) 449 | } 450 | } 451 | 452 | impl UniqueArray { 453 | #[cold] 454 | #[inline(never)] 455 | const fn capacity_exceeded() -> ! { 456 | panic!("New capacity exceeds isize::MAX bytes") 457 | } 458 | 459 | const fn round_up_capacity(min_capacity: usize) -> usize { 460 | if min_capacity <= 1 || min_capacity.count_ones() == 1 { 461 | return min_capacity; 462 | } 463 | 464 | if min_capacity >= 1 << (usize::BITS - 1) { 465 | Self::capacity_exceeded() 466 | } 467 | 468 | let shift = usize::BITS - (min_capacity - 1).leading_zeros(); 469 | 470 | 1 << shift 471 | } 472 | 473 | // # Safety 474 | // 475 | // - `target_capacity` must be greater than or equal to `self.capacity()`. 476 | // 477 | // # Panics 478 | // 479 | // If the new capacity exceeds `isize::MAX` bytes. 480 | const unsafe fn grow_to(&mut self, target_capacity: usize) 481 | where 482 | S: ~const StoreSingle + ~const StoreDangling, 483 | { 484 | const MAX_BYTES: usize = isize::MAX as usize; 485 | 486 | let Some(target_bytes) = target_capacity.checked_mul(mem::size_of::()) else { 487 | Self::capacity_exceeded() 488 | }; 489 | 490 | if target_bytes > MAX_BYTES { 491 | Self::capacity_exceeded() 492 | } 493 | 494 | if self.handle.is_empty() { 495 | self.handle = UniqueSingleHandle::allocate_slice(target_capacity, &mut self.store); 496 | } else { 497 | // Safety: 498 | // - `self.handle` was allocated by `self.store`. 499 | // - `self.handle` is still valid. 500 | // - `target_capacity` is greater than or equal to `self.handle.len()`. 501 | unsafe { self.handle.grow(target_capacity, &mut self.store) }; 502 | } 503 | } 504 | } 505 | 506 | impl Drop for UniqueArray { 507 | fn drop(&mut self) { 508 | if self.handle.is_empty() { 509 | return; 510 | } 511 | 512 | // Safety: 513 | // - `self.handle` is valid. 514 | // - `self.handle` will not be used after this point. 515 | let handle = unsafe { ptr::read(&self.handle) }; 516 | 517 | // Safety: 518 | // - `handle` is still valid, notably it is not dangling since its length is non-zero. 519 | // - `handle` was allocated by `self.store`. 520 | unsafe { handle.deallocate(&mut self.store) }; 521 | } 522 | } 523 | 524 | #[cfg(test)] 525 | mod tests_inline { 526 | use crate::store::InlineSingleStore; 527 | 528 | use super::*; 529 | 530 | type InlineVec = StoreVec>; 531 | 532 | #[test] 533 | fn const_inline_vec() { 534 | const fn fib() -> InlineVec { 535 | let mut v = InlineVec::new_in(InlineSingleStore::new()); 536 | 537 | if N > 0 { 538 | v.push(0); 539 | } 540 | 541 | if N > 1 { 542 | v.push(1); 543 | } 544 | 545 | let mut n_2 = 0; 546 | let mut n_1 = 1; 547 | 548 | while v.len() < N { 549 | let n = n_1 + n_2; 550 | n_2 = n_1; 551 | n_1 = n; 552 | 553 | v.push(n); 554 | } 555 | 556 | v 557 | } 558 | 559 | static FIB: InlineVec = fib::<10>(); 560 | 561 | assert_eq!(&[0, 1, 1, 2, 3, 5, 8, 13, 21, 34][..], FIB.as_slice()); 562 | } 563 | 564 | #[test] 565 | fn send_sync() { 566 | fn require_send() {} 567 | fn require_sync() {} 568 | 569 | require_send::>(); 570 | require_sync::>(); 571 | } 572 | 573 | #[test] 574 | fn brush() { 575 | let mut v = InlineVec::::new(); 576 | 577 | assert_eq!(0, v.len()); 578 | assert_eq!(0, v.capacity()); 579 | assert_eq!(None, v.pop()); 580 | 581 | v.push(String::from("0")); 582 | 583 | assert_eq!(1, v.len()); 584 | assert_eq!(12, v.capacity()); 585 | 586 | v.push(String::from("2")); 587 | 588 | assert_eq!(Some("2"), v.pop().as_deref()); 589 | 590 | v.push(String::from("2")); 591 | v.push(String::from("2")); 592 | 593 | let s = v.get_mut(1).unwrap(); 594 | s.clear(); 595 | s.push('1'); 596 | 597 | assert_eq!(["0", "1", "2"], v.as_slice()); 598 | } 599 | } // mod tests_inline 600 | 601 | #[cfg(test)] 602 | mod tests_stack { 603 | use crate::store::{StackBumpBlock, StackBumpStore}; 604 | 605 | use super::*; 606 | 607 | type StackVec<'a, T> = StoreVec>; 608 | 609 | #[test] 610 | fn brush() { 611 | let block = StackBumpBlock::<[String; 12]>::new(); 612 | 613 | let mut v = StackVec::<'_, String>::new_in(block.create_store()); 614 | 615 | assert_eq!(0, v.len()); 616 | assert_eq!(0, v.capacity()); 617 | assert_eq!(None, v.pop()); 618 | 619 | v.push(String::from("0")); 620 | 621 | assert_eq!(1, v.len()); 622 | assert_eq!(1, v.capacity()); 623 | 624 | v.push(String::from("2")); 625 | 626 | assert_eq!(Some("2"), v.pop().as_deref()); 627 | 628 | v.push(String::from("2")); 629 | v.push(String::from("2")); 630 | 631 | let s = v.get_mut(1).unwrap(); 632 | s.clear(); 633 | s.push('1'); 634 | 635 | assert_eq!(["0", "1", "2"], v.as_slice()); 636 | } 637 | } // mod tests_stack 638 | -------------------------------------------------------------------------------- /src/collection/utils.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::{AllocError, Allocator, Layout}, 3 | ptr::NonNull, 4 | }; 5 | 6 | pub(crate) use std::alloc::Global; 7 | 8 | #[derive(Debug, Default)] 9 | pub(crate) struct NonAllocator; 10 | 11 | unsafe impl Allocator for NonAllocator { 12 | fn allocate(&self, _layout: Layout) -> Result, AllocError> { 13 | Err(AllocError) 14 | } 15 | 16 | unsafe fn deallocate(&self, _ptr: NonNull, _layout: Layout) { 17 | panic!("NonAllocator::deallocate called!") 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/extension.rs: -------------------------------------------------------------------------------- 1 | //! Possible extensions to be built around `Store`. 2 | 3 | pub mod typed; 4 | pub mod typed_metadata; 5 | pub mod unique; 6 | 7 | pub mod typed_single; 8 | pub mod unique_single; 9 | -------------------------------------------------------------------------------- /src/extension/typed.rs: -------------------------------------------------------------------------------- 1 | //! Typed handle, for bonus type safety. 2 | 3 | use core::{ 4 | alloc::{AllocError, Layout}, 5 | marker::Unsize, 6 | mem, 7 | ptr::{self, Alignment, NonNull}, 8 | }; 9 | 10 | #[cfg(feature = "coercible-metadata")] 11 | use core::ops::CoerceUnsized; 12 | 13 | use crate::{ 14 | alloc, 15 | extension::typed_metadata::TypedMetadata, 16 | interface::{Store, StoreDangling}, 17 | }; 18 | 19 | /// Arbitrary typed handle, for type safety, and coercion. 20 | /// 21 | /// A typed handle may be dangling, or may be invalid. It is the responsibility of the user to ensure that the typed 22 | /// handle is valid when necessary. 23 | pub struct TypedHandle { 24 | handle: H, 25 | metadata: TypedMetadata, 26 | } 27 | 28 | impl TypedHandle { 29 | /// Creates a dangling handle. 30 | /// 31 | /// Calls `handle_alloc_error` if the creation of the handle fails. 32 | #[inline(always)] 33 | pub const fn dangling(store: &S) -> Self 34 | where 35 | S: ~const StoreDangling, 36 | { 37 | let Ok(this) = Self::try_dangling(store) else { 38 | alloc::handle_alloc_error(Layout::new::()) 39 | }; 40 | 41 | this 42 | } 43 | 44 | /// Attempts to create a dangling handle. 45 | /// 46 | /// Returns `AllocError` on failure. 47 | #[inline(always)] 48 | pub const fn try_dangling(store: &S) -> Result 49 | where 50 | S: ~const StoreDangling, 51 | { 52 | let Ok(handle) = store.dangling(Alignment::of::()) else { 53 | return Err(AllocError); 54 | }; 55 | 56 | let metadata = TypedMetadata::new(); 57 | 58 | Ok(Self { handle, metadata }) 59 | } 60 | 61 | /// Creates a new handle, pointing to a `T`. 62 | #[inline(always)] 63 | pub fn new(value: T, store: &S) -> Self 64 | where 65 | S: Store, 66 | { 67 | let Ok(this) = Self::try_new(value, store) else { 68 | alloc::handle_alloc_error(Layout::new::()) 69 | }; 70 | 71 | this 72 | } 73 | 74 | /// Attempts to create a new handle, pointing to a `T`. 75 | #[inline(always)] 76 | pub fn try_new(value: T, store: &S) -> Result 77 | where 78 | S: Store, 79 | { 80 | let (handle, _) = store.allocate(Layout::new::())?; 81 | 82 | // Safety: 83 | // - `handle` was just allocated by `store`. 84 | // - `handle` is still valid, as no other operation occurred on `store`. 85 | let pointer = unsafe { store.resolve(handle) }; 86 | 87 | // Safety: 88 | // - `pointer` points to writeable memory area. 89 | // - `pointer` points to a sufficiently aligned and sized memory area. 90 | // - `pointer` has exclusive access to the memory area it points to. 91 | unsafe { ptr::write(pointer.cast().as_ptr(), value) }; 92 | 93 | let metadata = TypedMetadata::new(); 94 | 95 | Ok(Self { handle, metadata }) 96 | } 97 | 98 | /// Allocates a new handle, with enough space for `T`. 99 | /// 100 | /// The allocated memory is left uninitialized. 101 | #[inline(always)] 102 | pub const fn allocate(store: &S) -> Self 103 | where 104 | S: ~const Store, 105 | { 106 | let Ok(this) = Self::try_allocate(store) else { 107 | alloc::handle_alloc_error(Layout::new::()) 108 | }; 109 | 110 | this 111 | } 112 | 113 | /// Attempts to allocate a new handle, with enough space for `T`. 114 | /// 115 | /// The allocated memory is left uninitialized. 116 | #[inline(always)] 117 | pub const fn try_allocate(store: &S) -> Result 118 | where 119 | S: ~const Store, 120 | { 121 | let Ok((handle, _)) = store.allocate(Layout::new::()) else { 122 | return Err(AllocError); 123 | }; 124 | 125 | let metadata = TypedMetadata::new(); 126 | 127 | Ok(Self { handle, metadata }) 128 | } 129 | 130 | /// Allocates a new handle, with enough space for `T`. 131 | /// 132 | /// The allocated memory is zeroed out. 133 | #[inline(always)] 134 | pub const fn allocate_zeroed(store: &S) -> Self 135 | where 136 | S: ~const Store, 137 | { 138 | let Ok(this) = Self::try_allocate_zeroed(store) else { 139 | alloc::handle_alloc_error(Layout::new::()) 140 | }; 141 | 142 | this 143 | } 144 | 145 | /// Attempts to allocate a new handle, with enough space for `T`. 146 | /// 147 | /// The allocated memory is zeroed out. 148 | #[inline(always)] 149 | pub const fn try_allocate_zeroed(store: &S) -> Result 150 | where 151 | S: ~const Store, 152 | { 153 | let Ok((handle, _)) = store.allocate_zeroed(Layout::new::()) else { 154 | return Err(AllocError); 155 | }; 156 | 157 | let metadata = TypedMetadata::new(); 158 | 159 | Ok(Self { handle, metadata }) 160 | } 161 | } 162 | 163 | impl TypedHandle { 164 | /// Creates a handle from raw parts. 165 | /// 166 | /// - If `handle` is valid, and associated to a block of memory which fits an instance of `T`, then the resulting 167 | /// typed handle is valid. 168 | /// - If `handle` is invalid, then the resulting typed handle is invalid. 169 | /// - If `handle` is valid and `metadata` does not fit the block of memory associated with it, then the resulting 170 | /// typed handle is invalid. 171 | pub const fn from_raw_parts(handle: H, metadata: TypedMetadata) -> Self { 172 | Self { handle, metadata } 173 | } 174 | 175 | /// Decomposes a (possibly wide) pointer into its (raw) handle and metadata components. 176 | pub const fn to_raw_parts(self) -> (H, TypedMetadata) { 177 | (self.handle, self.metadata) 178 | } 179 | 180 | /// Deallocates the memory associated with the handle. 181 | /// 182 | /// # Safety 183 | /// 184 | /// - `self` must have been allocated by `store`. 185 | /// - `self` must still be valid. 186 | /// - `self` is invalidated alongside any copy of it. 187 | #[inline(always)] 188 | pub const unsafe fn deallocate(&self, store: &S) 189 | where 190 | S: ~const Store, 191 | { 192 | // Safety: 193 | // - `self.handle` was allocated by `store`, as per pre-conditions. 194 | // - `self.handle` is still valid, as per pre-conditions. 195 | let pointer = unsafe { self.resolve_raw(store) }; 196 | 197 | // Safety: 198 | // - `pointer` has valid metadata for `T`. 199 | let layout = unsafe { Layout::for_value_raw(pointer.as_ptr() as *const T) }; 200 | 201 | // Safety: 202 | // - `self.handle` was allocated by `store`, as per pre-conditions. 203 | // - `self.handle` is still valid, as per pre-conditions. 204 | // - `layout` fits the block of memory associated with `self.handle`. 205 | unsafe { store.deallocate(self.handle, layout) }; 206 | } 207 | 208 | /// Resolves the handle to a reference. 209 | /// 210 | /// # Safety 211 | /// 212 | /// - `self` must have been allocated by `store`. 213 | /// - `self` must still be valid. 214 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 215 | /// - No access through a mutable reference to this instance of `T` must overlap with accesses through the result. 216 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 217 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 218 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 219 | /// `resolve` calls, may invalidate the reference. 220 | #[inline(always)] 221 | pub const unsafe fn resolve<'a, S>(&self, store: &'a S) -> &'a T 222 | where 223 | S: ~const Store, 224 | { 225 | // Safety: 226 | // - `self.handle` was allocated by `store`, as per pre-conditions. 227 | // - `self.handle` is still valid, as per pre-conditions. 228 | let pointer = unsafe { self.resolve_raw(store) }; 229 | 230 | // Safety: 231 | // - `pointer` points to a live instance of `T`, as per type-invariant. 232 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 233 | // or destroying store, though it may still be invalidated by allocating. 234 | unsafe { pointer.as_ref() } 235 | } 236 | 237 | /// Resolves the handle to a reference. 238 | /// 239 | /// # Safety 240 | /// 241 | /// - `self` must have been allocated by `store`. 242 | /// - `self` must still be valid. 243 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 244 | /// - No access through any reference to this instance of `T` must overlap with accesses through the result. 245 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 246 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 247 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 248 | /// `resolve` calls, may invalidate the reference. 249 | #[inline(always)] 250 | #[allow(clippy::mut_from_ref)] 251 | pub const unsafe fn resolve_mut<'a, S>(&mut self, store: &'a S) -> &'a mut T 252 | where 253 | S: ~const Store, 254 | { 255 | // Safety: 256 | // - `self.handle` was allocated by `store`, as per pre-conditions. 257 | // - `self.handle` is still valid, as per pre-conditions. 258 | let mut pointer = unsafe { self.resolve_raw(store) }; 259 | 260 | // Safety: 261 | // - `pointer` points to a live instance of `T`, as per type-invariant. 262 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 263 | // or destroying store, though it may still be invalidated by allocating. 264 | unsafe { pointer.as_mut() } 265 | } 266 | 267 | /// Resolves the handle to a non-null pointer. 268 | /// 269 | /// # Safety 270 | /// 271 | /// - `self` must have been allocated by `store`. 272 | /// - `self` must still be valid. 273 | /// - The pointer is only guaranteed to be valid as long as `self` is valid. 274 | /// - The pointer is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 275 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 276 | /// `resolve` calls, may invalidate the pointer. 277 | #[inline(always)] 278 | pub const unsafe fn resolve_raw(&self, store: &S) -> NonNull 279 | where 280 | S: ~const Store, 281 | { 282 | // Safety: 283 | // - `self.handle` was allocated by `store`, as per pre-conditions. 284 | // - `self.handle` is still valid, as per pre-conditions. 285 | let pointer = unsafe { store.resolve(self.handle) }; 286 | 287 | NonNull::from_raw_parts(pointer.cast(), self.metadata.get()) 288 | } 289 | 290 | /// Coerces the handle into another. 291 | /// 292 | /// If `self` is valid, the resulting typed handle is valid; otherwise it is invalid. 293 | #[inline(always)] 294 | pub const fn coerce(&self) -> TypedHandle 295 | where 296 | T: Unsize, 297 | { 298 | let metadata = self.metadata.coerce(); 299 | 300 | TypedHandle { 301 | handle: self.handle, 302 | metadata, 303 | } 304 | } 305 | } 306 | 307 | impl TypedHandle<[T], H> { 308 | /// Creates a dangling handle. 309 | /// 310 | /// Calls `handle_alloc_error` if the creation of the handle fails. 311 | #[inline(always)] 312 | pub const fn dangling_slice(store: &S) -> Self 313 | where 314 | S: ~const StoreDangling, 315 | { 316 | let Ok(this) = Self::try_dangling_slice(store) else { 317 | alloc::handle_alloc_error(Layout::new::()) 318 | }; 319 | 320 | this 321 | } 322 | 323 | /// Attempts to create a dangling handle. 324 | /// 325 | /// Returns `AllocError` on failure. 326 | #[inline(always)] 327 | pub const fn try_dangling_slice(store: &S) -> Result 328 | where 329 | S: ~const StoreDangling, 330 | { 331 | let Ok(handle) = store.dangling(Alignment::of::()) else { 332 | return Err(AllocError); 333 | }; 334 | 335 | let metadata = TypedMetadata::from_metadata(0); 336 | 337 | Ok(Self { handle, metadata }) 338 | } 339 | 340 | /// Allocates a new handle, with enough space for `size` elements `T`. 341 | /// 342 | /// The allocated memory is left uninitialized. 343 | #[inline(always)] 344 | pub const fn allocate_slice(size: usize, store: &S) -> Self 345 | where 346 | S: ~const Store + ~const StoreDangling, 347 | { 348 | let Ok(this) = Self::try_allocate_slice(size, store) else { 349 | alloc::handle_alloc_error(Layout::new::()) 350 | }; 351 | 352 | this 353 | } 354 | 355 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 356 | /// 357 | /// The allocated memory is left uninitialized. 358 | #[inline(always)] 359 | pub const fn try_allocate_slice(size: usize, store: &S) -> Result 360 | where 361 | S: ~const Store + ~const StoreDangling, 362 | { 363 | if mem::size_of::() == 0 { 364 | let Ok(mut this) = Self::try_dangling_slice(store) else { 365 | alloc::handle_alloc_error(Layout::new::()) 366 | }; 367 | 368 | this.metadata = TypedMetadata::from_metadata(usize::MAX); 369 | 370 | return Ok(this); 371 | } 372 | 373 | let Ok(layout) = Self::layout(size) else { 374 | return Err(AllocError); 375 | }; 376 | 377 | let Ok((handle, bytes)) = store.allocate(layout) else { 378 | return Err(AllocError); 379 | }; 380 | 381 | debug_assert!(bytes >= layout.size()); 382 | 383 | let metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 384 | 385 | Ok(Self { handle, metadata }) 386 | } 387 | 388 | /// Allocates a new handle, with enough space for `size` elements `T`. 389 | /// 390 | /// The allocated memory is zeroed out. 391 | #[inline(always)] 392 | pub const fn allocate_zeroed_slice(size: usize, store: &S) -> Self 393 | where 394 | S: ~const Store + ~const StoreDangling, 395 | { 396 | let Ok(this) = Self::try_allocate_zeroed_slice(size, store) else { 397 | alloc::handle_alloc_error(Layout::new::()) 398 | }; 399 | 400 | this 401 | } 402 | 403 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 404 | /// 405 | /// The allocated memory is zeroed out. 406 | #[inline(always)] 407 | pub const fn try_allocate_zeroed_slice(size: usize, store: &S) -> Result 408 | where 409 | S: ~const Store + ~const StoreDangling, 410 | { 411 | if mem::size_of::() == 0 { 412 | let Ok(mut this) = Self::try_dangling_slice(store) else { 413 | alloc::handle_alloc_error(Layout::new::()) 414 | }; 415 | 416 | this.metadata = TypedMetadata::from_metadata(usize::MAX); 417 | 418 | return Ok(this); 419 | } 420 | 421 | let Ok(layout) = Self::layout(size) else { 422 | return Err(AllocError); 423 | }; 424 | 425 | let Ok((handle, bytes)) = store.allocate_zeroed(layout) else { 426 | return Err(AllocError); 427 | }; 428 | 429 | debug_assert!(bytes >= layout.size()); 430 | 431 | let metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 432 | 433 | Ok(Self { handle, metadata }) 434 | } 435 | 436 | /// Returns whether the memory area associated to `self` may not contain any element. 437 | pub const fn is_empty(&self) -> bool { 438 | self.metadata.get() == 0 439 | } 440 | 441 | /// Returns the number of elements the memory area associated to `self` may contain. 442 | pub const fn len(&self) -> usize { 443 | self.metadata.get() 444 | } 445 | 446 | /// Grows the block of memory associated with the handle. 447 | /// 448 | /// On success, all the copies of the handle are invalidated, and the extra memory is left uninitialized. On 449 | /// failure, an error is returned. 450 | /// 451 | /// # Safety 452 | /// 453 | /// - `self` must have been allocated by `store`. 454 | /// - `self` must still be valid. 455 | /// - `new_size` must be greater than or equal to `self.len()`. 456 | pub const unsafe fn grow(&mut self, new_size: usize, store: &S) 457 | where 458 | S: ~const Store, 459 | { 460 | // Safety: 461 | // - `self` has been allocated by `store`, as per pre-conditions. 462 | // - `self` is still valid, as per pre-conditions. 463 | // - `new_size` must be greater than or equal to `self.len()`, as per pre-conditions. 464 | let result = unsafe { self.try_grow(new_size, store) }; 465 | 466 | if result.is_err() { 467 | alloc::handle_alloc_error(Layout::new::()) 468 | } 469 | } 470 | 471 | /// Attempts to grow the block of memory associated with the handle. 472 | /// 473 | /// On success, all the copies of the handle are invalidated, and the extra memory is left uninitialized. On 474 | /// failure, an error is returned. 475 | /// 476 | /// # Safety 477 | /// 478 | /// - `self` must have been allocated by `store`. 479 | /// - `self` must still be valid. 480 | /// - `new_size` must be greater than or equal to `self.len()`. 481 | pub const unsafe fn try_grow(&mut self, new_size: usize, store: &S) -> Result<(), AllocError> 482 | where 483 | S: ~const Store, 484 | { 485 | debug_assert!(new_size >= self.len()); 486 | 487 | let Ok(old_layout) = Self::layout(self.len()) else { 488 | return Err(AllocError); 489 | }; 490 | 491 | let Ok(new_layout) = Self::layout(new_size) else { 492 | return Err(AllocError); 493 | }; 494 | 495 | // Safety: 496 | // - `self.handle` was allocated by `store`, as per pre-conditions. 497 | // - `self.handle` is still valid, as per pre-conditions. 498 | // - `old_layout` fits the block of memory associated to `self.handle`, by construction. 499 | // - `new_layout`'s size is greater than or equal to the size of `old_layout`, as per pre-conditions. 500 | let result = unsafe { store.grow(self.handle, old_layout, new_layout) }; 501 | 502 | let Ok((handle, bytes)) = result else { 503 | return Err(AllocError); 504 | }; 505 | 506 | debug_assert!(bytes >= new_layout.size()); 507 | 508 | self.handle = handle; 509 | self.metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 510 | 511 | Ok(()) 512 | } 513 | 514 | /// Grows the block of memory associated with the handle. 515 | /// 516 | /// On success, all the copies of the handle are invalidated, and the extra memory is zeroed. On failure, an error 517 | /// is returned. 518 | /// 519 | /// # Safety 520 | /// 521 | /// - `self` must have been allocated by `store`. 522 | /// - `self` must still be valid. 523 | /// - `new_size` must be greater than or equal to `self.len()`. 524 | pub const unsafe fn grow_zeroed(&mut self, new_size: usize, store: &S) 525 | where 526 | S: ~const Store, 527 | { 528 | // Safety: 529 | // - `self` has been allocated by `store`, as per pre-conditions. 530 | // - `self` is still valid, as per pre-conditions. 531 | // - `new_size` must be greater than or equal to `self.len()`, as per pre-conditions. 532 | let result = unsafe { self.try_grow_zeroed(new_size, store) }; 533 | 534 | if result.is_err() { 535 | alloc::handle_alloc_error(Layout::new::()) 536 | } 537 | } 538 | 539 | /// Attempts to grow the block of memory associated with the handle. 540 | /// 541 | /// On success, all the copies of the handle are invalidated, and the extra memory is zeroed. On failure, an error 542 | /// is returned. 543 | /// 544 | /// # Safety 545 | /// 546 | /// - `self` must have been allocated by `store`. 547 | /// - `self` must still be valid. 548 | /// - `new_size` must be greater than or equal to `self.len()`. 549 | pub const unsafe fn try_grow_zeroed(&mut self, new_size: usize, store: &S) -> Result<(), AllocError> 550 | where 551 | S: ~const Store, 552 | { 553 | debug_assert!(new_size >= self.len()); 554 | 555 | let Ok(old_layout) = Self::layout(self.len()) else { 556 | return Err(AllocError); 557 | }; 558 | 559 | let Ok(new_layout) = Self::layout(new_size) else { 560 | return Err(AllocError); 561 | }; 562 | 563 | // Safety: 564 | // - `self.handle` was allocated by `store`, as per pre-conditions. 565 | // - `self.handle` is still valid, as per pre-conditions. 566 | // - `old_layout` fits the block of memory associated to `self.handle`, by construction. 567 | // - `new_layout`'s size is greater than or equal to the size of `old_layout`, as per pre-conditions. 568 | let result = unsafe { store.grow_zeroed(self.handle, old_layout, new_layout) }; 569 | 570 | let Ok((handle, bytes)) = result else { 571 | return Err(AllocError); 572 | }; 573 | 574 | debug_assert!(bytes >= new_layout.size()); 575 | 576 | self.handle = handle; 577 | self.metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 578 | 579 | Ok(()) 580 | } 581 | 582 | /// Shrinks the block of memory associated with the handle. 583 | /// 584 | /// On success, all the copies of the handle are invalidated. On failure, an error is returned. 585 | /// 586 | /// # Safety 587 | /// 588 | /// - `self` must have been allocated by `store`. 589 | /// - `self` must still be valid. 590 | /// - `new_size` must be less than or equal to `self.len()`. 591 | pub const unsafe fn shrink(&mut self, new_size: usize, store: &S) 592 | where 593 | S: ~const Store, 594 | { 595 | // Safety: 596 | // - `self` has been allocated by `store`, as per pre-conditions. 597 | // - `self` is still valid, as per pre-conditions. 598 | // - `new_size` must be less than or equal to `self.len()`, as per pre-conditions. 599 | let result = unsafe { self.try_shrink(new_size, store) }; 600 | 601 | if result.is_err() { 602 | alloc::handle_alloc_error(Layout::new::()) 603 | } 604 | } 605 | 606 | /// Attempts to shrink the block of memory associated with the handle. 607 | /// 608 | /// On success, all the copies of the handle are invalidated. On failure, an error is returned. 609 | /// 610 | /// # Safety 611 | /// 612 | /// - `self` must have been allocated by `store`. 613 | /// - `self` must still be valid. 614 | /// - `new_size` must be less than or equal to `self.len()`. 615 | pub const unsafe fn try_shrink(&mut self, new_size: usize, store: &S) -> Result<(), AllocError> 616 | where 617 | S: ~const Store, 618 | { 619 | debug_assert!(new_size <= self.len()); 620 | 621 | if mem::size_of::() == 0 { 622 | return Ok(()); 623 | } 624 | 625 | let Ok(old_layout) = Self::layout(self.len()) else { 626 | return Err(AllocError); 627 | }; 628 | 629 | let Ok(new_layout) = Self::layout(new_size) else { 630 | return Err(AllocError); 631 | }; 632 | 633 | // Safety: 634 | // - `self.handle` was allocated by `store`, as per pre-conditions. 635 | // - `self.handle` is still valid, as per pre-conditions. 636 | // - `old_layout` fits the block of memory associated to `self.handle`, by construction. 637 | // - `new_layout`'s size is less than or equal to the size of `old_layout`, as per pre-conditions. 638 | let result = unsafe { store.shrink(self.handle, old_layout, new_layout) }; 639 | 640 | let Ok((handle, bytes)) = result else { 641 | return Err(AllocError); 642 | }; 643 | 644 | debug_assert!(bytes >= new_layout.size()); 645 | 646 | self.handle = handle; 647 | self.metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 648 | 649 | Ok(()) 650 | } 651 | } 652 | 653 | impl Clone for TypedHandle { 654 | fn clone(&self) -> Self { 655 | *self 656 | } 657 | } 658 | 659 | impl Copy for TypedHandle {} 660 | 661 | #[cfg(feature = "coercible-metadata")] 662 | impl CoerceUnsized> for TypedHandle where T: Unsize {} 663 | 664 | // 665 | // Implementation 666 | // 667 | 668 | impl TypedHandle<[T], H> { 669 | const fn layout(size: usize) -> Result { 670 | let Some(size) = mem::size_of::().checked_mul(size) else { 671 | return Err(AllocError); 672 | }; 673 | 674 | let align = mem::align_of::(); 675 | 676 | let Ok(layout) = Layout::from_size_align(size, align) else { 677 | return Err(AllocError); 678 | }; 679 | 680 | Ok(layout) 681 | } 682 | } 683 | -------------------------------------------------------------------------------- /src/extension/typed_metadata.rs: -------------------------------------------------------------------------------- 1 | //! Typed Metadata, for coercion purposes. 2 | 3 | use core::fmt; 4 | 5 | pub use implementation::TypedMetadata; 6 | 7 | #[cfg(not(feature = "coercible-metadata"))] 8 | mod implementation { 9 | use core::{ 10 | marker::Unsize, 11 | ptr::{self, Pointee}, 12 | }; 13 | 14 | /// Typed Metadata, for type-safe APIs. 15 | pub struct TypedMetadata { 16 | metadata: ::Metadata, 17 | // Work-around for https://github.com/rust-lang/rust/issues/111821. 18 | // 19 | // rustc fails to realize that `Pointee::Metadata` is always `Sized`, which in case of cycles may lead it to 20 | // erroneously reject a program due to use of a possible `!Sized` type for a non-last field. 21 | _self_is_always_sized: (), 22 | } 23 | 24 | impl TypedMetadata { 25 | /// Creates a new instance from the given metadata. 26 | pub const fn from_metadata(metadata: ::Metadata) -> Self { 27 | #[allow(clippy::let_unit_value)] 28 | let _self_is_always_sized = (); 29 | 30 | Self { 31 | metadata, 32 | _self_is_always_sized, 33 | } 34 | } 35 | 36 | /// Retrieves the metadata. 37 | pub const fn get(&self) -> ::Metadata { 38 | self.metadata 39 | } 40 | 41 | /// Coerces the metadata into another. 42 | pub const fn coerce(&self) -> TypedMetadata 43 | where 44 | T: Unsize, 45 | { 46 | let pointer: *const T = ptr::from_raw_parts(ptr::null(), self.metadata); 47 | let pointer: *const U = pointer as *const _; 48 | let (_, metadata) = pointer.to_raw_parts(); 49 | 50 | #[allow(clippy::let_unit_value)] 51 | let _self_is_always_sized = (); 52 | 53 | TypedMetadata { 54 | metadata, 55 | _self_is_always_sized, 56 | } 57 | } 58 | } 59 | } // mod implementation 60 | 61 | #[cfg(feature = "coercible-metadata")] 62 | mod implementation { 63 | use core::{ 64 | marker::Unsize, 65 | ops::CoerceUnsized, 66 | ptr::{NonNull, Pointee}, 67 | }; 68 | 69 | /// Typed Metadata, for type-safe APIs. 70 | pub struct TypedMetadata(NonNull); 71 | 72 | impl TypedMetadata { 73 | /// Creates a new Typed metadata. 74 | pub const fn from_metadata(metadata: ::Metadata) -> Self { 75 | Self(NonNull::from_raw_parts(NonNull::dangling(), metadata)) 76 | } 77 | 78 | /// Retrieves the metadata. 79 | pub const fn get(&self) -> ::Metadata { 80 | self.0.to_raw_parts().1 81 | } 82 | 83 | /// Coerces the metadata into another. 84 | pub const fn coerce(&self) -> TypedMetadata 85 | where 86 | T: Unsize, 87 | { 88 | *self 89 | } 90 | } 91 | 92 | #[cfg(feature = "coercible-metadata")] 93 | impl CoerceUnsized> for TypedMetadata where T: Unsize {} 94 | } // mod implementation 95 | 96 | impl TypedMetadata { 97 | /// Creates a new instance. 98 | pub const fn new() -> Self { 99 | Self::from_metadata(()) 100 | } 101 | } 102 | 103 | impl Clone for TypedMetadata { 104 | fn clone(&self) -> Self { 105 | *self 106 | } 107 | } 108 | 109 | impl Copy for TypedMetadata {} 110 | 111 | impl fmt::Debug for TypedMetadata { 112 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 113 | write!(f, "TypedMetadata") 114 | } 115 | } 116 | 117 | impl Default for TypedMetadata { 118 | fn default() -> Self { 119 | Self::from_metadata(()) 120 | } 121 | } 122 | 123 | impl From for TypedMetadata<[T]> { 124 | fn from(value: usize) -> Self { 125 | Self::from_metadata(value) 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /src/extension/typed_single.rs: -------------------------------------------------------------------------------- 1 | //! Typed handle, for bonus type safety. 2 | 3 | use core::{ 4 | alloc::{AllocError, Layout}, 5 | marker::Unsize, 6 | mem, 7 | ptr::{self, Alignment, NonNull}, 8 | }; 9 | 10 | #[cfg(feature = "coercible-metadata")] 11 | use core::ops::CoerceUnsized; 12 | 13 | use crate::{ 14 | alloc, 15 | extension::typed_metadata::TypedMetadata, 16 | interface::{StoreDangling, StoreSingle}, 17 | }; 18 | 19 | /// Arbitrary typed handle, for type safety, and coercion. 20 | /// 21 | /// A typed handle may be dangling, or may be invalid. It is the responsibility of the user to ensure that the typed 22 | /// handle is valid when necessary. 23 | pub struct TypedSingleHandle { 24 | handle: H, 25 | metadata: TypedMetadata, 26 | } 27 | 28 | impl TypedSingleHandle { 29 | /// Creates a dangling handle. 30 | /// 31 | /// Calls `handle_alloc_error` if the creation of the handle fails. 32 | #[inline(always)] 33 | pub const fn dangling(store: &S) -> Self 34 | where 35 | S: ~const StoreDangling, 36 | { 37 | let Ok(this) = Self::try_dangling(store) else { 38 | alloc::handle_alloc_error(Layout::new::()) 39 | }; 40 | 41 | this 42 | } 43 | 44 | /// Attempts to create a dangling handle. 45 | /// 46 | /// Returns `AllocError` on failure. 47 | #[inline(always)] 48 | pub const fn try_dangling(store: &S) -> Result 49 | where 50 | S: ~const StoreDangling, 51 | { 52 | let Ok(handle) = store.dangling(Alignment::of::()) else { 53 | return Err(AllocError); 54 | }; 55 | 56 | let metadata = TypedMetadata::new(); 57 | 58 | Ok(Self { handle, metadata }) 59 | } 60 | 61 | /// Creates a new handle, pointing to a `T`. 62 | #[inline(always)] 63 | pub fn new(value: T, store: &mut S) -> Self 64 | where 65 | S: StoreSingle, 66 | { 67 | let Ok(this) = Self::try_new(value, store) else { 68 | alloc::handle_alloc_error(Layout::new::()) 69 | }; 70 | 71 | this 72 | } 73 | 74 | /// Attempts to create a new handle, pointing to a `T`. 75 | #[inline(always)] 76 | pub fn try_new(value: T, store: &mut S) -> Result 77 | where 78 | S: StoreSingle, 79 | { 80 | let (handle, _) = store.allocate(Layout::new::())?; 81 | 82 | // Safety: 83 | // - `handle` was just allocated by `store`. 84 | // - `handle` is still valid, as no other operation occurred on `store`. 85 | let pointer = unsafe { store.resolve_mut(handle) }; 86 | 87 | // Safety: 88 | // - `pointer` points to writeable memory area. 89 | // - `pointer` points to a sufficiently aligned and sized memory area. 90 | // - `pointer` has exclusive access to the memory area it points to. 91 | unsafe { ptr::write(pointer.cast().as_ptr(), value) }; 92 | 93 | let metadata = TypedMetadata::new(); 94 | 95 | Ok(Self { handle, metadata }) 96 | } 97 | 98 | /// Allocates a new handle, with enough space for `T`. 99 | /// 100 | /// The allocated memory is left uninitialized. 101 | #[inline(always)] 102 | pub const fn allocate(store: &mut S) -> Self 103 | where 104 | S: ~const StoreSingle, 105 | { 106 | let Ok(this) = Self::try_allocate(store) else { 107 | alloc::handle_alloc_error(Layout::new::()) 108 | }; 109 | 110 | this 111 | } 112 | 113 | /// Attempts to allocate a new handle, with enough space for `T`. 114 | /// 115 | /// The allocated memory is left uninitialized. 116 | #[inline(always)] 117 | pub const fn try_allocate(store: &mut S) -> Result 118 | where 119 | S: ~const StoreSingle, 120 | { 121 | let Ok((handle, _)) = store.allocate(Layout::new::()) else { 122 | return Err(AllocError); 123 | }; 124 | 125 | let metadata = TypedMetadata::new(); 126 | 127 | Ok(Self { handle, metadata }) 128 | } 129 | 130 | /// Allocates a new handle, with enough space for `T`. 131 | /// 132 | /// The allocated memory is zeroed out. 133 | #[inline(always)] 134 | pub const fn allocate_zeroed(store: &mut S) -> Self 135 | where 136 | S: ~const StoreSingle, 137 | { 138 | let Ok(this) = Self::try_allocate_zeroed(store) else { 139 | alloc::handle_alloc_error(Layout::new::()) 140 | }; 141 | 142 | this 143 | } 144 | 145 | /// Attempts to allocate a new handle, with enough space for `T`. 146 | /// 147 | /// The allocated memory is zeroed out. 148 | #[inline(always)] 149 | pub const fn try_allocate_zeroed(store: &mut S) -> Result 150 | where 151 | S: ~const StoreSingle, 152 | { 153 | let Ok((handle, _)) = store.allocate_zeroed(Layout::new::()) else { 154 | return Err(AllocError); 155 | }; 156 | 157 | let metadata = TypedMetadata::new(); 158 | 159 | Ok(Self { handle, metadata }) 160 | } 161 | } 162 | 163 | impl TypedSingleHandle { 164 | /// Creates a handle from raw parts. 165 | /// 166 | /// - If `handle` is valid, and associated to a block of memory which fits an instance of `T`, then the resulting 167 | /// typed handle is valid. 168 | /// - If `handle` is invalid, then the resulting typed handle is invalid. 169 | /// - If `handle` is valid and `metadata` does not fit the block of memory associated with it, then the resulting 170 | /// typed handle is invalid. 171 | pub const fn from_raw_parts(handle: H, metadata: TypedMetadata) -> Self { 172 | Self { handle, metadata } 173 | } 174 | 175 | /// Decomposes a (possibly wide) pointer into its (raw) handle and metadata components. 176 | pub const fn to_raw_parts(self) -> (H, TypedMetadata) { 177 | (self.handle, self.metadata) 178 | } 179 | 180 | /// Deallocates the memory associated with the handle. 181 | /// 182 | /// # Safety 183 | /// 184 | /// - `self` must have been allocated by `store`. 185 | /// - `self` must still be valid. 186 | /// - `self` is invalidated alongside any copy of it. 187 | #[inline(always)] 188 | pub const unsafe fn deallocate(&self, store: &mut S) 189 | where 190 | S: ~const StoreSingle, 191 | { 192 | // Safety: 193 | // - `self.handle` was allocated by `store`, as per pre-conditions. 194 | // - `self.handle` is still valid, as per pre-conditions. 195 | let pointer = unsafe { self.resolve_raw_mut(store) }; 196 | 197 | // Safety: 198 | // - `pointer` has valid metadata for `T`. 199 | let layout = unsafe { Layout::for_value_raw(pointer.as_ptr() as *const T) }; 200 | 201 | // Safety: 202 | // - `self.handle` was allocated by `store`, as per pre-conditions. 203 | // - `self.handle` is still valid, as per pre-conditions. 204 | // - `layout` fits the block of memory associated with `self.handle`. 205 | unsafe { store.deallocate(self.handle, layout) }; 206 | } 207 | 208 | /// Resolves the handle to a reference. 209 | /// 210 | /// # Safety 211 | /// 212 | /// - `self` must have been allocated by `store`. 213 | /// - `self` must still be valid. 214 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 215 | /// - No access through a mutable reference to this instance of `T` must overlap with accesses through the result. 216 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 217 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 218 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 219 | /// `resolve` calls, may invalidate the reference. 220 | #[inline(always)] 221 | pub const unsafe fn resolve<'a, S>(&self, store: &'a S) -> &'a T 222 | where 223 | S: ~const StoreSingle, 224 | { 225 | // Safety: 226 | // - `self.handle` was allocated by `store`, as per pre-conditions. 227 | // - `self.handle` is still valid, as per pre-conditions. 228 | let pointer = unsafe { self.resolve_raw(store) }; 229 | 230 | // Safety: 231 | // - `pointer` points to a live instance of `T`, as per type-invariant. 232 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 233 | // or destroying store, though it may still be invalidated by allocating. 234 | unsafe { pointer.as_ref() } 235 | } 236 | 237 | /// Resolves the handle to a reference. 238 | /// 239 | /// # Safety 240 | /// 241 | /// - `self` must have been allocated by `store`. 242 | /// - `self` must still be valid. 243 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 244 | /// - No access through any reference to this instance of `T` must overlap with accesses through the result. 245 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 246 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 247 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 248 | /// `resolve` calls, may invalidate the reference. 249 | #[inline(always)] 250 | #[allow(clippy::mut_from_ref)] 251 | pub const unsafe fn resolve_mut<'a, S>(&mut self, store: &'a mut S) -> &'a mut T 252 | where 253 | S: ~const StoreSingle, 254 | { 255 | // Safety: 256 | // - `self.handle` was allocated by `store`, as per pre-conditions. 257 | // - `self.handle` is still valid, as per pre-conditions. 258 | let mut pointer = unsafe { self.resolve_raw_mut(store) }; 259 | 260 | // Safety: 261 | // - `pointer` points to a live instance of `T`, as per type-invariant. 262 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 263 | // or destroying store, though it may still be invalidated by allocating. 264 | unsafe { pointer.as_mut() } 265 | } 266 | 267 | /// Resolves the handle to a non-null pointer. 268 | /// 269 | /// # Safety 270 | /// 271 | /// - `self` must have been allocated by `store`. 272 | /// - `self` must still be valid. 273 | /// - The pointer is only guaranteed to be dereferenceable to a shared reference. 274 | /// - The pointer is only guaranteed to be valid as long as `self` is valid. 275 | /// - The pointer is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 276 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 277 | /// `resolve` calls, may invalidate the pointer. 278 | #[inline(always)] 279 | pub const unsafe fn resolve_raw(&self, store: &S) -> NonNull 280 | where 281 | S: ~const StoreSingle, 282 | { 283 | // Safety: 284 | // - `self.handle` was allocated by `store`, as per pre-conditions. 285 | // - `self.handle` is still valid, as per pre-conditions. 286 | let pointer = unsafe { store.resolve(self.handle) }; 287 | 288 | NonNull::from_raw_parts(pointer.cast(), self.metadata.get()) 289 | } 290 | 291 | /// Resolves the handle to a non-null pointer. 292 | /// 293 | /// # Safety 294 | /// 295 | /// - `self` must have been allocated by `store`. 296 | /// - `self` must still be valid. 297 | /// - The pointer is only guaranteed to be valid as long as `self` is valid. 298 | /// - The pointer is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 299 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 300 | /// `resolve` calls, may invalidate the pointer. 301 | #[inline(always)] 302 | pub const unsafe fn resolve_raw_mut(&self, store: &mut S) -> NonNull 303 | where 304 | S: ~const StoreSingle, 305 | { 306 | // Safety: 307 | // - `self.handle` was allocated by `store`, as per pre-conditions. 308 | // - `self.handle` is still valid, as per pre-conditions. 309 | let pointer = unsafe { store.resolve_mut(self.handle) }; 310 | 311 | NonNull::from_raw_parts(pointer.cast(), self.metadata.get()) 312 | } 313 | 314 | /// Coerces the handle into another. 315 | /// 316 | /// If `self` is valid, the resulting typed handle is valid; otherwise it is invalid. 317 | #[inline(always)] 318 | pub const fn coerce(&self) -> TypedSingleHandle 319 | where 320 | T: Unsize, 321 | { 322 | let metadata = self.metadata.coerce(); 323 | 324 | TypedSingleHandle { 325 | handle: self.handle, 326 | metadata, 327 | } 328 | } 329 | } 330 | 331 | impl TypedSingleHandle<[T], H> { 332 | /// Creates a dangling handle. 333 | /// 334 | /// Calls `handle_alloc_error` if the creation of the handle fails. 335 | #[inline(always)] 336 | pub const fn dangling_slice(store: &S) -> Self 337 | where 338 | S: ~const StoreDangling, 339 | { 340 | let Ok(this) = Self::try_dangling_slice(store) else { 341 | alloc::handle_alloc_error(Layout::new::()) 342 | }; 343 | 344 | this 345 | } 346 | 347 | /// Attempts to create a dangling handle. 348 | /// 349 | /// Returns `AllocError` on failure. 350 | #[inline(always)] 351 | pub const fn try_dangling_slice(store: &S) -> Result 352 | where 353 | S: ~const StoreDangling, 354 | { 355 | let Ok(handle) = store.dangling(Alignment::of::()) else { 356 | return Err(AllocError); 357 | }; 358 | 359 | let metadata = TypedMetadata::from_metadata(0); 360 | 361 | Ok(Self { handle, metadata }) 362 | } 363 | 364 | /// Allocates a new handle, with enough space for `size` elements `T`. 365 | /// 366 | /// The allocated memory is left uninitialized. 367 | #[inline(always)] 368 | pub const fn allocate_slice(size: usize, store: &mut S) -> Self 369 | where 370 | S: ~const StoreSingle + ~const StoreDangling, 371 | { 372 | let Ok(this) = Self::try_allocate_slice(size, store) else { 373 | alloc::handle_alloc_error(Layout::new::()) 374 | }; 375 | 376 | this 377 | } 378 | 379 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 380 | /// 381 | /// The allocated memory is left uninitialized. 382 | #[inline(always)] 383 | pub const fn try_allocate_slice(size: usize, store: &mut S) -> Result 384 | where 385 | S: ~const StoreSingle + ~const StoreDangling, 386 | { 387 | if mem::size_of::() == 0 { 388 | let Ok(mut this) = Self::try_dangling_slice(store) else { 389 | alloc::handle_alloc_error(Layout::new::()) 390 | }; 391 | 392 | this.metadata = TypedMetadata::from_metadata(usize::MAX); 393 | 394 | return Ok(this); 395 | } 396 | 397 | let Ok(layout) = Self::layout(size) else { 398 | return Err(AllocError); 399 | }; 400 | 401 | let Ok((handle, bytes)) = store.allocate(layout) else { 402 | return Err(AllocError); 403 | }; 404 | 405 | debug_assert!(bytes >= layout.size()); 406 | 407 | let metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 408 | 409 | Ok(Self { handle, metadata }) 410 | } 411 | 412 | /// Allocates a new handle, with enough space for `size` elements `T`. 413 | /// 414 | /// The allocated memory is zeroed out. 415 | #[inline(always)] 416 | pub const fn allocate_zeroed_slice(size: usize, store: &mut S) -> Self 417 | where 418 | S: ~const StoreSingle + ~const StoreDangling, 419 | { 420 | let Ok(this) = Self::try_allocate_zeroed_slice(size, store) else { 421 | alloc::handle_alloc_error(Layout::new::()) 422 | }; 423 | 424 | this 425 | } 426 | 427 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 428 | /// 429 | /// The allocated memory is zeroed out. 430 | #[inline(always)] 431 | pub const fn try_allocate_zeroed_slice(size: usize, store: &mut S) -> Result 432 | where 433 | S: ~const StoreSingle + ~const StoreDangling, 434 | { 435 | if mem::size_of::() == 0 { 436 | let Ok(mut this) = Self::try_dangling_slice(store) else { 437 | alloc::handle_alloc_error(Layout::new::()) 438 | }; 439 | 440 | this.metadata = TypedMetadata::from_metadata(usize::MAX); 441 | 442 | return Ok(this); 443 | } 444 | 445 | let Ok(layout) = Self::layout(size) else { 446 | return Err(AllocError); 447 | }; 448 | 449 | let Ok((handle, bytes)) = store.allocate_zeroed(layout) else { 450 | return Err(AllocError); 451 | }; 452 | 453 | debug_assert!(bytes >= layout.size()); 454 | 455 | let metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 456 | 457 | Ok(Self { handle, metadata }) 458 | } 459 | 460 | /// Returns whether the memory area associated to `self` may not contain any element. 461 | pub const fn is_empty(&self) -> bool { 462 | self.metadata.get() == 0 463 | } 464 | 465 | /// Returns the number of elements the memory area associated to `self` may contain. 466 | pub const fn len(&self) -> usize { 467 | self.metadata.get() 468 | } 469 | 470 | /// Grows the block of memory associated with the handle. 471 | /// 472 | /// On success, all the copies of the handle are invalidated, and the extra memory is left uninitialized. On 473 | /// failure, an error is returned. 474 | /// 475 | /// # Safety 476 | /// 477 | /// - `self` must have been allocated by `store`. 478 | /// - `self` must still be valid. 479 | /// - `new_size` must be greater than or equal to `self.len()`. 480 | pub const unsafe fn grow(&mut self, new_size: usize, store: &mut S) 481 | where 482 | S: ~const StoreSingle, 483 | { 484 | // Safety: 485 | // - `self` has been allocated by `store`, as per pre-conditions. 486 | // - `self` is still valid, as per pre-conditions. 487 | // - `new_size` must be greater than or equal to `self.len()`, as per pre-conditions. 488 | let result = unsafe { self.try_grow(new_size, store) }; 489 | 490 | if result.is_err() { 491 | alloc::handle_alloc_error(Layout::new::()) 492 | } 493 | } 494 | 495 | /// Attempts to grow the block of memory associated with the handle. 496 | /// 497 | /// On success, all the copies of the handle are invalidated, and the extra memory is left uninitialized. On 498 | /// failure, an error is returned. 499 | /// 500 | /// # Safety 501 | /// 502 | /// - `self` must have been allocated by `store`. 503 | /// - `self` must still be valid. 504 | /// - `new_size` must be greater than or equal to `self.len()`. 505 | pub const unsafe fn try_grow(&mut self, new_size: usize, store: &mut S) -> Result<(), AllocError> 506 | where 507 | S: ~const StoreSingle, 508 | { 509 | debug_assert!(new_size >= self.len()); 510 | 511 | let Ok(old_layout) = Self::layout(self.len()) else { 512 | return Err(AllocError); 513 | }; 514 | 515 | let Ok(new_layout) = Self::layout(new_size) else { 516 | return Err(AllocError); 517 | }; 518 | 519 | // Safety: 520 | // - `self.handle` was allocated by `store`, as per pre-conditions. 521 | // - `self.handle` is still valid, as per pre-conditions. 522 | // - `old_layout` fits the block of memory associated to `self.handle`, by construction. 523 | // - `new_layout`'s size is greater than or equal to the size of `old_layout`, as per pre-conditions. 524 | let result = unsafe { store.grow(self.handle, old_layout, new_layout) }; 525 | 526 | let Ok((handle, bytes)) = result else { 527 | return Err(AllocError); 528 | }; 529 | 530 | debug_assert!(bytes >= new_layout.size()); 531 | 532 | self.handle = handle; 533 | self.metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 534 | 535 | Ok(()) 536 | } 537 | 538 | /// Grows the block of memory associated with the handle. 539 | /// 540 | /// On success, all the copies of the handle are invalidated, and the extra memory is zeroed. On failure, an error 541 | /// is returned. 542 | /// 543 | /// # Safety 544 | /// 545 | /// - `self` must have been allocated by `store`. 546 | /// - `self` must still be valid. 547 | /// - `new_size` must be greater than or equal to `self.len()`. 548 | pub const unsafe fn grow_zeroed(&mut self, new_size: usize, store: &mut S) 549 | where 550 | S: ~const StoreSingle, 551 | { 552 | // Safety: 553 | // - `self` has been allocated by `store`, as per pre-conditions. 554 | // - `self` is still valid, as per pre-conditions. 555 | // - `new_size` must be greater than or equal to `self.len()`, as per pre-conditions. 556 | let result = unsafe { self.try_grow_zeroed(new_size, store) }; 557 | 558 | if result.is_err() { 559 | alloc::handle_alloc_error(Layout::new::()) 560 | } 561 | } 562 | 563 | /// Attempts to grow the block of memory associated with the handle. 564 | /// 565 | /// On success, all the copies of the handle are invalidated, and the extra memory is zeroed. On failure, an error 566 | /// is returned. 567 | /// 568 | /// # Safety 569 | /// 570 | /// - `self` must have been allocated by `store`. 571 | /// - `self` must still be valid. 572 | /// - `new_size` must be greater than or equal to `self.len()`. 573 | pub const unsafe fn try_grow_zeroed(&mut self, new_size: usize, store: &mut S) -> Result<(), AllocError> 574 | where 575 | S: ~const StoreSingle, 576 | { 577 | debug_assert!(new_size >= self.len()); 578 | 579 | let Ok(old_layout) = Self::layout(self.len()) else { 580 | return Err(AllocError); 581 | }; 582 | 583 | let Ok(new_layout) = Self::layout(new_size) else { 584 | return Err(AllocError); 585 | }; 586 | 587 | // Safety: 588 | // - `self.handle` was allocated by `store`, as per pre-conditions. 589 | // - `self.handle` is still valid, as per pre-conditions. 590 | // - `old_layout` fits the block of memory associated to `self.handle`, by construction. 591 | // - `new_layout`'s size is greater than or equal to the size of `old_layout`, as per pre-conditions. 592 | let result = unsafe { store.grow_zeroed(self.handle, old_layout, new_layout) }; 593 | 594 | let Ok((handle, bytes)) = result else { 595 | return Err(AllocError); 596 | }; 597 | 598 | debug_assert!(bytes >= new_layout.size()); 599 | 600 | self.handle = handle; 601 | self.metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 602 | 603 | Ok(()) 604 | } 605 | 606 | /// Shrinks the block of memory associated with the handle. 607 | /// 608 | /// On success, all the copies of the handle are invalidated. On failure, an error is returned. 609 | /// 610 | /// # Safety 611 | /// 612 | /// - `self` must have been allocated by `store`. 613 | /// - `self` must still be valid. 614 | /// - `new_size` must be less than or equal to `self.len()`. 615 | pub const unsafe fn shrink(&mut self, new_size: usize, store: &mut S) 616 | where 617 | S: ~const StoreSingle, 618 | { 619 | // Safety: 620 | // - `self` has been allocated by `store`, as per pre-conditions. 621 | // - `self` is still valid, as per pre-conditions. 622 | // - `new_size` must be less than or equal to `self.len()`, as per pre-conditions. 623 | let result = unsafe { self.try_shrink(new_size, store) }; 624 | 625 | if result.is_err() { 626 | alloc::handle_alloc_error(Layout::new::()) 627 | } 628 | } 629 | 630 | /// Attempts to shrink the block of memory associated with the handle. 631 | /// 632 | /// On success, all the copies of the handle are invalidated. On failure, an error is returned. 633 | /// 634 | /// # Safety 635 | /// 636 | /// - `self` must have been allocated by `store`. 637 | /// - `self` must still be valid. 638 | /// - `new_size` must be less than or equal to `self.len()`. 639 | pub const unsafe fn try_shrink(&mut self, new_size: usize, store: &mut S) -> Result<(), AllocError> 640 | where 641 | S: ~const StoreSingle, 642 | { 643 | debug_assert!(new_size <= self.len()); 644 | 645 | if mem::size_of::() == 0 { 646 | return Ok(()); 647 | } 648 | 649 | let Ok(old_layout) = Self::layout(self.len()) else { 650 | return Err(AllocError); 651 | }; 652 | 653 | let Ok(new_layout) = Self::layout(new_size) else { 654 | return Err(AllocError); 655 | }; 656 | 657 | // Safety: 658 | // - `self.handle` was allocated by `store`, as per pre-conditions. 659 | // - `self.handle` is still valid, as per pre-conditions. 660 | // - `old_layout` fits the block of memory associated to `self.handle`, by construction. 661 | // - `new_layout`'s size is less than or equal to the size of `old_layout`, as per pre-conditions. 662 | let result = unsafe { store.shrink(self.handle, old_layout, new_layout) }; 663 | 664 | let Ok((handle, bytes)) = result else { 665 | return Err(AllocError); 666 | }; 667 | 668 | debug_assert!(bytes >= new_layout.size()); 669 | 670 | self.handle = handle; 671 | self.metadata = TypedMetadata::from_metadata(bytes / mem::size_of::()); 672 | 673 | Ok(()) 674 | } 675 | } 676 | 677 | impl Clone for TypedSingleHandle { 678 | fn clone(&self) -> Self { 679 | *self 680 | } 681 | } 682 | 683 | impl Copy for TypedSingleHandle {} 684 | 685 | #[cfg(feature = "coercible-metadata")] 686 | impl CoerceUnsized> for TypedSingleHandle where T: Unsize {} 687 | 688 | // 689 | // Implementation 690 | // 691 | 692 | impl TypedSingleHandle<[T], H> { 693 | const fn layout(size: usize) -> Result { 694 | let Some(size) = mem::size_of::().checked_mul(size) else { 695 | return Err(AllocError); 696 | }; 697 | 698 | let align = mem::align_of::(); 699 | 700 | let Ok(layout) = Layout::from_size_align(size, align) else { 701 | return Err(AllocError); 702 | }; 703 | 704 | Ok(layout) 705 | } 706 | } 707 | -------------------------------------------------------------------------------- /src/extension/unique.rs: -------------------------------------------------------------------------------- 1 | //! A typed, unique handle. 2 | 3 | use core::{alloc::AllocError, marker::Unsize, ptr::NonNull}; 4 | 5 | #[cfg(feature = "coercible-metadata")] 6 | use core::ops::CoerceUnsized; 7 | 8 | use crate::{ 9 | extension::{typed::TypedHandle, typed_metadata::TypedMetadata}, 10 | interface::{Store, StoreDangling}, 11 | }; 12 | 13 | /// A typed, unique handle. 14 | pub struct UniqueHandle(TypedHandle); 15 | 16 | impl UniqueHandle { 17 | /// Creates a dangling handle. 18 | /// 19 | /// Calls `handle_alloc_error` on allocation failure. 20 | #[inline(always)] 21 | pub const fn dangling(store: &S) -> Self 22 | where 23 | S: ~const StoreDangling, 24 | { 25 | Self(TypedHandle::dangling(store)) 26 | } 27 | 28 | /// Attempts to create a dangling handle. 29 | /// 30 | /// Returns an error on allocation failure. 31 | #[inline(always)] 32 | pub const fn try_dangling(store: &S) -> Result 33 | where 34 | S: ~const StoreDangling, 35 | { 36 | let Ok(handle) = TypedHandle::try_dangling(store) else { 37 | return Err(AllocError); 38 | }; 39 | 40 | Ok(Self(handle)) 41 | } 42 | 43 | /// Creates a new handle, pointing to a `T`. 44 | #[inline(always)] 45 | pub fn new(value: T, store: &S) -> Self 46 | where 47 | S: Store, 48 | { 49 | Self(TypedHandle::new(value, store)) 50 | } 51 | 52 | /// Attempts to create a new handle, pointing to a `T`. 53 | #[inline(always)] 54 | pub fn try_new(value: T, store: &S) -> Result 55 | where 56 | S: Store, 57 | { 58 | TypedHandle::try_new(value, store).map(Self) 59 | } 60 | 61 | /// Allocates a new handle, with enough space for `T`. 62 | /// 63 | /// The allocated memory is left uninitialized. 64 | #[inline(always)] 65 | pub const fn allocate(store: &S) -> Self 66 | where 67 | S: ~const Store, 68 | { 69 | Self(TypedHandle::allocate(store)) 70 | } 71 | 72 | /// Attempts to allocate a new handle, with enough space for `T`. 73 | /// 74 | /// The allocated memory is left uninitialized. 75 | #[inline(always)] 76 | pub const fn try_allocate(store: &S) -> Result 77 | where 78 | S: ~const Store, 79 | { 80 | let Ok(handle) = TypedHandle::try_allocate(store) else { 81 | return Err(AllocError); 82 | }; 83 | 84 | Ok(Self(handle)) 85 | } 86 | 87 | /// Allocates a new handle, with enough space for `T`. 88 | /// 89 | /// The allocated memory is zeroed out. 90 | #[inline(always)] 91 | pub const fn allocate_zeroed(store: &S) -> Self 92 | where 93 | S: ~const Store, 94 | { 95 | Self(TypedHandle::allocate_zeroed(store)) 96 | } 97 | 98 | /// Attempts to allocate a new handle, with enough space for `T`. 99 | /// 100 | /// The allocated memory is zeroed out. 101 | #[inline(always)] 102 | pub const fn try_allocate_zeroed(store: &S) -> Result 103 | where 104 | S: ~const Store, 105 | { 106 | let Ok(handle) = TypedHandle::try_allocate_zeroed(store) else { 107 | return Err(AllocError); 108 | }; 109 | 110 | Ok(Self(handle)) 111 | } 112 | } 113 | 114 | impl UniqueHandle { 115 | /// Creates a handle from raw parts. 116 | /// 117 | /// - If `handle` is valid, and associated to a block of memory which fits an instance of `T`, then the resulting 118 | /// typed handle is valid. 119 | /// - If `handle` is invalid, then the resulting typed handle is invalid. 120 | /// - If `handle` is valid and `metadata` does not fit the block of memory associated with it, then the resulting 121 | /// typed handle is invalid. 122 | /// 123 | /// # Safety 124 | /// 125 | /// - No copy of `handle` must be used henceforth. 126 | pub const unsafe fn from_raw_parts(handle: H, metadata: TypedMetadata) -> Self { 127 | Self(TypedHandle::from_raw_parts(handle, metadata)) 128 | } 129 | 130 | /// Decomposes a (possibly wide) pointer into its handle and metadata components. 131 | pub const fn to_raw_parts(self) -> (H, TypedMetadata) { 132 | self.0.to_raw_parts() 133 | } 134 | 135 | /// Deallocates the memory associated with the handle. 136 | /// 137 | /// # Safety 138 | /// 139 | /// - `self` must have been allocated by `store`. 140 | /// - `self` must still be valid. 141 | #[inline(always)] 142 | pub const unsafe fn deallocate(self, store: &S) 143 | where 144 | S: ~const Store, 145 | { 146 | // Safety: 147 | // - `self.0` has been allocated by `store`, as per pre-conditions. 148 | // - `self.0` is valid, as per pre-conditions. 149 | unsafe { self.0.deallocate(store) } 150 | } 151 | 152 | /// Resolves the handle to a reference, borrowing the handle. 153 | /// 154 | /// # Safety 155 | /// 156 | /// - `self` must have been allocated by `store`. 157 | /// - `self` must still be valid. 158 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 159 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 160 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 161 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 162 | /// `resolve` calls, may invalidate the reference. 163 | #[inline(always)] 164 | pub const unsafe fn resolve<'a, S>(&'a self, store: &'a S) -> &'a T 165 | where 166 | S: ~const Store, 167 | { 168 | // Safety: 169 | // - `self.handle` was allocated by `store`, as per pre-conditions. 170 | // - `self.handle` is still valid, as per pre-conditions. 171 | // - `self.handle` is associated with a block of memory containing a live instance of `T`, as per 172 | // pre-conditions. 173 | // - The resulting reference borrows `self` immutably, guaranteeing that no mutable reference exist, nor can 174 | // be creating during its lifetime. 175 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 176 | // or destroying store, though it may still be invalidated by allocating. 177 | unsafe { self.0.resolve(store) } 178 | } 179 | 180 | /// Resolves the handle to a reference, borrowing the handle. 181 | /// 182 | /// # Safety 183 | /// 184 | /// - `self` must have been allocated by `store`. 185 | /// - `self` must still be valid. 186 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 187 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 188 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 189 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 190 | /// `resolve` calls, may invalidate the reference. 191 | #[inline(always)] 192 | pub const unsafe fn resolve_mut<'a, S>(&'a mut self, store: &'a S) -> &'a mut T 193 | where 194 | S: ~const Store, 195 | { 196 | // Safety: 197 | // - `self.handle` was allocated by `store`, as per pre-conditions. 198 | // - `self.handle` is still valid, as per pre-conditions. 199 | // - `self.handle` is associated with a block of memory containing a live instance of `T`, as per 200 | // pre-conditions. 201 | // - The resulting reference borrows `self` mutably, guaranteeing that no reference exist, nor can be 202 | // created during its lifetime. 203 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 204 | // or destroying store, though it may still be invalidated by allocating. 205 | unsafe { self.0.resolve_mut(store) } 206 | } 207 | 208 | /// Resolves the handle to a reference, borrowing the handle. 209 | /// 210 | /// # Safety 211 | /// 212 | /// - `self` must have been allocated by `store`. 213 | /// - `self` must still be valid. 214 | /// - The pointer is only guaranteed to be valid as long as `self` is valid. 215 | /// - The pointer is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 216 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 217 | /// `resolve` calls, may invalidate the pointer. 218 | #[inline(always)] 219 | pub const unsafe fn resolve_raw(&self, store: &S) -> NonNull 220 | where 221 | S: ~const Store, 222 | { 223 | // Safety: 224 | // - `self.handle` was allocated by `store`, as per pre-conditions. 225 | // - `self.handle` is still valid, as per pre-conditions. 226 | unsafe { self.0.resolve_raw(store) } 227 | } 228 | 229 | /// Coerces the handle into another. 230 | #[inline(always)] 231 | pub const fn coerce(self) -> UniqueHandle 232 | where 233 | T: Unsize, 234 | { 235 | UniqueHandle(self.0.coerce()) 236 | } 237 | } 238 | 239 | impl UniqueHandle<[T], H> { 240 | /// Creates a dangling handle. 241 | /// 242 | /// Calls `handle_alloc_error` on allocation failure. 243 | #[inline(always)] 244 | pub const fn dangling_slice(store: &S) -> Self 245 | where 246 | S: ~const StoreDangling, 247 | { 248 | Self(TypedHandle::dangling_slice(store)) 249 | } 250 | 251 | /// Attempts to create a dangling handle. 252 | /// 253 | /// Returns an error on allocation failure. 254 | #[inline(always)] 255 | pub const fn try_dangling_slice(store: &S) -> Result 256 | where 257 | S: ~const StoreDangling, 258 | { 259 | let Ok(handle) = TypedHandle::try_dangling_slice(store) else { 260 | return Err(AllocError); 261 | }; 262 | 263 | Ok(Self(handle)) 264 | } 265 | 266 | /// Allocates a new handle, with enough space for `size` elements `T`. 267 | /// 268 | /// The allocated memory is left uninitialized. 269 | #[inline(always)] 270 | pub const fn allocate_slice(size: usize, store: &S) -> Self 271 | where 272 | S: ~const Store + ~const StoreDangling, 273 | { 274 | Self(TypedHandle::allocate_slice(size, store)) 275 | } 276 | 277 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 278 | /// 279 | /// The allocated memory is left uninitialized. 280 | #[inline(always)] 281 | pub const fn try_allocate_slice(size: usize, store: &S) -> Result 282 | where 283 | S: ~const Store + ~const StoreDangling, 284 | { 285 | let Ok(handle) = TypedHandle::try_allocate_slice(size, store) else { 286 | return Err(AllocError); 287 | }; 288 | 289 | Ok(Self(handle)) 290 | } 291 | 292 | /// Allocates a new handle, with enough space for `size` elements `T`. 293 | /// 294 | /// The allocated memory is zeroed out. 295 | #[inline(always)] 296 | pub const fn allocate_zeroed_slice(size: usize, store: &S) -> Self 297 | where 298 | S: ~const Store + ~const StoreDangling, 299 | { 300 | Self(TypedHandle::allocate_zeroed_slice(size, store)) 301 | } 302 | 303 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 304 | /// 305 | /// The allocated memory is zeroed out. 306 | #[inline(always)] 307 | pub const fn try_allocate_zeroed_slice(size: usize, store: &S) -> Result 308 | where 309 | S: ~const Store + ~const StoreDangling, 310 | { 311 | let Ok(handle) = TypedHandle::try_allocate_zeroed_slice(size, store) else { 312 | return Err(AllocError); 313 | }; 314 | 315 | Ok(Self(handle)) 316 | } 317 | 318 | /// Returns whether the memory area associated to `self` may not contain any element. 319 | pub const fn is_empty(&self) -> bool { 320 | self.0.is_empty() 321 | } 322 | 323 | /// Returns the number of elements the memory area associated to `self` may contain. 324 | pub const fn len(&self) -> usize { 325 | self.0.len() 326 | } 327 | 328 | /// Grows the block of memory associated with the handle. 329 | /// 330 | /// On success, the extra memory is left uninitialized. On failure, an error is returned. 331 | /// 332 | /// # Safety 333 | /// 334 | /// - `self` must have been allocated by `store`. 335 | /// - `self` must still be valid. 336 | /// - `new_size` must be greater than or equal to `self.len()`. 337 | pub const unsafe fn grow(&mut self, new_size: usize, store: &S) 338 | where 339 | S: ~const Store, 340 | { 341 | // Safety: 342 | // - `self.0` has been allocated by `store`, as per pre-conditions. 343 | // - `self.0` is still valid, as per pre-conditions. 344 | // - `new_size` is greater than or equal to `self.0.len()`. 345 | unsafe { self.0.grow(new_size, store) } 346 | } 347 | 348 | /// Attempts to grow the block of memory associated with the handle. 349 | /// 350 | /// On success, the extra memory is left uninitialized. On failure, an error is returned. 351 | /// 352 | /// # Safety 353 | /// 354 | /// - `self` must have been allocated by `store`. 355 | /// - `self` must still be valid. 356 | /// - `new_size` must be greater than or equal to `self.len()`. 357 | pub const unsafe fn try_grow(&mut self, new_size: usize, store: &S) -> Result<(), AllocError> 358 | where 359 | S: ~const Store, 360 | { 361 | // Safety: 362 | // - `self.0` has been allocated by `store`, as per pre-conditions. 363 | // - `self.0` is still valid, as per pre-conditions. 364 | // - `new_size` is greater than or equal to `self.0.len()`. 365 | unsafe { self.0.try_grow(new_size, store) } 366 | } 367 | 368 | /// Grows the block of memory associated with the handle. 369 | /// 370 | /// On success, the extra memory is zeroed. On failure, an error is returned. 371 | /// 372 | /// # Safety 373 | /// 374 | /// - `self` must have been allocated by `store`. 375 | /// - `self` must still be valid. 376 | /// - `new_size` must be greater than or equal to `self.len()`. 377 | pub const unsafe fn grow_zeroed(&mut self, new_size: usize, store: &S) 378 | where 379 | S: ~const Store, 380 | { 381 | // Safety: 382 | // - `self.0` has been allocated by `store`, as per pre-conditions. 383 | // - `self.0` is still valid, as per pre-conditions. 384 | // - `new_size` is greater than or equal to `self.0.len()`. 385 | unsafe { self.0.grow_zeroed(new_size, store) } 386 | } 387 | 388 | /// Attempts to grow the block of memory associated with the handle. 389 | /// 390 | /// On success, the extra memory is zeroed. On failure, an error is returned. 391 | /// 392 | /// # Safety 393 | /// 394 | /// - `self` must have been allocated by `store`. 395 | /// - `self` must still be valid. 396 | /// - `new_size` must be greater than or equal to `self.len()`. 397 | pub const unsafe fn try_grow_zeroed(&mut self, new_size: usize, store: &S) -> Result<(), AllocError> 398 | where 399 | S: ~const Store, 400 | { 401 | // Safety: 402 | // - `self.0` has been allocated by `store`, as per pre-conditions. 403 | // - `self.0` is still valid, as per pre-conditions. 404 | // - `new_size` is greater than or equal to `self.0.len()`. 405 | unsafe { self.0.try_grow_zeroed(new_size, store) } 406 | } 407 | 408 | /// Shrinks the block of memory associated with the handle. 409 | /// 410 | /// On failure, an error is returned. 411 | /// 412 | /// # Safety 413 | /// 414 | /// - `self` must have been allocated by `store`. 415 | /// - `self` must still be valid. 416 | /// - `new_size` must be less than or equal to `self.len()`. 417 | pub const unsafe fn shrink(&mut self, new_size: usize, store: &S) 418 | where 419 | S: ~const Store, 420 | { 421 | // Safety: 422 | // - `self.0` has been allocated by `store`, as per pre-conditions. 423 | // - `self.0` is still valid, as per pre-conditions. 424 | // - `new_size` is less than or equal to `self.0.len()`. 425 | unsafe { self.0.shrink(new_size, store) } 426 | } 427 | 428 | /// Shrinks the block of memory associated with the handle. 429 | /// 430 | /// On failure, an error is returned. 431 | /// 432 | /// # Safety 433 | /// 434 | /// - `self` must have been allocated by `store`. 435 | /// - `self` must still be valid. 436 | /// - `new_size` must be less than or equal to `self.len()`. 437 | pub const unsafe fn try_shrink(&mut self, new_size: usize, store: &S) -> Result<(), AllocError> 438 | where 439 | S: ~const Store, 440 | { 441 | // Safety: 442 | // - `self.0` has been allocated by `store`, as per pre-conditions. 443 | // - `self.0` is still valid, as per pre-conditions. 444 | // - `new_size` is less than or equal to `self.0.len()`. 445 | unsafe { self.0.try_shrink(new_size, store) } 446 | } 447 | } 448 | 449 | #[cfg(feature = "coercible-metadata")] 450 | impl CoerceUnsized> for UniqueHandle where T: Unsize {} 451 | -------------------------------------------------------------------------------- /src/extension/unique_single.rs: -------------------------------------------------------------------------------- 1 | //! A typed, unique handle. 2 | 3 | use core::{alloc::AllocError, marker::Unsize, ptr::NonNull}; 4 | 5 | #[cfg(feature = "coercible-metadata")] 6 | use core::ops::CoerceUnsized; 7 | 8 | use crate::{ 9 | extension::{typed_metadata::TypedMetadata, typed_single::TypedSingleHandle}, 10 | interface::{StoreDangling, StoreSingle}, 11 | }; 12 | 13 | /// A typed, unique handle. 14 | pub struct UniqueSingleHandle(TypedSingleHandle); 15 | 16 | impl UniqueSingleHandle { 17 | /// Creates a dangling handle. 18 | /// 19 | /// Calls `handle_alloc_error` on allocation failure. 20 | #[inline(always)] 21 | pub const fn dangling(store: &S) -> Self 22 | where 23 | S: ~const StoreDangling, 24 | { 25 | Self(TypedSingleHandle::dangling(store)) 26 | } 27 | 28 | /// Attempts to create a dangling handle. 29 | /// 30 | /// Returns an error on allocation failure. 31 | #[inline(always)] 32 | pub const fn try_dangling(store: &S) -> Result 33 | where 34 | S: ~const StoreDangling, 35 | { 36 | let Ok(handle) = TypedSingleHandle::try_dangling(store) else { 37 | return Err(AllocError); 38 | }; 39 | 40 | Ok(Self(handle)) 41 | } 42 | 43 | /// Creates a new handle, pointing to a `T`. 44 | #[inline(always)] 45 | pub fn new(value: T, store: &mut S) -> Self 46 | where 47 | S: StoreSingle, 48 | { 49 | Self(TypedSingleHandle::new(value, store)) 50 | } 51 | 52 | /// Attempts to create a new handle, pointing to a `T`. 53 | #[inline(always)] 54 | pub fn try_new(value: T, store: &mut S) -> Result 55 | where 56 | S: StoreSingle, 57 | { 58 | TypedSingleHandle::try_new(value, store).map(Self) 59 | } 60 | 61 | /// Allocates a new handle, with enough space for `T`. 62 | /// 63 | /// The allocated memory is left uninitialized. 64 | #[inline(always)] 65 | pub const fn allocate(store: &mut S) -> Self 66 | where 67 | S: ~const StoreSingle, 68 | { 69 | Self(TypedSingleHandle::allocate(store)) 70 | } 71 | 72 | /// Attempts to allocate a new handle, with enough space for `T`. 73 | /// 74 | /// The allocated memory is left uninitialized. 75 | #[inline(always)] 76 | pub const fn try_allocate(store: &mut S) -> Result 77 | where 78 | S: ~const StoreSingle, 79 | { 80 | let Ok(handle) = TypedSingleHandle::try_allocate(store) else { 81 | return Err(AllocError); 82 | }; 83 | 84 | Ok(Self(handle)) 85 | } 86 | 87 | /// Allocates a new handle, with enough space for `T`. 88 | /// 89 | /// The allocated memory is zeroed out. 90 | #[inline(always)] 91 | pub const fn allocate_zeroed(store: &mut S) -> Self 92 | where 93 | S: ~const StoreSingle, 94 | { 95 | Self(TypedSingleHandle::allocate_zeroed(store)) 96 | } 97 | 98 | /// Attempts to allocate a new handle, with enough space for `T`. 99 | /// 100 | /// The allocated memory is zeroed out. 101 | #[inline(always)] 102 | pub const fn try_allocate_zeroed(store: &mut S) -> Result 103 | where 104 | S: ~const StoreSingle, 105 | { 106 | let Ok(handle) = TypedSingleHandle::try_allocate_zeroed(store) else { 107 | return Err(AllocError); 108 | }; 109 | 110 | Ok(Self(handle)) 111 | } 112 | } 113 | 114 | impl UniqueSingleHandle { 115 | /// Creates a handle from raw parts. 116 | /// 117 | /// - If `handle` is valid, and associated to a block of memory which fits an instance of `T`, then the resulting 118 | /// typed handle is valid. 119 | /// - If `handle` is invalid, then the resulting typed handle is invalid. 120 | /// - If `handle` is valid and `metadata` does not fit the block of memory associated with it, then the resulting 121 | /// typed handle is invalid. 122 | /// 123 | /// # Safety 124 | /// 125 | /// - No copy of `handle` must be used henceforth. 126 | pub const unsafe fn from_raw_parts(handle: H, metadata: TypedMetadata) -> Self { 127 | Self(TypedSingleHandle::from_raw_parts(handle, metadata)) 128 | } 129 | 130 | /// Decomposes a (possibly wide) pointer into its handle and metadata components. 131 | pub const fn to_raw_parts(self) -> (H, TypedMetadata) { 132 | self.0.to_raw_parts() 133 | } 134 | 135 | /// Deallocates the memory associated with the handle. 136 | /// 137 | /// # Safety 138 | /// 139 | /// - `self` must have been allocated by `store`. 140 | /// - `self` must still be valid. 141 | #[inline(always)] 142 | pub const unsafe fn deallocate(self, store: &mut S) 143 | where 144 | S: ~const StoreSingle, 145 | { 146 | // Safety: 147 | // - `self.0` has been allocated by `store`, as per pre-conditions. 148 | // - `self.0` is valid, as per pre-conditions. 149 | unsafe { self.0.deallocate(store) } 150 | } 151 | 152 | /// Resolves the handle to a reference, borrowing the handle. 153 | /// 154 | /// # Safety 155 | /// 156 | /// - `self` must have been allocated by `store`. 157 | /// - `self` must still be valid. 158 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 159 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 160 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 161 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 162 | /// `resolve` calls, may invalidate the reference. 163 | #[inline(always)] 164 | pub const unsafe fn resolve<'a, S>(&'a self, store: &'a S) -> &'a T 165 | where 166 | S: ~const StoreSingle, 167 | { 168 | // Safety: 169 | // - `self.handle` was allocated by `store`, as per pre-conditions. 170 | // - `self.handle` is still valid, as per pre-conditions. 171 | // - `self.handle` is associated with a block of memory containing a live instance of `T`, as per 172 | // pre-conditions. 173 | // - The resulting reference borrows `self` immutably, guaranteeing that no mutable reference exist, nor can 174 | // be creating during its lifetime. 175 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 176 | // or destroying store, though it may still be invalidated by allocating. 177 | unsafe { self.0.resolve(store) } 178 | } 179 | 180 | /// Resolves the handle to a reference, borrowing the handle. 181 | /// 182 | /// # Safety 183 | /// 184 | /// - `self` must have been allocated by `store`. 185 | /// - `self` must still be valid. 186 | /// - `self` must be associated to a block of memory containing a valid instance of `T`. 187 | /// - The reference is only guaranteed to be valid as long as `self` is valid. 188 | /// - The reference is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 189 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 190 | /// `resolve` calls, may invalidate the reference. 191 | #[inline(always)] 192 | pub const unsafe fn resolve_mut<'a, S>(&'a mut self, store: &'a mut S) -> &'a mut T 193 | where 194 | S: ~const StoreSingle, 195 | { 196 | // Safety: 197 | // - `self.handle` was allocated by `store`, as per pre-conditions. 198 | // - `self.handle` is still valid, as per pre-conditions. 199 | // - `self.handle` is associated with a block of memory containing a live instance of `T`, as per 200 | // pre-conditions. 201 | // - The resulting reference borrows `self` mutably, guaranteeing that no reference exist, nor can be 202 | // created during its lifetime. 203 | // - The resulting reference borrows `store` immutably, guaranteeing it won't be invalidated by moving 204 | // or destroying store, though it may still be invalidated by allocating. 205 | unsafe { self.0.resolve_mut(store) } 206 | } 207 | 208 | /// Resolves the handle to a reference, borrowing the handle. 209 | /// 210 | /// # Safety 211 | /// 212 | /// - `self` must have been allocated by `store`. 213 | /// - `self` must still be valid. 214 | /// - The pointer is only guaranteed to be dereferenceable into a shared reference. 215 | /// - The pointer is only guaranteed to be valid as long as `self` is valid. 216 | /// - The pointer is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 217 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 218 | /// `resolve` calls, may invalidate the pointer. 219 | #[inline(always)] 220 | pub const unsafe fn resolve_raw(&self, store: &S) -> NonNull 221 | where 222 | S: ~const StoreSingle, 223 | { 224 | // Safety: 225 | // - `self.handle` was allocated by `store`, as per pre-conditions. 226 | // - `self.handle` is still valid, as per pre-conditions. 227 | unsafe { self.0.resolve_raw(store) } 228 | } 229 | 230 | /// Resolves the handle to a reference, borrowing the handle. 231 | /// 232 | /// # Safety 233 | /// 234 | /// - `self` must have been allocated by `store`. 235 | /// - `self` must still be valid. 236 | /// - The pointer is only guaranteed to be valid as long as `self` is valid. 237 | /// - The pointer is only guaranteed to be valid as long as pointers resolved from `self` are not invalidated. 238 | /// Most notably, unless `store` implements `StoreStable`, any method call on `store`, including other 239 | /// `resolve` calls, may invalidate the pointer. 240 | #[inline(always)] 241 | pub const unsafe fn resolve_raw_mut(&self, store: &mut S) -> NonNull 242 | where 243 | S: ~const StoreSingle, 244 | { 245 | // Safety: 246 | // - `self.handle` was allocated by `store`, as per pre-conditions. 247 | // - `self.handle` is still valid, as per pre-conditions. 248 | unsafe { self.0.resolve_raw_mut(store) } 249 | } 250 | 251 | /// Coerces the handle into another. 252 | #[inline(always)] 253 | pub const fn coerce(self) -> UniqueSingleHandle 254 | where 255 | T: Unsize, 256 | { 257 | UniqueSingleHandle(self.0.coerce()) 258 | } 259 | } 260 | 261 | impl UniqueSingleHandle<[T], H> { 262 | /// Creates a dangling handle. 263 | /// 264 | /// Calls `handle_alloc_error` on allocation failure. 265 | #[inline(always)] 266 | pub const fn dangling_slice(store: &S) -> Self 267 | where 268 | S: ~const StoreDangling, 269 | { 270 | Self(TypedSingleHandle::dangling_slice(store)) 271 | } 272 | 273 | /// Attempts to create a dangling handle. 274 | /// 275 | /// Returns an error on allocation failure. 276 | #[inline(always)] 277 | pub const fn try_dangling_slice(store: &S) -> Result 278 | where 279 | S: ~const StoreDangling, 280 | { 281 | let Ok(handle) = TypedSingleHandle::try_dangling_slice(store) else { 282 | return Err(AllocError); 283 | }; 284 | 285 | Ok(Self(handle)) 286 | } 287 | 288 | /// Allocates a new handle, with enough space for `size` elements `T`. 289 | /// 290 | /// The allocated memory is left uninitialized. 291 | #[inline(always)] 292 | pub const fn allocate_slice(size: usize, store: &mut S) -> Self 293 | where 294 | S: ~const StoreSingle + ~const StoreDangling, 295 | { 296 | Self(TypedSingleHandle::allocate_slice(size, store)) 297 | } 298 | 299 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 300 | /// 301 | /// The allocated memory is left uninitialized. 302 | #[inline(always)] 303 | pub const fn try_allocate_slice(size: usize, store: &mut S) -> Result 304 | where 305 | S: ~const StoreSingle + ~const StoreDangling, 306 | { 307 | let Ok(handle) = TypedSingleHandle::try_allocate_slice(size, store) else { 308 | return Err(AllocError); 309 | }; 310 | 311 | Ok(Self(handle)) 312 | } 313 | 314 | /// Allocates a new handle, with enough space for `size` elements `T`. 315 | /// 316 | /// The allocated memory is zeroed out. 317 | #[inline(always)] 318 | pub const fn allocate_zeroed_slice(size: usize, store: &mut S) -> Self 319 | where 320 | S: ~const StoreSingle + ~const StoreDangling, 321 | { 322 | Self(TypedSingleHandle::allocate_zeroed_slice(size, store)) 323 | } 324 | 325 | /// Attempts to allocate a new handle, with enough space for `size` elements `T`. 326 | /// 327 | /// The allocated memory is zeroed out. 328 | #[inline(always)] 329 | pub const fn try_allocate_zeroed_slice(size: usize, store: &mut S) -> Result 330 | where 331 | S: ~const StoreSingle + ~const StoreDangling, 332 | { 333 | let Ok(handle) = TypedSingleHandle::try_allocate_zeroed_slice(size, store) else { 334 | return Err(AllocError); 335 | }; 336 | 337 | Ok(Self(handle)) 338 | } 339 | 340 | /// Returns whether the memory area associated to `self` may not contain any element. 341 | pub const fn is_empty(&self) -> bool { 342 | self.0.is_empty() 343 | } 344 | 345 | /// Returns the number of elements the memory area associated to `self` may contain. 346 | pub const fn len(&self) -> usize { 347 | self.0.len() 348 | } 349 | 350 | /// Grows the block of memory associated with the handle. 351 | /// 352 | /// On success, the extra memory is left uninitialized. On failure, an error is returned. 353 | /// 354 | /// # Safety 355 | /// 356 | /// - `self` must have been allocated by `store`. 357 | /// - `self` must still be valid. 358 | /// - `new_size` must be greater than or equal to `self.len()`. 359 | pub const unsafe fn grow(&mut self, new_size: usize, store: &mut S) 360 | where 361 | S: ~const StoreSingle, 362 | { 363 | // Safety: 364 | // - `self.0` has been allocated by `store`, as per pre-conditions. 365 | // - `self.0` is still valid, as per pre-conditions. 366 | // - `new_size` is greater than or equal to `self.0.len()`. 367 | unsafe { self.0.grow(new_size, store) } 368 | } 369 | 370 | /// Attempts to grow the block of memory associated with the handle. 371 | /// 372 | /// On success, the extra memory is left uninitialized. On failure, an error is returned. 373 | /// 374 | /// # Safety 375 | /// 376 | /// - `self` must have been allocated by `store`. 377 | /// - `self` must still be valid. 378 | /// - `new_size` must be greater than or equal to `self.len()`. 379 | pub const unsafe fn try_grow(&mut self, new_size: usize, store: &mut S) -> Result<(), AllocError> 380 | where 381 | S: ~const StoreSingle, 382 | { 383 | // Safety: 384 | // - `self.0` has been allocated by `store`, as per pre-conditions. 385 | // - `self.0` is still valid, as per pre-conditions. 386 | // - `new_size` is greater than or equal to `self.0.len()`. 387 | unsafe { self.0.try_grow(new_size, store) } 388 | } 389 | 390 | /// Grows the block of memory associated with the handle. 391 | /// 392 | /// On success, the extra memory is zeroed. On failure, an error is returned. 393 | /// 394 | /// # Safety 395 | /// 396 | /// - `self` must have been allocated by `store`. 397 | /// - `self` must still be valid. 398 | /// - `new_size` must be greater than or equal to `self.len()`. 399 | pub const unsafe fn grow_zeroed(&mut self, new_size: usize, store: &mut S) 400 | where 401 | S: ~const StoreSingle, 402 | { 403 | // Safety: 404 | // - `self.0` has been allocated by `store`, as per pre-conditions. 405 | // - `self.0` is still valid, as per pre-conditions. 406 | // - `new_size` is greater than or equal to `self.0.len()`. 407 | unsafe { self.0.grow_zeroed(new_size, store) } 408 | } 409 | 410 | /// Attempts to grow the block of memory associated with the handle. 411 | /// 412 | /// On success, the extra memory is zeroed. On failure, an error is returned. 413 | /// 414 | /// # Safety 415 | /// 416 | /// - `self` must have been allocated by `store`. 417 | /// - `self` must still be valid. 418 | /// - `new_size` must be greater than or equal to `self.len()`. 419 | pub const unsafe fn try_grow_zeroed(&mut self, new_size: usize, store: &mut S) -> Result<(), AllocError> 420 | where 421 | S: ~const StoreSingle, 422 | { 423 | // Safety: 424 | // - `self.0` has been allocated by `store`, as per pre-conditions. 425 | // - `self.0` is still valid, as per pre-conditions. 426 | // - `new_size` is greater than or equal to `self.0.len()`. 427 | unsafe { self.0.try_grow_zeroed(new_size, store) } 428 | } 429 | 430 | /// Shrinks the block of memory associated with the handle. 431 | /// 432 | /// On failure, an error is returned. 433 | /// 434 | /// # Safety 435 | /// 436 | /// - `self` must have been allocated by `store`. 437 | /// - `self` must still be valid. 438 | /// - `new_size` must be less than or equal to `self.len()`. 439 | pub const unsafe fn shrink(&mut self, new_size: usize, store: &mut S) 440 | where 441 | S: ~const StoreSingle, 442 | { 443 | // Safety: 444 | // - `self.0` has been allocated by `store`, as per pre-conditions. 445 | // - `self.0` is still valid, as per pre-conditions. 446 | // - `new_size` is less than or equal to `self.0.len()`. 447 | unsafe { self.0.shrink(new_size, store) } 448 | } 449 | 450 | /// Shrinks the block of memory associated with the handle. 451 | /// 452 | /// On failure, an error is returned. 453 | /// 454 | /// # Safety 455 | /// 456 | /// - `self` must have been allocated by `store`. 457 | /// - `self` must still be valid. 458 | /// - `new_size` must be less than or equal to `self.len()`. 459 | pub const unsafe fn try_shrink(&mut self, new_size: usize, store: &mut S) -> Result<(), AllocError> 460 | where 461 | S: ~const StoreSingle, 462 | { 463 | // Safety: 464 | // - `self.0` has been allocated by `store`, as per pre-conditions. 465 | // - `self.0` is still valid, as per pre-conditions. 466 | // - `new_size` is less than or equal to `self.0.len()`. 467 | unsafe { self.0.try_shrink(new_size, store) } 468 | } 469 | } 470 | 471 | #[cfg(feature = "coercible-metadata")] 472 | impl CoerceUnsized> for UniqueSingleHandle where T: Unsize {} 473 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Store API, for greater flexibility. 2 | //! 3 | //! This project aims at exploring the possibility of a different API for allocation, providing greater flexibility 4 | //! than `Allocator`. 5 | //! 6 | //! This project does NOT aim at displacing `Allocator`, but rather at providing a lower-level abstraction layer for 7 | //! when greater flexibility is required. Zero-Cost compatibility with `Allocator` is desired, so that collections can 8 | //! be implemented in terms of `Store`, but used with an `Allocator` easily. 9 | 10 | #![cfg_attr(not(test), no_std)] 11 | // Features 12 | #![feature(allocator_api)] 13 | #![feature(alloc_layout_extra)] 14 | #![feature(coerce_unsized)] 15 | #![feature(const_alloc_layout)] 16 | #![feature(const_maybe_uninit_as_mut_ptr)] 17 | #![feature(const_mut_refs)] 18 | #![feature(const_ptr_as_ref)] 19 | #![feature(const_refs_to_cell)] 20 | #![feature(const_slice_from_raw_parts_mut)] 21 | #![feature(const_trait_impl)] 22 | #![feature(const_try)] 23 | #![feature(const_ptr_write)] 24 | #![feature(hasher_prefixfree_extras)] 25 | #![feature(layout_for_ptr)] 26 | #![feature(maybe_uninit_write_slice)] 27 | #![feature(offset_of)] 28 | #![feature(never_type)] 29 | #![feature(ptr_alignment_type)] 30 | #![feature(ptr_as_uninit)] 31 | #![feature(ptr_metadata)] 32 | #![feature(slice_ptr_get)] 33 | #![feature(specialization)] 34 | #![feature(strict_provenance)] 35 | #![feature(unsize)] 36 | #![feature(unwrap_infallible)] 37 | // Lints 38 | #![deny(missing_docs)] 39 | #![deny(unsafe_op_in_unsafe_fn)] 40 | #![allow(incomplete_features)] // For specialization. 41 | 42 | #[cfg(feature = "alloc")] 43 | extern crate alloc; 44 | 45 | mod alloc; 46 | pub mod collection; 47 | pub mod extension; 48 | pub mod interface; 49 | pub mod store; 50 | -------------------------------------------------------------------------------- /src/store.rs: -------------------------------------------------------------------------------- 1 | //! Provides implementations of multiple stores or store adapters. 2 | 3 | mod allocator_store; 4 | mod inline_bump_store; 5 | mod inline_single_store; 6 | mod stack_bump_store; 7 | 8 | pub use inline_bump_store::InlineBumpStore; 9 | pub use inline_single_store::InlineSingleStore; 10 | pub use stack_bump_store::{StackBumpBlock, StackBumpStore}; 11 | -------------------------------------------------------------------------------- /src/store/allocator_store.rs: -------------------------------------------------------------------------------- 1 | //! Wraps an allocator to provide a `Store` API. 2 | 3 | use core::{ 4 | alloc::{AllocError, Allocator, Layout}, 5 | ptr::{self, Alignment, NonNull}, 6 | }; 7 | 8 | #[cfg(feature = "alloc")] 9 | use alloc::alloc::Global; 10 | 11 | use crate::interface::{Store, StoreDangling, StorePinning, StoreSingle, StoreStable}; 12 | 13 | #[cfg(feature = "alloc")] 14 | use crate::interface::StoreSharing; 15 | 16 | #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] 17 | pub struct AllocatorHandle(NonNull); 18 | 19 | unsafe impl Send for AllocatorHandle {} 20 | unsafe impl Sync for AllocatorHandle {} 21 | 22 | impl From> for AllocatorHandle { 23 | fn from(value: NonNull) -> Self { 24 | Self(value) 25 | } 26 | } 27 | 28 | impl From for NonNull { 29 | fn from(value: AllocatorHandle) -> Self { 30 | value.0 31 | } 32 | } 33 | 34 | unsafe impl const StoreDangling for A 35 | where 36 | A: Allocator, 37 | { 38 | type Handle = AllocatorHandle; 39 | 40 | fn dangling(&self, alignment: Alignment) -> Result { 41 | let pointer = ptr::invalid_mut(alignment.as_usize()); 42 | 43 | // Safety: 44 | // - Non-null, since `alignment` is non-zero. 45 | let pointer = unsafe { NonNull::new_unchecked(pointer) }; 46 | 47 | Ok(AllocatorHandle(pointer)) 48 | } 49 | } 50 | 51 | unsafe impl Store for A 52 | where 53 | A: Allocator, 54 | { 55 | unsafe fn resolve(&self, handle: Self::Handle) -> NonNull { 56 | handle.into() 57 | } 58 | 59 | fn allocate(&self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 60 | Allocator::allocate(self, layout).map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 61 | } 62 | 63 | unsafe fn deallocate(&self, handle: Self::Handle, layout: Layout) { 64 | // Safety: 65 | // - `handle` is valid, as per the pre-conditions of `deallocate`. 66 | // - `layout` fits, as per the pre-conditions of `deallocate`. 67 | unsafe { Allocator::deallocate(self, handle.into(), layout) }; 68 | } 69 | 70 | unsafe fn grow( 71 | &self, 72 | handle: Self::Handle, 73 | old_layout: Layout, 74 | new_layout: Layout, 75 | ) -> Result<(Self::Handle, usize), AllocError> { 76 | // Safety: 77 | // - `handle` is valid, as per the pre-conditions of `grow`. 78 | // - `old_layout` fits, as per the pre-conditions of `grow`. 79 | // - `new_layout.size()` is greater than or equal to `old_layout.size()`, as per the pre-conditions of 80 | // `grow`. 81 | let result = unsafe { Allocator::grow(self, handle.into(), old_layout, new_layout) }; 82 | 83 | result.map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 84 | } 85 | 86 | unsafe fn shrink( 87 | &self, 88 | handle: Self::Handle, 89 | old_layout: Layout, 90 | new_layout: Layout, 91 | ) -> Result<(Self::Handle, usize), AllocError> { 92 | // Safety: 93 | // - `handle` is valid, as per the pre-conditions of `shrink`. 94 | // - `old_layout` fits, as per the pre-conditions of `shrink`. 95 | // - `new_layout.size()` is smaller than or equal to `old_layout.size()`, as per the pre-conditions of 96 | // `shrink`. 97 | let result = unsafe { Allocator::shrink(self, handle.into(), old_layout, new_layout) }; 98 | 99 | result.map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 100 | } 101 | 102 | fn allocate_zeroed(&self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 103 | Allocator::allocate_zeroed(self, layout).map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 104 | } 105 | 106 | unsafe fn grow_zeroed( 107 | &self, 108 | handle: Self::Handle, 109 | old_layout: Layout, 110 | new_layout: Layout, 111 | ) -> Result<(Self::Handle, usize), AllocError> { 112 | // Safety: 113 | // - `handle` is valid, as per the pre-conditions of `grow_zeroed`. 114 | // - `old_layout` fits, as per the pre-conditions of `grow_zeroed`. 115 | // - `new_layout.size()` is greater than or equal to `old_layout.size()`, as per the pre-conditions of 116 | // `grow_zeroed`. 117 | let result = unsafe { Allocator::grow_zeroed(self, handle.into(), old_layout, new_layout) }; 118 | 119 | result.map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 120 | } 121 | } 122 | 123 | unsafe impl StoreSingle for A 124 | where 125 | A: Allocator, 126 | { 127 | unsafe fn resolve(&self, handle: Self::Handle) -> NonNull { 128 | handle.into() 129 | } 130 | 131 | unsafe fn resolve_mut(&mut self, handle: Self::Handle) -> NonNull { 132 | handle.into() 133 | } 134 | 135 | fn allocate(&mut self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 136 | Allocator::allocate(self, layout).map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 137 | } 138 | 139 | unsafe fn deallocate(&mut self, handle: Self::Handle, layout: Layout) { 140 | // Safety: 141 | // - `handle` is valid, as per the pre-conditions of `deallocate`. 142 | // - `layout` fits, as per the pre-conditions of `deallocate`. 143 | unsafe { Allocator::deallocate(self, handle.into(), layout) }; 144 | } 145 | 146 | unsafe fn grow( 147 | &mut self, 148 | handle: Self::Handle, 149 | old_layout: Layout, 150 | new_layout: Layout, 151 | ) -> Result<(Self::Handle, usize), AllocError> { 152 | // Safety: 153 | // - `handle` is valid, as per the pre-conditions of `grow`. 154 | // - `old_layout` fits, as per the pre-conditions of `grow`. 155 | // - `new_layout.size()` is greater than or equal to `old_layout.size()`, as per the pre-conditions of 156 | // `grow`. 157 | let result = unsafe { Allocator::grow(self, handle.into(), old_layout, new_layout) }; 158 | 159 | result.map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 160 | } 161 | 162 | unsafe fn shrink( 163 | &mut self, 164 | handle: Self::Handle, 165 | old_layout: Layout, 166 | new_layout: Layout, 167 | ) -> Result<(Self::Handle, usize), AllocError> { 168 | // Safety: 169 | // - `handle` is valid, as per the pre-conditions of `shrink`. 170 | // - `old_layout` fits, as per the pre-conditions of `shrink`. 171 | // - `new_layout.size()` is smaller than or equal to `old_layout.size()`, as per the pre-conditions of 172 | // `shrink`. 173 | let result = unsafe { Allocator::shrink(self, handle.into(), old_layout, new_layout) }; 174 | 175 | result.map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 176 | } 177 | 178 | fn allocate_zeroed(&mut self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 179 | Allocator::allocate_zeroed(self, layout).map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 180 | } 181 | 182 | unsafe fn grow_zeroed( 183 | &mut self, 184 | handle: Self::Handle, 185 | old_layout: Layout, 186 | new_layout: Layout, 187 | ) -> Result<(Self::Handle, usize), AllocError> { 188 | // Safety: 189 | // - `handle` is valid, as per the pre-conditions of `grow_zeroed`. 190 | // - `old_layout` fits, as per the pre-conditions of `grow_zeroed`. 191 | // - `new_layout.size()` is greater than or equal to `old_layout.size()`, as per the pre-conditions of 192 | // `grow_zeroed`. 193 | let result = unsafe { Allocator::grow_zeroed(self, handle.into(), old_layout, new_layout) }; 194 | 195 | result.map(|slice| (slice.as_non_null_ptr().into(), slice.len())) 196 | } 197 | } 198 | 199 | // Safety: 200 | // - `Allocator` allocations are pinned. 201 | unsafe impl StoreStable for A where A: Allocator {} 202 | 203 | // Safety: 204 | // - `Allocator` allocations are pinned. 205 | unsafe impl StorePinning for A where A: Allocator {} 206 | 207 | // Safety: 208 | // - `Allocator` are always sharing, today. 209 | #[cfg(feature = "alloc")] 210 | unsafe impl StoreSharing for Global { 211 | type SharingError = !; 212 | 213 | fn is_sharing_with(&self, _other: &Self) -> bool { 214 | true 215 | } 216 | 217 | fn share(&self) -> Result { 218 | Ok(*self) 219 | } 220 | } 221 | -------------------------------------------------------------------------------- /src/store/inline_bump_store.rs: -------------------------------------------------------------------------------- 1 | //! A dead simple "bump allocator" Store. 2 | //! 3 | //! This store is suitable for most containers -- such as `Box`, `BTreeMap`, `HashMap`, `List`, and `Vec` -- although in 4 | //! some cases not all operations on the container may be enabled, for example `List::split` and `List::append` will not 5 | //! be available. 6 | 7 | use core::{ 8 | alloc::{AllocError, Layout}, 9 | cell::{Cell, UnsafeCell}, 10 | fmt, 11 | mem::MaybeUninit, 12 | ptr::{self, Alignment, NonNull}, 13 | }; 14 | 15 | use crate::interface::{Store, StoreDangling, StoreSingle, StoreStable}; 16 | 17 | /// An implementation of `Store` providing a single, inline, block of memory. 18 | /// 19 | /// Generic parameters: 20 | /// 21 | /// - `H` is the handle type, it must convertible to and from `usize`. 22 | /// - The block of memory is aligned and sized as per `T`. 23 | pub struct InlineBumpStore { 24 | watermark: Cell, 25 | memory: UnsafeCell>, 26 | } 27 | 28 | impl InlineBumpStore 29 | where 30 | H: TryFrom, 31 | { 32 | fn new() -> Result { 33 | let _ = Self::from_offset(Self::memory_layout().size())?; 34 | 35 | let watermark = Cell::new(Self::from_offset(0)?); 36 | let memory = UnsafeCell::new(MaybeUninit::uninit()); 37 | 38 | Ok(Self { watermark, memory }) 39 | } 40 | } 41 | 42 | impl Default for InlineBumpStore 43 | where 44 | H: TryFrom, 45 | { 46 | fn default() -> Self { 47 | Self::new().expect("Size of `T` to be representable by `H`") 48 | } 49 | } 50 | 51 | // Cannot be const, because TryFrom is not marked #[const_trait]. 52 | unsafe impl StoreDangling for InlineBumpStore 53 | where 54 | H: Copy + TryFrom, 55 | { 56 | type Handle = H; 57 | 58 | fn dangling(&self, alignment: Alignment) -> Result { 59 | let layout = Self::memory_layout(); 60 | 61 | if alignment.as_usize() > layout.align() { 62 | return Err(AllocError); 63 | } 64 | 65 | Self::from_offset(alignment.as_usize()) 66 | } 67 | } 68 | 69 | unsafe impl Store for InlineBumpStore 70 | where 71 | H: Copy + TryFrom + TryInto, 72 | { 73 | fn allocate(&self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 74 | let (result, new_watermark) = Self::compute_offset(self.watermark.get(), layout)?; 75 | self.watermark.set(new_watermark); 76 | 77 | Ok((result, layout.size())) 78 | } 79 | 80 | #[inline(always)] 81 | unsafe fn deallocate(&self, _handle: Self::Handle, _layout: Layout) {} 82 | 83 | #[inline(always)] 84 | unsafe fn resolve(&self, handle: Self::Handle) -> NonNull { 85 | debug_assert!(Self::into_offset(handle) <= Self::memory_layout().size()); 86 | 87 | let offset = Self::into_offset(handle); 88 | let pointer = self.memory.get() as *mut u8; 89 | 90 | // Safety: 91 | // - `offset` is within bounds of `self.memory`, as `handle` was allocated by `self` as per pre-conditions. 92 | let pointer = unsafe { pointer.add(offset) }; 93 | 94 | // Safety: 95 | // - `pointer` is non null as `self` is non null. 96 | unsafe { NonNull::new_unchecked(pointer) } 97 | } 98 | 99 | unsafe fn grow( 100 | &self, 101 | handle: Self::Handle, 102 | old_layout: Layout, 103 | new_layout: Layout, 104 | ) -> Result<(Self::Handle, usize), AllocError> { 105 | debug_assert!( 106 | new_layout.size() >= old_layout.size(), 107 | "{new_layout:?} must have a greater size than {old_layout:?}" 108 | ); 109 | 110 | // As an optimization, if `handle` points to the last allocation, growth may actually occur _in place_. 111 | { 112 | let offset = Self::into_offset(handle); 113 | let watermark = Self::into_offset(self.watermark.get()); 114 | 115 | if offset + old_layout.size() == watermark 116 | && new_layout.align() <= old_layout.align() 117 | && offset + new_layout.size() <= Self::memory_layout().size() 118 | { 119 | let new_watermark = Self::from_offset(watermark - old_layout.size() + new_layout.size())?; 120 | self.watermark.set(new_watermark); 121 | 122 | return Ok((handle, new_layout.size())); 123 | } 124 | } 125 | 126 | self.grow_by_relocation(handle, old_layout, new_layout) 127 | } 128 | 129 | #[inline(always)] 130 | unsafe fn shrink( 131 | &self, 132 | handle: Self::Handle, 133 | old_layout: Layout, 134 | _new_layout: Layout, 135 | ) -> Result<(Self::Handle, usize), AllocError> { 136 | debug_assert!( 137 | _new_layout.size() >= old_layout.size(), 138 | "{_new_layout:?} must have a smaller size than {old_layout:?}" 139 | ); 140 | 141 | Ok((handle, old_layout.size())) 142 | } 143 | } 144 | 145 | unsafe impl StoreSingle for InlineBumpStore 146 | where 147 | H: Copy + TryFrom + TryInto, 148 | { 149 | #[inline(always)] 150 | unsafe fn resolve(&self, handle: Self::Handle) -> NonNull { 151 | // Safety: 152 | // - As per pre-conditions. 153 | unsafe { ::resolve(self, handle) } 154 | } 155 | 156 | #[inline(always)] 157 | unsafe fn resolve_mut(&mut self, handle: Self::Handle) -> NonNull { 158 | // Safety: 159 | // - As per pre-conditions. 160 | unsafe { ::resolve(self, handle) } 161 | } 162 | 163 | fn allocate(&mut self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 164 | ::allocate(self, layout) 165 | } 166 | 167 | #[inline(always)] 168 | unsafe fn deallocate(&mut self, _handle: Self::Handle, _layout: Layout) {} 169 | 170 | unsafe fn grow( 171 | &mut self, 172 | handle: Self::Handle, 173 | old_layout: Layout, 174 | new_layout: Layout, 175 | ) -> Result<(Self::Handle, usize), AllocError> { 176 | // Safety: 177 | // - As per pre-conditions. 178 | unsafe { ::grow(self, handle, old_layout, new_layout) } 179 | } 180 | 181 | #[inline(always)] 182 | unsafe fn shrink( 183 | &mut self, 184 | handle: Self::Handle, 185 | old_layout: Layout, 186 | new_layout: Layout, 187 | ) -> Result<(Self::Handle, usize), AllocError> { 188 | // Safety: 189 | // - As per pre-conditions. 190 | unsafe { ::shrink(self, handle, old_layout, new_layout) } 191 | } 192 | } 193 | 194 | // Safety: 195 | // - `self.resolve(handle)` always returns the same address, as long as `self` doesn't move. 196 | unsafe impl StoreStable for InlineBumpStore where H: Copy + TryFrom + TryInto {} 197 | 198 | impl fmt::Debug for InlineBumpStore { 199 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 200 | let layout = Layout::new::(); 201 | 202 | f.debug_struct("InlineBumpStore") 203 | .field("size", &layout.size()) 204 | .field("align", &layout.align()) 205 | .finish() 206 | } 207 | } 208 | 209 | // 210 | // Implementation 211 | // 212 | 213 | impl InlineBumpStore { 214 | #[inline(always)] 215 | const fn memory_layout() -> Layout { 216 | Layout::new::() 217 | } 218 | } 219 | 220 | impl InlineBumpStore 221 | where 222 | H: TryFrom, 223 | { 224 | #[inline(always)] 225 | fn from_offset(offset: usize) -> Result { 226 | debug_assert!(offset <= Self::memory_layout().size()); 227 | 228 | offset.try_into().map_err(|_| AllocError) 229 | } 230 | } 231 | 232 | impl InlineBumpStore 233 | where 234 | H: TryInto, 235 | { 236 | #[inline(always)] 237 | fn into_offset(handle: H) -> usize { 238 | let offset = handle.try_into(); 239 | 240 | debug_assert!(offset.is_ok()); 241 | 242 | // Safety: 243 | // - `handle` was created from `usize`, hence converting back always succeeds. 244 | unsafe { offset.unwrap_unchecked() } 245 | } 246 | } 247 | 248 | impl InlineBumpStore 249 | where 250 | H: TryFrom + TryInto, 251 | { 252 | // Returns the offset and new watermark of the newly allocated memory block. 253 | fn compute_offset(watermark: H, layout: Layout) -> Result<(H, H), AllocError> { 254 | let watermark = Self::into_offset(watermark); 255 | let memory = Self::memory_layout(); 256 | 257 | if layout.align() > memory.align() { 258 | // Even if the memory block was aligned for the current address of `self.memory`, moving `self` would risk 259 | // breaking this alignment. 260 | 261 | return Err(AllocError); 262 | } 263 | 264 | let aligned = { 265 | // Since `layout.align()` is always a power of 2, aligning to the next multiple of `layout.align()` can be 266 | // done with this one simple trick. 267 | let alignment_mask = layout.align() - 1; 268 | 269 | (watermark + alignment_mask) & !alignment_mask 270 | }; 271 | 272 | let new_watermark = aligned + layout.size(); 273 | 274 | if new_watermark > memory.size() { 275 | return Err(AllocError); 276 | } 277 | 278 | let aligned = Self::from_offset(aligned)?; 279 | let new_watermark = Self::from_offset(new_watermark)?; 280 | 281 | Ok((aligned, new_watermark)) 282 | } 283 | } 284 | 285 | impl InlineBumpStore 286 | where 287 | H: Copy + TryFrom + TryInto, 288 | { 289 | // Slow part of `grow`. 290 | #[inline(never)] 291 | fn grow_by_relocation(&self, handle: H, old_layout: Layout, new_layout: Layout) -> Result<(H, usize), AllocError> { 292 | let (result, new_watermark) = Self::compute_offset(self.watermark.get(), new_layout)?; 293 | self.watermark.set(new_watermark); 294 | 295 | // Safety: 296 | // - `handle` is valid, as per pre-conditions. 297 | // - `result` is valid, since newly allocated. 298 | let (new, old) = unsafe { (Store::resolve(self, result), Store::resolve(self, handle)) }; 299 | 300 | // Safety: 301 | // - `old` is valid for `old_layout.size()` bytes, as per pre-conditions. 302 | // - `new` is valid for `old_layout.size()` bytes, since it is valid for `new_layout.size()` bytes and as per 303 | // pre-conditions `new_layout.size() >= old_layout.size()`. 304 | // - `old` and `new` are at least 1-byte aligned. 305 | // - `old` and `new` point to non-overlapping areas, since `old` points to a memory area prior to the 306 | // watermark and `new` points to a memory area post the watermark (as the beginning of this function), 307 | // since `old_layout` fits `old` as per pre-conditions. 308 | unsafe { ptr::copy_nonoverlapping(old.as_ptr(), new.as_ptr(), old_layout.size()) }; 309 | 310 | Ok((result, new_layout.size())) 311 | } 312 | } 313 | -------------------------------------------------------------------------------- /src/store/inline_single_store.rs: -------------------------------------------------------------------------------- 1 | //! An implementation of `Store` providing a single, inline, block of memory. 2 | //! 3 | //! This store is suitable for `Box`, `Vec`, or `VecDeque`, for example. 4 | 5 | use core::{ 6 | alloc::{AllocError, Layout}, 7 | fmt, 8 | mem::{self, MaybeUninit}, 9 | ptr::{self, Alignment, NonNull}, 10 | }; 11 | 12 | use crate::interface::{StoreDangling, StoreSingle, StoreStable}; 13 | 14 | /// An implementation of `Store` providing a single, inline, block of memory. 15 | /// 16 | /// The block of memory is aligned and sized as per `T`. 17 | pub struct InlineSingleStore(MaybeUninit); 18 | 19 | impl InlineSingleStore { 20 | /// Creates a new instance. 21 | pub const fn new() -> Self { 22 | Self(MaybeUninit::uninit()) 23 | } 24 | } 25 | 26 | impl Default for InlineSingleStore { 27 | fn default() -> Self { 28 | Self::new() 29 | } 30 | } 31 | 32 | unsafe impl const StoreDangling for InlineSingleStore { 33 | type Handle = (); 34 | 35 | fn dangling(&self, alignment: Alignment) -> Result { 36 | if alignment.as_usize() <= Alignment::of::().as_usize() { 37 | Ok(()) 38 | } else { 39 | Err(AllocError) 40 | } 41 | } 42 | } 43 | 44 | unsafe impl const StoreSingle for InlineSingleStore { 45 | unsafe fn resolve(&self, _handle: Self::Handle) -> NonNull { 46 | let pointer = self.0.as_ptr() as *mut T; 47 | 48 | // Safety: 49 | // - `self` is non null. 50 | unsafe { NonNull::new_unchecked(pointer) }.cast() 51 | } 52 | 53 | unsafe fn resolve_mut(&mut self, _handle: Self::Handle) -> NonNull { 54 | let pointer = self.0.as_mut_ptr(); 55 | 56 | // Safety: 57 | // - `self` is non null. 58 | unsafe { NonNull::new_unchecked(pointer) }.cast() 59 | } 60 | 61 | fn allocate(&mut self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 62 | if Self::validate_layout(layout).is_err() { 63 | return Err(AllocError); 64 | } 65 | 66 | Ok(((), mem::size_of::())) 67 | } 68 | 69 | unsafe fn deallocate(&mut self, _handle: Self::Handle, _layout: Layout) {} 70 | 71 | unsafe fn grow( 72 | &mut self, 73 | _handle: Self::Handle, 74 | _old_layout: Layout, 75 | new_layout: Layout, 76 | ) -> Result<(Self::Handle, usize), AllocError> { 77 | debug_assert!( 78 | new_layout.size() >= _old_layout.size(), 79 | "new_layout must have a greater size than _old_layout" 80 | ); 81 | 82 | if Self::validate_layout(new_layout).is_err() { 83 | return Err(AllocError); 84 | } 85 | 86 | Ok(((), mem::size_of::())) 87 | } 88 | 89 | unsafe fn shrink( 90 | &mut self, 91 | _handle: Self::Handle, 92 | _old_layout: Layout, 93 | _new_layout: Layout, 94 | ) -> Result<(Self::Handle, usize), AllocError> { 95 | debug_assert!( 96 | _new_layout.size() >= _old_layout.size(), 97 | "_new_layout must have a smaller size than _old_layout" 98 | ); 99 | 100 | Ok(((), mem::size_of::())) 101 | } 102 | 103 | fn allocate_zeroed(&mut self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 104 | if Self::validate_layout(layout).is_err() { 105 | return Err(AllocError); 106 | } 107 | 108 | let pointer = self.0.as_mut_ptr() as *mut u8; 109 | 110 | // Safety: 111 | // - `pointer` is valid, since `self` is valid. 112 | // - `pointer` points to at an area of at least `mem::size_of::()`. 113 | // - Access to the next `mem::size_of::()` bytes is exclusive. 114 | unsafe { ptr::write_bytes(pointer, 0, mem::size_of::()) }; 115 | 116 | Ok(((), mem::size_of::())) 117 | } 118 | 119 | unsafe fn grow_zeroed( 120 | &mut self, 121 | _handle: Self::Handle, 122 | old_layout: Layout, 123 | new_layout: Layout, 124 | ) -> Result<(Self::Handle, usize), AllocError> { 125 | debug_assert!( 126 | new_layout.size() >= old_layout.size(), 127 | "new_layout must have a greater size than old_layout" 128 | ); 129 | 130 | if Self::validate_layout(new_layout).is_err() { 131 | return Err(AllocError); 132 | } 133 | 134 | let pointer = self.0.as_mut_ptr() as *mut u8; 135 | 136 | // Safety: 137 | // - Both starting and resulting pointers are in bounds of the same allocated objects as `old_layout` fits 138 | // `pointer`, as per the pre-conditions of `grow_zeroed`. 139 | // - The offset does not overflow `isize` as `old_layout.size()` does not. 140 | let pointer = unsafe { pointer.add(old_layout.size()) }; 141 | 142 | // Safety: 143 | // - `pointer` is valid, since `self` is valid. 144 | // - `pointer` points to at an area of at least `mem::size_of::() - old_layout.size()`. 145 | // - Access to the next `mem::size_of::() - old_layout.size()` bytes is exclusive. 146 | unsafe { ptr::write_bytes(pointer, 0, mem::size_of::() - old_layout.size()) }; 147 | 148 | Ok(((), mem::size_of::())) 149 | } 150 | } 151 | 152 | // Safety: 153 | // - `self.resolve(handle)` always returns the same address, as long as `self` doesn't move. 154 | unsafe impl StoreStable for InlineSingleStore {} 155 | 156 | impl fmt::Debug for InlineSingleStore { 157 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 158 | let layout = Layout::new::(); 159 | 160 | f.debug_struct("InlineSingleStore") 161 | .field("size", &layout.size()) 162 | .field("align", &layout.align()) 163 | .finish() 164 | } 165 | } 166 | 167 | // Safety: 168 | // - Self-contained, so can be sent across threads safely. 169 | unsafe impl Send for InlineSingleStore {} 170 | 171 | // Safety: 172 | // - Immutable (by itself), so can be shared across threads safely. 173 | unsafe impl Sync for InlineSingleStore {} 174 | 175 | // 176 | // Implementation 177 | // 178 | 179 | impl InlineSingleStore { 180 | const fn validate_layout(layout: Layout) -> Result<(), AllocError> { 181 | let own = Layout::new::(); 182 | 183 | if layout.align() <= own.align() && layout.size() <= own.size() { 184 | Ok(()) 185 | } else { 186 | Err(AllocError) 187 | } 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /src/store/stack_bump_store.rs: -------------------------------------------------------------------------------- 1 | //! A dead simple "bump allocator" Store. 2 | //! 3 | //! A store which references a stack or statically allocated fixed-sized block of memory. Multiple instances may 4 | //! reference the same block, and all instances referencing the same block are fungible. 5 | 6 | use core::{ 7 | alloc::{AllocError, Layout}, 8 | cell::{Cell, UnsafeCell}, 9 | fmt, 10 | marker::PhantomData, 11 | mem::{self, MaybeUninit}, 12 | ptr::{self, Alignment, NonNull}, 13 | }; 14 | 15 | use crate::interface::{Store, StoreDangling, StorePinning, StoreSharing, StoreSingle, StoreStable}; 16 | 17 | /// The backing block of memory for the store. 18 | /// 19 | /// Generic parameters: 20 | /// 21 | /// - The block of memory is aligned and sized as per `T`. 22 | pub struct StackBumpBlock { 23 | watermark: Cell, 24 | memory: UnsafeCell>, 25 | } 26 | 27 | impl StackBumpBlock { 28 | /// Creates a new, empty, block. 29 | pub fn new() -> Self { 30 | let watermark = Cell::new(0); 31 | let memory = UnsafeCell::new(MaybeUninit::uninit()); 32 | 33 | Self { watermark, memory } 34 | } 35 | 36 | /// Creates a new store referencing this block. 37 | pub fn create_store(&self) -> StackBumpStore<'_, H> { 38 | let watermark = &self.watermark; 39 | 40 | let memory = { 41 | let length = mem::size_of::(); 42 | let address = NonNull::from(&self.memory).cast(); 43 | 44 | NonNull::slice_from_raw_parts(address, length) 45 | }; 46 | 47 | let _marker = PhantomData; 48 | 49 | StackBumpStore { 50 | watermark, 51 | memory, 52 | _marker, 53 | } 54 | } 55 | } 56 | 57 | impl Default for StackBumpBlock { 58 | fn default() -> Self { 59 | Self::new() 60 | } 61 | } 62 | 63 | /// A store instance referencing its block. 64 | /// 65 | /// Generic parameters: 66 | /// 67 | /// - `H` is the handle type, it must convertible to and from `usize`. 68 | pub struct StackBumpStore<'a, H> { 69 | watermark: &'a Cell, 70 | memory: NonNull<[u8]>, 71 | _marker: PhantomData H>, 72 | } 73 | 74 | // Cannot be const, because TryFrom is not marked #[const_trait]. 75 | unsafe impl<'a, H> StoreDangling for StackBumpStore<'a, H> 76 | where 77 | H: Copy + TryFrom, 78 | { 79 | type Handle = H; 80 | 81 | fn dangling(&self, alignment: Alignment) -> Result { 82 | Self::from_offset(alignment.as_usize()) 83 | } 84 | } 85 | 86 | unsafe impl<'a, H> Store for StackBumpStore<'a, H> 87 | where 88 | H: Copy + TryFrom + TryInto, 89 | { 90 | fn allocate(&self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 91 | let (result, new_watermark) = self.compute_offset(layout)?; 92 | self.watermark.set(new_watermark); 93 | 94 | Ok((result, layout.size())) 95 | } 96 | 97 | #[inline(always)] 98 | unsafe fn deallocate(&self, _handle: Self::Handle, _layout: Layout) {} 99 | 100 | #[inline(always)] 101 | unsafe fn resolve(&self, handle: Self::Handle) -> NonNull { 102 | debug_assert!(Self::into_offset(handle) <= self.memory.len()); 103 | 104 | let offset = Self::into_offset(handle); 105 | let pointer = self.memory.as_mut_ptr(); 106 | 107 | // Safety: 108 | // - `offset` is within bounds of `self.memory`, as `handle` was allocated by `self` as per pre-conditions. 109 | let pointer = unsafe { pointer.add(offset) }; 110 | 111 | // Safety: 112 | // - `pointer` is non null as `self` is non null. 113 | unsafe { NonNull::new_unchecked(pointer) } 114 | } 115 | 116 | unsafe fn grow( 117 | &self, 118 | handle: Self::Handle, 119 | old_layout: Layout, 120 | new_layout: Layout, 121 | ) -> Result<(Self::Handle, usize), AllocError> { 122 | debug_assert!( 123 | new_layout.size() >= old_layout.size(), 124 | "{new_layout:?} must have a greater size than {old_layout:?}" 125 | ); 126 | 127 | // As an optimization, if `handle` points to the last allocation, growth may actually occur _in place_. 128 | { 129 | let offset = Self::into_offset(handle); 130 | let watermark = self.watermark.get(); 131 | 132 | if offset + old_layout.size() == watermark 133 | && new_layout.align() <= old_layout.align() 134 | && offset + new_layout.size() <= self.memory.len() 135 | { 136 | let new_watermark = watermark - old_layout.size() + new_layout.size(); 137 | self.watermark.set(new_watermark); 138 | 139 | return Ok((handle, new_layout.size())); 140 | } 141 | } 142 | 143 | self.grow_by_relocation(handle, old_layout, new_layout) 144 | } 145 | 146 | #[inline(always)] 147 | unsafe fn shrink( 148 | &self, 149 | handle: Self::Handle, 150 | old_layout: Layout, 151 | _new_layout: Layout, 152 | ) -> Result<(Self::Handle, usize), AllocError> { 153 | debug_assert!( 154 | _new_layout.size() >= old_layout.size(), 155 | "{_new_layout:?} must have a smaller size than {old_layout:?}" 156 | ); 157 | 158 | Ok((handle, old_layout.size())) 159 | } 160 | } 161 | 162 | unsafe impl<'a, H> StoreSingle for StackBumpStore<'a, H> 163 | where 164 | H: Copy + TryFrom + TryInto, 165 | { 166 | #[inline(always)] 167 | unsafe fn resolve(&self, handle: Self::Handle) -> NonNull { 168 | // Safety: 169 | // - As per pre-conditions. 170 | unsafe { ::resolve(self, handle) } 171 | } 172 | 173 | #[inline(always)] 174 | unsafe fn resolve_mut(&mut self, handle: Self::Handle) -> NonNull { 175 | // Safety: 176 | // - As per pre-conditions. 177 | unsafe { ::resolve(self, handle) } 178 | } 179 | 180 | fn allocate(&mut self, layout: Layout) -> Result<(Self::Handle, usize), AllocError> { 181 | ::allocate(self, layout) 182 | } 183 | 184 | #[inline(always)] 185 | unsafe fn deallocate(&mut self, _handle: Self::Handle, _layout: Layout) {} 186 | 187 | unsafe fn grow( 188 | &mut self, 189 | handle: Self::Handle, 190 | old_layout: Layout, 191 | new_layout: Layout, 192 | ) -> Result<(Self::Handle, usize), AllocError> { 193 | // Safety: 194 | // - As per pre-conditions. 195 | unsafe { ::grow(self, handle, old_layout, new_layout) } 196 | } 197 | 198 | #[inline(always)] 199 | unsafe fn shrink( 200 | &mut self, 201 | handle: Self::Handle, 202 | old_layout: Layout, 203 | new_layout: Layout, 204 | ) -> Result<(Self::Handle, usize), AllocError> { 205 | // Safety: 206 | // - As per pre-conditions. 207 | unsafe { ::shrink(self, handle, old_layout, new_layout) } 208 | } 209 | } 210 | 211 | // Safety: 212 | // - `self.resolve(handle)` always returns the same address. 213 | unsafe impl<'a, H> StoreStable for StackBumpStore<'a, H> where H: Copy + TryFrom + TryInto {} 214 | 215 | // Safety: 216 | // - `self.resolve(handle)` always returns the same address. 217 | unsafe impl<'a, H> StorePinning for StackBumpStore<'a, H> where H: Copy + TryFrom + TryInto {} 218 | 219 | /// Safety: 220 | /// - All instances referencing the same StackBumpBlock are fungible. 221 | unsafe impl<'a, H> StoreSharing for StackBumpStore<'a, H> 222 | where 223 | H: Copy + TryFrom + TryInto, 224 | { 225 | type SharingError = !; 226 | 227 | fn is_sharing_with(&self, other: &Self) -> bool { 228 | self.memory == other.memory 229 | } 230 | 231 | fn share(&self) -> Result 232 | where 233 | Self: Sized, 234 | { 235 | let watermark = self.watermark; 236 | let memory = self.memory; 237 | let _marker = PhantomData; 238 | 239 | Ok(Self { 240 | watermark, 241 | memory, 242 | _marker, 243 | }) 244 | } 245 | } 246 | 247 | impl<'a, H> fmt::Debug for StackBumpStore<'a, H> { 248 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { 249 | f.debug_struct("StackBumpStore") 250 | .field("watermark", &self.watermark) 251 | .field("memory", &self.memory.len()) 252 | .finish() 253 | } 254 | } 255 | 256 | // 257 | // Implementation 258 | // 259 | 260 | impl<'a, H> StackBumpStore<'a, H> 261 | where 262 | H: TryFrom, 263 | { 264 | #[inline(always)] 265 | fn from_offset(offset: usize) -> Result { 266 | offset.try_into().map_err(|_| AllocError) 267 | } 268 | } 269 | 270 | impl<'a, H> StackBumpStore<'a, H> 271 | where 272 | H: TryInto, 273 | { 274 | #[inline(always)] 275 | fn into_offset(handle: H) -> usize { 276 | let offset = handle.try_into(); 277 | 278 | debug_assert!(offset.is_ok()); 279 | 280 | // Safety: 281 | // - `handle` was created from `usize`, hence converting back always succeeds. 282 | unsafe { offset.unwrap_unchecked() } 283 | } 284 | } 285 | 286 | impl<'a, H> StackBumpStore<'a, H> 287 | where 288 | H: TryFrom + TryInto, 289 | { 290 | // Returns the offset and new watermark of the newly allocated memory block. 291 | fn compute_offset(&self, layout: Layout) -> Result<(H, usize), AllocError> { 292 | let watermark = self.watermark.get(); 293 | 294 | let aligned = { 295 | // Since `layout.align()` is always a power of 2, aligning to the next multiple of `layout.align()` can be 296 | // done with this one simple trick. 297 | let alignment_mask = layout.align() - 1; 298 | 299 | (watermark + alignment_mask) & !alignment_mask 300 | }; 301 | 302 | let new_watermark = aligned + layout.size(); 303 | 304 | if new_watermark > self.memory.len() { 305 | return Err(AllocError); 306 | } 307 | 308 | let aligned = Self::from_offset(aligned)?; 309 | 310 | Ok((aligned, new_watermark)) 311 | } 312 | } 313 | 314 | impl<'a, H> StackBumpStore<'a, H> 315 | where 316 | H: Copy + TryFrom + TryInto, 317 | { 318 | // Slow part of `grow`. 319 | #[inline(never)] 320 | fn grow_by_relocation(&self, handle: H, old_layout: Layout, new_layout: Layout) -> Result<(H, usize), AllocError> { 321 | let (result, new_watermark) = self.compute_offset(new_layout)?; 322 | self.watermark.set(new_watermark); 323 | 324 | // Safety: 325 | // - `handle` is valid, as per pre-conditions. 326 | // - `result` is valid, since newly allocated. 327 | let (new, old) = unsafe { (Store::resolve(self, result), Store::resolve(self, handle)) }; 328 | 329 | // Safety: 330 | // - `old` is valid for `old_layout.size()` bytes, as per pre-conditions. 331 | // - `new` is valid for `old_layout.size()` bytes, since it is valid for `new_layout.size()` bytes and as per 332 | // pre-conditions `new_layout.size() >= old_layout.size()`. 333 | // - `old` and `new` are at least 1-byte aligned. 334 | // - `old` and `new` point to non-overlapping areas, since `old` points to a memory area prior to the 335 | // watermark and `new` points to a memory area post the watermark (as the beginning of this function), 336 | // since `old_layout` fits `old` as per pre-conditions. 337 | unsafe { ptr::copy_nonoverlapping(old.as_ptr(), new.as_ptr(), old_layout.size()) }; 338 | 339 | Ok((result, new_layout.size())) 340 | } 341 | } 342 | --------------------------------------------------------------------------------