├── .github └── workflows │ └── actions.yml ├── .gitignore ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── benches └── bench.rs ├── emplacable ├── Cargo.toml ├── README.md ├── src │ └── lib.rs └── tests │ └── test.rs ├── examples └── rust-book-17-02.rs ├── src ├── bin │ └── stress.rs ├── helper.rs ├── helper │ ├── valid_align.rs │ └── valid_size.rs ├── inner.rs ├── inner │ ├── aligned.rs │ ├── sized.rs │ └── unaligned.rs ├── lib.rs └── marker.rs └── tests └── test.rs /.github/workflows/actions.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: 4 | push: 5 | branches: ["master"] 6 | pull_request: 7 | branches: ["master"] 8 | schedule: 9 | - cron: "25 22 * * 1" 10 | 11 | jobs: 12 | test: 13 | runs-on: ubuntu-latest 14 | 15 | env: 16 | CARGO_INCREMENTAL: 0 17 | CARGO_NET_GIT_FETCH_WITH_CLI: true 18 | CARGO_NET_RETRY: 10 19 | CARGO_TERM_COLOR: always 20 | RUST_BACKTRACE: 1 21 | RUSTFLAGS: -D warnings 22 | RUSTDOCFLAGS: -D warnings 23 | RUSTUP_MAX_RETRIES: 10 24 | ASAN_OPTIONS: detect_leaks=1 25 | 26 | steps: 27 | - uses: actions/checkout@v4 28 | 29 | - name: Install Rust toolchain 30 | uses: dtolnay/rust-toolchain@stable 31 | with: 32 | components: clippy, miri, rustfmt 33 | target: x86_64-unknown-linux-gnu 34 | toolchain: nightly 35 | 36 | - name: Check formatting 37 | run: cargo fmt --all --check 38 | 39 | - name: Run clippy 40 | run: cargo clippy --all --all-targets 41 | 42 | - name: Check docs 43 | run: cargo doc --all 44 | 45 | - name: Run tests in relase mode 46 | run: cargo test --all --all-targets --release 47 | 48 | - name: Run tests with miri 49 | run: cargo miri test -p emplacable 50 | 51 | - name: Install cargo careful 52 | run: cargo install cargo-careful 53 | 54 | - name: Run tests with careful 55 | run: cargo careful test --all --all-targets -Zcareful-sanitizer 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "unsized-vec" 3 | version = "0.0.2-alpha.13" 4 | edition = "2024" 5 | authors = ["Jules Bertholet "] 6 | description = "Like Vec, but for unsized values" 7 | repository = "https://github.com/Jules-Bertholet/unsized-vec/" 8 | license = "MIT OR Apache-2.0" 9 | keywords = ["vec", "vector", "unsized"] 10 | categories = ["data-structures", "no-std"] 11 | 12 | [workspace] 13 | members = ["emplacable"] 14 | 15 | [workspace.lints.rust] 16 | missing_docs = "warn" 17 | rust_2018_idioms = { level = "warn", priority = -1 } 18 | unsafe_op_in_unsafe_fn = "forbid" 19 | 20 | [workspace.lints.clippy] 21 | semicolon_if_nothing_returned = "warn" 22 | undocumented_unsafe_blocks = "warn" 23 | 24 | [dependencies] 25 | emplacable = { path = "emplacable", version = "0.1.0-alpha.16", default-features = false, features = [ 26 | "alloc", 27 | ] } 28 | serde = { version = "1.0.193", optional = true, default-features = false } 29 | 30 | [profile.bench] 31 | codegen-units = 1 32 | debug = true 33 | lto = "fat" 34 | 35 | [package.metadata.docs.rs] 36 | all-features = true 37 | 38 | [badges] 39 | maintenance = { status = "experimental" } 40 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Jules Bertholet 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `unsized-vec` 2 | 3 | [![docs.rs](https://img.shields.io/docsrs/unsized-vec)](https://docs.rs/unsized-vec/) [![Crates.io](https://img.shields.io/crates/v/unsized-vec)](https://crates.io/crates/unsized-vec) 4 | 5 | Say goodbye to `Vec>`! Cut down on your heap allocations. 6 | `UnsizedVec` is like [`Vec`](https://doc.rust-lang.org/alloc/vec/struct.Vec.html), but `T` can be `?Sized`. 7 | 8 | ## Features 9 | 10 | - Familiar `Vec` API. 11 | - Same time complexity as `alloc::vec::Vec` for major operations(indexing, push, pop, insert, remove). 12 | - When `T`'s alignment is not known at compile time (e.g. `T` is a trait object), this rule has one expection, 13 | explained in the crate docs. 14 | - For `T: Sized`, `UnsizedVec` compiles to a newtype around `alloc::vec::Vec`, and can be trivially converted to/from it. 15 | - For unsized `T`, there are two heap allocations: one for the elements, and one for the pointer metadata. 16 | - `#[no_std]` (but requires `alloc`). 17 | 18 | ## Drawbacks 19 | 20 | - Invariant in `T`. 21 | - Experimental, nightly-only. 22 | 23 | ## Example 24 | 25 | ```rust 26 | #![allow(internal_features)] // for `unsized_fn_params` 27 | #![feature(unsized_fn_params)] 28 | 29 | use core::fmt::Debug; 30 | 31 | use emplacable::box_new_with; 32 | use unsized_vec::{unsize_vec, UnsizedVec}; 33 | 34 | fn main() { 35 | let mut vec: UnsizedVec = unsize_vec![27.53_f32, "oh the places we'll go", Some(())]; 36 | 37 | for traitobj in &vec { 38 | dbg!(traitobj); 39 | }; 40 | 41 | assert_eq!(vec.len(), 3); 42 | 43 | let maybe_popped: Option> = vec.pop_into().map(box_new_with); 44 | let popped = maybe_popped.unwrap(); 45 | dbg!(&*popped); 46 | 47 | assert_eq!(vec.len(), 2); 48 | } 49 | ``` 50 | 51 | ## License 52 | 53 | `unsized-vec` is distributed under the terms of both the MIT license and the Apache License (Version 2.0). 54 | 55 | See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details. 56 | -------------------------------------------------------------------------------- /benches/bench.rs: -------------------------------------------------------------------------------- 1 | #![feature(test)] 2 | 3 | extern crate test; 4 | 5 | use std::hint::black_box; 6 | 7 | use test::bench::Bencher; 8 | use unsized_vec::UnsizedVec; 9 | 10 | const ARR0: [i32; 0] = []; 11 | const ARR1: [i32; 1] = [23]; 12 | const ARR2: [i32; 2] = [-4, 27]; 13 | const ARR3: [i32; 3] = [-4, 27, 31]; 14 | 15 | const ARR4: [i32; 4] = [-4, 27, 31, 42]; 16 | 17 | const ARR13: [i32; 13] = [ 18 | -4, 27, 31, 42, 43, 342, 2342, -324, 234, 234, 65, 123, 32465532, 19 | ]; 20 | 21 | #[bench] 22 | fn test_push_arrays(b: &mut Bencher) { 23 | //let mut lock = stdout().lock(); 24 | b.iter(|| { 25 | let mut v: UnsizedVec<[i32]> = UnsizedVec::new(); 26 | 27 | for _ in 0..255 { 28 | // writeln!(lock, "0"); 29 | v.push_unsize(black_box(ARR0)); 30 | // writeln!(lock, "1"); 31 | v.push_unsize(black_box(ARR1)); 32 | // writeln!(lock, "2"); 33 | v.push_unsize(black_box(ARR2)); 34 | // writeln!(lock, "3"); 35 | v.push_unsize(black_box(ARR3)); 36 | // writeln!(lock, "4"); 37 | v.push_unsize(black_box(ARR4)); 38 | //writeln!(lock, "5"); 39 | v.push_unsize(black_box(ARR13)); 40 | //writeln!(lock, "6"); 41 | } 42 | 43 | black_box(v); 44 | }); 45 | } 46 | 47 | #[bench] 48 | fn test_push_arrays_boxed(b: &mut Bencher) { 49 | b.iter(|| { 50 | let mut v: Vec> = Vec::new(); 51 | 52 | for _ in 0..255 { 53 | v.push(Box::new(black_box(ARR0))); 54 | v.push(Box::new(black_box(ARR1))); 55 | v.push(Box::new(black_box(ARR2))); 56 | v.push(Box::new(black_box(ARR3))); 57 | v.push(Box::new(black_box(ARR4))); 58 | v.push(Box::new(black_box(ARR13))); 59 | } 60 | 61 | black_box(v); 62 | }); 63 | } 64 | 65 | #[bench] 66 | fn test_push_arrays_preallocated(b: &mut Bencher) { 67 | b.iter(|| { 68 | let mut v: UnsizedVec<[i32]> = 69 | UnsizedVec::with_capacity_bytes(256 * 6, 256 * 4 * (13 + 4 + 3 + 2 + 1)); 70 | 71 | for _ in 0..255 { 72 | v.push_unsize(black_box(ARR0)); 73 | v.push_unsize(black_box(ARR1)); 74 | v.push_unsize(black_box(ARR2)); 75 | v.push_unsize(black_box(ARR3)); 76 | v.push_unsize(black_box(ARR4)); 77 | v.push_unsize(black_box(ARR13)); 78 | } 79 | 80 | black_box(v); 81 | }); 82 | } 83 | 84 | #[bench] 85 | fn test_push_arrays_boxed_preallocated(b: &mut Bencher) { 86 | b.iter(|| { 87 | let mut v: Vec> = Vec::with_capacity(6 * 256); 88 | 89 | for _ in 0..255 { 90 | v.push(Box::new(black_box(ARR0))); 91 | v.push(Box::new(black_box(ARR1))); 92 | v.push(Box::new(black_box(ARR2))); 93 | v.push(Box::new(black_box(ARR3))); 94 | v.push(Box::new(black_box(ARR4))); 95 | v.push(Box::new(black_box(ARR13))); 96 | } 97 | 98 | black_box(v); 99 | }); 100 | } 101 | -------------------------------------------------------------------------------- /emplacable/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "emplacable" 3 | version = "0.1.0-alpha.16" 4 | edition = "2021" 5 | authors = ["Jules Bertholet "] 6 | description = "Return unsized values from functions" 7 | repository = "https://github.com/Jules-Bertholet/unsized-vec/" 8 | license = "MIT OR Apache-2.0" 9 | keywords = ["unsized", "placement-new"] 10 | categories = ["memory-management", "rust-patterns", "no-std"] 11 | 12 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 13 | 14 | [features] 15 | default = ["std"] 16 | alloc = [] 17 | std = ["alloc"] 18 | 19 | [badges] 20 | maintenance = { status = "experimental" } 21 | -------------------------------------------------------------------------------- /emplacable/README.md: -------------------------------------------------------------------------------- 1 | # `emplacable` 2 | 3 | [![docs.rs](https://img.shields.io/docsrs/emplacable)](https://docs.rs/emplacable/) [![Crates.io](https://img.shields.io/crates/v/emplacable)](https://crates.io/crates/emplacable) 4 | 5 | Return values of unsized types, like `[i32]` or `dyn Any`, from functions, 6 | with a mechanism similar to placement new. 7 | 8 | Written to support the [`unsized-vec`](https://crates.io/crates/unsized-vec) crate, but is independent of it. 9 | Experimental, and requires nightly Rust. 10 | -------------------------------------------------------------------------------- /emplacable/tests/test.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ffi::{CStr, OsStr, OsString}, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | use emplacable::*; 7 | 8 | #[test] 9 | fn into_impls() { 10 | let a: &[Box] = &[Box::new(1), Box::new(2), Box::new(3), Box::new(4)]; 11 | let _: Box<[Box]> = box_new_with(a.into()); 12 | 13 | let a: &str = "iiiiii"; 14 | let _: Box = box_new_with(a.into()); 15 | 16 | let a: &CStr = c"hiii"; 17 | let _: Box = box_new_with(a.into()); 18 | 19 | let a: &OsStr = &OsString::from("a"); 20 | let _: Box = box_new_with(a.into()); 21 | 22 | let a: &Path = &PathBuf::from("a"); 23 | let _: Box = box_new_with(a.into()); 24 | 25 | let a: &str = "iiiiii"; 26 | let e: Emplacable = a.into(); 27 | let b: Box<[u8]> = box_new_with(e.into()); 28 | 29 | let _: Box<[u8]> = box_new_with(b.into()); 30 | 31 | let v: Vec> = vec![Box::new(1), Box::new(2), Box::new(3), Box::new(4)]; 32 | let _: Box<[Box]> = box_new_with(v.into()); 33 | 34 | let a: [Box; 4] = [Box::new(1), Box::new(2), Box::new(3), Box::new(4)]; 35 | let e: Emplacable<[Box; 4], _> = a.into(); 36 | let _: Box<[Box]> = box_new_with(e.into()); 37 | 38 | let a: [Emplacable; 4] = [ 39 | Box::new(1).into(), 40 | Box::new(2).into(), 41 | Box::new(3).into(), 42 | Box::new(4).into(), 43 | ]; 44 | 45 | let _: Box<[i32; 4]> = box_new_with(a.into()); 46 | } 47 | -------------------------------------------------------------------------------- /examples/rust-book-17-02.rs: -------------------------------------------------------------------------------- 1 | //! The example from [section 17.2 of the Rust book][0], 2 | //! modified to use `UnsizedVec` instead of `Vec>>`. 3 | //! 4 | //! [0]: https://doc.rust-lang.org/book/ch17-02-trait-objects.html 5 | 6 | #![allow( 7 | dead_code, 8 | internal_features, // for `unsized_fn_params` 9 | )] 10 | #![feature(allocator_api, ptr_metadata, unsized_fn_params)] 11 | 12 | use emplacable::by_value_str; 13 | use unsized_vec::{UnsizedVec, unsize_vec, unsized_vec}; 14 | 15 | mod gui { 16 | //! lib.rs 17 | 18 | use unsized_vec::UnsizedVec; 19 | 20 | pub trait Draw { 21 | fn draw(&self); 22 | } 23 | pub struct Screen { 24 | pub components: UnsizedVec, 25 | } 26 | 27 | impl Screen { 28 | pub fn run(&self) { 29 | for component in self.components.iter() { 30 | component.draw(); 31 | } 32 | } 33 | } 34 | pub struct Button { 35 | pub width: u32, 36 | pub height: u32, 37 | pub label: Box, 38 | } 39 | 40 | impl Draw for Button { 41 | fn draw(&self) { 42 | // code to actually draw a button 43 | } 44 | } 45 | } 46 | 47 | // main.rs 48 | use gui::Draw; 49 | 50 | struct SelectBox { 51 | width: u32, 52 | height: u32, 53 | options: UnsizedVec, 54 | } 55 | 56 | impl Draw for SelectBox { 57 | fn draw(&self) { 58 | // code to actually draw a select box 59 | } 60 | } 61 | 62 | use gui::{Button, Screen}; 63 | 64 | fn main() { 65 | let screen = Screen { 66 | components: unsize_vec![ 67 | SelectBox { 68 | width: 75, 69 | height: 10, 70 | options: unsized_vec![ 71 | by_value_str!("Yes"), 72 | by_value_str!("Maybe"), 73 | by_value_str!("No"), 74 | ], 75 | }, 76 | Button { 77 | width: 50, 78 | height: 10, 79 | label: Box::::from("OK"), 80 | }, 81 | ], 82 | }; 83 | 84 | screen.run(); 85 | } 86 | -------------------------------------------------------------------------------- /src/bin/stress.rs: -------------------------------------------------------------------------------- 1 | use std::{hint::black_box, mem::size_of}; 2 | 3 | use unsized_vec::UnsizedVec; 4 | 5 | const ARR0: [i32; 0] = []; 6 | const ARR1: [i32; 1] = [23]; 7 | const ARR2: [i32; 2] = [-4, 27]; 8 | const ARR3: [i32; 3] = [-4, 27, 31]; 9 | 10 | const ARR4: [i32; 4] = [-4, 27, 31, 42]; 11 | 12 | const ARR13: [i32; 13] = [ 13 | -4, 27, 31, 42, 43, 342, 2342, -324, 234, 234, 65, 123, 32465532, 14 | ]; 15 | 16 | fn main() { 17 | for _ in 0..100 { 18 | let mut v: UnsizedVec<[i32]> = 19 | UnsizedVec::with_capacity_bytes(256 * 6, size_of::() * 256 * (13 + 4 + 3 + 2 + 1)); 20 | 21 | for _ in 0..255 { 22 | v.push_unsize(black_box(ARR0)); 23 | v.push_unsize(black_box(ARR1)); 24 | v.push_unsize(black_box(ARR2)); 25 | v.push_unsize(black_box(ARR3)); 26 | v.push_unsize(black_box(ARR4)); 27 | v.push_unsize(black_box(ARR13)); 28 | } 29 | assert_eq!( 30 | v.byte_capacity(), 31 | size_of::() * 256 * (13 + 4 + 3 + 2 + 1) 32 | ); 33 | 34 | black_box(v); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/helper.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod valid_align; 2 | pub(crate) mod valid_size; 3 | 4 | use core::{alloc::Layout, hash::Hash, mem, ptr::Pointee}; 5 | 6 | use crate::marker::Aligned; 7 | 8 | use self::{valid_align::ValidAlign, valid_size::ValidSizeUnaligned}; 9 | 10 | /// Used by `UnsizedVec` to only store offset and pointer metadata 11 | /// when the latter can't be derived from the former. 12 | pub(crate) trait MetadataFromSize: Aligned { 13 | fn from_size(size: ValidSizeUnaligned) -> ::Metadata; 14 | } 15 | 16 | impl MetadataFromSize for T { 17 | fn from_size(_: ValidSizeUnaligned) -> ::Metadata {} 18 | } 19 | 20 | impl MetadataFromSize for [T] { 21 | fn from_size(size: ValidSizeUnaligned) -> ::Metadata { 22 | debug_assert!(size.get() != 0 || size == ValidSizeUnaligned::ZERO); 23 | size.get().checked_div(mem::size_of::()).unwrap_or(0) 24 | } 25 | } 26 | 27 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] 28 | pub(crate) struct FullMetadataRemainder(T); 29 | 30 | /// Used by `UnsizedVec` to only store offset and pointer metadata 31 | /// when the latter can't be derived from the former. 32 | pub(crate) trait MetadataRemainder: 33 | Copy + Send + Sync + Ord + Hash + Unpin 34 | { 35 | #[must_use] 36 | fn from_metadata(meta: ::Metadata) -> Self; 37 | 38 | #[must_use] 39 | fn as_metadata(self, size: ValidSizeUnaligned) -> ::Metadata; 40 | } 41 | 42 | impl MetadataRemainder for FullMetadataRemainder<::Metadata> { 43 | #[inline] 44 | fn from_metadata(meta: ::Metadata) -> Self { 45 | FullMetadataRemainder(meta) 46 | } 47 | 48 | #[inline] 49 | fn as_metadata(self, _: ValidSizeUnaligned) -> ::Metadata { 50 | self.0 51 | } 52 | } 53 | impl MetadataRemainder for () { 54 | #[inline] 55 | fn from_metadata(_: ::Metadata) -> Self {} 56 | 57 | #[inline] 58 | fn as_metadata(self, size: ValidSizeUnaligned) -> ::Metadata { 59 | ::from_size(size) 60 | } 61 | } 62 | pub(crate) trait SplitMetadata { 63 | type Remainder: MetadataRemainder; 64 | } 65 | 66 | impl SplitMetadata for T { 67 | default type Remainder = FullMetadataRemainder<::Metadata>; 68 | } 69 | 70 | // `MetadataFromSize` implementations are always "always applicable", 71 | // so this specialization should be safe. 72 | impl SplitMetadata for T { 73 | type Remainder = (); 74 | } 75 | 76 | pub(crate) const fn decompose(layout: Layout) -> (ValidSizeUnaligned, ValidAlign) { 77 | // SAFETY: `Layout` can't return invalid size/align 78 | unsafe { 79 | ( 80 | ValidSizeUnaligned::new_unchecked(layout.size()), 81 | ValidAlign::new_unckecked(layout.align()), 82 | ) 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/helper/valid_align.rs: -------------------------------------------------------------------------------- 1 | //! Copied and pasted from the Rust standard library: 2 | //! 3 | //! 4 | //! All credit goes to the stdlib developers. 5 | 6 | #![allow(clippy::enum_clike_unportable_variant)] 7 | 8 | use core::{ 9 | cmp, fmt, hash, mem, 10 | num::NonZeroUsize, 11 | ptr::{self, NonNull}, 12 | }; 13 | 14 | /// A type storing a `usize` which is a power of two, and thus 15 | /// represents a possible alignment in the rust abstract machine. 16 | /// 17 | /// Note that particularly large alignments, while representable in this type, 18 | /// are likely not to be supported by actual allocators and linkers. 19 | #[derive(Clone, Copy)] 20 | #[repr(transparent)] 21 | pub(crate) struct ValidAlign(ValidAlignEnum); 22 | 23 | // ValidAlign is `repr(usize)`, but via extra steps. 24 | const _: () = assert!(mem::size_of::() == mem::size_of::()); 25 | const _: () = assert!(mem::align_of::() == mem::align_of::()); 26 | 27 | impl ValidAlign { 28 | pub(crate) const ONE: Self = Self::new(1).unwrap(); 29 | 30 | /// Creates a `ValidAlign` from a power-of-two `usize`. 31 | /// 32 | /// Returns `None` if `align` is not a power of two. 33 | /// 34 | /// `align` must be a power of two. 35 | /// 36 | /// Equivalently, it must be `1 << exp` for some `exp` in `0..usize::BITS`. 37 | /// It must *not* be zero. 38 | #[must_use] 39 | #[inline] 40 | pub(crate) const fn new(align: usize) -> Option { 41 | if align.is_power_of_two() { 42 | Some( 43 | // SAFETY: By check above, this must be a power of two, and 44 | // our variants encompass all possible powers of two. 45 | unsafe { Self::new_unckecked(align) }, 46 | ) 47 | } else { 48 | None 49 | } 50 | } 51 | 52 | /// Creates a `ValidAlign` from a power-of-two `usize`. 53 | /// 54 | /// Returns `None` if `align` is not a power of two. 55 | /// 56 | /// # Safety 57 | /// 58 | /// `align` must be a power of two. 59 | /// 60 | /// Equivalently, it must be `1 << exp` for some `exp` in `0..usize::BITS`. 61 | /// It must *not* be zero. 62 | #[must_use] 63 | #[inline] 64 | pub(crate) const unsafe fn new_unckecked(align: usize) -> Self { 65 | // SAFETY: By function preconditions, this must be a power of two, and 66 | // our variants encompass all possible powers of two. 67 | unsafe { mem::transmute::(align) } 68 | } 69 | 70 | #[must_use] 71 | #[inline] 72 | #[allow(clippy::cast_possible_truncation)] // Clippy is not smart enough to realize this can/t fail 73 | pub(crate) const fn get(self) -> usize { 74 | self.0 as usize 75 | } 76 | 77 | #[must_use] 78 | #[inline] 79 | pub(super) const fn minus_1(self) -> usize { 80 | // SAFETY: align always >= 1 81 | unsafe { self.get().unchecked_sub(1) } 82 | } 83 | 84 | #[must_use] 85 | #[inline] 86 | pub(crate) const fn as_nonzero(self) -> NonZeroUsize { 87 | // SAFETY: All the discriminants are non-zero. 88 | unsafe { NonZeroUsize::new_unchecked(self.0 as usize) } 89 | } 90 | 91 | #[must_use] 92 | #[inline] 93 | pub(crate) const fn as_usize(self) -> usize { 94 | self.0 as usize 95 | } 96 | 97 | /// Returns the base 2 logarithm of the alignment. 98 | /// 99 | /// This is always exact, as `self` represents a power of two. 100 | #[must_use] 101 | #[inline] 102 | pub(crate) const fn log2(self) -> u32 { 103 | self.as_nonzero().trailing_zeros() 104 | } 105 | 106 | /// Returns a dangling poiter with a numeric value equal to this alignment. 107 | #[must_use] 108 | #[inline] 109 | pub(crate) const fn dangling_thin(self) -> NonNull<()> { 110 | // SAFETY: self != 0 111 | unsafe { NonNull::new_unchecked(ptr::without_provenance_mut(self.get())) } 112 | } 113 | 114 | /// Returns the alignment of this value. 115 | #[must_use] 116 | #[inline] 117 | pub(crate) const fn of_val(val: &T) -> Self { 118 | // SAFETY: `align_of_val` returns valid alignments 119 | unsafe { Self::new_unckecked(mem::align_of_val(val)) } 120 | } 121 | } 122 | 123 | impl fmt::Debug for ValidAlign { 124 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 125 | write!(f, "{:?} (1 << {:?})", self.as_nonzero(), self.log2()) 126 | } 127 | } 128 | 129 | impl From for NonZeroUsize { 130 | #[inline] 131 | fn from(value: ValidAlign) -> Self { 132 | value.as_nonzero() 133 | } 134 | } 135 | 136 | impl From for usize { 137 | #[inline] 138 | fn from(value: ValidAlign) -> Self { 139 | value.get() 140 | } 141 | } 142 | 143 | impl PartialEq for ValidAlign { 144 | #[inline] 145 | fn eq(&self, other: &Self) -> bool { 146 | self.get() == other.get() 147 | } 148 | } 149 | 150 | impl Eq for ValidAlign {} 151 | 152 | impl PartialOrd for ValidAlign { 153 | #[inline] 154 | fn partial_cmp(&self, other: &Self) -> Option { 155 | Some(self.cmp(other)) 156 | } 157 | } 158 | 159 | impl Ord for ValidAlign { 160 | #[inline] 161 | fn cmp(&self, other: &Self) -> cmp::Ordering { 162 | self.get().cmp(&other.get()) 163 | } 164 | } 165 | 166 | #[allow(clippy::derived_hash_with_manual_eq)] 167 | impl hash::Hash for ValidAlign { 168 | #[inline] 169 | fn hash(&self, state: &mut H) { 170 | self.as_nonzero().hash(state); 171 | } 172 | } 173 | 174 | #[cfg(target_pointer_width = "16")] 175 | #[derive(Clone, Copy)] 176 | #[repr(usize)] 177 | enum ValidAlignEnum { 178 | _Align1Shl0 = 1 << 0, 179 | _Align1Shl1 = 1 << 1, 180 | _Align1Shl2 = 1 << 2, 181 | _Align1Shl3 = 1 << 3, 182 | _Align1Shl4 = 1 << 4, 183 | _Align1Shl5 = 1 << 5, 184 | _Align1Shl6 = 1 << 6, 185 | _Align1Shl7 = 1 << 7, 186 | _Align1Shl8 = 1 << 8, 187 | _Align1Shl9 = 1 << 9, 188 | _Align1Shl10 = 1 << 10, 189 | _Align1Shl11 = 1 << 11, 190 | _Align1Shl12 = 1 << 12, 191 | _Align1Shl13 = 1 << 13, 192 | _Align1Shl14 = 1 << 14, 193 | _Align1Shl15 = 1 << 15, 194 | } 195 | 196 | #[cfg(target_pointer_width = "32")] 197 | #[derive(Clone, Copy)] 198 | #[repr(usize)] 199 | enum ValidAlignEnum { 200 | _Align1Shl0 = 1 << 0, 201 | _Align1Shl1 = 1 << 1, 202 | _Align1Shl2 = 1 << 2, 203 | _Align1Shl3 = 1 << 3, 204 | _Align1Shl4 = 1 << 4, 205 | _Align1Shl5 = 1 << 5, 206 | _Align1Shl6 = 1 << 6, 207 | _Align1Shl7 = 1 << 7, 208 | _Align1Shl8 = 1 << 8, 209 | _Align1Shl9 = 1 << 9, 210 | _Align1Shl10 = 1 << 10, 211 | _Align1Shl11 = 1 << 11, 212 | _Align1Shl12 = 1 << 12, 213 | _Align1Shl13 = 1 << 13, 214 | _Align1Shl14 = 1 << 14, 215 | _Align1Shl15 = 1 << 15, 216 | _Align1Shl16 = 1 << 16, 217 | _Align1Shl17 = 1 << 17, 218 | _Align1Shl18 = 1 << 18, 219 | _Align1Shl19 = 1 << 19, 220 | _Align1Shl20 = 1 << 20, 221 | _Align1Shl21 = 1 << 21, 222 | _Align1Shl22 = 1 << 22, 223 | _Align1Shl23 = 1 << 23, 224 | _Align1Shl24 = 1 << 24, 225 | _Align1Shl25 = 1 << 25, 226 | _Align1Shl26 = 1 << 26, 227 | _Align1Shl27 = 1 << 27, 228 | _Align1Shl28 = 1 << 28, 229 | _Align1Shl29 = 1 << 29, 230 | _Align1Shl30 = 1 << 30, 231 | _Align1Shl31 = 1 << 31, 232 | } 233 | 234 | #[cfg(target_pointer_width = "64")] 235 | #[derive(Clone, Copy)] 236 | #[repr(usize)] 237 | enum ValidAlignEnum { 238 | _Align1Shl0 = 1 << 0, 239 | _Align1Shl1 = 1 << 1, 240 | _Align1Shl2 = 1 << 2, 241 | _Align1Shl3 = 1 << 3, 242 | _Align1Shl4 = 1 << 4, 243 | _Align1Shl5 = 1 << 5, 244 | _Align1Shl6 = 1 << 6, 245 | _Align1Shl7 = 1 << 7, 246 | _Align1Shl8 = 1 << 8, 247 | _Align1Shl9 = 1 << 9, 248 | _Align1Shl10 = 1 << 10, 249 | _Align1Shl11 = 1 << 11, 250 | _Align1Shl12 = 1 << 12, 251 | _Align1Shl13 = 1 << 13, 252 | _Align1Shl14 = 1 << 14, 253 | _Align1Shl15 = 1 << 15, 254 | _Align1Shl16 = 1 << 16, 255 | _Align1Shl17 = 1 << 17, 256 | _Align1Shl18 = 1 << 18, 257 | _Align1Shl19 = 1 << 19, 258 | _Align1Shl20 = 1 << 20, 259 | _Align1Shl21 = 1 << 21, 260 | _Align1Shl22 = 1 << 22, 261 | _Align1Shl23 = 1 << 23, 262 | _Align1Shl24 = 1 << 24, 263 | _Align1Shl25 = 1 << 25, 264 | _Align1Shl26 = 1 << 26, 265 | _Align1Shl27 = 1 << 27, 266 | _Align1Shl28 = 1 << 28, 267 | _Align1Shl29 = 1 << 29, 268 | _Align1Shl30 = 1 << 30, 269 | _Align1Shl31 = 1 << 31, 270 | _Align1Shl32 = 1 << 32, 271 | _Align1Shl33 = 1 << 33, 272 | _Align1Shl34 = 1 << 34, 273 | _Align1Shl35 = 1 << 35, 274 | _Align1Shl36 = 1 << 36, 275 | _Align1Shl37 = 1 << 37, 276 | _Align1Shl38 = 1 << 38, 277 | _Align1Shl39 = 1 << 39, 278 | _Align1Shl40 = 1 << 40, 279 | _Align1Shl41 = 1 << 41, 280 | _Align1Shl42 = 1 << 42, 281 | _Align1Shl43 = 1 << 43, 282 | _Align1Shl44 = 1 << 44, 283 | _Align1Shl45 = 1 << 45, 284 | _Align1Shl46 = 1 << 46, 285 | _Align1Shl47 = 1 << 47, 286 | _Align1Shl48 = 1 << 48, 287 | _Align1Shl49 = 1 << 49, 288 | _Align1Shl50 = 1 << 50, 289 | _Align1Shl51 = 1 << 51, 290 | _Align1Shl52 = 1 << 52, 291 | _Align1Shl53 = 1 << 53, 292 | _Align1Shl54 = 1 << 54, 293 | _Align1Shl55 = 1 << 55, 294 | _Align1Shl56 = 1 << 56, 295 | _Align1Shl57 = 1 << 57, 296 | _Align1Shl58 = 1 << 58, 297 | _Align1Shl59 = 1 << 59, 298 | _Align1Shl60 = 1 << 60, 299 | _Align1Shl61 = 1 << 61, 300 | _Align1Shl62 = 1 << 62, 301 | _Align1Shl63 = 1 << 63, 302 | } 303 | -------------------------------------------------------------------------------- /src/helper/valid_size.rs: -------------------------------------------------------------------------------- 1 | use core::{alloc::Layout, fmt::Debug, hash::Hash, marker::PhantomData, mem}; 2 | 3 | use crate::marker::Aligned; 4 | 5 | use super::valid_align::ValidAlign; 6 | 7 | /// A type storing a `usize`, that, when rounded up to the nearest multiple of `T: Align`, 8 | /// is less than or equal to `isize::MAX`. 9 | /// 10 | /// This mirros the requirements of [`Layout`]. 11 | /// 12 | #[repr(transparent)] 13 | pub(crate) struct ValidSize(usize, PhantomData T>); 14 | 15 | impl Clone for ValidSize { 16 | fn clone(&self) -> Self { 17 | *self 18 | } 19 | } 20 | impl Copy for ValidSize {} 21 | impl Debug for ValidSize { 22 | #[inline] 23 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 24 | self.0.fmt(f) 25 | } 26 | } 27 | impl PartialEq for ValidSize { 28 | #[inline] 29 | fn eq(&self, other: &Self) -> bool { 30 | self.0 == other.0 31 | } 32 | } 33 | impl Eq for ValidSize {} 34 | impl Hash for ValidSize { 35 | fn hash(&self, state: &mut H) { 36 | self.0.hash(state); 37 | } 38 | } 39 | impl PartialOrd for ValidSize { 40 | #[inline] 41 | fn partial_cmp(&self, other: &Self) -> Option { 42 | Some(self.cmp(other)) 43 | } 44 | } 45 | impl Ord for ValidSize { 46 | #[inline] 47 | fn cmp(&self, other: &Self) -> core::cmp::Ordering { 48 | self.0.cmp(&other.0) 49 | } 50 | } 51 | 52 | impl ValidSize { 53 | pub(crate) const ZERO: Self = ValidSize::new(0).unwrap(); 54 | 55 | pub(crate) const MAX: Self = ValidSize( 56 | isize::MAX as usize - (::ALIGN.as_usize() - 1), 57 | PhantomData, 58 | ); 59 | 60 | /// Creates a `ValidSize` from a `usize` that, wher rounded up 61 | /// to the nearest multiple of `Self::ALIGN` is less than 62 | /// or equal to `isize::MAX`. 63 | /// 64 | /// # Safety 65 | /// 66 | /// `size <= isize::MAX as usize` must hold. 67 | #[must_use] 68 | #[inline] 69 | pub(crate) const unsafe fn new_unchecked(size: usize) -> Self { 70 | debug_assert!(size <= Self::MAX.get()); 71 | 72 | ValidSize(size, PhantomData) 73 | } 74 | 75 | /// Creates a `ValidSize` from a `usize` that is less than or equal to 76 | /// `isize::MAX`. 77 | /// 78 | /// Returns `None` if `size > isize::MAX as usize`. 79 | #[must_use] 80 | #[inline] 81 | pub(crate) const fn new(size: usize) -> Option { 82 | if size <= Self::MAX.get() { 83 | Some(ValidSize(size, PhantomData)) 84 | } else { 85 | None 86 | } 87 | } 88 | 89 | /// Rounds down if it would overflow. 90 | #[must_use] 91 | #[inline] 92 | pub(crate) const fn new_squished(size: usize) -> Self { 93 | if size <= Self::MAX.get() { 94 | // SAFETY: ensured by if guard 95 | unsafe { Self::new_unchecked(size) } 96 | } else { 97 | Self::MAX 98 | } 99 | } 100 | 101 | #[must_use] 102 | #[inline] 103 | pub(crate) const fn get(self) -> usize { 104 | self.0 105 | } 106 | 107 | #[must_use] 108 | #[inline] 109 | pub(crate) const fn checked_add(self, rhs: Self) -> Option { 110 | if let Some(sum) = self.get().checked_add(rhs.get()) { 111 | ValidSize::new(sum) 112 | } else { 113 | None 114 | } 115 | } 116 | 117 | /// # Safety 118 | /// 119 | /// Must not overflow 120 | #[must_use] 121 | #[inline] 122 | pub(crate) const unsafe fn unchecked_add(self, rhs: Self) -> Self { 123 | // SAFETY: precondition of function 124 | unsafe { ValidSize::new_unchecked(self.get().unchecked_add(rhs.get())) } 125 | } 126 | 127 | /// # Safety 128 | /// 129 | /// Must not underflow 130 | #[must_use] 131 | #[inline] 132 | pub(crate) const unsafe fn unchecked_sub(self, rhs: Self) -> Self { 133 | // SAFETY: precondition of function 134 | unsafe { ValidSize::new_unchecked(self.get().unchecked_sub(rhs.get())) } 135 | } 136 | 137 | #[must_use] 138 | #[inline] 139 | pub(crate) const fn as_layout(self) -> Layout { 140 | // SAFETY: `T::ALIGN` is a valid align, and conditions on `self` ensure it meets 141 | // the requirements of the call. 142 | unsafe { Layout::from_size_align_unchecked(self.get(), T::ALIGN.get()) } 143 | } 144 | 145 | #[must_use] 146 | #[inline] 147 | pub(crate) const fn as_unaligned(self) -> ValidSizeUnaligned { 148 | // SAFETY: Reducing alignment strictly increases the set of valid sizes 149 | unsafe { ValidSizeUnaligned::new_unchecked(self.get()) } 150 | } 151 | 152 | #[must_use] 153 | #[inline] 154 | pub(crate) const fn of_val(val: &T) -> Self { 155 | // SAFETY: `size_of_val` returns valid size 156 | unsafe { Self::new_unchecked(mem::size_of_val(val)) } 157 | } 158 | } 159 | 160 | /// For when align isn't known at compile-time 161 | #[repr(transparent)] 162 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] 163 | pub(crate) struct ValidSizeUnaligned(ValidSize<()>); 164 | 165 | impl ValidSizeUnaligned { 166 | pub(crate) const ZERO: Self = Self(ValidSize::ZERO); 167 | 168 | /// Creates a `ValidSize` from a `usize` that is less than 169 | /// or equal to `isize::MAX`. 170 | /// 171 | /// # Safety 172 | /// 173 | /// `size <= isize::MAX as usize` must hold. 174 | #[must_use] 175 | #[inline] 176 | pub(crate) const unsafe fn new_unchecked(size: usize) -> Self { 177 | // SAFETY: precondition of function 178 | Self(unsafe { ValidSize::new_unchecked(size) }) 179 | } 180 | 181 | /// Creates a `ValidSize` from a `usize` that is less than or equal to 182 | /// `isize::MAX`. 183 | /// 184 | /// Returns `None` if `size > isize::MAX as usize`. 185 | #[must_use] 186 | #[inline] 187 | const fn new(size: usize) -> Option { 188 | if let Some(size) = ValidSize::new(size) { 189 | Some(Self(size)) 190 | } else { 191 | None 192 | } 193 | } 194 | 195 | #[must_use] 196 | #[inline] 197 | pub(crate) const fn get(self) -> usize { 198 | self.0.get() 199 | } 200 | 201 | #[must_use] 202 | #[inline] 203 | pub(crate) const fn max_for_align(align: ValidAlign) -> Self { 204 | // SAFETY: `1 <= align` and `align - 1 <= isize::MAX` 205 | // for any possible `align`. 206 | unsafe { Self::new_unchecked((isize::MAX as usize).unchecked_sub(align.minus_1())) } 207 | } 208 | 209 | #[must_use] 210 | #[inline] 211 | pub(crate) const fn new_padded_to(size: usize, align: ValidAlign) -> Option { 212 | let align_m_1 = align.minus_1(); 213 | 214 | let Some(sum) = size.checked_add(align_m_1) else { 215 | return None; 216 | }; 217 | 218 | let new = sum & !align_m_1; 219 | 220 | Self::new(new) 221 | } 222 | 223 | /// Rounds down if it would overflow. 224 | #[must_use] 225 | #[inline] 226 | pub(crate) const fn new_squished_to(size: usize, align: ValidAlign) -> Self { 227 | let max = Self::max_for_align(align); 228 | if size <= max.get() { 229 | // SAFETY: ensured by if guard 230 | unsafe { Self::new_unchecked(size) } 231 | } else { 232 | max 233 | } 234 | } 235 | 236 | #[must_use] 237 | #[inline] 238 | pub(crate) const fn checked_add(self, rhs: Self) -> Option { 239 | if let Some(sum) = self.0.checked_add(rhs.0) { 240 | Some(Self(sum)) 241 | } else { 242 | None 243 | } 244 | } 245 | 246 | /// # Safety 247 | /// 248 | /// Must not overflow 249 | #[must_use] 250 | #[inline] 251 | pub(crate) const unsafe fn unchecked_add(self, rhs: Self) -> Self { 252 | // SAFETY: precondition of function 253 | Self(unsafe { self.0.unchecked_add(rhs.0) }) 254 | } 255 | 256 | /// # Safety 257 | /// 258 | /// Must not underflow 259 | #[must_use] 260 | #[inline] 261 | pub(crate) const unsafe fn unchecked_sub(self, rhs: Self) -> Self { 262 | // SAFETY: precondition of function 263 | Self(unsafe { self.0.unchecked_sub(rhs.0) }) 264 | } 265 | 266 | #[must_use] 267 | #[inline] 268 | pub(crate) const fn checked_pad_to(self, align: ValidAlign) -> Option { 269 | let align_m_1 = align.minus_1(); 270 | 271 | // SAFETY: align - 1 <= isize::MAX, self <= isize::MAX, so no possibility of overflow 272 | let new = unsafe { self.get().unchecked_add(align_m_1) } & !align_m_1; 273 | 274 | // We are padded, so just need to check for < isize::MAX 275 | Self::new(new) 276 | } 277 | 278 | /// # Safety 279 | /// 280 | /// Must not overflow `isize` 281 | #[must_use] 282 | #[inline] 283 | pub(crate) const unsafe fn unchecked_pad_to(self, align: ValidAlign) -> Self { 284 | let align_m_1 = align.minus_1(); 285 | 286 | // SAFETY: align - 1 <= isize::MAX, self <= isize::MAX, so no possibility of overflow 287 | let new = unsafe { self.get().unchecked_add(align_m_1) } & !align_m_1; 288 | 289 | // SAFETY: preconditions of function 290 | unsafe { Self::new_unchecked(new) } 291 | } 292 | 293 | #[must_use] 294 | #[inline] 295 | pub(crate) const fn checked_add_pad(self, rhs: usize, align: ValidAlign) -> Option { 296 | if let Some(sum) = self.get().checked_add(rhs) { 297 | Self::new_padded_to(sum, align) 298 | } else { 299 | None 300 | } 301 | } 302 | 303 | /// # Safety 304 | /// 305 | /// Must meet requirements of `Layout::from_size_align_unchecked` 306 | #[must_use] 307 | #[inline] 308 | pub(crate) const unsafe fn as_layout_with_align_unchecked(self, align: ValidAlign) -> Layout { 309 | // SAFETY: Preconditions of function 310 | unsafe { Layout::from_size_align_unchecked(self.get(), align.get()) } 311 | } 312 | 313 | #[must_use] 314 | #[inline] 315 | pub(crate) const fn of_val(val: &T) -> Self { 316 | // SAFETY: size of values is always a multiple of alignment, 317 | // and less than or equal to `isize::MAX` 318 | unsafe { Self::new_unchecked(mem::size_of_val(val)) } 319 | } 320 | } 321 | -------------------------------------------------------------------------------- /src/inner.rs: -------------------------------------------------------------------------------- 1 | //! Defines the interface that the three implementations of 2 | //! `UnsizedVec` (sized, unsized + aligned, unaligned) 3 | //! implement. 4 | 5 | use core::{ 6 | hash::Hash, 7 | iter::FusedIterator, 8 | marker::Unsize, 9 | mem, 10 | panic::{RefUnwindSafe, UnwindSafe}, 11 | ptr::NonNull, 12 | }; 13 | 14 | use crate::{ 15 | TryReserveError, 16 | helper::{ 17 | valid_align::ValidAlign, 18 | valid_size::{ValidSize, ValidSizeUnaligned}, 19 | }, 20 | marker::Aligned, 21 | }; 22 | 23 | use emplacable::{Emplacable, EmplacableFn, Emplacer}; 24 | 25 | mod aligned; 26 | mod sized; 27 | mod unaligned; 28 | 29 | pub(super) trait Align: 30 | Copy + Send + Sync + Ord + Hash + Unpin + UnwindSafe + RefUnwindSafe 31 | { 32 | #[must_use] 33 | fn new(align: usize) -> Option; 34 | } 35 | 36 | impl Align for ValidAlign { 37 | #[inline] 38 | fn new(align: usize) -> Option { 39 | ValidAlign::new(align) 40 | } 41 | } 42 | 43 | impl Align for () { 44 | #[inline] 45 | fn new(_: usize) -> Option { 46 | Some(()) 47 | } 48 | } 49 | 50 | pub(super) trait Size: 51 | Copy + Send + Sync + Ord + Hash + Unpin + UnwindSafe + RefUnwindSafe 52 | { 53 | #[must_use] 54 | fn of_val(val: &T) -> Self; 55 | 56 | #[must_use] 57 | fn get(self) -> usize; 58 | } 59 | 60 | impl Size for ValidSizeUnaligned { 61 | #[inline] 62 | fn of_val(val: &T) -> Self { 63 | Self::of_val(val) 64 | } 65 | 66 | #[inline] 67 | fn get(self) -> usize { 68 | self.get() 69 | } 70 | } 71 | 72 | impl Size for ValidSize { 73 | #[inline] 74 | fn of_val(val: &T) -> Self { 75 | Self::of_val(val) 76 | } 77 | 78 | #[inline] 79 | fn get(self) -> usize { 80 | self.get() 81 | } 82 | } 83 | 84 | impl Size for () { 85 | #[inline] 86 | fn of_val(_: &T) -> Self {} 87 | 88 | #[inline] 89 | fn get(self) -> usize { 90 | mem::size_of::() 91 | } 92 | } 93 | 94 | /// Implementation of `UnsizedVec`. 95 | /// Different impls for for `T: ?Aligned`, `T: Aligned`, 96 | /// and `T: Sized`. 97 | pub(super) trait UnsizedVecProvider { 98 | type Align: Align; 99 | type Size: Size; 100 | 101 | type Iter<'a>: Iterator + DoubleEndedIterator + ExactSizeIterator + FusedIterator 102 | where 103 | T: 'a, 104 | Self: 'a; 105 | 106 | type IterMut<'a>: Iterator 107 | + DoubleEndedIterator 108 | + ExactSizeIterator 109 | + FusedIterator 110 | where 111 | T: 'a, 112 | Self: 'a; 113 | 114 | const NEW_ALIGN_1: Self; 115 | const NEW_ALIGN_PTR: Self; 116 | 117 | #[must_use] 118 | fn capacity(&self) -> usize; 119 | 120 | #[must_use] 121 | fn byte_len(&self) -> usize; 122 | 123 | #[must_use] 124 | fn byte_capacity(&self) -> usize; 125 | 126 | #[must_use] 127 | fn align(&self) -> usize; 128 | 129 | fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError>; 130 | 131 | fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError>; 132 | 133 | /// Try to reserve additional byte or align capacity, 134 | /// on top of existing unused capacity. Exact (doesn't overallocate). 135 | fn try_reserve_additional_bytes_align( 136 | &mut self, 137 | additional_bytes: usize, 138 | align: Self::Align, 139 | ) -> Result<(), TryReserveError>; 140 | 141 | fn shrink_capacity_bytes_align_to( 142 | &mut self, 143 | min_capacity: usize, 144 | min_byte_capacity: usize, 145 | min_align: Self::Align, 146 | ); 147 | 148 | /// # Safety 149 | /// 150 | /// Does no capacity or bounds checks. 151 | /// Doesn't check for sufficient space for metadata, either, 152 | /// 153 | /// `size` must correpond to the actual 154 | /// size of `element`. 155 | unsafe fn insert_unchecked(&mut self, index: usize, value: T, size: Self::Size); 156 | 157 | /// # Safety 158 | /// 159 | /// Like `insert`, but does no bounds checks. 160 | unsafe fn insert_with_unchecked( 161 | &mut self, 162 | index: usize, 163 | value: Emplacable>, 164 | ); 165 | 166 | /// # Safety 167 | /// 168 | /// `index < self.len()` must hold. 169 | unsafe fn remove_into_unchecked(&mut self, index: usize, emplacer: &mut Emplacer<'_, T>); 170 | 171 | /// # Safety 172 | /// 173 | /// Like `push`, but does no capacity checks. 174 | /// Doesn't check for sufficient space for metadata, either. 175 | /// 176 | /// `size` must correpond to the actual 177 | /// size of `elem`. 178 | unsafe fn push_unchecked(&mut self, elem: T, size: Self::Size); 179 | 180 | fn push_with(&mut self, value: Emplacable>); 181 | 182 | /// # Safety 183 | /// 184 | /// `!self.is_empty()` must hold. 185 | unsafe fn pop_into_unchecked(&mut self, emplacer: &mut Emplacer<'_, T>); 186 | 187 | #[must_use] 188 | fn len(&self) -> usize; 189 | 190 | /// # Safety 191 | /// 192 | /// `index` must be contained in `0..self.len()` 193 | #[must_use] 194 | unsafe fn get_unchecked_raw(&self, index: usize) -> NonNull; 195 | 196 | #[must_use] 197 | fn iter(&self) -> Self::Iter<'_>; 198 | 199 | #[must_use] 200 | fn iter_mut(&mut self) -> Self::IterMut<'_>; 201 | 202 | #[must_use] 203 | fn from_sized(vec: ::alloc::vec::Vec) -> Self 204 | where 205 | S: Unsize; 206 | } 207 | 208 | pub(super) trait UnsizedVecImpl { 209 | type Impl: UnsizedVecProvider; 210 | } 211 | 212 | pub(super) trait AlignedVecProvider: UnsizedVecProvider {} 213 | 214 | pub(super) trait AlignedVecImpl: Aligned { 215 | type Impl: AlignedVecProvider; 216 | } 217 | 218 | impl UnsizedVecImpl for T { 219 | type Impl = ::Impl; 220 | } 221 | -------------------------------------------------------------------------------- /src/inner/aligned.rs: -------------------------------------------------------------------------------- 1 | //! The implementation of `UnsizedVec` for `T: ?Sized + Aligned`. 2 | 3 | use ::alloc::{alloc, collections::TryReserveErrorKind}; 4 | use core::{ 5 | alloc::{Allocator, Layout}, 6 | cmp, 7 | iter::FusedIterator, 8 | marker::{PhantomData, Unsize}, 9 | mem::{self, ManuallyDrop}, 10 | ptr::{self, NonNull, addr_of}, 11 | }; 12 | 13 | use emplacable::{Emplacable, EmplacableFn, Emplacer}; 14 | 15 | use crate::{ 16 | helper::{MetadataRemainder, SplitMetadata, decompose, valid_size::ValidSize}, 17 | marker::Aligned, 18 | unwrap_try_reserve_result, 19 | }; 20 | 21 | use super::{AlignedVecImpl, AlignedVecProvider, TryReserveError, UnsizedVecProvider}; 22 | 23 | struct ElementInfo { 24 | /// The pointer metadata of the element. 25 | metadata: ::Remainder, 26 | /// The offset that the element following this one would be stored at. 27 | /// We use this encoding to store the sizes of `Vec` elements 28 | /// because it allows for *O(1)* random access while only storing 29 | /// a single `usize`. 30 | end_offset: ValidSize, 31 | } 32 | 33 | impl Clone for ElementInfo { 34 | fn clone(&self) -> Self { 35 | *self 36 | } 37 | } 38 | 39 | impl Copy for ElementInfo {} 40 | 41 | pub(in super::super) struct AlignedVecInner { 42 | ptr: NonNull<()>, 43 | byte_capacity: ValidSize, 44 | elems_info: ManuallyDrop<::alloc::vec::Vec>>, 45 | _marker: PhantomData, 46 | } 47 | 48 | impl AlignedVecInner { 49 | /// The number of bytes this vec is curretly using. 50 | /// (sum of `size_of_val`s of all elements). 51 | #[inline] 52 | fn byte_len(&self) -> ValidSize { 53 | self.elems_info 54 | .last() 55 | .map_or(ValidSize::ZERO, |last| last.end_offset) 56 | } 57 | 58 | /// Returns the offset of the start of this element in the vec. 59 | /// 60 | /// # Safety 61 | /// 62 | /// Does no bounds checks 63 | #[inline] 64 | unsafe fn start_offset_of_unchecked(&self, index: usize) -> ValidSize { 65 | index.checked_sub(1).map_or(ValidSize::ZERO, |index_m_1| 66 | // SAFETY: precondition of function 67 | unsafe { 68 | self.elems_info 69 | .get_unchecked(index_m_1) 70 | .end_offset 71 | }) 72 | } 73 | } 74 | 75 | impl Drop for AlignedVecInner { 76 | fn drop(&mut self) { 77 | let mut start_offset: ValidSize = ValidSize::ZERO; 78 | 79 | // SAFETY: we are in `drop`, nobody will access the `ManuallyDrop` after us 80 | let elems_info = unsafe { ManuallyDrop::take(&mut self.elems_info) }; 81 | 82 | // Drop remaining elements 83 | for ElementInfo { 84 | metadata, 85 | end_offset, 86 | } in elems_info 87 | { 88 | // SAFETY: end of element can't be smaller than start 89 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 90 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 91 | 92 | let start_of_alloc = self.ptr.as_ptr().cast::(); 93 | // SAFETY: offset is within allocation 94 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(start_offset.get()) }; 95 | let wide_ptr_to_elem: *mut T = ptr::from_raw_parts_mut(thin_ptr_to_elem, metadata); 96 | 97 | start_offset = end_offset; 98 | 99 | // SAFETY: nobody will access this after us 100 | unsafe { wide_ptr_to_elem.drop_in_place() } 101 | } 102 | 103 | // Drop allocation 104 | 105 | let alloc_layout = self.byte_capacity.as_layout(); 106 | 107 | // SAFETY: capacity and align come from the vec. 108 | unsafe { 109 | alloc::Global.deallocate(self.ptr.cast(), alloc_layout); 110 | } 111 | } 112 | } 113 | 114 | impl UnsizedVecProvider for AlignedVecInner { 115 | type Align = (); 116 | type Size = ValidSize; 117 | 118 | type Iter<'a> 119 | = AlignedIter<'a, T> 120 | where 121 | T: 'a; 122 | type IterMut<'a> 123 | = AlignedIterMut<'a, T> 124 | where 125 | T: 'a; 126 | 127 | const NEW_ALIGN_1: Self = AlignedVecInner { 128 | ptr: T::DANGLING_THIN, 129 | byte_capacity: ValidSize::ZERO, 130 | elems_info: ManuallyDrop::new(::alloc::vec::Vec::new()), 131 | _marker: PhantomData, 132 | }; 133 | 134 | const NEW_ALIGN_PTR: Self = Self::NEW_ALIGN_1; 135 | 136 | #[inline] 137 | fn capacity(&self) -> usize { 138 | self.elems_info.capacity() 139 | } 140 | 141 | #[inline] 142 | fn byte_capacity(&self) -> usize { 143 | self.byte_capacity.get() 144 | } 145 | 146 | #[inline] 147 | fn byte_len(&self) -> usize { 148 | self.byte_len().get() 149 | } 150 | 151 | #[inline] 152 | fn align(&self) -> usize { 153 | T::ALIGN.get() 154 | } 155 | 156 | #[inline] 157 | fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { 158 | Ok(self.elems_info.try_reserve(additional)?) 159 | } 160 | 161 | #[inline] 162 | fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { 163 | Ok(self.elems_info.try_reserve_exact(additional)?) 164 | } 165 | 166 | #[inline] 167 | fn try_reserve_additional_bytes_align( 168 | &mut self, 169 | additional_bytes: usize, 170 | _align: (), 171 | ) -> Result<(), TryReserveError> { 172 | let old_cap = self.byte_capacity; 173 | 174 | if additional_bytes > 0 { 175 | let new_cap = self 176 | .byte_capacity() 177 | .checked_add(additional_bytes) 178 | .and_then(ValidSize::::new) 179 | .ok_or(TryReserveError { 180 | kind: TryReserveErrorKind::CapacityOverflow, 181 | })?; 182 | 183 | let new_layout = new_cap.as_layout(); 184 | 185 | let new_ptr: NonNull<[u8]> = if old_cap == ValidSize::ZERO { 186 | alloc::Global.allocate(new_layout) 187 | } else { 188 | let old_layout = old_cap.as_layout(); 189 | 190 | // SAFETY: old layout comes from vec, checked above for `old_cap < new_cap` 191 | unsafe { alloc::Global.grow(self.ptr.cast(), old_layout, new_layout) } 192 | } 193 | .map_err(|_| TryReserveError { 194 | kind: TryReserveErrorKind::AllocError { 195 | layout: new_cap.as_layout(), 196 | non_exhaustive: (), 197 | }, 198 | })?; 199 | 200 | self.ptr = new_ptr.cast(); 201 | 202 | self.byte_capacity = ValidSize::new_squished(new_ptr.len()); 203 | } 204 | Ok(()) 205 | } 206 | 207 | #[inline] 208 | fn shrink_capacity_bytes_align_to( 209 | &mut self, 210 | min_capacity: usize, 211 | byte_capacity: usize, 212 | _align: (), 213 | ) { 214 | self.elems_info.shrink_to(min_capacity); 215 | 216 | let old_cap = self.byte_capacity; 217 | let new_cap = cmp::max(self.byte_len().get(), byte_capacity); 218 | 219 | if new_cap < old_cap.get() { 220 | // SAFETY: `self.byte_capacity` is valid, so anything less than it is too 221 | let new_cap = unsafe { ValidSize::::new_unchecked(new_cap) }; 222 | 223 | if new_cap == ValidSize::ZERO { 224 | // SAFETY: layout comes from the vec 225 | unsafe { alloc::Global.deallocate(self.ptr.cast(), old_cap.as_layout()) } 226 | self.ptr = ::DANGLING_THIN; 227 | self.byte_capacity = ValidSize::ZERO; 228 | } else { 229 | // SAFETY: `old_layout` comes from the vec, if guard ensures `new_layout` is smaller 230 | if let Ok(new_ptr) = unsafe { 231 | alloc::Global.shrink(self.ptr.cast(), old_cap.as_layout(), new_cap.as_layout()) 232 | } { 233 | self.ptr = new_ptr.cast(); 234 | self.byte_capacity = ValidSize::new_squished(new_ptr.len()); 235 | } 236 | 237 | // if shrink fails, we just keep old allocation 238 | } 239 | } 240 | } 241 | 242 | unsafe fn insert_unchecked(&mut self, index: usize, element: T, size_of_val: ValidSize) { 243 | debug_assert!(self.capacity() > self.len()); 244 | debug_assert!(self.byte_capacity() >= (self.byte_len().get() + size_of_val.get())); 245 | 246 | let metadata = 247 | ::Remainder::from_metadata(core::ptr::metadata(&element)); 248 | 249 | // SAFETY: preconditions of function 250 | unsafe { 251 | let start_offset = self.start_offset_of_unchecked(index); 252 | let how_much_to_move = self.byte_len().unchecked_sub(start_offset); 253 | 254 | let start_ptr = self.ptr.cast::().as_ptr().add(index); 255 | 256 | ptr::copy( 257 | start_ptr, 258 | start_ptr.add(size_of_val.get()), 259 | how_much_to_move.get(), 260 | ); 261 | 262 | ptr::copy_nonoverlapping(addr_of!(element).cast(), start_ptr, size_of_val.get()); 263 | 264 | for ElementInfo { end_offset, .. } in self.elems_info.get_unchecked_mut(index..) { 265 | *end_offset = end_offset.unchecked_add(size_of_val); 266 | } 267 | 268 | self.elems_info.insert_unchecked( 269 | index, 270 | ElementInfo { 271 | metadata, 272 | end_offset: start_offset.unchecked_add(size_of_val), 273 | }, 274 | (), 275 | ); 276 | } 277 | 278 | mem::forget_unsized(element); 279 | } 280 | 281 | unsafe fn insert_with_unchecked( 282 | &mut self, 283 | index: usize, 284 | value: Emplacable>, 285 | ) { 286 | /// Helper to ensure elements are moved back 287 | /// where they belong in case `inner_closure` 288 | /// panics. 289 | struct ElementShifterBacker { 290 | ptr_to_index: *mut u8, 291 | num_bytes_to_shift: ValidSize, 292 | shift_by_bytes: ValidSize, 293 | } 294 | 295 | impl Drop for ElementShifterBacker { 296 | #[inline] 297 | fn drop(&mut self) { 298 | // SAFETY: shifting elements back in case of drop 299 | unsafe { 300 | ptr::copy( 301 | self.ptr_to_index.add(self.shift_by_bytes.get()), 302 | self.ptr_to_index, 303 | self.num_bytes_to_shift.get(), 304 | ); 305 | } 306 | } 307 | } 308 | 309 | debug_assert!(index <= self.len()); 310 | 311 | let emplacable_closure = value.into_fn(); 312 | 313 | let emplacer_closure = 314 | &mut |layout: Layout, metadata, inner_closure: &mut dyn FnMut(*mut PhantomData)| { 315 | let (size_of_val, _) = decompose(layout); 316 | 317 | // SAFETY: by `Emplacer::new` preconditions 318 | let size_of_val = unsafe { ValidSize::::new_unchecked(size_of_val.get()) }; 319 | 320 | let reserve_result = self 321 | .try_reserve(1) 322 | .and_then(|()| self.try_reserve_additional_bytes_align(layout.size(), ())); 323 | unwrap_try_reserve_result(reserve_result); 324 | 325 | // SAFETY: precondition of function 326 | let start_offset = unsafe { self.start_offset_of_unchecked(index) }; 327 | 328 | // SAFETY: getting pointer to element 329 | let ptr_to_elem = unsafe { self.ptr.cast::().as_ptr().add(start_offset.get()) }; 330 | 331 | // SAFETY: by precondition of function 332 | let num_bytes_to_shift = unsafe { self.byte_len().unchecked_sub(start_offset) }; 333 | 334 | let shifter_backer = ElementShifterBacker { 335 | ptr_to_index: ptr_to_elem, 336 | num_bytes_to_shift, 337 | shift_by_bytes: size_of_val, 338 | }; 339 | 340 | // SAFETY: copying elements right to make room 341 | unsafe { 342 | ptr::copy( 343 | ptr_to_elem, 344 | ptr_to_elem.add(size_of_val.get()), 345 | num_bytes_to_shift.get(), 346 | ); 347 | } 348 | 349 | // If this unwinds, `shifter_backer` will be dropped 350 | // and the elements will be moved back where they belong. 351 | inner_closure(ptr_to_elem.cast()); 352 | 353 | // `inner_closure` succeeded, so don't want to move elements back now! 354 | mem::forget(shifter_backer); 355 | 356 | // SAFETY: by precondition of function 357 | let elems_to_move_back = unsafe { self.elems_info.get_unchecked_mut(index..) }; 358 | 359 | for ElementInfo { end_offset, .. } in elems_to_move_back { 360 | // SAFETY: make the offsets correct again 361 | *end_offset = unsafe { end_offset.unchecked_add(size_of_val) }; 362 | } 363 | 364 | // SAFETY: reserved memory earlier 365 | unsafe { 366 | self.elems_info.insert_unchecked( 367 | index, 368 | ElementInfo { 369 | metadata: ::Remainder::from_metadata(metadata), 370 | end_offset: start_offset.unchecked_add(size_of_val), 371 | }, 372 | (), 373 | ); 374 | } 375 | }; 376 | 377 | // SAFETY: `emplacer_closure` runs the closure with a valid pointer to `index` 378 | let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) }; 379 | 380 | emplacable_closure(emplacer); 381 | } 382 | 383 | unsafe fn remove_into_unchecked(&mut self, index: usize, emplacer: &mut Emplacer<'_, T>) { 384 | debug_assert!(index < self.len()); 385 | 386 | // We can't remove the metadata yet, as `emplacer_closure` might unwind, 387 | // so we can't leave vec metadata in an invalid state. 388 | // SAFETY: by precondition of function 389 | let removed_elem_metadata = unsafe { self.elems_info.get_unchecked(index) }; 390 | 391 | let ElementInfo { 392 | metadata, 393 | end_offset, 394 | } = removed_elem_metadata; 395 | 396 | // SAFETY: precondition of function 397 | let start_offset = unsafe { self.start_offset_of_unchecked(index) }; 398 | 399 | // SAFETY: start_offset < end_offset 400 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 401 | 402 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 403 | 404 | // Get pointer to the element we are popping out of the vec 405 | // SAFETY: offset comes from vec 406 | let thin_ptr_to_elem = unsafe { 407 | self.ptr 408 | .as_ptr() 409 | .cast_const() 410 | .cast::() 411 | .add(start_offset.get()) 412 | }; 413 | 414 | // Copy element into the place 415 | 416 | // SAFETY: we call the closure right after we unwrap it 417 | let emplacer_closure = unsafe { emplacer.into_fn() }; 418 | 419 | // The emplacer can choose never to run the inner closure at all! In this case, the removed value 420 | // is simply forgotten. 421 | emplacer_closure(size_of_val.as_layout(), metadata, &mut |out_ptr| { 422 | if !out_ptr.is_null() { 423 | // SAFETY: we are allowed to copy `size_of_val` bytes into `out_ptr`, 424 | // by the preconditions of `Emplacer::new` 425 | unsafe { 426 | ptr::copy_nonoverlapping( 427 | thin_ptr_to_elem, 428 | out_ptr.cast::(), 429 | size_of_val.get(), 430 | ); 431 | } 432 | } else { 433 | let wide_ptr: *mut T = 434 | ptr::from_raw_parts_mut(thin_ptr_to_elem.cast_mut(), metadata); 435 | 436 | // SAFETY: We forget the element right after by copying over it and adjusting vec metadata 437 | unsafe { wide_ptr.drop_in_place() } 438 | } 439 | }); 440 | 441 | // Now that `emplacer_closure` has run successfuly, we don't need to worry 442 | // about exception safety anymore. 443 | // FIXME elide bounds check 444 | self.elems_info.remove(index); 445 | 446 | for ElementInfo { end_offset, .. } in 447 | // SAFETY: `index` in range by preconditions of function. 448 | unsafe { self.elems_info.get_unchecked_mut(index..) } 449 | { 450 | // SAFETY: `end_fooset >= size_of_val` for elements following something 451 | // of size `size_of_val` 452 | unsafe { 453 | *end_offset = end_offset.unchecked_sub(size_of_val); 454 | } 455 | } 456 | 457 | // SAFETY: new end of vec can't be to the left of old start of elem at `index` 458 | let how_much_to_move = unsafe { self.byte_len().unchecked_sub(start_offset) }; 459 | 460 | // SAFETY: copying elements back where they belong 461 | unsafe { 462 | ptr::copy( 463 | thin_ptr_to_elem.add(size_of_val.get()), 464 | thin_ptr_to_elem.cast_mut(), 465 | how_much_to_move.get(), 466 | ); 467 | } 468 | } 469 | 470 | unsafe fn push_unchecked(&mut self, value: T, size_of_val: ValidSize) { 471 | debug_assert!(self.capacity() - self.len() > 0); 472 | debug_assert!(self.byte_capacity() >= (self.byte_len().get() + size_of_val.get())); 473 | 474 | let metadata = ::Remainder::from_metadata(core::ptr::metadata(&value)); 475 | let start_offset = self.byte_len(); 476 | // SAFETY: preconditions of function 477 | unsafe { 478 | ptr::copy_nonoverlapping( 479 | addr_of!(value).cast(), 480 | self.ptr.as_ptr().cast::().add(start_offset.get()), 481 | size_of_val.get(), 482 | ); 483 | 484 | self.elems_info.push_unchecked( 485 | ElementInfo { 486 | metadata, 487 | end_offset: start_offset.unchecked_add(size_of_val), 488 | }, 489 | (), 490 | ); 491 | } 492 | 493 | mem::forget_unsized(value); 494 | } 495 | 496 | fn push_with(&mut self, value: Emplacable>) { 497 | let emplacable_closure = value.into_fn(); 498 | 499 | let emplacer_closure = 500 | &mut |layout: Layout, metadata, inner_closure: &mut dyn FnMut(*mut PhantomData)| { 501 | let (size_of_val, _) = decompose(layout); 502 | 503 | // SAFETY: by `Emplacer::new` preconditions 504 | let size_of_val = unsafe { ValidSize::::new_unchecked(size_of_val.get()) }; 505 | 506 | let reserve_result = self 507 | .try_reserve(1) 508 | .and_then(|()| self.try_reserve_additional_bytes_align(layout.size(), ())); 509 | unwrap_try_reserve_result(reserve_result); 510 | 511 | let start_offset = self.byte_len(); 512 | 513 | // SAFETY: getting pointer to end of allocation 514 | let ptr_to_elem = unsafe { self.ptr.cast::().as_ptr().add(start_offset.get()) }; 515 | 516 | inner_closure(ptr_to_elem.cast()); 517 | 518 | let elem_info: ElementInfo = ElementInfo { 519 | metadata: ::Remainder::from_metadata(metadata), 520 | // SAFETY: neither operand can overflow `isize`, so sum 521 | // can't overflow `usize` 522 | end_offset: unsafe { start_offset.unchecked_add(size_of_val) }, 523 | }; 524 | 525 | // SAFETY: `emplacable` wrote new element at end of vec, 526 | // and we have reserved the needed space 527 | unsafe { self.elems_info.push_unchecked(elem_info, ()) }; 528 | }; 529 | 530 | // SAFETY: `emplacer_closure` runs the closure with a valid pointer to the end of the vec 531 | let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) }; 532 | 533 | emplacable_closure(emplacer); 534 | } 535 | 536 | unsafe fn pop_into_unchecked(&mut self, emplacer: &mut Emplacer<'_, T>) { 537 | debug_assert!(!self.elems_info.is_empty()); 538 | 539 | // SAFETY: precondition of function 540 | let last_elem_metadata = unsafe { self.elems_info.pop().unwrap_unchecked() }; 541 | 542 | let ElementInfo { 543 | metadata, 544 | end_offset, 545 | } = last_elem_metadata; 546 | 547 | let start_offset = self.byte_len(); 548 | 549 | // SAFETY: start_offset < end_offset 550 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 551 | 552 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 553 | 554 | // Get pointer to the element we are popping out of the vec 555 | // SAFETY: offset comes from vec 556 | let thin_ptr_to_elem = unsafe { 557 | self.ptr 558 | .as_ptr() 559 | .cast_const() 560 | .cast::() 561 | .add(start_offset.get()) 562 | }; 563 | 564 | // Copy element into the place 565 | 566 | // SAFETY: we call the closure right after we unwrap it 567 | let emplacer_closure = unsafe { emplacer.into_fn() }; 568 | 569 | emplacer_closure(size_of_val.as_layout(), metadata, &mut |out_ptr| { 570 | if !out_ptr.is_null() { 571 | // SAFETY: we are allowed to copy `size_of_val` bytes into `out_ptr`, 572 | // by the preconditions of `Emplacer::new` 573 | unsafe { 574 | ptr::copy_nonoverlapping( 575 | thin_ptr_to_elem, 576 | out_ptr.cast::(), 577 | size_of_val.get(), 578 | ); 579 | } 580 | } else { 581 | let wide_ptr_to_elem: *mut T = 582 | ptr::from_raw_parts_mut(thin_ptr_to_elem.cast_mut(), metadata); 583 | 584 | // SAFETY: we adusted vec metadata earlier, so this won't be double-dropped 585 | unsafe { wide_ptr_to_elem.drop_in_place() } 586 | } 587 | }); 588 | } 589 | 590 | #[inline] 591 | fn len(&self) -> usize { 592 | self.elems_info.len() 593 | } 594 | 595 | #[inline] 596 | unsafe fn get_unchecked_raw(&self, index: usize) -> NonNull { 597 | debug_assert!(index < self.len()); 598 | 599 | // SAFETY: see individual comments inside block 600 | unsafe { 601 | // SAFETY: precondition of method 602 | let start_offset = self.start_offset_of_unchecked(index); 603 | let &ElementInfo { 604 | end_offset, 605 | metadata, 606 | } = self.elems_info.get_unchecked(index); 607 | 608 | // SAFETY: end >= start 609 | let size_of_val = end_offset.unchecked_sub(start_offset); 610 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 611 | 612 | // SAFETY: `start_offset` in range of allocation 613 | NonNull::from_raw_parts( 614 | NonNull::new_unchecked(self.ptr.as_ptr().cast::().add(start_offset.get())), 615 | metadata, 616 | ) 617 | } 618 | } 619 | 620 | #[inline] 621 | fn iter(&self) -> Self::Iter<'_> { 622 | AlignedIter { 623 | elems_info: self.elems_info.iter(), 624 | ptr: self.ptr, 625 | start_offset: ValidSize::ZERO, 626 | } 627 | } 628 | 629 | #[inline] 630 | fn iter_mut(&mut self) -> Self::IterMut<'_> { 631 | AlignedIterMut { 632 | elems_info: self.elems_info.iter(), 633 | ptr: self.ptr, 634 | start_offset: ValidSize::ZERO, 635 | } 636 | } 637 | 638 | #[inline] 639 | fn from_sized(vec: ::alloc::vec::Vec) -> Self 640 | where 641 | S: Unsize, 642 | { 643 | let mut vec = ManuallyDrop::new(vec); 644 | let len_elems = vec.len(); 645 | let cap_elems = vec.capacity(); 646 | let heap_ptr = vec.as_mut_ptr(); 647 | let heap_ptr_unsized: *mut T = heap_ptr; 648 | let metadata = 649 | ::Remainder::from_metadata(ptr::metadata(heap_ptr_unsized)); 650 | // SAFETY: ptr comes from vec, can't be null 651 | let heap_ptr_thin: NonNull<()> = unsafe { NonNull::new_unchecked(heap_ptr_unsized.cast()) }; 652 | 653 | // SAFETY: can't overflow, as otherwise allocation would be overflowing 654 | let byte_capacity = unsafe { cap_elems.unchecked_mul(mem::size_of::()) }; 655 | 656 | // SAFETY: same as above 657 | let byte_capacity = unsafe { ValidSize::new_unchecked(byte_capacity) }; 658 | 659 | let elems_info = (0..len_elems) 660 | .map(|index| ElementInfo { 661 | metadata, 662 | // SAFETY: can't overflow, as otherwise allocation would be overflowing 663 | end_offset: unsafe { 664 | ValidSize::new_unchecked(index.unchecked_mul(mem::size_of::())) 665 | }, 666 | }) 667 | .collect(); 668 | 669 | let elems_info = ManuallyDrop::new(elems_info); 670 | 671 | Self { 672 | ptr: heap_ptr_thin, 673 | byte_capacity, 674 | elems_info, 675 | _marker: PhantomData, 676 | } 677 | } 678 | } 679 | 680 | impl AlignedVecProvider for AlignedVecInner {} 681 | 682 | impl AlignedVecImpl for T { 683 | default type Impl = AlignedVecInner; 684 | } 685 | 686 | macro_rules! iter_ref { 687 | ($iter_ty:ident, $from_raw_parts:ident $($muta:ident)?) => { 688 | pub(in super::super) struct $iter_ty<'a, T: ?Sized + Aligned> { 689 | elems_info: core::slice::Iter<'a, ElementInfo>, 690 | ptr: NonNull<()>, 691 | start_offset: ValidSize, 692 | } 693 | 694 | impl<'a, T: ?Sized + Aligned + 'a> Iterator for $iter_ty<'a, T> { 695 | type Item = &'a $($muta)? T; 696 | 697 | #[inline] 698 | fn next(&mut self) -> Option { 699 | let ElementInfo { 700 | metadata, 701 | end_offset, 702 | } = *self.elems_info.next()?; 703 | 704 | // SAFETY: end of element can't be smaller than start 705 | let size_of_val = unsafe { end_offset.unchecked_sub(self.start_offset) }; 706 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 707 | 708 | let start_of_alloc = self.ptr.as_ptr().cast::(); 709 | // SAFETY: offset is within allocation 710 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(self.start_offset.get()) }; 711 | let wide_ptr = ptr::$from_raw_parts(thin_ptr_to_elem, metadata); 712 | 713 | // SAFETY: pointer to element of vec 714 | let wide_ref = unsafe { & $($muta)? *wide_ptr }; 715 | 716 | self.start_offset = end_offset; 717 | 718 | Some(wide_ref) 719 | } 720 | 721 | #[inline] 722 | fn size_hint(&self) -> (usize, Option) { 723 | self.elems_info.size_hint() 724 | } 725 | 726 | #[inline] 727 | fn count(self) -> usize { 728 | self.elems_info.count() 729 | } 730 | 731 | #[inline] 732 | fn nth(&mut self, n: usize) -> Option { 733 | let start_offset = n 734 | .checked_sub(1) 735 | .and_then(|n| self.elems_info.nth(n)) 736 | .copied() 737 | .map_or(ValidSize::ZERO, |e_i| e_i.end_offset); 738 | 739 | let ElementInfo { 740 | metadata, 741 | end_offset, 742 | } = *self.elems_info.next()?; 743 | 744 | // SAFETY: end of element can't be smaller than start` 745 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 746 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 747 | 748 | let start_of_alloc = self.ptr.as_ptr().cast::(); 749 | // SAFETY: offset is within allocation 750 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(start_offset.get()) }; 751 | let wide_ptr = ptr::$from_raw_parts(thin_ptr_to_elem, metadata); 752 | 753 | // SAFETY: pointer to element of vec 754 | let wide_ref = unsafe { & $($muta)? *wide_ptr }; 755 | 756 | self.start_offset = end_offset; 757 | 758 | Some(wide_ref) 759 | } 760 | 761 | #[inline] 762 | fn last(mut self) -> Option { 763 | self.nth(self.elems_info.len().checked_sub(1)?) 764 | } 765 | } 766 | 767 | impl<'a, T: ?Sized + Aligned + 'a> DoubleEndedIterator for $iter_ty<'a, T> { 768 | #[inline] 769 | fn next_back(&mut self) -> Option { 770 | let ElementInfo { 771 | metadata, 772 | end_offset, 773 | } = *self.elems_info.next_back()?; 774 | 775 | let start_offset = self 776 | .elems_info 777 | .as_slice() 778 | .last() 779 | .map_or(ValidSize::ZERO, |e_i| e_i.end_offset); 780 | 781 | // SAFETY: end of element can't be smaller than start 782 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 783 | let metadata = metadata.as_metadata(size_of_val.as_unaligned()); 784 | 785 | let start_of_alloc = self.ptr.as_ptr().cast::(); 786 | // SAFETY: offset is within allocation 787 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(start_offset.get()) }; 788 | let wide_ptr = ptr::$from_raw_parts(thin_ptr_to_elem, metadata); 789 | 790 | // SAFETY: pointer to element of vec 791 | let wide_ref = unsafe { & $($muta)? *wide_ptr }; 792 | 793 | Some(wide_ref) 794 | } 795 | } 796 | 797 | impl<'a, T: ?Sized + Aligned + 'a> ExactSizeIterator for $iter_ty<'a, T> { 798 | #[inline] 799 | fn len(&self) -> usize { 800 | self.elems_info.len() 801 | } 802 | } 803 | 804 | impl<'a, T: ?Sized + Aligned + 'a> FusedIterator for $iter_ty<'a, T> {} 805 | }; 806 | } 807 | 808 | iter_ref!(AlignedIter, from_raw_parts); 809 | iter_ref!(AlignedIterMut, from_raw_parts_mut mut); 810 | -------------------------------------------------------------------------------- /src/inner/sized.rs: -------------------------------------------------------------------------------- 1 | //! The implementation of `UnsizedVec` for `T: Sized`. 2 | 3 | use core::{ 4 | alloc::Layout, 5 | cmp, 6 | marker::{PhantomData, Unsize}, 7 | mem, 8 | ptr::{self, NonNull}, 9 | slice::{Iter, IterMut}, 10 | }; 11 | 12 | use emplacable::{Emplacable, EmplacableFn, Emplacer}; 13 | 14 | use crate::unwrap_try_reserve_result; 15 | 16 | use super::{AlignedVecImpl, AlignedVecProvider, TryReserveError, UnsizedVecProvider}; 17 | 18 | impl UnsizedVecProvider for ::alloc::vec::Vec { 19 | type Align = (); 20 | type Size = (); 21 | 22 | type Iter<'a> 23 | = Iter<'a, T> 24 | where 25 | T: 'a; 26 | type IterMut<'a> 27 | = IterMut<'a, T> 28 | where 29 | T: 'a; 30 | 31 | const NEW_ALIGN_1: Self = ::alloc::vec::Vec::new(); 32 | const NEW_ALIGN_PTR: Self = Self::NEW_ALIGN_1; 33 | 34 | #[inline] 35 | fn capacity(&self) -> usize { 36 | self.capacity() 37 | } 38 | 39 | #[inline] 40 | fn byte_capacity(&self) -> usize { 41 | // SAFETY: capacity can't overflow `isize::MAX` bytes 42 | unsafe { self.capacity().unchecked_mul(mem::size_of::()) } 43 | } 44 | 45 | #[inline] 46 | fn byte_len(&self) -> usize { 47 | // SAFETY: capacity can't overflow `isize::MAX` bytes 48 | unsafe { self.len().unchecked_mul(mem::size_of::()) } 49 | } 50 | 51 | #[inline] 52 | fn align(&self) -> usize { 53 | mem::align_of::() 54 | } 55 | 56 | #[inline] 57 | fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { 58 | Ok(self.try_reserve(additional)?) 59 | } 60 | 61 | #[inline] 62 | fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { 63 | Ok(self.try_reserve_exact(additional)?) 64 | } 65 | 66 | #[inline] 67 | fn try_reserve_additional_bytes_align( 68 | &mut self, 69 | additional_bytes: usize, 70 | _align: (), 71 | ) -> Result<(), TryReserveError> { 72 | // SAFETY: capacity >= len 73 | let free = unsafe { self.capacity().unchecked_sub(self.len()) }; 74 | let needed = additional_bytes 75 | .saturating_add(free) 76 | .div_ceil(mem::size_of::()); 77 | Ok(self.try_reserve_exact(needed.saturating_sub(free))?) 78 | } 79 | 80 | #[inline] 81 | fn shrink_capacity_bytes_align_to( 82 | &mut self, 83 | min_capacity: usize, 84 | min_byte_capacity: usize, 85 | _align: (), 86 | ) { 87 | let min_capacity = cmp::max( 88 | min_capacity, 89 | min_byte_capacity.div_ceil(mem::size_of::()), 90 | ); 91 | self.shrink_to(min_capacity); 92 | } 93 | 94 | #[inline] 95 | unsafe fn insert_unchecked(&mut self, index: usize, element: T, _size: ()) { 96 | debug_assert!(self.capacity() > self.len()); 97 | 98 | // SAFETY: precondition of the function 99 | unsafe { 100 | let how_much_to_move = self.len().unchecked_sub(index); 101 | let start_ptr = self.as_mut_ptr().add(index); 102 | // shift back elems to the right of pointer 103 | ptr::copy(start_ptr, start_ptr.add(1), how_much_to_move); 104 | start_ptr.write(element); 105 | self.set_len(self.len().unchecked_add(1)); 106 | } 107 | } 108 | 109 | unsafe fn insert_with_unchecked( 110 | &mut self, 111 | index: usize, 112 | value: Emplacable>, 113 | ) { 114 | /// Helper to ensure elements are moved back 115 | /// where they belong in case `inner_closure` 116 | /// panics. 117 | struct ElementShifterBacker { 118 | ptr_to_index: *mut T, 119 | num_elems_to_shift: usize, 120 | } 121 | 122 | impl Drop for ElementShifterBacker { 123 | #[inline] 124 | fn drop(&mut self) { 125 | // SAFETY: shifting elements back in case of drops 126 | unsafe { 127 | ptr::copy( 128 | self.ptr_to_index.add(1), 129 | self.ptr_to_index, 130 | self.num_elems_to_shift, 131 | ); 132 | } 133 | } 134 | } 135 | 136 | debug_assert!(index <= self.len()); 137 | 138 | let emplacable_closure = value.into_fn(); 139 | 140 | let emplacer_closure = 141 | &mut move |_, (), inner_closure: &mut dyn FnMut(*mut PhantomData)| { 142 | let reserve_result = >::try_reserve_exact(self, 1); 143 | unwrap_try_reserve_result(reserve_result); 144 | 145 | // SAFETY: by precondition of function 146 | let ptr_to_elem = unsafe { self.as_mut_ptr().add(index) }; 147 | 148 | // SAFETY: by precondition of function 149 | let num_elems_to_shift = unsafe { self.len().unchecked_sub(index) }; 150 | 151 | let shifter_backer: ElementShifterBacker = ElementShifterBacker { 152 | ptr_to_index: ptr_to_elem, 153 | num_elems_to_shift, 154 | }; 155 | 156 | // SAFETY: copying back elements to make room 157 | unsafe { ptr::copy(ptr_to_elem, ptr_to_elem.add(1), num_elems_to_shift) } 158 | 159 | // If this unwinds, `shifter_backer` will be dropped 160 | // and the elements will be moved back where they belong. 161 | inner_closure(ptr_to_elem.cast()); 162 | 163 | // `inner_closure` succeeded, so don't want to move elements back now! 164 | mem::forget(shifter_backer); 165 | 166 | // SAFETY: `inner_closure` wrote new element at the correct index, 167 | // elems to the right were shifted back 168 | unsafe { self.set_len(self.len().unchecked_add(1)) }; 169 | }; 170 | 171 | // SAFETY: `emplacer_closure` runs the closure with a valid pointer to `index` 172 | let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) }; 173 | 174 | emplacable_closure(emplacer); 175 | } 176 | 177 | unsafe fn remove_into_unchecked(&mut self, index: usize, emplacer: &mut Emplacer<'_, T>) { 178 | debug_assert!(index < self.len()); 179 | 180 | // SAFETY: precondition of function 181 | let new_len = unsafe { self.len().unchecked_sub(1) }; 182 | 183 | // Set new length of vector 184 | 185 | // SAFETY: new_len < old_len 186 | unsafe { self.set_len(new_len) }; 187 | 188 | // Get pointer to the element we are popping out of the vec 189 | // SAFETY: offset comes from vec 190 | let thin_ptr_to_elem = unsafe { self.as_ptr().add(index) }; 191 | 192 | // SAFETY: old_len > index, so old_len - 1 == new_len >= index 193 | let how_much_to_move = unsafe { new_len.unchecked_sub(index) }; 194 | 195 | // Copy element into the place 196 | 197 | // SAFETY: we call the closure right after we unwrap it 198 | let emplacer_closure = unsafe { emplacer.into_fn() }; 199 | 200 | emplacer_closure(Layout::new::(), (), &mut |out_ptr| { 201 | if !out_ptr.is_null() { 202 | // SAFETY: we are allowed to copy `size_of::()` bytes into `out_ptr`, 203 | // by the preconditions of `Emplacer::new` 204 | unsafe { 205 | ptr::copy_nonoverlapping(thin_ptr_to_elem, out_ptr.cast(), 1); 206 | } 207 | } else { 208 | let typed_ptr_to_elem: *mut T = thin_ptr_to_elem.cast_mut().cast(); 209 | 210 | // SAFETY: we adusted vec metadata earlier, and copy elements back right after, 211 | // so this won't be double-dropped 212 | unsafe { typed_ptr_to_elem.drop_in_place() } 213 | } 214 | }); 215 | 216 | // SAFETY: copy elements back where they belong 217 | unsafe { 218 | ptr::copy( 219 | thin_ptr_to_elem.add(1), 220 | thin_ptr_to_elem.cast_mut(), 221 | how_much_to_move, 222 | ); 223 | } 224 | } 225 | 226 | #[inline] 227 | unsafe fn push_unchecked(&mut self, value: T, _size: ()) { 228 | debug_assert!(self.capacity() - self.len() > 0); 229 | 230 | let old_len = self.len(); 231 | // SAFETY: precondition of the function 232 | unsafe { 233 | self.as_mut_ptr().add(old_len).write(value); 234 | self.set_len(old_len.unchecked_add(1)); 235 | } 236 | } 237 | 238 | fn push_with(&mut self, value: Emplacable>) { 239 | let emplacable_closure = value.into_fn(); 240 | 241 | let emplacer_closure = 242 | &mut move |_, (), inner_closure: &mut dyn FnMut(*mut PhantomData)| { 243 | let reserve_result = >::try_reserve(self, 1); 244 | unwrap_try_reserve_result(reserve_result); 245 | // SAFETY: getting pointer to end of allocation 246 | let ptr_to_elem = unsafe { self.as_mut_ptr().add(self.len()) }; 247 | 248 | inner_closure(ptr_to_elem.cast()); 249 | 250 | // SAFETY: `inner_closure` wrote new element at end of vec 251 | unsafe { self.set_len(self.len().unchecked_add(1)) }; 252 | }; 253 | 254 | // SAFETY: `emplacer_closure` runs the closure with a valid pointer to the end of the vec 255 | let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) }; 256 | 257 | emplacable_closure(emplacer); 258 | } 259 | 260 | unsafe fn pop_into_unchecked(&mut self, emplacer: &mut Emplacer<'_, T>) { 261 | debug_assert!(!self.is_empty()); 262 | 263 | // Set new length of vector 264 | 265 | // SAFETY: precondition of function 266 | let new_len = unsafe { self.len().unchecked_sub(1) }; 267 | 268 | // SAFETY: new_len < old_len 269 | unsafe { self.set_len(new_len) }; 270 | 271 | // Get pointer to the element we are popping out of the vec 272 | // SAFETY: offset comes from vec 273 | let ptr_to_elem = unsafe { self.as_ptr().add(new_len) }; 274 | 275 | // Copy element into the place 276 | 277 | // SAFETY: we call the closure right after we unwrap it 278 | let emplace_closure = unsafe { emplacer.into_fn() }; 279 | 280 | emplace_closure(Layout::new::(), (), &mut |out_ptr| { 281 | if !out_ptr.is_null() { 282 | // SAFETY: we are allowed to copy `size_of::()` bytes into `out_ptr`, 283 | // by the preconditions of `Emplacer::new` 284 | unsafe { 285 | ptr::copy_nonoverlapping(ptr_to_elem, out_ptr.cast(), 1); 286 | } 287 | } else { 288 | let typed_ptr_to_elem: *mut T = ptr_to_elem.cast_mut().cast(); 289 | 290 | // SAFETY: we adusted vec metadata earlier, so this won't be double-dropped 291 | unsafe { typed_ptr_to_elem.drop_in_place() } 292 | } 293 | }); 294 | } 295 | 296 | #[inline] 297 | fn len(&self) -> usize { 298 | self.len() 299 | } 300 | 301 | #[inline] 302 | unsafe fn get_unchecked_raw(&self, index: usize) -> NonNull { 303 | debug_assert!(index < self.len()); 304 | 305 | // SAFETY: precondition of method 306 | unsafe { NonNull::new_unchecked(self.as_ptr().add(index).cast_mut()).cast() } 307 | } 308 | 309 | #[inline] 310 | fn iter(&self) -> Self::Iter<'_> { 311 | let slice: &[T] = self; 312 | slice.iter() 313 | } 314 | 315 | #[inline] 316 | fn iter_mut(&mut self) -> Self::IterMut<'_> { 317 | let slice: &mut [T] = self; 318 | slice.iter_mut() 319 | } 320 | 321 | fn from_sized(_: ::alloc::vec::Vec) -> Self 322 | where 323 | S: Unsize, 324 | { 325 | unreachable!("can't unsize to a sized type, that would make 0 sense") 326 | } 327 | } 328 | 329 | impl AlignedVecProvider for ::alloc::vec::Vec {} 330 | 331 | impl AlignedVecImpl for T { 332 | type Impl = ::alloc::vec::Vec; 333 | } 334 | -------------------------------------------------------------------------------- /src/inner/unaligned.rs: -------------------------------------------------------------------------------- 1 | //! The implementation of `UnsizedVec` for `T: ?Sized + ?Aligned`. 2 | 3 | use ::alloc::{alloc, collections::TryReserveErrorKind}; 4 | use core::{ 5 | alloc::{Allocator, Layout}, 6 | cmp, 7 | iter::FusedIterator, 8 | marker::{PhantomData, Unsize}, 9 | mem::{self, ManuallyDrop}, 10 | ptr::{self, NonNull, addr_of}, 11 | }; 12 | 13 | use emplacable::{Emplacable, EmplacableFn, Emplacer}; 14 | 15 | use crate::{ 16 | helper::{ 17 | MetadataRemainder, SplitMetadata, decompose, valid_align::ValidAlign, 18 | valid_size::ValidSizeUnaligned, 19 | }, 20 | marker::Aligned, 21 | unwrap_try_reserve_result, 22 | }; 23 | 24 | use super::{TryReserveError, UnsizedVecImpl, UnsizedVecProvider}; 25 | 26 | struct ElementInfo { 27 | /// The pointer metadata of the element. 28 | metadata: ::Remainder, 29 | /// The offset that the element following this one would be stored at, 30 | /// but disregarding padding due to over-alignment. 31 | /// We use this encoding to store the sizes of `Vec` elements 32 | /// because it allows for *O(1)* random access while only storing 33 | /// a single `usize`. 34 | /// 35 | /// To get the actual offset of the next element, use 36 | /// `unchecked_pad_to(end_offset, align)`. 37 | end_offset: ValidSizeUnaligned, 38 | } 39 | 40 | impl Clone for ElementInfo { 41 | fn clone(&self) -> Self { 42 | *self 43 | } 44 | } 45 | 46 | impl Copy for ElementInfo {} 47 | 48 | pub(in super::super) struct UnalignedVecInner { 49 | ptr: NonNull<()>, 50 | /// # Safety 51 | /// 52 | /// For simplicity, must be a multiple of `self.align`. 53 | byte_capacity: ValidSizeUnaligned, 54 | elems_info: ManuallyDrop<::alloc::vec::Vec>>, 55 | align: ValidAlign, 56 | _marker: PhantomData, 57 | } 58 | 59 | impl UnalignedVecInner { 60 | /// The number of bytes this vec is curretly using, 61 | /// discounting padding following the last element. 62 | #[inline] 63 | fn unaligned_byte_len(&self) -> ValidSizeUnaligned { 64 | self.elems_info 65 | .last() 66 | .map_or(ValidSizeUnaligned::ZERO, |last| last.end_offset) 67 | } 68 | 69 | /// The number of bytes this vec is curretly using, 70 | /// including padding following the last element. 71 | #[inline] 72 | fn aligned_byte_len(&self) -> ValidSizeUnaligned { 73 | // SAFETY: it's an invariant of the capacity field that this be legal 74 | unsafe { self.unaligned_byte_len().unchecked_pad_to(self.align) } 75 | } 76 | 77 | /// Returns the offset of the start of this element in the vec. 78 | /// 79 | /// # Safety 80 | /// 81 | /// Does not bounds checks 82 | #[inline] 83 | unsafe fn start_offset_of_unchecked(&self, index: usize) -> ValidSizeUnaligned { 84 | index 85 | .checked_sub(1) 86 | .map_or(ValidSizeUnaligned::ZERO, |index_m_1| 87 | // SAFETY: precondition of function 88 | unsafe { 89 | self.elems_info 90 | .get_unchecked(index_m_1) 91 | .end_offset 92 | .unchecked_pad_to(self.align) 93 | }) 94 | } 95 | 96 | /// Returns the maximum alignment among all the elements in the vec. 97 | /// Used by `shrink`. 98 | #[inline] 99 | fn max_align_of_elems(&self) -> ValidAlign { 100 | self.iter() 101 | .map(ValidAlign::of_val) 102 | .max() 103 | .unwrap_or(ValidAlign::ONE) 104 | } 105 | 106 | /// Used in `try_reserve_exact_bytes_align_unchecked`. 107 | /// Returns what the length in bytes of the array would be 108 | /// after its alignment is increased from `self.align` to `new_align` 109 | /// 110 | /// # Safety 111 | /// 112 | /// `self.align <= new_align` must hold. 113 | unsafe fn len_after_realign_up(&mut self, new_align: ValidAlign) -> Option { 114 | debug_assert!(self.align <= new_align); 115 | let mut new_pad_to_new: ValidSizeUnaligned = ValidSizeUnaligned::ZERO; 116 | self.elems_info.iter().try_fold( 117 | ValidSizeUnaligned::ZERO, 118 | |shift, 119 | ElementInfo { 120 | end_offset: old_end_offset, 121 | .. 122 | }| { 123 | let new_end_offset = old_end_offset.checked_add(shift)?; 124 | 125 | new_pad_to_new = new_end_offset.checked_pad_to(new_align)?; 126 | 127 | // SAFETY: `old_align <= new_align`, so if above call returned `Some`, this must be legal. 128 | let new_pad_to_old = unsafe { new_end_offset.unchecked_pad_to(self.align) }; 129 | 130 | // SAFETY: `old_align <= new_align`, so can't underflow 131 | let padding_difference = unsafe { new_pad_to_new.unchecked_sub(new_pad_to_old) }; 132 | 133 | shift.checked_add(padding_difference) 134 | }, 135 | )?; 136 | 137 | Some(new_pad_to_new) 138 | } 139 | 140 | /// Realigns all elements in the vec to the given `new_align`, 141 | /// if the current align is less. 142 | /// 143 | /// # Safety 144 | /// 145 | /// `new_align > self.align` must hold. 146 | /// `self.len() > 1` must hold. 147 | /// 148 | /// Realigning must not lead to overflow. 149 | /// 150 | /// `new_align` must be equal to the actual alignment of the allocation, 151 | /// Also, this function does not allocate memory, 152 | /// nor does it check that enough memory has been allocated. 153 | /// 154 | /// Finally, this function doesn't update `self.align`, do that yourself. 155 | unsafe fn realign_up(&mut self, new_align: ValidAlign) { 156 | let old_align = self.align; 157 | 158 | debug_assert!(self.len() > 1 && new_align > old_align); 159 | 160 | // We compute the new offset of each element, along with the difference from the old offset. 161 | // Then, we copy everything over. 162 | // Doing this without allocating requires some complicated code. 163 | // 164 | // First we calculate how much we need to shift the very last element, 165 | // then we perform the copies while reversing our calculations. 166 | // 167 | // The first element is already in the right place, its offset is 0. 168 | 169 | // Starting here, our offsets are invalid, so unwinding is UB !!! 170 | // To make this explicit, we use unckecked ops for arithmetic. 171 | // This loop is basically `len_after_realign_up`, excpet with 0 checks and modifying metadata. 172 | // TODO: get from `len_after_realign_up`? 173 | let final_offset_shift: ValidSizeUnaligned = self.elems_info.iter_mut().fold( 174 | ValidSizeUnaligned::ZERO, 175 | |shift, ElementInfo { end_offset, .. }| { 176 | // SAFETY: precondition of function 177 | unsafe { 178 | let new_end_offset = end_offset.unchecked_add(shift); 179 | *end_offset = new_end_offset; 180 | 181 | let new_pad_to_new = new_end_offset.unchecked_pad_to(new_align); 182 | let new_pad_to_old = new_end_offset.unchecked_pad_to(old_align); 183 | let padding_difference = new_pad_to_new.unchecked_sub(new_pad_to_old); 184 | 185 | shift.unchecked_add(padding_difference) 186 | } 187 | }, 188 | ); 189 | 190 | // Now we go in reverse, and copy. 191 | self.elems_info.array_windows::<2>().rev().fold( 192 | final_offset_shift, 193 | |shift_end, 194 | 195 | &[ 196 | ElementInfo { 197 | end_offset: prev_end_offset, 198 | .. 199 | }, 200 | ElementInfo { 201 | end_offset: new_end_offset, 202 | .. 203 | }, 204 | ]| { 205 | // SAFETY:: See comments inside block 206 | unsafe { 207 | // SAFETY: Reversing computation in the last loop. 208 | let new_pad_to_new = new_end_offset.unchecked_pad_to(new_align); 209 | let new_pad_to_old = new_end_offset.unchecked_pad_to(old_align); 210 | let padding_difference = new_pad_to_new.unchecked_sub(new_pad_to_old); 211 | let shift_start = shift_end.unchecked_sub(padding_difference); 212 | 213 | let new_start_offset = prev_end_offset.unchecked_pad_to(new_align); 214 | let old_start_offset = new_start_offset.unchecked_sub(shift_start); 215 | 216 | // SAFETY: End offset >= start offset 217 | let size_of_val = new_end_offset.unchecked_sub(new_start_offset); 218 | 219 | // SAFETY: moving element to new correct position, as computed above 220 | ptr::copy( 221 | self.ptr.as_ptr().cast::().add(old_start_offset.get()), 222 | self.ptr.as_ptr().cast::().add(new_start_offset.get()), 223 | size_of_val.get(), 224 | ); 225 | shift_start 226 | } 227 | }, 228 | ); 229 | } 230 | 231 | /// Realigns all elements in the vec to the given `new_align`, 232 | /// if the current align is greater. 233 | /// 234 | /// Opposite of `realign_up`. 235 | /// 236 | /// # Safety 237 | /// 238 | /// `new_align < self.align` must hold. 239 | /// `new_align >= self.max_align_of_elems()` must hold. 240 | /// `self.len() > 1` must hold. 241 | /// 242 | /// This function does not shrink the allocation, 243 | /// you will need to do that yourself, 244 | /// *even if you don't change the allocated size*, 245 | /// to ensure that the allocation is later deallocated 246 | /// with the correct alignment. 247 | /// 248 | /// Finally, this function doesn't update `self.align`, do that yourself. 249 | unsafe fn realign_down(&mut self, new_align: ValidAlign) { 250 | let old_align = self.align; 251 | 252 | debug_assert!(self.len() > 1 && new_align < old_align); 253 | 254 | // We compute the new offset of each element, along with the difference from the old offset. 255 | // Then, we copy the eement over. 256 | // 257 | // This is a lot simpler than `realign_up`, we can do everyting in a single pass. 258 | 259 | let mut shift_back = ValidSizeUnaligned::ZERO; 260 | for &[ 261 | ElementInfo { 262 | end_offset: prev_new_end_offset, 263 | .. 264 | }, 265 | ElementInfo { 266 | end_offset: old_end_offset, 267 | .. 268 | }, 269 | ] in self.elems_info.array_windows::<2>() 270 | { 271 | // SAFETY: shift must be smaller than size of allocation up to this point 272 | let new_end_offset = unsafe { old_end_offset.unchecked_sub(shift_back) }; 273 | 274 | // SAFETY: can't overflow, or else unshifted allocation would have overflowed 275 | let new_start_offset = unsafe { prev_new_end_offset.unchecked_pad_to(new_align) }; 276 | 277 | // SAFETY: shift must be smaller than size of allocation up to this point 278 | let old_start_offset = unsafe { new_start_offset.unchecked_sub(shift_back) }; 279 | 280 | // SAFETY: End offset >= start offset 281 | let size_of_val = unsafe { new_end_offset.unchecked_sub(new_start_offset) }; 282 | 283 | // SAFETY: moving element to new correct position, as computed above 284 | unsafe { 285 | ptr::copy( 286 | self.ptr.as_ptr().cast::().add(old_start_offset.get()), 287 | self.ptr.as_ptr().cast::().add(new_start_offset.get()), 288 | size_of_val.get(), 289 | ); 290 | } 291 | 292 | // SAFETY: pads can't overfolow as otherwise old offsets would be invalid. 293 | // Sub can't overflow as new_align < old_align. 294 | // Add can't overflow as we can't shift more than the entire size of the allocation. 295 | shift_back = unsafe { 296 | shift_back.unchecked_add( 297 | new_end_offset 298 | .unchecked_pad_to(old_align) 299 | .unchecked_sub(new_end_offset.unchecked_pad_to(new_align)), 300 | ) 301 | }; 302 | } 303 | } 304 | } 305 | 306 | impl Drop for UnalignedVecInner { 307 | fn drop(&mut self) { 308 | let mut start_offset: ValidSizeUnaligned = ValidSizeUnaligned::ZERO; 309 | 310 | // SAFETY: we are in `drop`, nobody will access the `ManuallyDrop` after us 311 | let elems_info = unsafe { ManuallyDrop::take(&mut self.elems_info) }; 312 | 313 | // Drop remaining elements 314 | for ElementInfo { 315 | metadata, 316 | end_offset, 317 | } in elems_info 318 | { 319 | // SAFETY: end of element can't be smaller than start 320 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 321 | let metadata = metadata.as_metadata(size_of_val); 322 | let start_of_alloc = self.ptr.as_ptr().cast::(); 323 | // SAFETY: offset is within allocation 324 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(start_offset.get()) }; 325 | let wide_ptr_to_elem: *mut T = ptr::from_raw_parts_mut(thin_ptr_to_elem, metadata); 326 | 327 | // SAFETY: align comes from the vec 328 | start_offset = unsafe { end_offset.unchecked_pad_to(self.align) }; 329 | 330 | // SAFETY: nobody will access this after us 331 | unsafe { wide_ptr_to_elem.drop_in_place() } 332 | } 333 | 334 | // Drop allocation 335 | // 336 | // SAFETY: capacity and align come from the vec. 337 | unsafe { 338 | let alloc_layout = self 339 | .byte_capacity 340 | .as_layout_with_align_unchecked(self.align); 341 | alloc::Global.deallocate(self.ptr.cast(), alloc_layout); 342 | } 343 | } 344 | } 345 | 346 | impl UnsizedVecProvider for UnalignedVecInner { 347 | type Align = ValidAlign; 348 | type Size = ValidSizeUnaligned; 349 | 350 | type Iter<'a> 351 | = UnalignedIter<'a, T> 352 | where 353 | T: 'a; 354 | type IterMut<'a> 355 | = UnalignedIterMut<'a, T> 356 | where 357 | T: 'a; 358 | 359 | const NEW_ALIGN_1: UnalignedVecInner = UnalignedVecInner { 360 | ptr: <() as Aligned>::DANGLING_THIN, 361 | byte_capacity: ValidSizeUnaligned::ZERO, 362 | elems_info: ManuallyDrop::new(::alloc::vec::Vec::new()), 363 | align: <()>::ALIGN, 364 | _marker: PhantomData, 365 | }; 366 | 367 | const NEW_ALIGN_PTR: UnalignedVecInner = UnalignedVecInner { 368 | ptr: ::DANGLING_THIN, 369 | byte_capacity: ValidSizeUnaligned::ZERO, 370 | elems_info: ManuallyDrop::new(::alloc::vec::Vec::new()), 371 | align: ::ALIGN, 372 | _marker: PhantomData, 373 | }; 374 | 375 | #[inline] 376 | fn capacity(&self) -> usize { 377 | self.elems_info.capacity() 378 | } 379 | 380 | #[inline] 381 | fn byte_capacity(&self) -> usize { 382 | self.byte_capacity.get() 383 | } 384 | 385 | #[inline] 386 | fn align(&self) -> usize { 387 | self.align.into() 388 | } 389 | 390 | #[inline] 391 | fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { 392 | Ok(self.elems_info.try_reserve(additional)?) 393 | } 394 | 395 | #[inline] 396 | fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { 397 | Ok(self.elems_info.try_reserve_exact(additional)?) 398 | } 399 | 400 | fn try_reserve_additional_bytes_align( 401 | &mut self, 402 | additional_bytes: usize, 403 | align: ValidAlign, 404 | ) -> Result<(), TryReserveError> { 405 | let old_align = self.align; 406 | let new_align = cmp::max(old_align, align); 407 | 408 | let old_byte_cap = self.byte_capacity; 409 | let old_byte_len = self.aligned_byte_len(); 410 | 411 | let byte_len_of_existing_elems_realigned: ValidSizeUnaligned = if old_align < new_align { 412 | // SAFETY: just checked `old_align < new_align` 413 | unsafe { self.len_after_realign_up(new_align) } 414 | // Return early if existing elems overflow `isize` 415 | // when realigned. 416 | .ok_or(TryReserveError { 417 | kind: TryReserveErrorKind::CapacityOverflow, 418 | })? 419 | } else { 420 | if additional_bytes == 0 { 421 | return Ok(()); 422 | } 423 | 424 | old_byte_len 425 | }; 426 | 427 | let realignment_of_existing_elems_cost = 428 | // SAFETY: realignment can't make byte length shorter. 429 | unsafe { byte_len_of_existing_elems_realigned.unchecked_sub(old_byte_len) }; 430 | 431 | // Now we add on the additional size requested. 432 | let new_byte_cap = old_byte_cap 433 | .checked_add_pad(realignment_of_existing_elems_cost.get(), new_align) 434 | .and_then(|s| s.checked_add_pad(additional_bytes, new_align)) 435 | .ok_or(TryReserveError { 436 | kind: TryReserveErrorKind::CapacityOverflow, 437 | })?; 438 | 439 | if old_align < new_align || old_byte_cap < new_byte_cap { 440 | if new_byte_cap > ValidSizeUnaligned::ZERO { 441 | // SAFETY: `new_cap` checked to be legal for following call in all branches above 442 | let new_layout = unsafe { new_byte_cap.as_layout_with_align_unchecked(new_align) }; 443 | let new_ptr: NonNull<[u8]> = (if old_byte_cap == ValidSizeUnaligned::ZERO { 444 | alloc::Global.allocate(new_layout) 445 | } else { 446 | // SAFETY: `old_cap` and `old_align` come from the vec 447 | unsafe { 448 | let old_layout = old_byte_cap.as_layout_with_align_unchecked(old_align); 449 | alloc::Global.grow(self.ptr.cast(), old_layout, new_layout) 450 | } 451 | }) 452 | .map_err(|_| TryReserveError { 453 | kind: TryReserveErrorKind::AllocError { 454 | layout: new_layout, 455 | non_exhaustive: (), 456 | }, 457 | })?; 458 | 459 | self.byte_capacity = ValidSizeUnaligned::new_squished_to(new_ptr.len(), new_align); 460 | self.ptr = new_ptr.cast(); 461 | 462 | if old_align < new_align { 463 | if self.len() > 1 { 464 | // SAFETY: Just performed necessary allocation, if guard. 465 | // Overflow covered by earlier checks. 466 | unsafe { self.realign_up(new_align) }; 467 | } 468 | 469 | self.align = new_align; 470 | } 471 | } else { 472 | self.ptr = new_align.dangling_thin(); 473 | } 474 | } 475 | Ok(()) 476 | } 477 | 478 | fn shrink_capacity_bytes_align_to( 479 | &mut self, 480 | min_capacity: usize, 481 | min_byte_capacity: usize, 482 | min_align: ValidAlign, 483 | ) { 484 | self.elems_info.shrink_to(min_capacity); 485 | 486 | let old_align = self.align; 487 | let new_align = cmp::max(cmp::min(min_align, old_align), self.max_align_of_elems()); 488 | 489 | debug_assert!(new_align <= old_align); 490 | 491 | let need_to_realign_elems = new_align < old_align && self.len() > 1; 492 | if need_to_realign_elems { 493 | // SAFETY: checked len, new vs old, max_align_of_elems above 494 | unsafe { self.realign_down(new_align) } 495 | } 496 | 497 | // SAFETY: Can't overflow, otherwise old offsets would be invalids 498 | let new_aligned_byte_len = unsafe { self.unaligned_byte_len().unchecked_pad_to(new_align) }; 499 | 500 | let old_byte_capacity = self.byte_capacity; 501 | let new_byte_capacity = cmp::max( 502 | // SAFETY: `old_byte_capacity` is a valid 503 | // `ValidSizeUnaligned`, and result of `cmp::min` 504 | // can't be bigger than it 505 | unsafe { 506 | ValidSizeUnaligned::new_unchecked(cmp::min( 507 | min_byte_capacity, 508 | old_byte_capacity.get(), 509 | )) 510 | }, 511 | new_aligned_byte_len, 512 | ); 513 | 514 | debug_assert!(new_byte_capacity <= old_byte_capacity); 515 | 516 | if new_byte_capacity < old_byte_capacity || new_align < old_align { 517 | // SAFETY: cap and align are valid as they come from the vec 518 | let old_layout = unsafe { old_byte_capacity.as_layout_with_align_unchecked(old_align) }; 519 | 520 | if new_byte_capacity > ValidSizeUnaligned::ZERO { 521 | let new_layout = 522 | // SAFETY: cap and align are <= old (valid) cap and align 523 | unsafe { new_byte_capacity.as_layout_with_align_unchecked(new_align) }; 524 | 525 | // `shrink` can unwind, in which case we need to make sure 526 | // we realign everything back to how it was. 527 | 528 | struct Realigner<'a, T: ?Sized> { 529 | vec: &'a mut UnalignedVecInner, 530 | new_align: ValidAlign, 531 | } 532 | 533 | impl Drop for Realigner<'_, T> { 534 | #[inline] 535 | fn drop(&mut self) { 536 | let old_align = self.vec.align; 537 | self.vec.align = self.new_align; 538 | 539 | // SAFETY: old_align > new_align, checked self.len(), 540 | // adjusted `self.align` 541 | unsafe { self.vec.realign_up(old_align) } 542 | 543 | self.vec.align = old_align; 544 | } 545 | } 546 | 547 | let alloc_ptr = self.ptr.cast(); 548 | 549 | // https://github.com/rust-lang/rust-clippy/issues/9427 550 | #[allow(clippy::unnecessary_lazy_evaluations)] 551 | let realigner = need_to_realign_elems.then(|| Realigner { 552 | vec: self, 553 | new_align, 554 | }); 555 | 556 | let shrink_result = 557 | // SAFETY: cap and align are <= old (valid) cap and align. 558 | // old layout and ptr come from the vec. 559 | unsafe { alloc::Global.shrink(alloc_ptr, old_layout, new_layout) }; 560 | let Ok(new_ptr) = shrink_result else { 561 | // `realigner` will be dropped, restoring offsets 562 | return; 563 | }; 564 | 565 | mem::forget(realigner); 566 | 567 | self.byte_capacity = ValidSizeUnaligned::new_squished_to(new_ptr.len(), new_align); 568 | self.ptr = new_ptr.cast(); 569 | } else { 570 | if old_byte_capacity > ValidSizeUnaligned::ZERO { 571 | // SAFETY: `old_layout` components come from the vec 572 | unsafe { alloc::Global.deallocate(self.ptr.cast(), old_layout) } 573 | 574 | self.byte_capacity = ValidSizeUnaligned::ZERO; 575 | } 576 | 577 | self.ptr = new_align.dangling_thin(); 578 | } 579 | 580 | self.align = new_align; 581 | } 582 | } 583 | 584 | unsafe fn insert_unchecked( 585 | &mut self, 586 | index: usize, 587 | element: T, 588 | unaligned_size_of_val: ValidSizeUnaligned, 589 | ) { 590 | debug_assert!(index <= self.len()); 591 | debug_assert!(self.capacity() > self.len()); 592 | 593 | // SAFETY: preconditions of function 594 | let aligned_size_of_val = unsafe { unaligned_size_of_val.unchecked_pad_to(self.align) }; 595 | 596 | debug_assert!( 597 | self.byte_capacity() >= (self.aligned_byte_len().get() + aligned_size_of_val.get()) 598 | ); 599 | debug_assert!(self.align() >= mem::align_of_val(&element)); 600 | 601 | let metadata = 602 | ::Remainder::from_metadata(core::ptr::metadata(&element)); 603 | 604 | // SAFETY: preconditions of function 605 | unsafe { 606 | let start_offset = self.start_offset_of_unchecked(index); 607 | let how_much_to_move = self.unaligned_byte_len().unchecked_sub(start_offset); 608 | 609 | let start_ptr = self.ptr.cast::().as_ptr().add(index); 610 | 611 | ptr::copy( 612 | start_ptr, 613 | start_ptr.add(aligned_size_of_val.get()), 614 | how_much_to_move.get(), 615 | ); 616 | 617 | ptr::copy_nonoverlapping( 618 | addr_of!(element).cast(), 619 | start_ptr, 620 | unaligned_size_of_val.get(), 621 | ); 622 | 623 | for ElementInfo { end_offset, .. } in self.elems_info.get_unchecked_mut(index..) { 624 | *end_offset = end_offset.unchecked_add(aligned_size_of_val); 625 | } 626 | 627 | self.elems_info.insert_unchecked( 628 | index, 629 | ElementInfo { 630 | metadata, 631 | end_offset: start_offset.unchecked_add(unaligned_size_of_val), 632 | }, 633 | (), 634 | ); 635 | } 636 | 637 | mem::forget_unsized(element); 638 | } 639 | 640 | unsafe fn insert_with_unchecked( 641 | &mut self, 642 | index: usize, 643 | value: Emplacable>, 644 | ) { 645 | /// Helper to ensure elements are moved back 646 | /// where they belong in case `inner_closure` 647 | /// panics. 648 | struct ElementShifterBacker { 649 | ptr_to_index: *mut u8, 650 | num_bytes_to_shift: ValidSizeUnaligned, 651 | shift_by_bytes: ValidSizeUnaligned, 652 | } 653 | 654 | impl Drop for ElementShifterBacker { 655 | #[inline] 656 | fn drop(&mut self) { 657 | // SAFETY: shifting elements back in case of drop 658 | unsafe { 659 | ptr::copy( 660 | self.ptr_to_index.add(self.shift_by_bytes.get()), 661 | self.ptr_to_index, 662 | self.num_bytes_to_shift.get(), 663 | ); 664 | } 665 | } 666 | } 667 | 668 | debug_assert!(index <= self.len()); 669 | 670 | let emplacable_closure = value.into_fn(); 671 | 672 | let emplacer_closure = 673 | &mut |layout, metadata, inner_closure: &mut dyn FnMut(*mut PhantomData)| { 674 | let (unaligned_size_of_val, align_of_val) = decompose(layout); 675 | 676 | let reserve_result = self.try_reserve(1).and_then(|()| { 677 | self.try_reserve_additional_bytes_align( 678 | unaligned_size_of_val.get(), 679 | align_of_val, 680 | ) 681 | }); 682 | unwrap_try_reserve_result(reserve_result); 683 | 684 | let aligned_size_of_val = 685 | // SAFETY: `try_reserve` would have failed if this could fail 686 | unsafe { unaligned_size_of_val.unchecked_pad_to(self.align) }; 687 | 688 | // SAFETY: precondition of function 689 | let start_offset = unsafe { self.start_offset_of_unchecked(index) }; 690 | 691 | // SAFETY: getting pointer to element 692 | let ptr_to_elem = unsafe { self.ptr.cast::().as_ptr().add(start_offset.get()) }; 693 | 694 | let unaligned_len = self.unaligned_byte_len(); 695 | 696 | // SAFETY: by precondition of function 697 | let num_bytes_to_shift = unsafe { unaligned_len.unchecked_sub(start_offset) }; 698 | 699 | let shifter_backer = ElementShifterBacker { 700 | ptr_to_index: ptr_to_elem, 701 | num_bytes_to_shift, 702 | shift_by_bytes: aligned_size_of_val, 703 | }; 704 | 705 | // SAFETY: copying elements right to make room 706 | unsafe { 707 | ptr::copy( 708 | ptr_to_elem, 709 | ptr_to_elem.add(aligned_size_of_val.get()), 710 | num_bytes_to_shift.get(), 711 | ); 712 | } 713 | 714 | // If this unwinds, `shifter_backer` will be dropped 715 | // and the elements will be moved back where they belong. 716 | inner_closure(ptr_to_elem.cast()); 717 | 718 | // `inner_closure` succeeded, so don't want to move elements back now! 719 | mem::forget(shifter_backer); 720 | 721 | // SAFETY: by precondition of function 722 | let elems_to_move_back = unsafe { self.elems_info.get_unchecked_mut(index..) }; 723 | 724 | for ElementInfo { end_offset, .. } in elems_to_move_back { 725 | // SAFETY: make the offsets correct again 726 | *end_offset = unsafe { end_offset.unchecked_add(aligned_size_of_val) }; 727 | } 728 | 729 | // SAFETY: reserved memory earlier 730 | unsafe { 731 | self.elems_info.insert_unchecked( 732 | index, 733 | ElementInfo { 734 | metadata: ::Remainder::from_metadata(metadata), 735 | end_offset: start_offset.unchecked_add(unaligned_size_of_val), 736 | }, 737 | (), 738 | ); 739 | } 740 | }; 741 | 742 | // SAFETY: `emplacer_closure` runs the closure with a valid pointer to `index` 743 | let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) }; 744 | 745 | emplacable_closure(emplacer); 746 | } 747 | 748 | unsafe fn remove_into_unchecked(&mut self, index: usize, emplacer: &mut Emplacer<'_, T>) { 749 | debug_assert!(index < self.len()); 750 | 751 | // We can't remove the metadata yet, as `emplacer_closure` might unwind, 752 | // so we can't leave vec metadata in an invalid state. 753 | // SAFETY: by precondition of function 754 | let removed_elem_metadata = unsafe { self.elems_info.get_unchecked(index) }; 755 | 756 | let ElementInfo { 757 | metadata, 758 | end_offset, 759 | } = removed_elem_metadata; 760 | 761 | // SAFETY: precondition of function 762 | let start_offset = unsafe { self.start_offset_of_unchecked(index) }; 763 | 764 | // SAFETY: start_offset < end_offset 765 | let unaligned_size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 766 | 767 | // SAFETY: `val` comes from the vec so must be paddable 768 | let aligned_size_of_val = unsafe { unaligned_size_of_val.unchecked_pad_to(self.align) }; 769 | 770 | let metadata = metadata.as_metadata(unaligned_size_of_val); 771 | 772 | // Get pointer to the element we are popping out of the vec 773 | // SAFETY: offset comes from vec 774 | let ptr_to_elem = unsafe { 775 | self.ptr 776 | .as_ptr() 777 | .cast_const() 778 | .cast::() 779 | .add(start_offset.get()) 780 | }; 781 | 782 | let wide_ptr_to_elem: *const T = ptr::from_raw_parts(ptr_to_elem, metadata); 783 | 784 | // SAFETY: the element is still initialized at this point 785 | let align_of_val = ValidAlign::of_val(unsafe { &*wide_ptr_to_elem }); 786 | 787 | // Copy element into the place 788 | 789 | // SAFETY: we call the closure right after we unwrap it 790 | let emplacer_closure = unsafe { emplacer.into_fn() }; 791 | 792 | emplacer_closure( 793 | // SAFETY: `size_of_val` comes from the vec 794 | unsafe { unaligned_size_of_val.as_layout_with_align_unchecked(align_of_val) }, 795 | metadata, 796 | &mut |out_ptr| { 797 | if !out_ptr.is_null() { 798 | // SAFETY: we are allowed to copy `size_of_val` bytes into `out_ptr`, 799 | // by the preconditions of `Emplacer::new` 800 | unsafe { 801 | ptr::copy_nonoverlapping( 802 | ptr_to_elem, 803 | out_ptr.cast::(), 804 | unaligned_size_of_val.get(), 805 | ); 806 | } 807 | } else { 808 | // SAFETY: we adjust vec metadata right after, so this won't be double-dropped 809 | unsafe { wide_ptr_to_elem.cast_mut().drop_in_place() } 810 | } 811 | }, 812 | ); 813 | 814 | // Now that `emplacer_closure` has run successfuly, we don't need to worry 815 | // about exception safety anymore. 816 | // FIXME elide bounds check 817 | self.elems_info.remove(index); 818 | 819 | for ElementInfo { end_offset, .. } in 820 | // SAFETY: `index` in range by preconditions of function. 821 | unsafe { self.elems_info.get_unchecked_mut(index..) } 822 | { 823 | // SAFETY: `end_fooset >= size_of_val` for elements following something 824 | // of size `size_of_val` 825 | unsafe { 826 | *end_offset = end_offset.unchecked_sub(aligned_size_of_val); 827 | } 828 | } 829 | 830 | let unaligned_len = self.unaligned_byte_len(); 831 | 832 | // SAFETY: new end of vec can't be to the left of old start of elem at `index` 833 | let how_much_to_move = unsafe { unaligned_len.unchecked_sub(start_offset) }; 834 | 835 | // SAFETY: copying elements back where they belong 836 | unsafe { 837 | ptr::copy( 838 | ptr_to_elem.add(aligned_size_of_val.get()), 839 | ptr_to_elem.cast_mut(), 840 | how_much_to_move.get(), 841 | ); 842 | } 843 | } 844 | 845 | #[inline] 846 | unsafe fn push_unchecked(&mut self, value: T, size_of_val: ValidSizeUnaligned) { 847 | debug_assert!(self.capacity() - self.len() > 0); 848 | 849 | debug_assert!(self.byte_capacity() >= (self.aligned_byte_len().get() + size_of_val.get())); 850 | debug_assert!(self.align() >= mem::align_of_val(&value)); 851 | 852 | let metadata = ::Remainder::from_metadata(core::ptr::metadata(&value)); 853 | let start_offset = self.aligned_byte_len(); 854 | 855 | // SAFETY: preconditions of function 856 | unsafe { 857 | ptr::copy_nonoverlapping( 858 | addr_of!(value).cast(), 859 | self.ptr.as_ptr().cast::().add(start_offset.get()), 860 | size_of_val.get(), 861 | ); 862 | 863 | self.elems_info.push_unchecked( 864 | ElementInfo { 865 | metadata, 866 | end_offset: start_offset.unchecked_add(size_of_val), 867 | }, 868 | (), 869 | ); 870 | } 871 | 872 | mem::forget_unsized(value); 873 | } 874 | 875 | fn push_with(&mut self, value: Emplacable>) { 876 | let emplacable_closure = value.into_fn(); 877 | 878 | let emplacer_closure = 879 | &mut |layout: Layout, metadata, inner_closure: &mut dyn FnMut(*mut PhantomData)| { 880 | let (size_of_val, align_of_val) = decompose(layout); 881 | 882 | let reserve_result = self.try_reserve(1).and_then(|()| { 883 | self.try_reserve_additional_bytes_align(layout.size(), align_of_val) 884 | }); 885 | unwrap_try_reserve_result(reserve_result); 886 | 887 | let start_offset = self.aligned_byte_len(); 888 | 889 | // SAFETY: getting pointer to end of allocation 890 | let ptr_to_elem = unsafe { self.ptr.cast::().as_ptr().add(start_offset.get()) }; 891 | 892 | inner_closure(ptr_to_elem.cast()); 893 | 894 | let elem_info: ElementInfo = ElementInfo { 895 | metadata: ::Remainder::from_metadata(metadata), 896 | // SAFETY: neither operand can overflow `isize`, so sum 897 | // can't overflow `usize` 898 | end_offset: unsafe { start_offset.unchecked_add(size_of_val) }, 899 | }; 900 | 901 | // SAFETY: `emplacable` wrote new element at end of vec, 902 | // and we have reserved the needed space 903 | unsafe { self.elems_info.push_unchecked(elem_info, ()) }; 904 | }; 905 | 906 | // SAFETY: `emplacer_closure` runs the closure with a valid pointer to the end of the vec 907 | let emplacer = unsafe { Emplacer::from_fn(emplacer_closure) }; 908 | 909 | emplacable_closure(emplacer); 910 | } 911 | 912 | #[inline] 913 | unsafe fn pop_into_unchecked(&mut self, emplacer: &mut Emplacer<'_, T>) { 914 | debug_assert!(!self.elems_info.is_empty()); 915 | 916 | // SAFETY: precondition of function 917 | let last_elem_metadata = unsafe { self.elems_info.pop().unwrap_unchecked() }; 918 | 919 | let ElementInfo { 920 | metadata, 921 | end_offset, 922 | } = last_elem_metadata; 923 | 924 | let start_offset = self.aligned_byte_len(); 925 | 926 | // SAFETY: start_offset < end_offset 927 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 928 | 929 | let metadata = metadata.as_metadata(size_of_val); 930 | 931 | // Get pointer to the element we are popping out of the vec 932 | // SAFETY: offset comes from vec 933 | let ptr_to_elem = unsafe { 934 | self.ptr 935 | .as_ptr() 936 | .cast_const() 937 | .cast::() 938 | .add(start_offset.get()) 939 | }; 940 | 941 | let wide_ptr_to_elem: *const T = ptr::from_raw_parts(ptr_to_elem, metadata); 942 | 943 | // SAFETY: the element is still initialized at this point 944 | let align_of_val = ValidAlign::of_val(unsafe { &*wide_ptr_to_elem }); 945 | 946 | // Copy element into the place 947 | 948 | // SAFETY: we call the closure right after we unwrap it 949 | let emplace_closure = unsafe { emplacer.into_fn() }; 950 | 951 | emplace_closure( 952 | // SAFETY: `size_of_val` comes from the vec 953 | unsafe { size_of_val.as_layout_with_align_unchecked(align_of_val) }, 954 | metadata, 955 | &mut |out_ptr| { 956 | if !out_ptr.is_null() { 957 | // SAFETY: we are allowed to copy `size_of_val` bytes into `out_ptr`, 958 | // by the preconditions of `Emplacer::new` 959 | unsafe { 960 | ptr::copy_nonoverlapping( 961 | ptr_to_elem, 962 | out_ptr.cast::(), 963 | size_of_val.get(), 964 | ); 965 | } 966 | } else { 967 | // SAFETY: we adjusted vec metadata earlier, so this won't be double-dropped 968 | unsafe { wide_ptr_to_elem.cast_mut().drop_in_place() } 969 | } 970 | }, 971 | ); 972 | } 973 | 974 | #[inline] 975 | fn len(&self) -> usize { 976 | self.elems_info.len() 977 | } 978 | 979 | #[inline] 980 | fn byte_len(&self) -> usize { 981 | self.aligned_byte_len().get() 982 | } 983 | 984 | #[inline] 985 | unsafe fn get_unchecked_raw(&self, index: usize) -> NonNull { 986 | debug_assert!(index < self.len()); 987 | 988 | // SAFETY: see individual comments inside block 989 | unsafe { 990 | // SAFETY: precondition of method 991 | let start_offset = self.start_offset_of_unchecked(index); 992 | let &ElementInfo { 993 | end_offset, 994 | metadata, 995 | } = self.elems_info.get_unchecked(index); 996 | 997 | // SAFETY: end >= start 998 | let size_of_val = end_offset.unchecked_sub(start_offset); 999 | let metadata = metadata.as_metadata(size_of_val); 1000 | 1001 | // SAFETY: `start_offset` in range of allocation 1002 | NonNull::from_raw_parts( 1003 | NonNull::new_unchecked(self.ptr.as_ptr().cast::().add(start_offset.get())), 1004 | metadata, 1005 | ) 1006 | } 1007 | } 1008 | 1009 | #[inline] 1010 | fn iter(&self) -> Self::Iter<'_> { 1011 | UnalignedIter { 1012 | elems_info: self.elems_info.iter(), 1013 | ptr: self.ptr, 1014 | start_offset: ValidSizeUnaligned::ZERO, 1015 | align: self.align, 1016 | } 1017 | } 1018 | 1019 | #[inline] 1020 | fn iter_mut(&mut self) -> Self::IterMut<'_> { 1021 | UnalignedIterMut { 1022 | elems_info: self.elems_info.iter(), 1023 | ptr: self.ptr, 1024 | start_offset: ValidSizeUnaligned::ZERO, 1025 | align: self.align, 1026 | } 1027 | } 1028 | 1029 | #[inline] 1030 | fn from_sized(vec: ::alloc::vec::Vec) -> Self 1031 | where 1032 | S: Unsize, 1033 | { 1034 | let mut vec = ManuallyDrop::new(vec); 1035 | let len_elems = vec.len(); 1036 | let cap_elems = vec.capacity(); 1037 | let heap_ptr = vec.as_mut_ptr(); 1038 | let heap_ptr_unsized: *mut T = heap_ptr; 1039 | let metadata = 1040 | ::Remainder::from_metadata(ptr::metadata(heap_ptr_unsized)); 1041 | // SAFETY: ptr comes from vec, can't be null 1042 | let heap_ptr_thin: NonNull<()> = unsafe { NonNull::new_unchecked(heap_ptr_unsized.cast()) }; 1043 | 1044 | // SAFETY: can't overflow, as otherwise allocation would be overflowing 1045 | let byte_capacity = unsafe { cap_elems.unchecked_mul(mem::size_of::()) }; 1046 | 1047 | // SAFETY: same as above 1048 | let byte_capacity = unsafe { ValidSizeUnaligned::new_unchecked(byte_capacity) }; 1049 | 1050 | let elems_info = (0..len_elems) 1051 | .map(|index| ElementInfo { 1052 | metadata, 1053 | // SAFETY: can't overflow, as otherwise allocation would be overflowing 1054 | end_offset: unsafe { 1055 | ValidSizeUnaligned::new_unchecked(index.unchecked_mul(mem::size_of::())) 1056 | }, 1057 | }) 1058 | .collect(); 1059 | 1060 | let elems_info = ManuallyDrop::new(elems_info); 1061 | 1062 | Self { 1063 | ptr: heap_ptr_thin, 1064 | byte_capacity, 1065 | elems_info, 1066 | align: S::ALIGN, 1067 | _marker: PhantomData, 1068 | } 1069 | } 1070 | } 1071 | 1072 | impl UnsizedVecImpl for T { 1073 | default type Impl = UnalignedVecInner; 1074 | } 1075 | 1076 | macro_rules! iter_ref { 1077 | ($iter_ty:ident, $from_raw_parts:ident $($muta:ident)?) => { 1078 | pub(in super::super) struct $iter_ty<'a, T: ?Sized> { 1079 | elems_info: core::slice::Iter<'a, ElementInfo>, 1080 | ptr: NonNull<()>, 1081 | start_offset: ValidSizeUnaligned, 1082 | align: ValidAlign, 1083 | } 1084 | 1085 | impl<'a, T: ?Sized + 'a> Iterator for $iter_ty<'a, T> { 1086 | type Item = &'a $($muta)? T; 1087 | 1088 | #[inline] 1089 | fn next(&mut self) -> Option { 1090 | let ElementInfo { 1091 | metadata, 1092 | end_offset, 1093 | } = *self.elems_info.next()?; 1094 | 1095 | // SAFETY: end of element can't be smaller than start 1096 | let size_of_val = unsafe { end_offset.unchecked_sub(self.start_offset) }; 1097 | let metadata = metadata.as_metadata(size_of_val); 1098 | 1099 | let start_of_alloc = self.ptr.as_ptr().cast::(); 1100 | // SAFETY: offset is within allocation 1101 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(self.start_offset.get()) }; 1102 | let wide_ptr = ptr::$from_raw_parts(thin_ptr_to_elem, metadata); 1103 | 1104 | // SAFETY: pointer to element of vec 1105 | let wide_ref = unsafe { & $($muta)? *wide_ptr }; 1106 | 1107 | // SAFETY: align comes from the vec 1108 | self.start_offset = unsafe { end_offset.unchecked_pad_to(self.align) }; 1109 | 1110 | Some(wide_ref) 1111 | } 1112 | 1113 | #[inline] 1114 | fn size_hint(&self) -> (usize, Option) { 1115 | self.elems_info.size_hint() 1116 | } 1117 | 1118 | #[inline] 1119 | fn count(self) -> usize 1120 | where 1121 | Self: Sized, 1122 | { 1123 | self.elems_info.count() 1124 | } 1125 | 1126 | #[inline] 1127 | fn nth(&mut self, n: usize) -> Option { 1128 | let start_offset = n 1129 | .checked_sub(1) 1130 | .and_then(|n| self.elems_info.nth(n)) 1131 | .copied() 1132 | // SAFETY: offset comes from the vec 1133 | .map_or(ValidSizeUnaligned::ZERO, |e_i| unsafe { 1134 | e_i.end_offset.unchecked_pad_to(self.align) 1135 | }); 1136 | 1137 | let ElementInfo { 1138 | metadata, 1139 | end_offset, 1140 | } = *self.elems_info.next()?; 1141 | 1142 | // SAFETY: end of element can't be smaller than start` 1143 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 1144 | let metadata = metadata.as_metadata(size_of_val); 1145 | 1146 | let start_of_alloc = self.ptr.as_ptr().cast::(); 1147 | // SAFETY: offset is within allocation 1148 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(start_offset.get()) }; 1149 | let wide_ptr = ptr::$from_raw_parts(thin_ptr_to_elem, metadata); 1150 | 1151 | // SAFETY: pointer to element of vec 1152 | let wide_ref = unsafe { & $($muta)? *wide_ptr }; 1153 | 1154 | // SAFETY: offset comes from the vec 1155 | self.start_offset = unsafe { end_offset.unchecked_pad_to(self.align) }; 1156 | 1157 | Some(wide_ref) 1158 | } 1159 | 1160 | #[inline] 1161 | fn last(mut self) -> Option 1162 | where 1163 | Self: Sized, 1164 | { 1165 | self.nth(self.elems_info.len().checked_sub(1)?) 1166 | } 1167 | } 1168 | 1169 | impl<'a, T: ?Sized + 'a> DoubleEndedIterator for $iter_ty<'a, T> { 1170 | #[inline] 1171 | fn next_back(&mut self) -> Option { 1172 | let ElementInfo { 1173 | metadata, 1174 | end_offset, 1175 | } = *self.elems_info.next_back()?; 1176 | 1177 | let start_offset = self 1178 | .elems_info 1179 | .as_slice() 1180 | .last() 1181 | // SAFETY: offset comes from the vec 1182 | .map_or(ValidSizeUnaligned::ZERO, |e_i| unsafe { 1183 | e_i.end_offset.unchecked_pad_to(self.align) 1184 | }); 1185 | 1186 | // SAFETY: end of element can't be smaller than start 1187 | let size_of_val = unsafe { end_offset.unchecked_sub(start_offset) }; 1188 | let metadata = metadata.as_metadata(size_of_val); 1189 | 1190 | let start_of_alloc = self.ptr.as_ptr().cast::(); 1191 | // SAFETY: offset is within allocation 1192 | let thin_ptr_to_elem = unsafe { start_of_alloc.add(start_offset.get()) }; 1193 | let wide_ptr = ptr::$from_raw_parts(thin_ptr_to_elem, metadata); 1194 | 1195 | // SAFETY: pointer to element of vec 1196 | let wide_ref = unsafe { & $($muta)? *wide_ptr }; 1197 | 1198 | Some(wide_ref) 1199 | } 1200 | } 1201 | 1202 | impl<'a, T: ?Sized + 'a> ExactSizeIterator for $iter_ty<'a, T> { 1203 | #[inline] 1204 | fn len(&self) -> usize { 1205 | self.elems_info.len() 1206 | } 1207 | } 1208 | 1209 | impl<'a, T: ?Sized + 'a> FusedIterator for $iter_ty<'a, T> {} 1210 | }; 1211 | } 1212 | 1213 | iter_ref!(UnalignedIter, from_raw_parts); 1214 | iter_ref!(UnalignedIterMut, from_raw_parts_mut mut); 1215 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! [`UnsizedVec`], like [`Vec`][alloc::vec::Vec], is a contiguous growable array 2 | //! type with heap-allocated contents. Unlike [`Vec`], it can store unsized values. 3 | //! 4 | //! Vectors have *O*(1) indexing, amortized *O*(1) push (to the end) and 5 | //! *O*(1) pop (from the end). When `T` is [`Sized`], they use one heap 6 | //! allocation; when it is not, they use two. 7 | //! 8 | //! This crate is nightly-only and experimental. 9 | //! 10 | //! # Examples 11 | //! 12 | //! You can explicitly create an [`UnsizedVec`] with [`UnsizedVec::new`]: 13 | //! 14 | //! ``` 15 | //! # use unsized_vec::UnsizedVec; 16 | //! let v: UnsizedVec = UnsizedVec::new(); 17 | //! ``` 18 | //! 19 | //! ...or by using the [`unsize_vec!`] macro: 20 | //! 21 | //! ``` 22 | //! # use core::fmt::Debug; 23 | //! # use unsized_vec::{unsize_vec, UnsizedVec}; 24 | //! let v: UnsizedVec<[u32]> = unsize_vec![]; 25 | //! 26 | //! let v: UnsizedVec = unsize_vec![1_u32, "hello!", 3.0_f64, (), -17_i32]; 27 | //! ``` 28 | //! 29 | //! You can [`push`] or [`push_unsize`] values onto the end of a vector (which will grow the vector 30 | //! as needed): 31 | //! 32 | //! ``` 33 | //! # use core::fmt::Debug; 34 | //! # use unsized_vec::{unsize_vec, UnsizedVec}; 35 | //! let mut v: UnsizedVec = unsize_vec![1_u32, "hello!", 3.0_f64, (), -17_i32]; 36 | //! 37 | //! v.push_unsize(3); 38 | //! ``` 39 | //! 40 | //! Popping values works in much the same way: 41 | //! 42 | //! ``` 43 | //! # use core::fmt::Debug; 44 | //! # use emplacable::box_new_with; 45 | //! # use unsized_vec::{unsize_vec, UnsizedVec}; 46 | //! let mut v: UnsizedVec = unsize_vec![1_u32, "hello!"]; 47 | //! 48 | //! // "hello!" is copied directly into a new heap allocation 49 | //! let two: Option> = v.pop_into().map(box_new_with); 50 | //! ``` 51 | //! 52 | //! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits): 53 | //! 54 | //! ``` 55 | //! # use core::fmt::Debug; 56 | //! # use unsized_vec::{unsize_vec, UnsizedVec}; 57 | //! let mut v: UnsizedVec = unsize_vec![1_u32, "hello!", [(); 0]]; 58 | //! let greeting = &v[1]; 59 | //! dbg!(greeting); 60 | //! ``` 61 | //! [`Vec`]: alloc::vec::Vec 62 | //! [`push`]: UnsizedVec::push 63 | //! [`push_unsize`]: UnsizedVec::push_unsize 64 | 65 | #![allow( 66 | incomplete_features, // For `specialization` 67 | internal_features, // for `unsized_fn_params` 68 | )] 69 | #![feature( 70 | allocator_api, 71 | array_windows, 72 | forget_unsized, 73 | int_roundings, 74 | ptr_metadata, 75 | // We avoid specializing based on subtyping, 76 | // so barring compiler bugs, our usage should be sound. 77 | specialization, 78 | try_reserve_kind, 79 | type_alias_impl_trait, 80 | unsize, 81 | unsized_fn_params, 82 | )] 83 | #![no_std] 84 | 85 | mod helper; 86 | mod inner; 87 | mod marker; 88 | 89 | #[doc(hidden)] 90 | pub extern crate alloc; 91 | use alloc::{alloc::handle_alloc_error, collections::TryReserveErrorKind}; 92 | use core::{ 93 | self, cmp, 94 | fmt::{self, Debug, Formatter}, 95 | hash::Hash, 96 | iter::FusedIterator, 97 | marker::Unsize, 98 | mem, 99 | ops::{Index, IndexMut}, 100 | }; 101 | use emplacable::{Emplacable, EmplacableFn, Emplacer, unsize}; 102 | 103 | use inner::{Align, Size, UnsizedVecImpl, UnsizedVecProvider}; 104 | 105 | /// The error type for `try_reserve` methods. 106 | #[derive(Clone, Debug, PartialEq, Eq)] 107 | pub struct TryReserveError { 108 | kind: TryReserveErrorKind, 109 | } 110 | 111 | #[track_caller] 112 | #[inline] 113 | fn to_align(align: usize) -> AlignTypeFor { 114 | #[cold] 115 | #[inline(never)] 116 | fn invalid_align(align: usize) -> ! { 117 | panic!("align {align} is not a power of 2") 118 | } 119 | 120 | let Some(ret) = AlignTypeFor::::new(align) else { 121 | invalid_align(align) 122 | }; 123 | 124 | ret 125 | } 126 | 127 | #[track_caller] 128 | #[inline] 129 | fn unwrap_try_reserve_result(result: Result) -> T { 130 | #[cold] 131 | #[inline(never)] 132 | fn handle_err(e: TryReserveError) -> ! { 133 | match e.kind { 134 | TryReserveErrorKind::CapacityOverflow => panic!("Capacity overflowed `isize::MAX`"), 135 | TryReserveErrorKind::AllocError { layout, .. } => handle_alloc_error(layout), 136 | } 137 | } 138 | 139 | match result { 140 | Ok(val) => val, 141 | Err(e) => handle_err(e), 142 | } 143 | } 144 | 145 | impl From<::alloc::collections::TryReserveError> for TryReserveError { 146 | fn from(value: ::alloc::collections::TryReserveError) -> Self { 147 | TryReserveError { kind: value.kind() } 148 | } 149 | } 150 | 151 | type AlignTypeFor = <::Impl as UnsizedVecProvider>::Align; 152 | type SizeTypeFor = <::Impl as UnsizedVecProvider>::Size; 153 | 154 | /// Like [`Vec`][0], but can store unsized values. 155 | /// 156 | /// # Memory layout 157 | /// 158 | /// `UnsizedVec` is actually three different types rolled in to one; 159 | /// specialization is used to choose the optimal implementation based on the properties 160 | /// of `T`. 161 | /// 162 | /// 1. When `T` is a [`Sized`] type, `UnsizedVec` is a newtype around [`Vec`][0], 163 | /// with exactly the same memoy layout. 164 | /// 165 | /// 2. When `T` is a slice, there are two heap allocations. 166 | /// The first is to the slices themsleves; they are laid out end-to-end, one after the other, 167 | /// with no padding in between. The second heap allocation is to a list of offsets, to store 168 | /// where each element begins and ends. 169 | /// 170 | /// 3. When `T` is neither of the above, there are still two allocations. 171 | /// The first allocation still contains the elements of the vector laid out end-to-end, 172 | /// but now every element is padded to at least the alignment of the most-aligned element 173 | /// in the `UnsizedVec`. For this reason, adding a new element to the vec with a larger alignment 174 | /// than any of the elements already in it will add new padding to all the existing elements, 175 | /// which will involve a lot of copying and probably a reallocation. By default, [`UnsizedVec::new`] 176 | /// sets the alignment to [`core::mem::align_of::()`], so as long as none of your trait objects 177 | /// are aligned to more than that, you won't have to worry about re-padding. 178 | /// For this last case, the second allocation, in addition to storing offsets, also stores the pointer 179 | /// metadata of each element. 180 | /// 181 | /// ## Managing capacity 182 | /// 183 | /// [`Vec`][0] has only one kind of capacity to worry about: elementwise capacity. And so does 184 | /// `UnsizedVec`, as long as `T: Sized`. You can use functions like [`capacity`], [`with_capacity`] 185 | /// and [`reserve`] to manage this capacity. 186 | /// 187 | /// When `T` is a slice, there are two kinds of capacities: element capacity and byte capacity. 188 | /// Adding new elements to the vec is guaranteed not to reallocate as long as 189 | /// the number of elements doesn't exceed the element capacity *and* the total size of all 190 | /// the elements in bytes doesn't exceed the byte capacity. You can use functions like 191 | /// [`byte_capacity`], [`with_capacity_bytes`], and [`reserve_capacity_bytes`] to manage 192 | /// these two capacities. 193 | /// 194 | /// When `T` is a trait object, there is a third type of capacity: alignment. To avoid 195 | /// reallocation when adding a new element to the vec, you need to ensure that you have 196 | /// sufficient element and byte capacity, and that the vec's align is not less than the 197 | /// alignment of the new element. Functions like [`align`], [`with_capacity_bytes_align`], and 198 | /// [`reserve_capacity_bytes_align`], can be used to manage all three capacities in this case. 199 | /// 200 | /// # Limitations 201 | /// 202 | /// - `UnsizedVec` is invariant with respect to `T`; ideally, it should be covariant. 203 | /// This is because Rust forces invariance on all structs that contain associated types 204 | /// referencing `T`. Hopefully, future language features will allow lifting this limitation. 205 | /// - Rust functions can't directly return unsized types. So this crate's functions return 206 | /// them indirectly, though the "emplacer" mechanism defined in the [`emplacable`] crate. 207 | /// See that crate's documentation for details, and the documentation of [`pop_into`] and 208 | /// [`remove_into`] for usage examples. 209 | /// 210 | /// # Example 211 | /// 212 | /// ``` 213 | /// #![allow(internal_features)] // for `unsized_fn_params` 214 | /// #![feature(unsized_fn_params)] 215 | /// use core::fmt::Debug; 216 | /// 217 | /// use emplacable::box_new_with; 218 | /// use unsized_vec::{unsize_vec, UnsizedVec}; 219 | /// 220 | /// let mut vec: UnsizedVec = unsize_vec![27.53_f32, "oh the places we'll go", Some(())]; 221 | /// 222 | /// for traitobj in &vec { 223 | /// dbg!(traitobj); 224 | /// }; 225 | /// 226 | /// assert_eq!(vec.len(), 3); 227 | /// 228 | /// let maybe_popped: Option> = vec.pop_into().map(box_new_with); 229 | /// let popped = maybe_popped.unwrap(); 230 | /// dbg!(&*popped); 231 | /// 232 | /// assert_eq!(vec.len(), 2); 233 | /// ``` 234 | /// 235 | /// [0]: alloc::vec::Vec 236 | /// [`emplacable`]: emplacable 237 | /// [`capacity`]: UnsizedVec::capacity 238 | /// [`with_capacity`]: UnsizedVec::with_capacity 239 | /// [`reserve`]: UnsizedVec::reserve 240 | /// [`byte_capacity`]: UnsizedVec::byte_capacity 241 | /// [`with_capacity_bytes`]: UnsizedVec::with_capacity_bytes 242 | /// [`reserve_capacity_bytes`]: UnsizedVec::reserve_capacity_bytes 243 | /// [`align`]: UnsizedVec::align 244 | /// [`with_capacity_bytes_align`]: UnsizedVec::with_capacity_bytes_align 245 | /// [`reserve_capacity_bytes_align`]: UnsizedVec::reserve_capacity_bytes_align 246 | /// [`pop_into`]: UnsizedVec::pop_into 247 | /// [`remove_into`]: UnsizedVec::remove_into 248 | #[repr(transparent)] 249 | pub struct UnsizedVec 250 | where 251 | T: ?Sized, 252 | { 253 | inner: ::Impl, 254 | } 255 | 256 | impl UnsizedVec { 257 | /// Create a new, empty `UnsizedVec`. 258 | /// Does not allocate. 259 | /// 260 | /// When `T`'s alignmnent is not known 261 | /// at compile-time, this uses `mem::align_of::()` 262 | /// as the default alignment. 263 | #[must_use] 264 | #[inline] 265 | pub const fn new() -> UnsizedVec { 266 | UnsizedVec { 267 | inner: UnsizedVecProvider::NEW_ALIGN_PTR, 268 | } 269 | } 270 | 271 | /// Create a new, empty `UnsizedVec` with the given capacity. 272 | /// 273 | /// When `T`'s alignmnent is not known 274 | /// at compile-time, this uses `mem::align_of::()` 275 | /// as the default alignment. 276 | #[must_use] 277 | #[inline] 278 | pub fn with_capacity(capacity: usize) -> UnsizedVec { 279 | let mut vec = UnsizedVec::new(); 280 | vec.reserve_exact(capacity); 281 | vec 282 | } 283 | 284 | /// Create a new, empty `UnsizedVec` with the given capacity. 285 | /// (When `T: Aligned` does not hold, an alignment of 1 is used.) 286 | /// 287 | /// When `T`'s alignmnent is not known 288 | /// at compile-time, this uses `mem::align_of::()` 289 | /// as the default alignment. 290 | #[must_use] 291 | #[inline] 292 | pub fn with_capacity_bytes(capacity: usize, byte_capacity: usize) -> UnsizedVec { 293 | let mut vec = UnsizedVec::new(); 294 | vec.reserve_exact_capacity_bytes(capacity, byte_capacity); 295 | vec 296 | } 297 | 298 | /// Create a new, empty `UnsizedVec` with the given capacity 299 | /// (in bytes) and alignment. 300 | /// 301 | /// `align` is ignored when `T`'s alignment is known at compile time 302 | #[must_use] 303 | #[inline] 304 | pub fn with_capacity_bytes_align( 305 | capacity: usize, 306 | byte_capacity: usize, 307 | align: usize, 308 | ) -> UnsizedVec { 309 | let mut vec = UnsizedVec { 310 | inner: UnsizedVecProvider::NEW_ALIGN_1, 311 | }; 312 | vec.reserve_exact_capacity_bytes_align(capacity, byte_capacity, align); 313 | vec 314 | } 315 | 316 | /// Returns the number of elements the vector can hold without 317 | /// reallocating. 318 | /// 319 | /// For `T: ?Sized`, this only concers whether metadata 320 | /// could get reallocated, not the elements themselves. 321 | #[must_use] 322 | #[inline] 323 | pub fn capacity(&self) -> usize { 324 | self.inner.capacity() 325 | } 326 | 327 | /// Returns the number of bytes the vector can hold without 328 | /// reallocating. 329 | #[must_use] 330 | #[inline] 331 | pub fn byte_capacity(&self) -> usize { 332 | self.inner.byte_capacity() 333 | } 334 | 335 | /// Returns the maximum alignment of the values this vector 336 | /// can hold without re-padding and reallocating. 337 | /// 338 | /// Only relevant when `T`'s alignment is not known at compile time. 339 | #[must_use] 340 | #[inline] 341 | pub fn align(&self) -> usize { 342 | self.inner.align() 343 | } 344 | 345 | /// Reserves capacity for at least `additional` more elements to be inserted 346 | /// in the given `UnsizedVec`. The collection may reserve more space to 347 | /// speculatively avoid frequent reallocations. 348 | /// 349 | /// When `T` is not `Sized`, this only reseves space to store *metadata*. 350 | /// Consider using [`reserve_capacity_bytes`] instead in such cases. 351 | /// 352 | /// # Panics 353 | /// 354 | /// Panics if the new capacity exceeds `isize::MAX` bytes. 355 | /// 356 | /// [`reserve_capacity_bytes`]: UnsizedVec::reserve_capacity_bytes 357 | #[inline] 358 | pub fn reserve(&mut self, additional: usize) { 359 | unwrap_try_reserve_result(self.try_reserve(additional)); 360 | } 361 | 362 | /// Reserves capacity for at least `additional` more elements, 363 | /// taking up at least `additional_bytes` bytes of space, to be inserted 364 | /// in the given `UnsizedVec`. The collection may reserve more space to 365 | /// speculatively avoid frequent reallocations. 366 | /// 367 | /// When `T`'s alignment is not known at compile time, 368 | /// the vec may still reallocate if you push a new element onto the 369 | /// vec with an alignment greater than `self.align()`. Consider 370 | /// using [`reserve_capacity_bytes_align`] instead in such cases. 371 | /// 372 | /// # Panics 373 | /// 374 | /// Panics if the either of the new capacities exceeds `isize::MAX` bytes. 375 | /// 376 | /// [`reserve_capacity_bytes_align`]: UnsizedVec::reserve_capacity_bytes_align 377 | #[inline] 378 | pub fn reserve_capacity_bytes(&mut self, additional: usize, additional_bytes: usize) { 379 | unwrap_try_reserve_result(self.try_reserve_capacity_bytes(additional, additional_bytes)); 380 | } 381 | 382 | /// Reserves capacity for at least `additional` more elements, 383 | /// taking up at least `additional_bytes` bytes of space, 384 | /// and with alignment of at most `align`, to be inserted 385 | /// in the given `UnsizedVec`. The collection may reserve more space to 386 | /// speculatively avoid frequent reallocations. 387 | /// 388 | /// When `T`'s alignment is known at compile time, 389 | /// `align` is ignored. Consider using [`reserve_capacity_bytes`] 390 | /// instead in such cases. 391 | /// 392 | /// # Panics 393 | /// 394 | /// Panics if the either of the new capacities exceeds `isize::MAX` bytes, 395 | /// or if `align` is not a power of two. 396 | /// 397 | /// [`reserve_capacity_bytes`]: UnsizedVec::reserve_capacity_bytes 398 | #[inline] 399 | pub fn reserve_capacity_bytes_align( 400 | &mut self, 401 | additional: usize, 402 | additional_bytes: usize, 403 | align: usize, 404 | ) { 405 | unwrap_try_reserve_result(self.try_reserve_capacity_bytes_align( 406 | additional, 407 | additional_bytes, 408 | align, 409 | )); 410 | } 411 | 412 | /// Reserves capacity for at least `additional` more elements to be inserted 413 | /// in the given `UnsizedVec`. Unlike [`reserve`], this will not 414 | /// deliberately over-allocate to speculatively avoid frequent allocations. 415 | /// 416 | /// When `T` is not `Sized`, this only reseves space to store *metadata*. 417 | /// Consider using [`reserve_exact_capacity_bytes`] instead in such cases. 418 | /// 419 | /// # Panics 420 | /// 421 | /// Panics if the new capacity exceeds `isize::MAX` bytes. 422 | /// 423 | /// [`reserve`]: UnsizedVec::reserve 424 | /// [`reserve_exact_capacity_bytes`]: UnsizedVec::reserve_exact_capacity_bytes 425 | #[inline] 426 | pub fn reserve_exact(&mut self, additional: usize) { 427 | unwrap_try_reserve_result(self.try_reserve_exact_capacity_bytes(additional, 0)); 428 | } 429 | 430 | /// Reserves capacity for at least `additional` more elements, 431 | /// taking up at least `additional_bytes` bytes of space, to be inserted 432 | /// in the given `UnsizedVec`. Unlike [`reserve_capacity_bytes`], this will not 433 | /// deliberately over-allocate to speculatively avoid frequent allocations. 434 | /// 435 | /// When `T`'s alignment is not known at compile time, 436 | /// the vec may still reallocate if you push a new element onto the 437 | /// vec with an alignment greater than `self.align()`. Consider 438 | /// using [`reserve_exact_capacity_bytes_align`] instead in such cases. 439 | /// 440 | /// # Panics 441 | /// 442 | /// Panics if the new capacity exceeds `isize::MAX` bytes. 443 | /// 444 | /// [`reserve_capacity_bytes`]: UnsizedVec::reserve_capacity_bytes 445 | /// [`reserve_exact_capacity_bytes_align`]: UnsizedVec::reserve_exact_capacity_bytes_align 446 | #[inline] 447 | pub fn reserve_exact_capacity_bytes(&mut self, additional: usize, additional_bytes: usize) { 448 | unwrap_try_reserve_result( 449 | self.try_reserve_exact_capacity_bytes(additional, additional_bytes), 450 | ); 451 | } 452 | 453 | /// Reserves capacity for at least `additional` more elements, 454 | /// taking up at least `additional_bytes` bytes of space, 455 | /// and with alignment of at most `align`, to be inserted 456 | /// in the given `UnsizedVec`. Unlike [`reserve_capacity_bytes_align`], this will not 457 | /// deliberately over-allocate to speculatively avoid frequent allocations. 458 | /// 459 | /// When `T`'s alignment is known at compile time, 460 | /// `align` is ignored. Consider using [`reserve_exact_capacity_bytes`] 461 | /// instead in such cases. 462 | /// 463 | /// # Panics 464 | /// 465 | /// Panics if the new capacity exceeds `isize::MAX` bytes. 466 | /// 467 | /// [`reserve_capacity_bytes_align`]: UnsizedVec::reserve_capacity_bytes_align 468 | /// [`reserve_exact_capacity_bytes`]: UnsizedVec::reserve_exact_capacity_bytes 469 | #[inline] 470 | pub fn reserve_exact_capacity_bytes_align( 471 | &mut self, 472 | additional: usize, 473 | additional_bytes: usize, 474 | align: usize, 475 | ) { 476 | unwrap_try_reserve_result(self.try_reserve_exact_capacity_bytes_align( 477 | additional, 478 | additional_bytes, 479 | align, 480 | )); 481 | } 482 | 483 | /// Reserves capacity for at least `additional` more elements to be inserted 484 | /// in the given `UnsizedVec`. The collection may reserve more space to 485 | /// speculatively avoid frequent reallocations. 486 | /// 487 | /// When `T` is not `Sized`, this only reseves space to store *metadata*. 488 | /// Consider using [`try_reserve_capacity_bytes`] instead in such cases. 489 | /// 490 | /// # Errors 491 | /// 492 | /// If the capacity overflows, or the allocator reports a failure, then an error 493 | /// is returned. 494 | /// 495 | /// [`try_reserve_capacity_bytes`]: UnsizedVec::try_reserve_capacity_bytes 496 | #[inline] 497 | pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> { 498 | self.inner.try_reserve(additional) 499 | } 500 | 501 | /// Reserves capacity for at least `additional` more elements, 502 | /// taking up at least `additional_bytes` bytes of space, to be inserted 503 | /// in the given `UnsizedVec`. The collection may reserve more space to 504 | /// speculatively avoid frequent reallocations. 505 | /// 506 | /// When `T`'s alignment is not known at compile time, 507 | /// the vec may still reallocate if you push a new element onto the 508 | /// vec with an alignment greater than `self.align()`. Consider 509 | /// using [`try_reserve_capacity_bytes_align`] instead in such cases. 510 | /// 511 | /// # Errors 512 | /// 513 | /// If the capacity overflows, or the allocator reports a failure, then an error 514 | /// is returned. 515 | /// 516 | /// [`try_reserve_capacity_bytes_align`]: UnsizedVec::try_reserve_capacity_bytes_align 517 | #[inline] 518 | pub fn try_reserve_capacity_bytes( 519 | &mut self, 520 | additional: usize, 521 | additional_bytes: usize, 522 | ) -> Result<(), TryReserveError> { 523 | self.try_reserve_capacity_bytes_align(additional, additional_bytes, 1) 524 | } 525 | 526 | /// Reserves capacity for at least `additional` more elements, 527 | /// taking up at least `additional_bytes` bytes of space, 528 | /// and with alignment of at most `align`, to be inserted 529 | /// in the given `UnsizedVec`. The collection may reserve more space to 530 | /// speculatively avoid frequent reallocations. 531 | /// 532 | /// When `T`'s alignment is known at compile time, 533 | /// `align` is ignored. Consider using [`try_reserve_capacity_bytes`] 534 | /// instead in such cases. 535 | /// 536 | /// # Errors 537 | /// 538 | /// If the capacity overflows, or the allocator reports a failure, then an error 539 | /// is returned. 540 | /// 541 | /// # Panics 542 | /// 543 | /// Panics if `align` is not a power of two. 544 | /// 545 | /// [`try_reserve_capacity_bytes`]: UnsizedVec::try_reserve_capacity_bytes 546 | #[inline] 547 | pub fn try_reserve_capacity_bytes_align( 548 | &mut self, 549 | additional: usize, 550 | additional_bytes: usize, 551 | align: usize, 552 | ) -> Result<(), TryReserveError> { 553 | self.try_reserve(additional)?; 554 | 555 | debug_assert!(self.capacity() >= self.len() + additional); 556 | 557 | let align = to_align::(align); 558 | 559 | let byte_cap = self.byte_capacity(); 560 | 561 | let needed_bytes = additional_bytes.saturating_sub(self.unused_byte_cap()); 562 | 563 | let optimist_bytes = if needed_bytes > 0 { 564 | cmp::max(needed_bytes, byte_cap) 565 | } else { 566 | 0 567 | }; 568 | 569 | // First we try to double capacities. 570 | // if that fails, we try again with only what we really need. 571 | if optimist_bytes > needed_bytes { 572 | let result = self 573 | .inner 574 | .try_reserve_additional_bytes_align(optimist_bytes, align); 575 | 576 | if result.is_ok() { 577 | return result; 578 | } 579 | } 580 | 581 | let result = self 582 | .inner 583 | .try_reserve_additional_bytes_align(needed_bytes, align); 584 | 585 | debug_assert!(self.byte_capacity() >= self.byte_len() + additional_bytes); 586 | 587 | result 588 | } 589 | 590 | /// Reserves capacity for at least `additional` more elements to be inserted 591 | /// in the given `UnsizedVec`. Unlike [`try_reserve`], this will not 592 | /// deliberately over-allocate to speculatively avoid frequent allocations. 593 | /// 594 | /// When `T` is not `Sized`, this only reseves space to store *metadata*. 595 | /// Consider using [`try_reserve_exact_capacity_bytes`] instead in such cases. 596 | /// 597 | /// # Errors 598 | /// 599 | /// If the capacity overflows, or the allocator reports a failure, then an error 600 | /// is returned. 601 | /// 602 | /// [`try_reserve`]: UnsizedVec::try_reserve 603 | /// [`try_reserve_exact_capacity_bytes`]: UnsizedVec::try_reserve_exact_capacity_bytes 604 | #[inline] 605 | pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> { 606 | self.inner.try_reserve_exact(additional) 607 | } 608 | 609 | /// Reserves capacity for at least `additional` more elements, 610 | /// taking up at least `additional_bytes` bytes of space, to be inserted 611 | /// in the given `UnsizedVec`. Unlike [`try_reserve_capacity_bytes`], this will not 612 | /// deliberately over-allocate to speculatively avoid frequent allocations. 613 | /// 614 | /// When `T`'s alignment is not known at compile time, 615 | /// the vec may still reallocate if you push a new element onto the 616 | /// vec with an alignment greater than `self.align()`. Consider 617 | /// using [`try_reserve_exact_capacity_bytes_align`] instead in such cases. 618 | /// 619 | /// # Errors 620 | /// 621 | /// If the capacity overflows, or the allocator reports a failure, then an error 622 | /// is returned. 623 | /// 624 | /// [`try_reserve_capacity_bytes`]: UnsizedVec::try_reserve_capacity_bytes 625 | /// [`try_reserve_exact_capacity_bytes_align`]: UnsizedVec::try_reserve_exact_capacity_bytes_align 626 | #[inline] 627 | pub fn try_reserve_exact_capacity_bytes( 628 | &mut self, 629 | additional: usize, 630 | additional_bytes: usize, 631 | ) -> Result<(), TryReserveError> { 632 | self.try_reserve_exact_capacity_bytes_align(additional, additional_bytes, 1) 633 | } 634 | 635 | /// Reserves capacity for at least `additional` more elements, 636 | /// taking up at least `additional_bytes` bytes of space, 637 | /// and with alignment of at most `align`, to be inserted 638 | /// in the given `UnsizedVec`. Unlike [`try_reserve_capacity_bytes_align`], this will not 639 | /// deliberately over-allocate to speculatively avoid frequent allocations. 640 | /// 641 | /// When `T`'s alignment is known at compile time, 642 | /// `align` is ignored. Consider using [`try_reserve_exact_capacity_bytes`] 643 | /// instead in such cases. 644 | /// 645 | /// # Errors 646 | /// 647 | /// If the capacity overflows, or the allocator reports a failure, then an error 648 | /// is returned. 649 | /// 650 | /// # Panics 651 | /// 652 | /// Panics if `align` is not a power of two. 653 | /// 654 | /// [`try_reserve_capacity_bytes_align`]: UnsizedVec::try_reserve_capacity_bytes_align 655 | /// [`try_reserve_exact_capacity_bytes`]: UnsizedVec::try_reserve_exact_capacity_bytes 656 | #[inline] 657 | pub fn try_reserve_exact_capacity_bytes_align( 658 | &mut self, 659 | additional: usize, 660 | additional_bytes: usize, 661 | align: usize, 662 | ) -> Result<(), TryReserveError> { 663 | self.inner.try_reserve(additional)?; 664 | let align = to_align::(align); 665 | 666 | self.inner 667 | .try_reserve_additional_bytes_align(additional_bytes, align) 668 | } 669 | 670 | /// Shrinks all the capacities of the vec as much as possible. 671 | #[inline] 672 | pub fn shrink_to_fit(&mut self) { 673 | self.inner 674 | .shrink_capacity_bytes_align_to(0, 0, to_align::(1)); 675 | } 676 | 677 | /// Shrinks the elementwise capacity of the vector with a lower bound. 678 | /// 679 | /// The capacity will remain at least as large as both the length 680 | /// and the supplied value. 681 | /// 682 | /// If the current capacity is less than the lower limit, this is a no-op. 683 | /// 684 | /// For `T: ?Sized`, this only effects elementwise capacity. 685 | /// Consider using [`shrink_capacity_bytes_to`] in such cases. 686 | /// 687 | /// [`shrink_capacity_bytes_to`]: UnsizedVec::shrink_capacity_bytes_to 688 | #[inline] 689 | pub fn shrink_to(&mut self, min_capacity: usize) { 690 | self.inner.shrink_capacity_bytes_align_to( 691 | min_capacity, 692 | usize::MAX, 693 | to_align::(1 << (usize::BITS - 1)), 694 | ); 695 | } 696 | 697 | /// Shrinks the elementwise and byte capacities of the vector with 698 | /// lower bounds. 699 | /// 700 | /// The capacities will remain at least as large as both the lengths 701 | /// and the supplied values. 702 | /// 703 | /// If the current capacities are less than the lower limits, this is a no-op. 704 | /// 705 | /// When `T`'s alignment is not known at compile-time, this only effects elementwise 706 | /// and bytewise capacities. 707 | /// Consider using [`shrink_capacity_bytes_align_to`] in such cases. 708 | /// 709 | /// [`shrink_capacity_bytes_align_to`]: UnsizedVec::shrink_capacity_bytes_align_to 710 | #[inline] 711 | pub fn shrink_capacity_bytes_to(&mut self, min_capacity: usize, min_byte_capacity: usize) { 712 | self.inner.shrink_capacity_bytes_align_to( 713 | min_capacity, 714 | min_byte_capacity, 715 | to_align::(1 << (usize::BITS - 1)), 716 | ); 717 | } 718 | 719 | /// Shrinks the elementwise, byte, and alignment capacities of the vector with 720 | /// lower bounds. 721 | /// 722 | /// The capacities will remain at least as large as both the lengths 723 | /// and the supplied values. 724 | /// 725 | /// If the current capacities are less than the lower limits, this is a no-op. 726 | /// 727 | /// # Panics 728 | /// 729 | /// Panics if `min_align` is not a power of two. 730 | #[inline] 731 | pub fn shrink_capacity_bytes_align_to( 732 | &mut self, 733 | min_capacity: usize, 734 | min_byte_capacity: usize, 735 | min_align: usize, 736 | ) { 737 | self.inner.shrink_capacity_bytes_align_to( 738 | min_capacity, 739 | min_byte_capacity, 740 | to_align::(min_align), 741 | ); 742 | } 743 | 744 | /// Inserts an element at position `index` within the vector, shifting all 745 | /// elements after it to the right. 746 | /// 747 | /// If `T` is not `Sized`, you will need 748 | /// `#![feature(unsized_fn_params)]` to call this. 749 | /// You may also need the [`unsize`] macro, which 750 | /// requires additional nightly features. 751 | /// 752 | /// Alternatively, you can use [`insert_unsize`][0], 753 | /// which takes care of unsizing for you. 754 | /// 755 | /// # Example 756 | /// 757 | /// ``` 758 | /// #![allow(internal_features)] // for `unsized_fn_params` 759 | /// #![feature(allocator_api, ptr_metadata, unsized_fn_params)] 760 | /// 761 | /// use core::fmt::Debug; 762 | /// 763 | /// use emplacable::unsize; 764 | /// use unsized_vec::UnsizedVec; 765 | /// 766 | /// let mut vec: UnsizedVec = UnsizedVec::new(); 767 | /// 768 | /// vec.push(unsize!([1, 2], ([i32; 2]) -> dyn Debug)); 769 | /// vec.insert(0, unsize!("can you believe it", (&str) -> dyn Debug)); 770 | /// dbg!(&vec[0]); 771 | /// ``` 772 | /// 773 | /// [0]: UnsizedVec::insert_unsize 774 | #[inline] 775 | pub fn insert(&mut self, index: usize, value: T) { 776 | #[track_caller] 777 | #[cold] 778 | #[inline(never)] 779 | fn assert_failed(index: usize, len: usize) -> ! { 780 | panic!("insertion index (is {index}) should be <= len (is {len})"); 781 | } 782 | 783 | if index <= self.len() { 784 | let size_of_val = SizeTypeFor::::of_val(&value); 785 | self.reserve_capacity_bytes_align(1, size_of_val.get(), mem::align_of_val(&value)); 786 | 787 | // SAFETY: reserved needed capacity and performed bounds check above 788 | unsafe { self.inner.insert_unchecked(index, value, size_of_val) } 789 | } else { 790 | assert_failed(index, self.len()) 791 | } 792 | } 793 | 794 | /// Appends an element to the back of a collection 795 | /// after unsizing it. 796 | /// 797 | /// # Examples 798 | /// 799 | /// ``` 800 | /// use core::fmt::Debug; 801 | /// 802 | /// use unsized_vec::UnsizedVec; 803 | /// 804 | /// let mut vec: UnsizedVec = UnsizedVec::new(); 805 | /// 806 | /// vec.push_unsize([1, 2]); 807 | /// vec.insert_unsize(0, "can you believe it"); 808 | /// dbg!(&vec[0]); 809 | /// ``` 810 | #[inline] 811 | pub fn insert_unsize(&mut self, index: usize, value: S) 812 | where 813 | S: Unsize, 814 | { 815 | self.insert(index, unsize!(value, (S) -> T)); 816 | } 817 | 818 | /// Inserts an element at position `index` within the vector, shifting all 819 | /// elements after it to the right. 820 | /// 821 | /// Accepts the element as an [`Emplacable`] 822 | /// instead of `T` directly, analogously 823 | /// to [`emplacable::box_new_with`]. 824 | /// 825 | /// # Example 826 | /// 827 | /// ``` 828 | /// #![allow(internal_features)] // for `unsized_fn_params` 829 | /// #![feature(allocator_api, ptr_metadata, unsized_fn_params)] 830 | /// 831 | /// use core::fmt::Debug; 832 | /// 833 | /// use unsized_vec::{unsize_vec, UnsizedVec}; 834 | /// 835 | /// let mut vec_1: UnsizedVec = unsize_vec![32, "hello"]; 836 | /// let mut vec_2: UnsizedVec = unsize_vec![97]; 837 | /// 838 | /// vec_2.insert_with(0, vec_1.pop_into().unwrap()); 839 | /// 840 | /// assert_eq!(vec_1.len(), 1); 841 | /// assert_eq!(vec_2.len(), 2); 842 | /// dbg!(&vec_2[0]); 843 | /// ``` 844 | #[inline] 845 | pub fn insert_with(&mut self, index: usize, value: Emplacable>) { 846 | #[track_caller] 847 | #[cold] 848 | #[inline(never)] 849 | fn assert_failed(index: usize, len: usize) -> ! { 850 | panic!("insertion index (is {index}) should be <= len (is {len})"); 851 | } 852 | 853 | if index <= self.len() { 854 | // SAFETY: did bounds check just above 855 | unsafe { self.inner.insert_with_unchecked(index, value) } 856 | } else { 857 | assert_failed(index, self.len()) 858 | } 859 | } 860 | 861 | /// Removes and returns the element at position `index` within the vector, 862 | /// shifting all elements after it to the left. 863 | /// 864 | /// Because `T` might be unsized, and functions can't return 865 | /// unsized values directly, this method returns the element using 866 | /// the "emplacer" mechanism. You can pass the returned [`Emplacable`] 867 | /// to a function like [`box_new_with`] to get the contained `T`. 868 | /// 869 | /// # Example 870 | /// 871 | /// ``` 872 | /// use core::fmt::Debug; 873 | /// 874 | /// use emplacable::box_new_with; 875 | /// use unsized_vec::UnsizedVec; 876 | /// 877 | /// let mut vec = UnsizedVec::::new(); 878 | /// 879 | /// vec.push_unsize("A beautiful day today innit"); 880 | /// vec.push_unsize("Quite right ol chap"); 881 | /// 882 | /// let popped: Box = box_new_with(vec.remove_into(0)); 883 | /// dbg!(&popped); 884 | /// 885 | /// ``` 886 | /// 887 | /// [`box_new_with`]: emplacable::box_new_with 888 | #[inline] 889 | pub fn remove_into(&mut self, index: usize) -> Emplacable + '_> { 890 | #[track_caller] 891 | #[cold] 892 | #[inline(never)] 893 | fn assert_failed(index: usize, len: usize) -> ! { 894 | panic!("removal index (is {index}) should be < len (is {len})"); 895 | } 896 | 897 | if index < self.len() { 898 | let closure = move |emplacer: &mut Emplacer<'_, T>| { 899 | // SAFETY: check `index < len` right above 900 | unsafe { self.inner.remove_into_unchecked(index, emplacer) }; 901 | }; 902 | // SAFETY: `remove_into_unchecked` upholds the requirements 903 | unsafe { Emplacable::from_fn(closure) } 904 | } else { 905 | assert_failed(index, self.len()) 906 | } 907 | } 908 | 909 | /// Appends an element to the back of a collection. 910 | /// 911 | /// If `T` is not `Sized`, you will need 912 | /// `#![feature(unsized_fn_params)]` to call this. 913 | /// You may also need the [`unsize`] macro, which 914 | /// requires additional nightly features. 915 | /// 916 | /// Alternatively, you can use [`push_unsize`][0], 917 | /// which takes care of unsizing for you. 918 | /// 919 | /// # Example 920 | /// 921 | /// ``` 922 | /// #![allow(internal_features)] // for `unsized_fn_params` 923 | /// #![feature(allocator_api, ptr_metadata, unsized_fn_params)] 924 | /// 925 | /// use core::fmt::Debug; 926 | /// 927 | /// use emplacable::unsize; 928 | /// use unsized_vec::UnsizedVec; 929 | /// 930 | /// let mut vec: UnsizedVec = UnsizedVec::new(); 931 | /// 932 | /// vec.push(unsize!([1, 2], ([i32; 2]) -> dyn Debug)); 933 | /// dbg!(&vec[0]); 934 | /// ``` 935 | /// 936 | /// [0]: UnsizedVec::push_unsize 937 | #[inline] 938 | pub fn push(&mut self, value: T) { 939 | let size_of_val = SizeTypeFor::::of_val(&value); 940 | 941 | self.reserve_capacity_bytes_align(1, size_of_val.get(), mem::align_of_val(&value)); 942 | 943 | // SAFETY: reserved needed capacity above 944 | unsafe { self.inner.push_unchecked(value, size_of_val) } 945 | } 946 | 947 | /// Appends an element to the back of a collection 948 | /// after coercing it to an unsized type. 949 | /// 950 | /// # Example 951 | /// 952 | /// ``` 953 | /// use core::fmt::Debug; 954 | /// 955 | /// use unsized_vec::UnsizedVec; 956 | /// 957 | /// let mut vec: UnsizedVec = UnsizedVec::new(); 958 | /// 959 | /// vec.push_unsize([1, 2]); 960 | /// dbg!(&vec[0]); 961 | /// 962 | /// ``` 963 | #[inline] 964 | pub fn push_unsize>(&mut self, value: S) { 965 | self.push(unsize!(value, (S) -> T)); 966 | } 967 | 968 | /// Appends an element to the back of a collection. 969 | /// 970 | /// Accepts the element as an [`Emplacable`] 971 | /// instead of `T` directly, analogously 972 | /// to [`emplacable::box_new_with`]. 973 | /// 974 | /// ``` 975 | /// #![allow(internal_features)] // for `unsized_fn_params` 976 | /// #![feature(allocator_api, ptr_metadata, unsized_fn_params)] 977 | /// 978 | /// use core::fmt::Debug; 979 | /// 980 | /// use unsized_vec::{unsize_vec, UnsizedVec}; 981 | /// 982 | /// let mut vec_1: UnsizedVec = unsize_vec![32, "hello"]; 983 | /// let mut vec_2: UnsizedVec = UnsizedVec::new(); 984 | /// 985 | /// vec_2.push_with(vec_1.pop_into().unwrap()); 986 | /// 987 | /// assert_eq!(vec_1.len(), 1); 988 | /// dbg!(&vec_2[0]); 989 | /// ``` 990 | #[inline] 991 | pub fn push_with(&mut self, value: Emplacable>) { 992 | self.inner.push_with(value); 993 | } 994 | 995 | /// Removes the last element from a vector and returns it, or [`None`] if it 996 | /// is empty. 997 | /// 998 | /// Because `T` might be unsized, and functions can't return 999 | /// unsized values directly, this method returns the element using 1000 | /// the "emplacer" mechanism. You can pass the returned [`Emplacable`] 1001 | /// to a function like [`box_new_with`] to get the contained `T`. 1002 | /// 1003 | /// # Example 1004 | /// 1005 | /// ``` 1006 | /// use core::fmt::Debug; 1007 | /// 1008 | /// use emplacable::{box_new_with, Emplacable}; 1009 | /// use unsized_vec::{UnsizedVec}; 1010 | /// 1011 | /// let mut vec = UnsizedVec::::new(); 1012 | /// 1013 | /// dbg!(vec.is_empty()); 1014 | /// let nothing: Option> = vec.pop_into().map(box_new_with); 1015 | /// assert!(nothing.is_none()); 1016 | /// 1017 | /// vec.push_unsize("A beautiful day today"); 1018 | /// let popped: Option> = vec.pop_into().map(box_new_with); 1019 | /// let unwrapped: Box = popped.unwrap(); 1020 | /// dbg!(&unwrapped); 1021 | /// 1022 | /// vec.push_unsize("innit?"); 1023 | /// dbg!(&vec); 1024 | /// 1025 | /// let mut popped_emplacable: Emplacable = vec.pop_into().unwrap(); 1026 | /// 1027 | /// // vec.push_unsize("yea"); // error: cannot borrow `vec` as mutable more than once at a time 1028 | /// // The `vec` will remain borrowed until you consume the `Emplacable`! 1029 | /// 1030 | /// // or we can just drop it... 1031 | /// // dropping an `Emplacable` drops 1032 | /// // the contained value. 1033 | /// popped_emplacable; 1034 | /// 1035 | /// assert!(vec.is_empty()); 1036 | /// 1037 | /// vec.push_unsize("yea"); // works now 1038 | /// 1039 | /// ``` 1040 | /// 1041 | /// [`box_new_with`]: emplacable::box_new_with 1042 | #[inline] 1043 | pub fn pop_into(&mut self) -> Option + '_>> { 1044 | if !self.is_empty() { 1045 | let closure = move |emplacer: &mut Emplacer<'_, T>| { 1046 | // SAFETY: checked above that vec is non-empty 1047 | unsafe { self.inner.pop_into_unchecked(emplacer) } 1048 | }; 1049 | 1050 | // SAFETY: `pop_into_unchecked` upholds the requirements of this closure 1051 | Some(unsafe { Emplacable::from_fn(closure) }) 1052 | } else { 1053 | None 1054 | } 1055 | } 1056 | 1057 | /// Returns the number of elements in the vector, also referred to 1058 | /// as its 'length'. 1059 | #[must_use] 1060 | #[inline] 1061 | pub fn len(&self) -> usize { 1062 | self.inner.len() 1063 | } 1064 | 1065 | /// Returns the number of used bytes in the vector. 1066 | #[must_use] 1067 | #[inline] 1068 | pub fn byte_len(&self) -> usize { 1069 | self.inner.byte_len() 1070 | } 1071 | 1072 | /// Returns `true` if the vector contains no elements. 1073 | #[must_use] 1074 | #[inline] 1075 | pub fn is_empty(&self) -> bool { 1076 | self.len() == 0 1077 | } 1078 | 1079 | /// Returns a reference to an element, 1080 | /// or `None` if `index` is out of range. 1081 | #[must_use] 1082 | #[inline] 1083 | pub fn get(&self, index: usize) -> Option<&T> { 1084 | // SAFETY: Bounds check done right before 1085 | (index < self.len()).then(|| unsafe { self.get_unchecked(index) }) 1086 | } 1087 | 1088 | /// Returns a mutable reference to an element, 1089 | /// or `None` if `index` is out of range. 1090 | #[must_use] 1091 | #[inline] 1092 | pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { 1093 | // SAFETY: Bounds check done right before 1094 | (index < self.len()).then(|| unsafe { self.get_unchecked_mut(index) }) 1095 | } 1096 | 1097 | /// Returns a reference to an element, without doing bounds 1098 | /// checking. 1099 | /// 1100 | /// # Safety 1101 | /// 1102 | /// Calling this method with an out-of-bounds index is *[undefined behavior]* 1103 | /// even if the resulting reference is not used. 1104 | #[must_use] 1105 | #[inline] 1106 | pub unsafe fn get_unchecked(&self, index: usize) -> &T { 1107 | // SAFETY: precondition of function 1108 | unsafe { self.inner.get_unchecked_raw(index).as_ref() } 1109 | } 1110 | 1111 | /// Returns a mutable reference to an element, without doing bounds 1112 | /// checking. 1113 | /// 1114 | /// # Safety 1115 | /// 1116 | /// Calling this method with an out-of-bounds index is *[undefined behavior]* 1117 | /// even if the resulting reference is not used. 1118 | #[must_use] 1119 | #[inline] 1120 | pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T { 1121 | // SAFETY: precondition of function 1122 | unsafe { self.inner.get_unchecked_raw(index).as_mut() } 1123 | } 1124 | 1125 | /// Returns an iterator over references to the elements of this vec. 1126 | #[must_use] 1127 | #[inline] 1128 | pub fn iter(&self) -> UnsizedIter<'_, T> { 1129 | UnsizedIter { 1130 | inner: self.inner.iter(), 1131 | } 1132 | } 1133 | 1134 | /// Returns an iterator over mutable references to the elements of this vec. 1135 | #[must_use] 1136 | #[inline] 1137 | pub fn iter_mut(&mut self) -> UnsizedIterMut<'_, T> { 1138 | UnsizedIterMut { 1139 | inner: self.inner.iter_mut(), 1140 | } 1141 | } 1142 | 1143 | /// Coerces this Vec's elements to an unsized type. 1144 | /// 1145 | /// # Example 1146 | /// 1147 | /// ``` 1148 | /// use core::fmt::Debug; 1149 | /// 1150 | /// use unsized_vec::UnsizedVec; 1151 | /// 1152 | /// let sized: Vec = vec![3, 4, 5]; 1153 | /// let unsize: UnsizedVec = UnsizedVec::unsize(sized.into()); 1154 | /// dbg!(&unsize); 1155 | /// ``` 1156 | #[must_use] 1157 | #[inline] 1158 | pub fn unsize(self) -> UnsizedVec 1159 | where 1160 | T: Sized + Unsize, 1161 | U: ?Sized, 1162 | { 1163 | UnsizedVec { 1164 | inner: ::Impl::from_sized(self.inner), 1165 | } 1166 | } 1167 | 1168 | #[must_use] 1169 | #[inline] 1170 | fn unused_byte_cap(&self) -> usize { 1171 | // SAFETY: len <= cap 1172 | unsafe { self.byte_capacity().unchecked_sub(self.byte_len()) } 1173 | } 1174 | } 1175 | 1176 | impl Default for UnsizedVec 1177 | where 1178 | T: ?Sized, 1179 | { 1180 | #[inline] 1181 | fn default() -> Self { 1182 | Self::new() 1183 | } 1184 | } 1185 | 1186 | /// The iterator returned by [`UnsizedVec::iter`]. 1187 | #[repr(transparent)] 1188 | pub struct UnsizedIter<'a, T> 1189 | where 1190 | T: ?Sized + 'a, 1191 | { 1192 | inner: <::Impl as UnsizedVecProvider>::Iter<'a>, 1193 | } 1194 | 1195 | /// The iterator returned by [`UnsizedVec::iter_mut`]. 1196 | #[repr(transparent)] 1197 | pub struct UnsizedIterMut<'a, T> 1198 | where 1199 | T: ?Sized + 'a, 1200 | { 1201 | inner: <::Impl as UnsizedVecProvider>::IterMut<'a>, 1202 | } 1203 | 1204 | impl From<::alloc::vec::Vec> for UnsizedVec { 1205 | #[inline] 1206 | fn from(value: ::alloc::vec::Vec) -> Self { 1207 | UnsizedVec { inner: value } 1208 | } 1209 | } 1210 | 1211 | impl From> for ::alloc::vec::Vec { 1212 | #[inline] 1213 | fn from(value: UnsizedVec) -> Self { 1214 | value.inner 1215 | } 1216 | } 1217 | 1218 | impl Index for UnsizedVec 1219 | where 1220 | T: ?Sized, 1221 | { 1222 | type Output = T; 1223 | 1224 | #[inline] 1225 | fn index(&self, index: usize) -> &Self::Output { 1226 | self.get(index).expect("index out of range") 1227 | } 1228 | } 1229 | 1230 | impl IndexMut for UnsizedVec 1231 | where 1232 | T: ?Sized, 1233 | { 1234 | #[inline] 1235 | fn index_mut(&mut self, index: usize) -> &mut Self::Output { 1236 | self.get_mut(index).expect("index out of range") 1237 | } 1238 | } 1239 | 1240 | impl<'a, T> From> for UnsizedIter<'a, T> 1241 | where 1242 | T: 'a, 1243 | { 1244 | #[inline] 1245 | fn from(value: core::slice::Iter<'a, T>) -> Self { 1246 | UnsizedIter { inner: value } 1247 | } 1248 | } 1249 | 1250 | impl<'a, T> From> for core::slice::Iter<'a, T> 1251 | where 1252 | T: 'a, 1253 | { 1254 | #[inline] 1255 | fn from(value: UnsizedIter<'a, T>) -> Self { 1256 | value.inner 1257 | } 1258 | } 1259 | 1260 | macro_rules! iter_ref { 1261 | ($iter_ty:ident $($muta:ident)?) => { 1262 | impl<'a, T> Iterator for $iter_ty<'a, T> 1263 | where 1264 | T: ?Sized + 'a, 1265 | { 1266 | type Item = &'a $($muta)? T; 1267 | 1268 | #[inline] 1269 | fn next(&mut self) -> Option { 1270 | self.inner.next() 1271 | } 1272 | 1273 | #[inline] 1274 | fn size_hint(&self) -> (usize, Option) { 1275 | self.inner.size_hint() 1276 | } 1277 | 1278 | #[inline] 1279 | fn count(self) -> usize { 1280 | self.inner.count() 1281 | } 1282 | 1283 | #[inline] 1284 | fn nth(&mut self, n: usize) -> Option { 1285 | self.inner.nth(n) 1286 | } 1287 | 1288 | #[inline] 1289 | fn last(self) -> Option { 1290 | self.inner.last() 1291 | } 1292 | 1293 | #[inline] 1294 | fn for_each(self, f: F) 1295 | where 1296 | F: FnMut(Self::Item), 1297 | { 1298 | self.inner.for_each(f); 1299 | } 1300 | 1301 | #[inline] 1302 | fn all(&mut self, f: F) -> bool 1303 | where 1304 | F: FnMut(Self::Item) -> bool, 1305 | { 1306 | self.inner.all(f) 1307 | } 1308 | 1309 | #[inline] 1310 | fn any(&mut self, f: F) -> bool 1311 | where 1312 | F: FnMut(Self::Item) -> bool, 1313 | { 1314 | self.inner.any(f) 1315 | } 1316 | 1317 | #[inline] 1318 | fn find

(&mut self, predicate: P) -> Option 1319 | where 1320 | P: FnMut(&Self::Item) -> bool, 1321 | { 1322 | self.inner.find(predicate) 1323 | } 1324 | 1325 | #[inline] 1326 | fn find_map(&mut self, f: F) -> Option 1327 | where 1328 | F: FnMut(Self::Item) -> Option, 1329 | { 1330 | self.inner.find_map(f) 1331 | } 1332 | 1333 | #[inline] 1334 | fn position

(&mut self, predicate: P) -> Option 1335 | where 1336 | P: FnMut(Self::Item) -> bool, 1337 | { 1338 | self.inner.position(predicate) 1339 | } 1340 | } 1341 | 1342 | impl<'a, T> DoubleEndedIterator for $iter_ty<'a, T> 1343 | where 1344 | T: ?Sized + 'a, 1345 | { 1346 | #[inline] 1347 | fn next_back(&mut self) -> Option { 1348 | self.inner.next_back() 1349 | } 1350 | 1351 | #[inline] 1352 | fn nth_back(&mut self, n: usize) -> Option { 1353 | self.inner.nth_back(n) 1354 | } 1355 | } 1356 | 1357 | impl<'a, T> ExactSizeIterator for $iter_ty<'a, T> 1358 | where 1359 | T: ?Sized + 'a, 1360 | {} 1361 | impl<'a, T> FusedIterator for $iter_ty<'a, T> 1362 | where 1363 | T: ?Sized + 'a, 1364 | {} 1365 | } 1366 | } 1367 | 1368 | iter_ref!(UnsizedIter); 1369 | iter_ref!(UnsizedIterMut mut); 1370 | 1371 | impl<'a, T> IntoIterator for &'a UnsizedVec 1372 | where 1373 | T: ?Sized + 'a, 1374 | { 1375 | type Item = &'a T; 1376 | 1377 | type IntoIter = UnsizedIter<'a, T>; 1378 | 1379 | #[inline] 1380 | fn into_iter(self) -> Self::IntoIter { 1381 | self.iter() 1382 | } 1383 | } 1384 | 1385 | impl<'a, T> IntoIterator for &'a mut UnsizedVec 1386 | where 1387 | T: ?Sized + 'a, 1388 | { 1389 | type Item = &'a mut T; 1390 | 1391 | type IntoIter = UnsizedIterMut<'a, T>; 1392 | 1393 | #[inline] 1394 | fn into_iter(self) -> Self::IntoIter { 1395 | self.iter_mut() 1396 | } 1397 | } 1398 | 1399 | impl FromIterator> for UnsizedVec 1400 | where 1401 | T: ?Sized, 1402 | F: EmplacableFn, 1403 | { 1404 | #[inline] 1405 | fn from_iter(iter: I) -> Self 1406 | where 1407 | I: IntoIterator>, 1408 | { 1409 | let mut vec = UnsizedVec::new(); 1410 | vec.extend(iter); 1411 | vec 1412 | } 1413 | } 1414 | 1415 | impl Extend> for UnsizedVec 1416 | where 1417 | T: ?Sized, 1418 | F: EmplacableFn, 1419 | { 1420 | #[inline] 1421 | fn extend(&mut self, iter: I) 1422 | where 1423 | I: IntoIterator>, 1424 | { 1425 | fn extend_inner, I: Iterator>>( 1426 | vec: &mut UnsizedVec, 1427 | iter: I, 1428 | ) { 1429 | vec.reserve_exact(iter.size_hint().0); 1430 | for emplacable in iter { 1431 | vec.push_with(emplacable); 1432 | } 1433 | } 1434 | 1435 | extend_inner(self, iter.into_iter()); 1436 | } 1437 | } 1438 | 1439 | impl Debug for UnsizedVec 1440 | where 1441 | T: ?Sized + Debug, 1442 | { 1443 | #[inline] 1444 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 1445 | f.debug_list().entries(self.iter()).finish() 1446 | } 1447 | } 1448 | 1449 | impl Clone for UnsizedVec 1450 | where 1451 | T: Clone, 1452 | { 1453 | #[inline] 1454 | fn clone(&self) -> Self { 1455 | let mut ret = UnsizedVec::with_capacity_bytes_align( 1456 | self.capacity(), 1457 | self.byte_capacity(), 1458 | self.align(), 1459 | ); 1460 | for elem in self { 1461 | ret.push(elem.clone()); 1462 | } 1463 | ret 1464 | } 1465 | } 1466 | 1467 | impl PartialEq> for UnsizedVec 1468 | where 1469 | T: ?Sized + PartialEq, 1470 | U: ?Sized, 1471 | { 1472 | #[inline] 1473 | fn eq(&self, other: &UnsizedVec) -> bool { 1474 | self.len() == other.len() && self.iter().zip(other).all(|(l, r)| l == r) 1475 | } 1476 | } 1477 | 1478 | impl Eq for UnsizedVec where T: ?Sized + Eq {} 1479 | 1480 | impl PartialOrd> for UnsizedVec 1481 | where 1482 | T: ?Sized + PartialOrd, 1483 | U: ?Sized, 1484 | { 1485 | fn partial_cmp(&self, other: &UnsizedVec) -> Option { 1486 | for (l, r) in self.iter().zip(other) { 1487 | match l.partial_cmp(r) { 1488 | Some(cmp::Ordering::Equal) => (), 1489 | res => return res, 1490 | } 1491 | } 1492 | self.len().partial_cmp(&other.len()) 1493 | } 1494 | } 1495 | 1496 | impl Ord for UnsizedVec 1497 | where 1498 | T: ?Sized + Ord, 1499 | { 1500 | #[inline] 1501 | fn cmp(&self, other: &Self) -> cmp::Ordering { 1502 | for (l, r) in self.iter().zip(other) { 1503 | match l.cmp(r) { 1504 | cmp::Ordering::Equal => (), 1505 | res => return res, 1506 | } 1507 | } 1508 | self.len().cmp(&other.len()) 1509 | } 1510 | } 1511 | 1512 | impl Hash for UnsizedVec 1513 | where 1514 | T: ?Sized + Hash, 1515 | { 1516 | #[inline] 1517 | fn hash(&self, state: &mut H) { 1518 | for elem in self { 1519 | elem.hash(state); 1520 | } 1521 | } 1522 | } 1523 | 1524 | #[cfg(feature = "serde")] 1525 | use serde::{Serialize, ser::SerializeSeq}; 1526 | 1527 | #[cfg(feature = "serde")] 1528 | impl Serialize for UnsizedVec 1529 | where 1530 | T: ?Sized + Serialize, 1531 | { 1532 | fn serialize(&self, serializer: S) -> Result 1533 | where 1534 | S: serde::Serializer, 1535 | { 1536 | let mut elem_serialize = serializer.serialize_seq(Some(self.len()))?; 1537 | for elem in self { 1538 | elem_serialize.serialize_element(elem)?; 1539 | } 1540 | elem_serialize.end() 1541 | } 1542 | } 1543 | 1544 | /// Impementation detail of `unsized_vec` macro. 1545 | #[doc(hidden)] 1546 | pub trait PushToUnsizedVec { 1547 | fn push_to_unsized_vec(self, vec: &mut UnsizedVec); 1548 | } 1549 | 1550 | impl PushToUnsizedVec for T { 1551 | #[inline] 1552 | fn push_to_unsized_vec(self, vec: &mut UnsizedVec) { 1553 | vec.push(self); 1554 | } 1555 | } 1556 | 1557 | impl> PushToUnsizedVec for Emplacable { 1558 | #[inline] 1559 | fn push_to_unsized_vec(self, vec: &mut UnsizedVec) { 1560 | vec.push_with(self); 1561 | } 1562 | } 1563 | 1564 | /// Like the standard library's [`vec`] macro. 1565 | /// Accepts both raw unsized `T`s and 1566 | /// [`Emplacable`]s. 1567 | /// 1568 | /// However, this does not accept sized values implementing 1569 | /// [`Unsize`]; you can use [`unsize_vec`] for that. 1570 | /// 1571 | /// # Example 1572 | /// 1573 | /// ``` 1574 | /// #![allow(internal_features)] // for `unsized_fn_params` 1575 | /// #![feature(allocator_api, ptr_metadata, unsized_fn_params)] 1576 | /// 1577 | /// use emplacable::unsize; 1578 | /// use unsized_vec::{UnsizedVec, unsized_vec}; 1579 | /// 1580 | /// let my_vec = unsized_vec![[23_u32, 17], [16, 34], [23, 47]]; 1581 | /// 1582 | /// let mut my_vec_unsized: UnsizedVec<[u32]> = my_vec.unsize(); 1583 | /// 1584 | /// let another_vec = unsized_vec![unsize!([42], ([u32; 1]) -> [u32]), my_vec_unsized.remove_into(2)]; 1585 | /// ``` 1586 | /// 1587 | /// [`vec`]: macro@alloc::vec 1588 | #[macro_export] 1589 | macro_rules! unsized_vec { 1590 | () => ( 1591 | $crate::UnsizedVec::new() 1592 | ); 1593 | ($($x:expr),+ $(,)?) => ( 1594 | { 1595 | let mut ret = $crate::UnsizedVec::new(); 1596 | $($crate::PushToUnsizedVec::push_to_unsized_vec($x, &mut ret);)+ 1597 | ret 1598 | } 1599 | ); 1600 | } 1601 | 1602 | /// Like [`unsized_vec`], but unsizes its arguments 1603 | /// using the [`Unsize`] trait. 1604 | /// 1605 | /// Accepts sized values that can coerce to an unsized `T`. 1606 | /// If you have raw unsized `T`s or [`Emplacable`]s, 1607 | /// use [`unsized_vec`] instead. 1608 | /// 1609 | /// # Example 1610 | /// 1611 | /// ``` 1612 | /// use core::fmt::Debug; 1613 | /// 1614 | /// use unsized_vec::{unsize_vec, UnsizedVec}; 1615 | /// 1616 | /// let my_vec: UnsizedVec = unsize_vec![1, "hello!", 97.5]; 1617 | /// ``` 1618 | #[macro_export] 1619 | macro_rules! unsize_vec { 1620 | () => ( 1621 | $crate::UnsizedVec::new() 1622 | ); 1623 | ($($x:expr),+ $(,)?) => ( 1624 | { 1625 | let mut ret = $crate::UnsizedVec::new(); 1626 | $(ret.push_unsize($x);)+ 1627 | ret 1628 | } 1629 | ); 1630 | } 1631 | -------------------------------------------------------------------------------- /src/marker.rs: -------------------------------------------------------------------------------- 1 | //! Defines the [`Aligned`] trait. 2 | 3 | use core::{ 4 | ffi::CStr, 5 | mem, 6 | ptr::{self, NonNull}, 7 | }; 8 | 9 | use crate::helper::valid_align::ValidAlign; 10 | 11 | /// Implemented for types that have an alignment known at compile-time. 12 | /// 13 | /// # Safety 14 | /// 15 | /// `ALIGN` must be equal to the alignment of all values of the type. 16 | pub(crate) unsafe trait Aligned { 17 | /// The alignment of this type. 18 | const ALIGN: ValidAlign; 19 | 20 | /// A dangling, well-aligned pointer thin pointer for the type. 21 | const DANGLING_THIN: NonNull<()> = 22 | NonNull::new(ptr::without_provenance_mut(Self::ALIGN.get())).unwrap(); 23 | } 24 | 25 | // SAFETY: mem::align_of::() is correct 26 | unsafe impl Aligned for T { 27 | const ALIGN: ValidAlign = ValidAlign::new(mem::align_of::()).unwrap(); 28 | } 29 | 30 | // SAFETY: alignment of `[T]` equals alignment of `T` 31 | unsafe impl Aligned for [T] { 32 | const ALIGN: ValidAlign = ValidAlign::new(mem::align_of::()).unwrap(); 33 | } 34 | 35 | // SAFETY: All `str`s have the same alignment 36 | unsafe impl Aligned for str { 37 | const ALIGN: ValidAlign = ValidAlign::new(mem::align_of_val("")).unwrap(); 38 | } 39 | 40 | // SAFETY: All `CStrs`s have the same alignment 41 | unsafe impl Aligned for CStr { 42 | const ALIGN: ValidAlign = ValidAlign::new(mem::align_of_val(&c"")).unwrap(); 43 | } 44 | -------------------------------------------------------------------------------- /tests/test.rs: -------------------------------------------------------------------------------- 1 | #![allow(internal_features)] // for `unsized_fn_params` 2 | #![feature(unsized_fn_params)] 3 | 4 | use std::{fmt::Debug, mem}; 5 | 6 | use emplacable::*; 7 | use unsized_vec::*; 8 | 9 | #[test] 10 | fn test_capacity_growth() { 11 | let mut v: UnsizedVec<[i32]> = UnsizedVec::with_capacity_bytes(6, mem::size_of::() * 15); 12 | 13 | v.push_unsize([]); 14 | v.push_unsize([1]); 15 | v.push_unsize([1, 2]); 16 | v.push_unsize([1, 2, 3]); 17 | v.push_unsize([1, 2, 3, 4]); 18 | v.push_unsize([1, 2, 3, 4, 5]); 19 | 20 | assert_eq!(v.byte_capacity(), mem::size_of::() * 15); 21 | } 22 | 23 | #[test] 24 | fn emplacable_from() { 25 | let a: Box = box_new_with(3.into()); 26 | assert_eq!(*a, 3); 27 | } 28 | 29 | #[test] 30 | fn test_sized() { 31 | let mut vec = UnsizedVec::new(); 32 | assert_eq!(vec.len(), 0); 33 | 34 | vec.push(32); 35 | assert_eq!(vec.len(), 1); 36 | assert_eq!(vec[0], 32); 37 | 38 | vec.shrink_to_fit(); 39 | 40 | vec.push(34); 41 | assert_eq!(vec.len(), 2); 42 | assert_eq!(vec[1], 34); 43 | } 44 | 45 | const EMPTY_SLICE: &[Box] = &[]; 46 | 47 | #[test] 48 | fn test_unsized_drop() { 49 | let mut vec: UnsizedVec<[Box]> = UnsizedVec::new(); 50 | assert_eq!(vec.len(), 0); 51 | 52 | let slice: Box<[Box]> = Box::new([Box::new(1), Box::new(2)]); 53 | vec.push(*slice); 54 | assert_eq!(vec.len(), 1); 55 | assert_eq!(&vec[0], &[Box::new(1), Box::new(2)]); 56 | 57 | let slice: Box<[Box]> = Box::new([]); 58 | vec.push(*slice); 59 | assert_eq!(&vec[1], EMPTY_SLICE); 60 | 61 | let slice: Box<[Box]> = Box::new([Box::new(4), Box::new(7), Box::new(3)]); 62 | vec.push(*slice); 63 | vec[2][1] = Box::new(19); 64 | assert_eq!(&vec[2], &[Box::new(4), Box::new(19), Box::new(3)]); 65 | 66 | let popped: Box<[Box]> = vec.pop_into().map(box_new_with).unwrap(); 67 | assert_eq!(&*popped, &[Box::new(4), Box::new(19), Box::new(3)]); 68 | 69 | let slice: Box<[Box]> = Box::new([ 70 | Box::new(4), 71 | Box::new(7), 72 | Box::new(3), 73 | Box::new(4), 74 | Box::new(5), 75 | Box::new(6), 76 | Box::new(6), 77 | Box::new(-1), 78 | ]); 79 | vec.insert(0, *slice); 80 | assert_eq!( 81 | &vec[0], 82 | &[ 83 | Box::new(4), 84 | Box::new(7), 85 | Box::new(3), 86 | Box::new(4), 87 | Box::new(5), 88 | Box::new(6), 89 | Box::new(6), 90 | Box::new(-1) 91 | ] 92 | ); 93 | assert_eq!(&vec[1], &[Box::new(1), Box::new(2)]); 94 | assert_eq!(&vec[2], EMPTY_SLICE); 95 | 96 | vec.shrink_to_fit(); 97 | 98 | let removed: Box<[Box]> = box_new_with(vec.remove_into(1)); 99 | assert_eq!(&*removed, &[Box::new(1), Box::new(2)]); 100 | assert_eq!( 101 | &vec[0], 102 | &[ 103 | Box::new(4), 104 | Box::new(7), 105 | Box::new(3), 106 | Box::new(4), 107 | Box::new(5), 108 | Box::new(6), 109 | Box::new(6), 110 | Box::new(-1) 111 | ] 112 | ); 113 | assert_eq!(&vec[1], EMPTY_SLICE); 114 | } 115 | 116 | #[test] 117 | fn test_dyn() { 118 | let mut vec: UnsizedVec = UnsizedVec::new(); 119 | assert_eq!(vec.len(), 0); 120 | 121 | let obj: Box = Box::new(()); 122 | vec.push(*obj); 123 | assert_eq!(vec.len(), 1); 124 | 125 | let obj: Box = Box::new(1_u16); 126 | vec.push(*obj); 127 | assert_eq!(vec.len(), 2); 128 | 129 | let popped = vec.pop_into().map(box_new_with).unwrap(); 130 | assert_eq!(vec.len(), 1); 131 | assert_eq!(&format!("{:?}", &*popped), "1"); 132 | 133 | vec.shrink_to_fit(); 134 | 135 | let obj: Box = Box::new("walla walla"); 136 | vec.insert(0, *obj); 137 | assert_eq!(vec.len(), 2); 138 | assert_eq!(&format!("{:?}", &vec[0]), "\"walla walla\""); 139 | assert_eq!(&format!("{:?}", &vec[1]), "()"); 140 | dbg!(&vec); 141 | 142 | let removed: Box = box_new_with(vec.remove_into(0)); 143 | assert_eq!(vec.len(), 1); 144 | assert_eq!(&format!("{:?}", &*removed), "\"walla walla\""); 145 | assert_eq!(&format!("{:?}", &vec[0]), "()"); 146 | } 147 | 148 | #[test] 149 | fn test_unsized_aligned() { 150 | let mut vec: UnsizedVec<[i32]> = UnsizedVec::new(); 151 | assert_eq!(vec.len(), 0); 152 | 153 | let slice: Box<[i32]> = Box::new([1, 2]); 154 | vec.push(*slice); 155 | assert_eq!(vec.len(), 1); 156 | assert_eq!(&vec[0], &[1, 2]); 157 | 158 | vec.push(unsize!([], ([i32; 0]) -> [i32])); 159 | assert_eq!(&vec[1], &[]); 160 | 161 | vec.shrink_to_fit(); 162 | 163 | vec.push_unsize([4, 7, 3]); 164 | vec[2][1] = 19; 165 | assert_eq!(&vec[2], &[4, 19, 3]); 166 | 167 | let popped: Box<[i32]> = vec.pop_into().map(box_new_with).unwrap(); 168 | assert_eq!(&*popped, &[4, 19, 3]); 169 | 170 | vec.insert_unsize(0, [4, 7, 3, 4, 5, 6, 6, -1]); 171 | assert_eq!(&vec[0], &[4, 7, 3, 4, 5, 6, 6, -1]); 172 | assert_eq!(&vec[1], &[1, 2]); 173 | assert_eq!(&vec[2], &[]); 174 | 175 | let removed: Box<[i32]> = box_new_with(vec.remove_into(1)); 176 | assert_eq!(&*removed, &[1, 2]); 177 | assert_eq!(&vec[0], &[4, 7, 3, 4, 5, 6, 6, -1]); 178 | assert_eq!(&vec[1], &[]); 179 | } 180 | 181 | #[test] 182 | fn type_inference() { 183 | let mut vec: UnsizedVec<[i32; 3]> = unsized_vec![[33, 34, 35]]; 184 | let emplacable: Emplacable<[i32; 3], _> = vec.pop_into().unwrap(); 185 | let _: Emplacable<[i32], _> = emplacable.into(); 186 | } 187 | --------------------------------------------------------------------------------