├── .github └── workflows │ └── rust.yml ├── .gitignore ├── .travis.yml ├── Cargo.toml ├── LICENCE ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── deploy.sh ├── index.html ├── src ├── data_buf.rs ├── fifo.rs ├── fifo │ └── impls.rs ├── lib.rs ├── stack.rs ├── stack │ └── impls.rs ├── value.rs └── value │ └── trait_impls.rs └── tests ├── fifo.rs ├── stack.rs └── value.rs /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | # Check that the build works/passes on the documented minimum version (1.36.0) 14 | build_minimum: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Switch to min version 19 | uses: actions-rs/toolchain@v1 20 | with: 21 | toolchain: 1.36.0 22 | override: true 23 | - name: Run tests 24 | run: cargo test --verbose --no-default-features --features alloc 25 | # Check that all features work on nightly 26 | build_nightly: 27 | runs-on: ubuntu-latest 28 | steps: 29 | - uses: actions/checkout@v3 30 | - name: Switch to nightly 31 | uses: actions-rs/toolchain@v1 32 | with: 33 | toolchain: nightly 34 | components: miri 35 | override: true 36 | - name: Install cargo-hack 37 | run: cargo install cargo-hack 38 | - name: Run tests (--all-features) 39 | run: cargo test --all-features 40 | - name: Check feature combinations 41 | run: cargo hack check --feature-powerset --no-dev-deps 42 | - name: Run `miri` 43 | run: cargo miri test --all-features 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .* 2 | 3 | /target/ 4 | /Cargo.lock 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | rust: nightly 3 | sudo: false 4 | script: 5 | - cargo build 6 | - cargo test 7 | - cargo test --release 8 | - cargo doc 9 | after_success: 10 | - test $TRAVIS_PULL_REQUEST == "false" && test $TRAVIS_BRANCH == "master" && bash deploy.sh 11 | env: 12 | global: 13 | secure: F4t5MSwRFZKyi89/SqCpDdjxTwr4qTtQh2lvJsvuYaRmeIKQNVgIbOEMFQqSoxAWfzyH+Dte3RsZnl+OTjiLDlNGgkxbk7AipZwb/GlrzCa8GTOFuNM5DJT2TCZ4SvgXrMY3/af3lQZZYhBjKAoy50QsjM5oQaksLj6zxsQyXP6DgGdooBvLoZVlZ7vFQ+JnD25ArZi8yK/l9/W3ccR2hY993+T/8QJ2XazwUHl4oiAM+hzrSwGlvMayM6duDrSN23EFLwlZr9XXTUEr/4a7HNsQbleiK+lpCMuGLRVq6oAHCXq1sCXjR32hC6ZErXikE9fk9QTN7HmHPWhjyu+J85ytojXPW7KQZixRltFonCFoQSBVLqS9JAIOj4s1GC6GAy4nJDq8nP/E7M2VafT+5ykgylYVlekraYy5Hrg36gnMCit+aKe9GNbk8gAybSQZJUY/mMFuGLW16hJDtON9nwJjKlTj3pGcVhe4wDnRLG5+f5FYg07mZPjD2v8BH8nK3o4Av7B/Bcv7Rmi6ZXCY0sOtjyWJQXFjZka4CA4M8ykrFkptZoFXlrH/ehWngaXdJus/q5xH+Y2vjvxdaHS1Cy1JU4uKJTygZO9k8AdlbT1BlVEZcEfbUeWApAhb6cuHczZzq5+83FC5BU4IMGSm+AJ/Aw6u0t3MMujsQt1os0Q= 14 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "stack_dst" 3 | version = "0.8.1" 4 | rust-version = "1.36.0" # without "const_generics" 5 | authors = [ "John Hodge " ] 6 | description = "A wrapper that allows storage of unsized values of up to a fixed size inline (without boxing)" 7 | repository = "https://github.com/thepowersgang/stack_dst-rs" 8 | documentation = "http://thepowersgang.github.io/stack_dst-rs/target/doc/stack_dst/" 9 | readme = "README.md" 10 | keywords = ["stack", "dst"] 11 | license = "MIT OR Apache-2.0" 12 | 13 | [features] 14 | default = [ "alloc", "const_generics" ] 15 | 16 | alloc = [] 17 | unsize = [] 18 | const_generics = [] # increases MSRV to "1.51.0" 19 | # EXPERIMENTAL 20 | #full_const_generics = [] 21 | 22 | [package.metadata.docs.rs] 23 | all-features = true 24 | 25 | [dependencies] 26 | generic-array = "0.14" 27 | -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 John Hodge 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | 24 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 The stack_dst-rs Developers 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # stack_dst 2 | 3 | Inline (aka stack-allocated) dynamically-sized types, and collections of dyanmically-sized types using the same logic 4 | 5 | # Overview 6 | This crate provides ways of storing DSTs directly within an allocation. 7 | 8 | # Basic usage 9 | This crate covers two primary usecases 10 | - `Value` allows storing (and returning) a single DST within a fixed-size allocation 11 | - `Stack` and `Fifo` allow heterogeneous collections without needing to box each object. 12 | 13 | # Example 14 | 15 | ## Unboxed closure 16 | One of the most obvious uses is to allow returning capturing closures without having to box them. In the example below, the closure 17 | takes ownership of `value`, and is then returned using a `Value` 18 | ```rust 19 | use stack_dst::Value; 20 | 21 | // The closure is stored in two 64-bit integers (one for the vtable, the other for the value) 22 | fn make_closure(value: u64) -> ValueString, ::stack_dst::buffers::U64_2> { 23 | if value < 0x10000 { 24 | ValueA::new_stable(move || format!("Hello there! value={}", value), |v| v as _).ok().expect("Closure doesn't fit") 25 | } 26 | else { 27 | ValueA::new_stable(move || format!("Hello there! value={:#x}", value), |v| v as _).ok().expect("Closure doesn't fit") 28 | } 29 | } 30 | let closure = make_closure(12); 31 | assert_eq!( closure(), "Hello there! value=12" ); 32 | ``` 33 | 34 | # Status 35 | - Works for most test cases 36 | - miri is happy with it 37 | - Not rigourously tested across platforms 38 | 39 | # Minimum rust version 40 | - Uses `MaybeUninit`, so requires at least 1.36 41 | 42 | ## License 43 | 44 | Licensed under either of 45 | 46 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 47 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 48 | 49 | at your option. 50 | 51 | ### Contribution 52 | 53 | Unless you explicitly state otherwise, any contribution intentionally 54 | submitted for inclusion in the work by you, as defined in the Apache-2.0 55 | license, shall be dual licensed as above, without any additional terms or 56 | conditions. 57 | -------------------------------------------------------------------------------- /deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -o errexit -o nounset 4 | 5 | rev=$(git rev-parse --short HEAD) 6 | 7 | git init 8 | git config user.name "John Hodge" 9 | git config user.email "tpg+travis@mutabah.net" 10 | 11 | git remote add upstream "https://$GH_TOKEN@github.com/thepowersgang/stack_dst-rs.git" 12 | git fetch upstream 13 | git reset upstream/gh-pages 14 | 15 | touch target/doc 16 | 17 | git add -A . 18 | git add -f target/doc/ 19 | git commit -m "rebuild pages at ${rev}" 20 | git push -q upstream HEAD:gh-pages 21 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 |

The page has moved to: this page

9 | 10 | 11 | -------------------------------------------------------------------------------- /src/data_buf.rs: -------------------------------------------------------------------------------- 1 | // 2 | // Implementation of the `DataBuf` trait 3 | // 4 | use core::mem::MaybeUninit; 5 | 6 | /// Trait used to represent a data buffer, typically you'll passs a `[usize; N]` array. 7 | /// 8 | /// Can also provide a `Vec` (if the `alloc` feature is enabled) which will grow as-needed 9 | /// 10 | /// UNSAFE: Used by the internal unsafe code, must confor to the following rules 11 | /// - The `as_ref`/`as_mut` methods must return pointers to the same data 12 | /// - The pointer returned by `as_mut` must be stable until either a call to `extend` or the 13 | /// value is moved (i.e. `let a = foo.as_mut().as_ptr(); let b = foo.as_mut().as_ptr(); assert!(a == b)` always holds.) 14 | /// - `extend` must not change any contained data (but may extend with unspecified values) 15 | pub unsafe trait DataBuf { 16 | /// Inner type of the buffer 17 | type Inner: Pod; 18 | 19 | /// Get the buffer slice as an immutable borrow 20 | fn as_ref(&self) -> &[MaybeUninit]; 21 | /// Get the buffer slice as a mutable borrow 22 | fn as_mut(&mut self) -> &mut [MaybeUninit]; 23 | 24 | /// Extend the buffer (fallible) 25 | fn extend(&mut self, len: usize) -> Result<(), ()>; 26 | 27 | /// Convert a byte count to a word count (rounding up) 28 | fn round_to_words(bytes: usize) -> usize { 29 | crate::round_to_words::(bytes) 30 | } 31 | } 32 | 33 | /// Trait that indicates that a type is valid for any bit pattern 34 | pub unsafe trait Pod: Copy { 35 | /// Construct a new instance (sames as `Default::default`) 36 | fn default() -> Self; 37 | } 38 | macro_rules! impl_pod { 39 | ( $($t:ty),* ) => { 40 | $( unsafe impl Pod for $t { fn default() -> Self { 0 } } )* 41 | } 42 | } 43 | impl_pod! { u8, u16, u32, u64, u128, usize } 44 | 45 | unsafe impl DataBuf for &mut T 46 | where 47 | U: Pod, 48 | T: DataBuf, 49 | { 50 | type Inner = T::Inner; 51 | fn as_ref(&self) -> &[MaybeUninit] { 52 | (**self).as_ref() 53 | } 54 | fn as_mut(&mut self) -> &mut [MaybeUninit] { 55 | (**self).as_mut() 56 | } 57 | fn extend(&mut self, len: usize) -> Result<(), ()> { 58 | (**self).extend(len) 59 | } 60 | } 61 | 62 | #[cfg(not(feature = "const_generics"))] 63 | macro_rules! impl_databuf_array { 64 | ( $($n:expr),* ) => { 65 | $(unsafe impl DataBuf for [MaybeUninit; $n] { 66 | type Inner = T; 67 | fn as_ref(&self) -> &[MaybeUninit] { 68 | self 69 | } 70 | fn as_mut(&mut self) -> &mut [MaybeUninit] { 71 | self 72 | } 73 | fn extend(&mut self, len: usize) -> Result<(), ()> { 74 | if len > $n { 75 | Err( () ) 76 | } 77 | else { 78 | Ok( () ) 79 | } 80 | } 81 | })* 82 | } 83 | } 84 | #[cfg(not(feature = "const_generics"))] 85 | impl_databuf_array! { 86 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 87 | 10,11,12,13,14,15,16,17,18,19, 88 | 20,21,22,23,24,25,26,27,28,29, 89 | 30,31, 90 | 32,48, 91 | 64,96, 92 | 128,192, 93 | 256 94 | } 95 | /// Array-specific impl 96 | #[cfg(feature = "const_generics")] 97 | unsafe impl DataBuf for [MaybeUninit; N] { 98 | type Inner = T; 99 | fn as_ref(&self) -> &[MaybeUninit] { 100 | self 101 | } 102 | fn as_mut(&mut self) -> &mut [MaybeUninit] { 103 | self 104 | } 105 | fn extend(&mut self, len: usize) -> Result<(), ()> { 106 | if len > N { 107 | Err(()) 108 | } else { 109 | Ok(()) 110 | } 111 | } 112 | } 113 | 114 | /// Vector backed structures, can be used to auto-grow the allocation 115 | /// 116 | /// ``` 117 | /// let mut buf = ::stack_dst::Fifo::>>::new(); 118 | /// buf.push_back_str("Hello world!"); 119 | /// buf.push_back_str("This is a very long string"); 120 | /// buf.push_back_str("The buffer should keep growing as it needs to"); 121 | /// for line in buf.iter() { 122 | /// println!("{}", line); 123 | /// } 124 | /// ``` 125 | #[cfg(feature = "alloc")] 126 | unsafe impl crate::DataBuf for ::alloc::vec::Vec> { 127 | type Inner = T; 128 | fn as_ref(&self) -> &[MaybeUninit] { 129 | self 130 | } 131 | fn as_mut(&mut self) -> &mut [MaybeUninit] { 132 | self 133 | } 134 | fn extend(&mut self, len: usize) -> Result<(), ()> { 135 | if len > self.len() { 136 | self.resize(len, MaybeUninit::uninit()); 137 | let cap = self.capacity(); 138 | self.resize(cap, MaybeUninit::uninit()); 139 | } 140 | Ok(()) 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/fifo.rs: -------------------------------------------------------------------------------- 1 | // See parent for docs 2 | use core::{iter, marker, mem, ops, ptr}; 3 | 4 | mod impls; 5 | 6 | // Implementation Notes 7 | // ----- 8 | // 9 | /// A First-In-First-Out queue of DSTs 10 | /// 11 | /// ``` 12 | /// let mut queue = ::stack_dst::Fifo::::new(); 13 | /// queue.push_back_str("Hello"); 14 | /// queue.push_back_str("World"); 15 | /// assert_eq!(queue.pop_front().as_ref().map(|v| &v[..]), Some("Hello")); 16 | /// ``` 17 | pub struct Fifo { 18 | _pd: marker::PhantomData<*const T>, 19 | read_pos: usize, 20 | write_pos: usize, 21 | data: D, 22 | } 23 | impl Fifo { 24 | /// Construct a new (empty) list 25 | pub fn new() -> Self 26 | where 27 | D: Default, 28 | { 29 | Self::with_buffer(D::default()) 30 | } 31 | /// Construct a new (empty) list using the provided buffer 32 | pub fn with_buffer(data: D) -> Self { 33 | Fifo { 34 | _pd: marker::PhantomData, 35 | read_pos: 0, 36 | write_pos: 0, 37 | data, 38 | } 39 | } 40 | 41 | fn meta_words() -> usize { 42 | D::round_to_words(mem::size_of::<&T>() - mem::size_of::()) 43 | } 44 | fn space_words(&self) -> usize { 45 | self.data.as_ref().len() - self.write_pos 46 | } 47 | 48 | /// Push a value at the top of the stack 49 | #[cfg(feature = "unsize")] 50 | pub fn push_back>(&mut self, v: U) -> Result<(), U> 51 | where 52 | (U, D::Inner): crate::AlignmentValid, 53 | { 54 | self.push_back_stable(v, |p| p) 55 | } 56 | 57 | /// Push a value to the end of the list (without using `Unsize`) 58 | pub fn push_back_stable &T>(&mut self, v: U, f: F) -> Result<(), U> 59 | where 60 | (U, D::Inner): crate::AlignmentValid, 61 | { 62 | <(U, D::Inner) as crate::AlignmentValid>::check(); 63 | 64 | // SAFE: Destination address is valid 65 | unsafe { 66 | match self.push_inner(crate::check_fat_pointer(&v, f)) { 67 | Ok(pii) => { 68 | ptr::write(pii.data.as_mut_ptr() as *mut U, v); 69 | Ok(()) 70 | } 71 | Err(_) => Err(v), 72 | } 73 | } 74 | } 75 | 76 | /// Compact the list (moving the read position to zero) 77 | pub fn compact(&mut self) { 78 | if self.read_pos != 0 { 79 | self.data.as_mut().rotate_left(self.read_pos); 80 | self.write_pos -= self.read_pos; 81 | self.read_pos = 0; 82 | } 83 | } 84 | 85 | /// Checks if the queue is currently empty 86 | pub fn empty(&self) -> bool { 87 | self.read_pos == self.write_pos 88 | } 89 | 90 | /// Remove an item from the front of the list 91 | pub fn pop_front(&mut self) -> Option> { 92 | if self.read_pos == self.write_pos { 93 | None 94 | } else { 95 | Some(PopHandle { parent: self }) 96 | } 97 | } 98 | /// Peek the front of the queue 99 | pub fn front_mut(&mut self) -> Option<&mut T> { 100 | if self.read_pos == self.write_pos { 101 | None 102 | } else { 103 | Some(unsafe { &mut *self.front_raw_mut() }) 104 | } 105 | } 106 | /// Peek the front of the queue 107 | pub fn front(&self) -> Option<&T> { 108 | if self.read_pos == self.write_pos { 109 | None 110 | } else { 111 | Some(unsafe { &*self.front_raw() }) 112 | } 113 | } 114 | 115 | /// Obtain an immutable iterator (yields references to items, in insertion order) 116 | /// ``` 117 | /// let mut list = ::stack_dst::Fifo::::new(); 118 | /// list.push_back_str("Hello"); 119 | /// list.push_back_str("world"); 120 | /// let mut it = list.iter(); 121 | /// assert_eq!(it.next(), Some("Hello")); 122 | /// assert_eq!(it.next(), Some("world")); 123 | /// assert_eq!(it.next(), None); 124 | /// ``` 125 | pub fn iter(&self) -> Iter { 126 | Iter(self, self.read_pos) 127 | } 128 | /// Obtain a mutable iterator 129 | /// ``` 130 | /// let mut list = ::stack_dst::Fifo::<[u8], ::stack_dst::buffers::Ptr8>::new(); 131 | /// list.push_copied(&[1,2,3]); 132 | /// list.push_copied(&[9]); 133 | /// for v in list.iter_mut() { 134 | /// v[0] -= 1; 135 | /// } 136 | /// let mut it = list.iter(); 137 | /// assert_eq!(it.next(), Some(&[0,2,3][..])); 138 | /// assert_eq!(it.next(), Some(&[8][..])); 139 | /// assert_eq!(it.next(), None); 140 | /// ``` 141 | pub fn iter_mut(&mut self) -> IterMut { 142 | IterMut(self, self.read_pos) 143 | } 144 | // Note: No into_iter, not possible due to unsized types 145 | // Could make a `drain` that returns read handles (pops as it goes) 146 | 147 | fn front_raw(&self) -> *mut T { 148 | assert!(self.read_pos < self.write_pos); 149 | 150 | // SAFE: Internal consistency maintains the metadata validity 151 | unsafe { self.raw_at(self.read_pos) } 152 | } 153 | // UNSAFE: Caller must ensure that `pos` is the start of an object 154 | unsafe fn raw_at(&self, pos: usize) -> *mut T { 155 | assert!(pos >= self.read_pos); 156 | assert!(pos < self.write_pos); 157 | let meta = &self.data.as_ref()[pos..]; 158 | let mw = Self::meta_words(); 159 | let (meta, data) = meta.split_at(mw); 160 | super::make_fat_ptr(data.as_ptr() as *mut (), meta) 161 | } 162 | fn front_raw_mut(&mut self) -> *mut T { 163 | assert!(self.read_pos < self.write_pos); 164 | 165 | // SAFE: Internal consistency maintains the metadata validity 166 | unsafe { self.raw_at_mut(self.read_pos) } 167 | } 168 | // UNSAFE: Caller must ensure that `pos` is the start of an object 169 | unsafe fn raw_at_mut(&mut self, pos: usize) -> *mut T { 170 | assert!(pos >= self.read_pos); 171 | assert!(pos < self.write_pos); 172 | let meta = &mut self.data.as_mut()[pos..]; 173 | let mw = Self::meta_words(); 174 | let (meta, data) = meta.split_at_mut(mw); 175 | super::make_fat_ptr(data.as_mut_ptr() as *mut (), meta) 176 | } 177 | fn pop_front_inner(&mut self) { 178 | // SAFE: `front_raw_mut` asserts that there's an item, rest is correct 179 | unsafe { 180 | let ptr = &mut *self.front_raw_mut(); 181 | let len = mem::size_of_val(ptr); 182 | ptr::drop_in_place(ptr); 183 | let words = D::round_to_words(len); 184 | self.read_pos += Self::meta_words() + words; 185 | } 186 | } 187 | 188 | /// Remove any items that don't meet a predicate 189 | /// 190 | /// ``` 191 | /// # extern crate core; 192 | /// use stack_dst::Fifo; 193 | /// use core::any::Any; 194 | /// use core::fmt::Debug; 195 | /// trait DebugAny: 'static + Any + Debug { fn as_any(&self) -> &dyn Any; } 196 | /// impl DebugAny for T { fn as_any(&self) -> &dyn Any { self } } 197 | /// let mut list = { 198 | /// let mut list: Fifo = Fifo::new(); 199 | /// list.push_back_stable(1234, |v| v); 200 | /// list.push_back_stable(234.5f32, |v| v); 201 | /// list.push_back_stable(5678, |v| v); 202 | /// list.push_back_stable(0.5f32, |v| v); 203 | /// list 204 | /// }; 205 | /// list.retain(|v| (*v).as_any().downcast_ref::().is_some()); 206 | /// let mut it = list.iter().map(|v| format!("{:?}", v)); 207 | /// assert_eq!(it.next(), Some("234.5".to_owned())); 208 | /// assert_eq!(it.next(), Some("0.5".to_owned())); 209 | /// assert_eq!(it.next(), None); 210 | /// ``` 211 | pub fn retain(&mut self, mut cb: Cb) 212 | where 213 | Cb: FnMut(&mut T) -> bool, 214 | { 215 | let orig_write_pos = self.write_pos; 216 | self.write_pos = self.read_pos; 217 | let mut ofs = self.read_pos; 218 | let mut writeback_pos = ofs; 219 | while ofs < orig_write_pos { 220 | let v: &mut T = unsafe { 221 | let meta = &mut self.data.as_mut()[ofs..]; 222 | let mw = Self::meta_words(); 223 | let (meta, data) = meta.split_at_mut(mw); 224 | &mut *super::make_fat_ptr(data.as_mut_ptr() as *mut (), meta) 225 | }; 226 | let words = Self::meta_words() + D::round_to_words(mem::size_of_val(v)); 227 | if cb(v) { 228 | if writeback_pos != ofs { 229 | let d = self.data.as_mut(); 230 | // writeback is always before `ofs`, so this ordering is correct 231 | for i in 0..words { 232 | let (a, b) = d.split_at_mut(ofs + i); 233 | a[writeback_pos + i] = b[0]; 234 | } 235 | } 236 | writeback_pos += words; 237 | } else { 238 | // Don't update `writeback_pos` 239 | // SAFE: Valid pointer, won't be accessed again 240 | unsafe { 241 | ptr::drop_in_place(v); 242 | } 243 | } 244 | ofs += words; 245 | } 246 | assert!(ofs == orig_write_pos); 247 | self.write_pos = writeback_pos; 248 | } 249 | } 250 | 251 | struct PushInnerInfo<'a, DInner> { 252 | /// Buffer for value data 253 | data: &'a mut crate::BufSlice, 254 | /// Buffer for metadata (length/vtable) 255 | meta: &'a mut crate::BufSlice, 256 | /// Memory location for resetting the push 257 | reset_slot: &'a mut usize, 258 | reset_value: usize, 259 | } 260 | 261 | impl Fifo { 262 | /// Push an item to the list (setting metadata based on `fat_ptr`) 263 | /// UNSAFE: Caller must fill the buffer before any potential panic 264 | unsafe fn push_inner(&mut self, fat_ptr: &T) -> Result, ()> { 265 | let bytes = mem::size_of_val(fat_ptr); 266 | let (_data_ptr, len, v) = crate::decompose_pointer(fat_ptr); 267 | self.push_inner_raw(bytes, &v[..len]) 268 | } 269 | unsafe fn push_inner_raw( 270 | &mut self, 271 | bytes: usize, 272 | metadata: &[usize], 273 | ) -> Result, ()> { 274 | let words = D::round_to_words(bytes) + Self::meta_words(); 275 | 276 | // 1. Check if there's space for the item 277 | if self.space_words() < words { 278 | // 2. If not, check if compaction would help 279 | if self.space_words() + self.read_pos >= words { 280 | self.compact(); 281 | } 282 | // 3. Then, try expanding 283 | if self.space_words() < words { 284 | if let Err(_) = self.data.extend(self.write_pos + words) { 285 | // if expansion fails, return error 286 | return Err(()); 287 | } 288 | } 289 | } 290 | assert!(self.space_words() >= words); 291 | 292 | // Get the base pointer for the new item 293 | let slot = &mut self.data.as_mut()[self.write_pos..][..words]; 294 | let prev_write_pos = self.write_pos; 295 | self.write_pos += words; 296 | let (meta, rv) = slot.split_at_mut(Self::meta_words()); 297 | 298 | // Populate the metadata 299 | super::store_metadata(meta, metadata); 300 | 301 | // Increment offset and return 302 | Ok(PushInnerInfo { 303 | meta, 304 | data: rv, 305 | reset_slot: &mut self.write_pos, 306 | reset_value: prev_write_pos, 307 | }) 308 | } 309 | } 310 | 311 | impl Fifo { 312 | /// Push the contents of a string slice as an item onto the stack 313 | pub fn push_back_str(&mut self, v: &str) -> Result<(), ()> { 314 | unsafe { 315 | self.push_inner(v).map(|pii| { 316 | ptr::copy( 317 | v.as_bytes().as_ptr(), 318 | pii.data.as_mut_ptr() as *mut u8, 319 | v.len(), 320 | ) 321 | }) 322 | } 323 | } 324 | } 325 | 326 | impl Fifo<[T], D> 327 | where 328 | (T, D::Inner): crate::AlignmentValid, 329 | { 330 | /// Pushes a set of items (cloning out of the input slice) 331 | /// 332 | /// ``` 333 | /// # use ::stack_dst::Fifo; 334 | /// let mut queue = Fifo::<[String], ::stack_dst::buffers::Ptr8>::new(); 335 | /// queue.push_cloned(&["1".to_owned()]); 336 | /// ``` 337 | pub fn push_cloned(&mut self, v: &[T]) -> Result<(), ()> { 338 | <(T, D::Inner) as crate::AlignmentValid>::check(); 339 | self.push_from_iter(v.iter().cloned()) 340 | } 341 | /// Pushes a set of items (copying out of the input slice) 342 | /// 343 | /// ``` 344 | /// # use ::stack_dst::Fifo; 345 | /// let mut queue = Fifo::<[usize], ::stack_dst::buffers::Ptr8>::new(); 346 | /// queue.push_copied(&[1]); 347 | /// ``` 348 | pub fn push_copied(&mut self, v: &[T]) -> Result<(), ()> 349 | where 350 | T: Copy, 351 | { 352 | <(T, D::Inner) as crate::AlignmentValid>::check(); 353 | // SAFE: Carefully constructed to maintain consistency 354 | unsafe { 355 | self.push_inner(v).map(|pii| { 356 | ptr::copy( 357 | v.as_ptr() as *const u8, 358 | pii.data.as_mut_ptr() as *mut u8, 359 | mem::size_of_val(v), 360 | ) 361 | }) 362 | } 363 | } 364 | } 365 | impl Fifo<[T], D> 366 | where 367 | (T, D::Inner): crate::AlignmentValid, 368 | { 369 | /// Push an item, populated from an exact-sized iterator 370 | /// 371 | /// ``` 372 | /// # extern crate core; 373 | /// # use stack_dst::Fifo; 374 | /// # use core::fmt::Display; 375 | /// 376 | /// let mut stack = Fifo::<[u8], ::stack_dst::buffers::Ptr8>::new(); 377 | /// stack.push_from_iter(0..10); 378 | /// assert_eq!(stack.front().unwrap(), &[0,1,2,3,4,5,6,7,8,9]); 379 | /// ``` 380 | pub fn push_from_iter(&mut self, mut iter: impl ExactSizeIterator) -> Result<(), ()> { 381 | <(T, D::Inner) as crate::AlignmentValid>::check(); 382 | // SAFE: API used correctly 383 | unsafe { 384 | let pii = self.push_inner_raw(iter.len() * mem::size_of::(), &[0])?; 385 | crate::list_push_gen( 386 | pii.meta, 387 | pii.data, 388 | iter.len(), 389 | |_| iter.next().unwrap(), 390 | pii.reset_slot, 391 | pii.reset_value, 392 | ); 393 | Ok(()) 394 | } 395 | } 396 | } 397 | 398 | impl ops::Drop for Fifo { 399 | fn drop(&mut self) { 400 | while let Some(_) = self.pop_front() {} 401 | } 402 | } 403 | impl Default for Fifo { 404 | fn default() -> Self { 405 | Fifo::new() 406 | } 407 | } 408 | 409 | /// Handle returned by `Fifo::pop` (does the actual pop on drop) 410 | pub struct PopHandle<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> { 411 | parent: &'a mut Fifo, 412 | } 413 | impl<'a, T: ?Sized, D: crate::DataBuf> ops::Deref for PopHandle<'a, T, D> { 414 | type Target = T; 415 | fn deref(&self) -> &T { 416 | unsafe { &*self.parent.front_raw() } 417 | } 418 | } 419 | impl<'a, T: ?Sized, D: crate::DataBuf> ops::DerefMut for PopHandle<'a, T, D> { 420 | fn deref_mut(&mut self) -> &mut T { 421 | unsafe { &mut *self.parent.front_raw_mut() } 422 | } 423 | } 424 | impl<'a, T: ?Sized, D: crate::DataBuf> ops::Drop for PopHandle<'a, T, D> { 425 | fn drop(&mut self) { 426 | self.parent.pop_front_inner(); 427 | } 428 | } 429 | 430 | /// DST FIFO iterator (immutable) 431 | pub struct Iter<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf>(&'a Fifo, usize); 432 | impl<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> iter::Iterator for Iter<'a, T, D> { 433 | type Item = &'a T; 434 | fn next(&mut self) -> Option<&'a T> { 435 | if self.1 == self.0.write_pos { 436 | None 437 | } else { 438 | // SAFE: Bounds checked, aliasing enforced by API 439 | let rv = unsafe { &*self.0.raw_at(self.1) }; 440 | self.1 += Fifo::::meta_words() + D::round_to_words(mem::size_of_val(rv)); 441 | Some(rv) 442 | } 443 | } 444 | } 445 | /// DST FIFO iterator (mutable) 446 | pub struct IterMut<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf>(&'a mut Fifo, usize); 447 | impl<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> iter::Iterator for IterMut<'a, T, D> { 448 | type Item = &'a mut T; 449 | fn next(&mut self) -> Option<&'a mut T> { 450 | if self.1 == self.0.write_pos { 451 | None 452 | } else { 453 | // SAFE: Bounds checked, aliasing enforced by API 454 | let rv = unsafe { &mut *self.0.raw_at_mut(self.1) }; 455 | self.1 += Fifo::::meta_words() + D::round_to_words(mem::size_of_val(rv)); 456 | Some(rv) 457 | } 458 | } 459 | } 460 | -------------------------------------------------------------------------------- /src/fifo/impls.rs: -------------------------------------------------------------------------------- 1 | macro_rules! d { 2 | ( $t:path; $($body:tt)* ) => { 3 | impl $t for super::Fifo 4 | where 5 | T: $t, 6 | { 7 | $( $body )* 8 | } 9 | } 10 | } 11 | 12 | d! { ::core::fmt::Debug; 13 | fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { 14 | f.write_str("[")?; 15 | for v in self.iter() { 16 | v.fmt(f)?; 17 | f.write_str(",")?; 18 | } 19 | f.write_str("]")?; 20 | Ok( () ) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Support for storing dynamically-sized types within fixed-size allocations 2 | //! 3 | //! - The `Value` type provides a fixed size (7 word in the current version) buffer in which 4 | //! a trait object or array can be stored, without resorting to a heap allocation. 5 | //! - The `Fifo` and `Stack` types provide collection types (first-in-first-out and last-in-first-out). 6 | //! 7 | //! # Examples 8 | //! ## An unboxed any 9 | //! As a quick example - The following wraps a 64-bit integer up in an inline DST using the Any trait. 10 | //! 11 | //! ```rust 12 | //! # use std::any::Any; 13 | //! # use stack_dst::Value; 14 | //! # 15 | //! let dst = Value::::new_stable(1234u64, |p| p) 16 | //! .ok().expect("Integer did not fit in allocation"); 17 | //! println!("dst as u64 = {:?}", dst.downcast_ref::()); 18 | //! println!("dst as i8 = {:?}", dst.downcast_ref::()); 19 | //! ``` 20 | //! 21 | //! ## Stack-allocated closure! 22 | //! The following snippet shows how small (`'static`) closures can be returned using this crate 23 | //! 24 | //! ```rust 25 | //! # use stack_dst::Value; 26 | //! # 27 | //! fn make_closure(value: u64) -> ValueString, ::stack_dst::array_buf![u64; U2]> { 28 | //! Value::new_stable(move || format!("Hello there! value={}", value), |p| p as _) 29 | //! .ok().expect("Closure doesn't fit") 30 | //! } 31 | //! let mut closure = make_closure(666); 32 | //! assert_eq!( (&mut *closure)(), "Hello there! value=666" ); 33 | //! ``` 34 | //! 35 | //! ## Custom allocation sizes/types 36 | //! If you need larger alignment, you can use a different type for the backing array. 37 | //! (Note, that metadata uses at least one slot in the array) 38 | //! 39 | //! This code panics, because i128 requires 8/16 byte alignment (usually) 40 | //! ```should_panic 41 | //! # use stack_dst::Value; 42 | //! # use std::any::Any; 43 | //! let v: Value = 44 | //! Value::new_stable(123i128, |p| p as _).unwrap(); 45 | //! ``` 46 | //! This works, because the backing buffer has sufficient alignment 47 | //! ```rust 48 | //! # use stack_dst::Value; 49 | //! # use std::any::Any; 50 | //! let v: Value = 51 | //! Value::new_stable(123i128, |p| p as _).unwrap(); 52 | //! ``` 53 | //! 54 | //! # Feature flags 55 | //! ## `alloc` (default) 56 | //! Provides the `StackDstA::new_or_boxed` method (if `unsize` feature is active too) 57 | //! ## `const_generics` (default) 58 | //! Uses value/constant generics to provide a slightly nicer API (e.g. [ValueU]) 59 | //! ## `unsize` (optional) 60 | //! Uses the nightly feature `unsize` to provide a more egonomic API 61 | //! (no need for the `|p| p` closures) 62 | // //! ## `full_const_generics` (optional) 63 | // //! Uses extended const generics to give compile time alignment errors 64 | //! 65 | #![cfg_attr(feature = "unsize", feature(unsize))] // needed for Unsize 66 | #![cfg_attr(feature = "full_const_generics", feature(generic_const_exprs))] 67 | #![cfg_attr(feature = "full_const_generics", allow(incomplete_features))] 68 | #![no_std] 69 | #![deny(missing_docs)] 70 | #![allow( 71 | clippy::missing_safety_doc, 72 | clippy::redundant_pattern_matching, 73 | clippy::result_unit_err 74 | )] 75 | 76 | use core::mem::MaybeUninit; 77 | use core::{mem, ptr, slice}; 78 | 79 | // Internal helper 80 | type BufSlice = [MaybeUninit]; 81 | 82 | #[cfg(miri)] 83 | #[macro_use] 84 | extern crate std; 85 | 86 | #[cfg(feature = "alloc")] 87 | extern crate alloc; 88 | 89 | extern crate generic_array; 90 | 91 | mod data_buf; 92 | pub use self::data_buf::DataBuf; 93 | pub use self::data_buf::Pod; 94 | 95 | pub use fifo::Fifo; 96 | pub use stack::Stack; 97 | pub use value::Value; 98 | 99 | /// Shorthand for defining a array buffer 100 | /// 101 | /// The array size must be a typenum unsigned integer (e.g `U8`) 102 | /// E.g. `array_buf![u8; U32]` expands to 103 | /// `::stack_dst::buffers::ArrayBuf` 104 | #[macro_export] 105 | macro_rules! array_buf { 106 | ($t:ty; $n:ident) => { $crate::buffers::ArrayBuf<$t, $crate::buffers::n::$n> } 107 | } 108 | 109 | pub mod buffers { 110 | //! Type aliases for common buffer sizes and types 111 | //! 112 | //! Some useful suggestions: 113 | //! - [`Ptr8`] is the semi-standard buffer for holding a single object 114 | //! (a good balance of space used) 115 | //! - [`Ptr2`] is suitable for storing a single pointer and its vtable 116 | 117 | pub use self::array_buf::ArrayBuf; 118 | #[cfg(feature = "const_generics")] 119 | pub use self::cg_array_buf::ArrayBuf as ConstArrayBuf; 120 | /// A re-export of `typenum` for shorter names 121 | pub use generic_array::typenum as n; 122 | 123 | mod array_buf { 124 | use core::mem::MaybeUninit; 125 | 126 | /// A buffer backing onto an array (used to provide default) 127 | pub struct ArrayBuf 128 | where 129 | N: ::generic_array::ArrayLength>, 130 | { 131 | inner: ::generic_array::GenericArray, N>, 132 | } 133 | impl ::core::default::Default for ArrayBuf 134 | where 135 | N: ::generic_array::ArrayLength>, 136 | { 137 | fn default() -> Self { 138 | ArrayBuf { 139 | // `unwarp` won't fail, lengths match 140 | inner: ::generic_array::GenericArray::from_exact_iter( 141 | (0..N::USIZE).map(|_| MaybeUninit::uninit()), 142 | ) 143 | .unwrap(), 144 | } 145 | } 146 | } 147 | unsafe impl crate::DataBuf for ArrayBuf 148 | where 149 | T: crate::Pod, 150 | N: ::generic_array::ArrayLength>, 151 | { 152 | type Inner = T; 153 | fn as_ref(&self) -> &[MaybeUninit] { 154 | &self.inner 155 | } 156 | fn as_mut(&mut self) -> &mut [MaybeUninit] { 157 | &mut self.inner 158 | } 159 | fn extend(&mut self, len: usize) -> Result<(), ()> { 160 | if len > N::USIZE { 161 | Err(()) 162 | } else { 163 | Ok(()) 164 | } 165 | } 166 | } 167 | } 168 | 169 | #[cfg(feature = "const_generics")] 170 | mod cg_array_buf { 171 | /// A buffer backing onto an array (used to provide default) - using constant generics 172 | pub struct ArrayBuf { 173 | inner: [::core::mem::MaybeUninit; N], 174 | } 175 | impl ::core::default::Default for ArrayBuf 176 | where 177 | T: crate::Pod, 178 | { 179 | fn default() -> Self { 180 | ArrayBuf { 181 | inner: [::core::mem::MaybeUninit::uninit(); N], 182 | } 183 | } 184 | } 185 | unsafe impl crate::DataBuf for ArrayBuf 186 | where 187 | T: crate::Pod, 188 | { 189 | type Inner = T; 190 | fn as_ref(&self) -> &[::core::mem::MaybeUninit] { 191 | &self.inner 192 | } 193 | fn as_mut(&mut self) -> &mut [::core::mem::MaybeUninit] { 194 | &mut self.inner 195 | } 196 | fn extend(&mut self, len: usize) -> Result<(), ()> { 197 | if len > N { 198 | Err(()) 199 | } else { 200 | Ok(()) 201 | } 202 | } 203 | } 204 | } 205 | 206 | /// 8 pointers (32/64 bytes, with pointer alignment) 207 | pub type Ptr8 = ArrayBuf; 208 | /// 64 bytes, 64-bit alignment 209 | pub type U64_8 = ArrayBuf; 210 | /// 32 bytes, 8-bit alignment 211 | pub type U8_32 = ArrayBuf; 212 | 213 | /// 16 bytes, 64-bit alignment 214 | pub type U64_2 = ArrayBuf; 215 | 216 | /// 16 pointers (64/128 bytes, with pointer alignment) 217 | pub type Ptr16 = ArrayBuf; 218 | 219 | /// Two pointers, useful for wrapping a pointer along with a vtable 220 | pub type Ptr2 = ArrayBuf; 221 | /// One pointer, can only store the vtable 222 | pub type Ptr1 = ArrayBuf; 223 | 224 | /// Dyanamically allocated buffer with 8-byte alignment 225 | #[cfg(feature = "alloc")] 226 | pub type U64Vec = ::alloc::vec::Vec<::core::mem::MaybeUninit>; 227 | /// Dyanamically allocated buffer with 1-byte alignment 228 | #[cfg(feature = "alloc")] 229 | pub type U8Vec = ::alloc::vec::Vec<::core::mem::MaybeUninit>; 230 | /// Dyanamically allocated buffer with pointer alignment 231 | #[cfg(feature = "alloc")] 232 | pub type PtrVec = ::alloc::vec::Vec<::core::mem::MaybeUninit>; 233 | } 234 | 235 | /// Implementation of the FIFO list structure 236 | pub mod fifo; 237 | /// Implementation of the LIFO stack structure 238 | pub mod stack; 239 | /// Implementation of the single-value structure 240 | pub mod value; 241 | 242 | #[cfg(feature = "const_generics")] 243 | /// A single dynamically-sized value stored in a `usize` aligned buffer 244 | /// 245 | /// ``` 246 | /// let v = ::stack_dst::ValueU::<[u8], 16>::new_stable([1,2,3], |v| v); 247 | /// ``` 248 | pub type ValueU = 249 | Value>; 250 | #[cfg(feature = "const_generics")] 251 | /// A single LIFO stack of DSTs using a `usize` aligned buffer 252 | /// 253 | /// ``` 254 | /// let mut stack = ::stack_dst::StackU::<[u8], 16>::new(); 255 | /// stack.push_copied(&[1]); 256 | /// ``` 257 | pub type StackU = 258 | Stack>; 259 | #[cfg(feature = "const_generics")] 260 | /// A FIFO queue of DSTs using a `usize` aligned buffer 261 | /// 262 | /// ``` 263 | /// let mut queue = ::stack_dst::FifoU::<[u8], 16>::new(); 264 | /// queue.push_copied(&[1]); 265 | /// ``` 266 | pub type FifoU = 267 | Fifo>; 268 | 269 | fn decompose_pointer(mut ptr: *const T) -> (*const (), usize, [usize; 3]) { 270 | let addr = ptr as *const (); 271 | let rv = mem_as_slice(&mut ptr); 272 | let mut vals = [0; 3]; 273 | assert!( 274 | rv[0] == addr as usize, 275 | "BUG: Pointer layout is not (data_ptr, info...)" 276 | ); 277 | vals[..rv.len() - 1].copy_from_slice(&rv[1..]); 278 | (addr, rv.len() - 1, vals) 279 | } 280 | 281 | fn mem_as_slice(ptr: &mut T) -> &mut [usize] { 282 | assert!(mem::size_of::() % mem::size_of::() == 0); 283 | assert!(mem::align_of::() % mem::align_of::() == 0); 284 | let words = mem::size_of::() / mem::size_of::(); 285 | // SAFE: Points to valid memory (a raw pointer) 286 | unsafe { slice::from_raw_parts_mut(ptr as *mut _ as *mut usize, words) } 287 | } 288 | 289 | /// Re-construct a fat pointer 290 | unsafe fn make_fat_ptr(data_ptr: *mut (), meta_vals: &BufSlice) -> *mut T { 291 | #[repr(C)] 292 | #[derive(Copy, Clone)] 293 | struct Raw { 294 | ptr: *const (), 295 | meta: [usize; 4], 296 | } 297 | union Inner { 298 | ptr: *mut T, 299 | raw: Raw, 300 | } 301 | let mut rv = Inner { 302 | raw: Raw { 303 | ptr: data_ptr, 304 | meta: [0; 4], 305 | }, 306 | }; 307 | assert!(meta_vals.len() * mem::size_of::() % mem::size_of::() == 0); 308 | assert!(meta_vals.len() * mem::size_of::() <= 4 * mem::size_of::()); 309 | ptr::copy( 310 | meta_vals.as_ptr() as *const u8, 311 | rv.raw.meta.as_mut_ptr() as *mut u8, 312 | meta_vals.len() * mem::size_of::(), 313 | ); 314 | let rv = rv.ptr; 315 | assert_eq!(rv as *const (), data_ptr as *const ()); 316 | rv 317 | } 318 | /// Write metadata (abstraction around `ptr::copy`) 319 | fn store_metadata(dst: &mut BufSlice, meta_words: &[usize]) { 320 | let n_bytes = core::mem::size_of_val(meta_words); 321 | assert!( 322 | n_bytes <= dst.len() * mem::size_of::(), 323 | "nbytes [{}] <= dst.len() [{}] * sizeof [{}]", 324 | n_bytes, 325 | dst.len(), 326 | mem::size_of::() 327 | ); 328 | unsafe { 329 | ptr::copy( 330 | meta_words.as_ptr() as *const u8, 331 | dst.as_mut_ptr() as *mut u8, 332 | n_bytes, 333 | ); 334 | } 335 | } 336 | 337 | fn round_to_words(len: usize) -> usize { 338 | (len + mem::size_of::() - 1) / mem::size_of::() 339 | } 340 | 341 | /// Calls a provided function to get a fat pointer version of `v` (and checks that the returned pointer is sane) 342 | fn check_fat_pointer(v: &U, get_ref: impl FnOnce(&U) -> &T) -> &T { 343 | let ptr: &T = get_ref(v); 344 | assert_eq!( 345 | ptr as *const _ as *const u8, v as *const _ as *const u8, 346 | "MISUSE: Closure returned different pointer" 347 | ); 348 | assert_eq!( 349 | mem::size_of_val(ptr), 350 | mem::size_of::(), 351 | "MISUSE: Closure returned a subset pointer" 352 | ); 353 | ptr 354 | } 355 | 356 | /// Push items to a list using a generator function to get the items 357 | /// - `meta` - Metadata slot (must be 1 usize long) 358 | /// - `data` - Data slot, must be at least `count * sizeof(T)` long 359 | /// - `count` - Number of items to insert 360 | /// - `gen` - Generator function (is passed the current index) 361 | /// - `reset_slot` - A slot updated with `reset_value` when a panic happens before push is complete 362 | /// - `reset_value` - Value used in `reset_slot` 363 | /// 364 | /// This provides a panic-safe push as long as `reset_slot` and `reset_value` undo the allocation operation 365 | unsafe fn list_push_gen( 366 | meta: &mut BufSlice, 367 | data: &mut BufSlice, 368 | count: usize, 369 | mut gen: impl FnMut(usize) -> T, 370 | reset_slot: &mut usize, 371 | reset_value: usize, 372 | ) { 373 | /// Helper to drop/zero all pushed items, and reset data structure state if there's a panic 374 | struct PanicState<'a, T>(*mut T, usize, &'a mut usize, usize); 375 | impl<'a, T> ::core::ops::Drop for PanicState<'a, T> { 376 | fn drop(&mut self) { 377 | if self.0.is_null() { 378 | return; 379 | } 380 | // Reset the state of the data structure (leaking items) 381 | *self.2 = self.3; 382 | // Drop all partially-populated items 383 | unsafe { 384 | while self.1 != 0 { 385 | ptr::drop_in_place(&mut *self.0); 386 | ptr::write_bytes(self.0 as *mut u8, 0, mem::size_of::()); 387 | self.0 = self.0.offset(1); 388 | self.1 -= 1; 389 | } 390 | } 391 | } 392 | } 393 | 394 | let mut ptr = data.as_mut_ptr() as *mut T; 395 | let mut clr = PanicState(ptr, 0, reset_slot, reset_value); 396 | for i in 0..count { 397 | let val = gen(i); 398 | ptr::write(ptr, val); 399 | ptr = ptr.offset(1); 400 | clr.1 += 1; 401 | } 402 | // Prevent drops and prevent reset 403 | clr.0 = ptr::null_mut(); 404 | // Save the length once everything has been written 405 | crate::store_metadata(meta, &[count]); 406 | } 407 | 408 | /// Marker trait used to check alignment 409 | pub unsafe trait AlignmentValid { 410 | #[doc(hidden)] 411 | fn check(); 412 | } 413 | #[cfg(feature = "full_const_generics")] 414 | unsafe impl AlignmentValid for (S, L) 415 | where 416 | [(); mem::align_of::() - mem::align_of::()]: Sized, 417 | { 418 | fn check() {} 419 | } 420 | #[cfg(not(feature = "full_const_generics"))] 421 | unsafe impl AlignmentValid for (S, L) { 422 | fn check() { 423 | assert!( 424 | mem::align_of::() <= mem::align_of::(), 425 | "TODO: Enforce alignment >{} (requires {})", 426 | mem::align_of::(), 427 | mem::align_of::() 428 | ); 429 | } 430 | } 431 | 432 | /* 433 | #[cfg(doctest)] 434 | #[doc=include_str!("../README.md")] 435 | pub mod readme { 436 | } 437 | */ 438 | -------------------------------------------------------------------------------- /src/stack.rs: -------------------------------------------------------------------------------- 1 | use core::{iter, marker, mem, ops, ptr}; 2 | 3 | mod impls; 4 | 5 | // Implementation Notes 6 | // ----- 7 | // 8 | // The data array is filled from the back, with the metadata stored before (at a lower memory address) 9 | // the actual data. This so the code can use a single integer to track the position (using size_of_val 10 | // when popping items, and the known size when pushing). 11 | 12 | /// A fixed-capacity stack that can contain dynamically-sized types 13 | /// 14 | /// Uses an array of usize as a backing store for a First-In, Last-Out stack 15 | /// of items that can unsize to `T`. 16 | /// 17 | /// Note: Each item in the stack takes at least one slot in the buffer (to store the metadata) 18 | pub struct Stack { 19 | _pd: marker::PhantomData<*const T>, 20 | // Offset from the _back_ of `data` to the next free position. 21 | // I.e. data[data.len() - cur_ofs] is the first metadata word 22 | next_ofs: usize, 23 | data: D, 24 | } 25 | 26 | impl ops::Drop for Stack { 27 | fn drop(&mut self) { 28 | while !self.is_empty() { 29 | self.pop(); 30 | } 31 | } 32 | } 33 | impl Default for Stack { 34 | fn default() -> Self { 35 | Stack::new() 36 | } 37 | } 38 | 39 | impl Stack { 40 | /// Construct a new (empty) stack 41 | pub fn new() -> Self 42 | where 43 | D: Default, 44 | { 45 | Self::with_buffer(D::default()) 46 | } 47 | /// Construct a new (empty) stack using the provided buffer 48 | pub fn with_buffer(data: D) -> Self { 49 | Stack { 50 | _pd: marker::PhantomData, 51 | next_ofs: 0, 52 | data, 53 | } 54 | } 55 | 56 | /// Tests if the stack is empty 57 | pub fn is_empty(&self) -> bool { 58 | self.next_ofs == 0 59 | } 60 | 61 | fn meta_words() -> usize { 62 | D::round_to_words(mem::size_of::<&T>() - mem::size_of::()) 63 | } 64 | 65 | /// Push a value at the top of the stack 66 | /// 67 | /// ``` 68 | /// # use stack_dst::Stack; 69 | /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new(); 70 | /// stack.push([1, 2, 3]); 71 | /// ``` 72 | #[cfg(feature = "unsize")] 73 | pub fn push>(&mut self, v: U) -> Result<(), U> 74 | where 75 | (U, D::Inner): crate::AlignmentValid, 76 | { 77 | self.push_stable(v, |p| p) 78 | } 79 | 80 | /// Push a value at the top of the stack (without using `Unsize`) 81 | /// 82 | /// ``` 83 | /// # use stack_dst::Stack; 84 | /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new(); 85 | /// stack.push_stable([1, 2,3], |v| v); 86 | /// ``` 87 | pub fn push_stable &T>(&mut self, v: U, f: F) -> Result<(), U> 88 | where 89 | (U, D::Inner): crate::AlignmentValid, 90 | { 91 | <(U, D::Inner) as crate::AlignmentValid>::check(); 92 | 93 | // SAFE: Destination address is valid 94 | unsafe { 95 | match self.push_inner(crate::check_fat_pointer(&v, f)) { 96 | Ok(pii) => { 97 | ptr::write(pii.data.as_mut_ptr() as *mut U, v); 98 | Ok(()) 99 | } 100 | Err(_) => Err(v), 101 | } 102 | } 103 | } 104 | 105 | unsafe fn raw_at(&self, ofs: usize) -> *mut T { 106 | let dar = self.data.as_ref(); 107 | let meta = &dar[dar.len() - ofs..]; 108 | let mw = Self::meta_words(); 109 | let (meta, data) = meta.split_at(mw); 110 | super::make_fat_ptr(data.as_ptr() as *mut (), meta) 111 | } 112 | unsafe fn raw_at_mut(&mut self, ofs: usize) -> *mut T { 113 | let dar = self.data.as_mut(); 114 | let ofs = dar.len() - ofs; 115 | let meta = &mut dar[ofs..]; 116 | let mw = Self::meta_words(); 117 | let (meta, data) = meta.split_at_mut(mw); 118 | super::make_fat_ptr(data.as_mut_ptr() as *mut (), meta) 119 | } 120 | // Get a raw pointer to the top of the stack 121 | fn top_raw(&self) -> Option<*mut T> { 122 | if self.next_ofs == 0 { 123 | None 124 | } else { 125 | // SAFE: Internal consistency maintains the metadata validity 126 | Some(unsafe { self.raw_at(self.next_ofs) }) 127 | } 128 | } 129 | // Get a raw pointer to the top of the stack 130 | fn top_raw_mut(&mut self) -> Option<*mut T> { 131 | if self.next_ofs == 0 { 132 | None 133 | } else { 134 | // SAFE: Internal consistency maintains the metadata validity 135 | Some(unsafe { self.raw_at_mut(self.next_ofs) }) 136 | } 137 | } 138 | /// Returns a pointer to the top item on the stack 139 | pub fn top(&self) -> Option<&T> { 140 | self.top_raw().map(|x| unsafe { &*x }) 141 | } 142 | /// Returns a pointer to the top item on the stack (unique/mutable) 143 | pub fn top_mut(&mut self) -> Option<&mut T> { 144 | self.top_raw_mut().map(|x| unsafe { &mut *x }) 145 | } 146 | /// Pop the top item off the stack 147 | pub fn pop(&mut self) { 148 | if let Some(ptr) = self.top_raw_mut() { 149 | assert!(self.next_ofs > 0); 150 | // SAFE: Pointer is valid, and will never be accessed after this point 151 | let words = unsafe { 152 | let size = mem::size_of_val(&*ptr); 153 | ptr::drop_in_place(ptr); 154 | D::round_to_words(size) 155 | }; 156 | self.next_ofs -= words + Self::meta_words(); 157 | } 158 | } 159 | 160 | /// Obtain an immutable iterator (yields references to items, in the order they would be popped) 161 | /// ``` 162 | /// let mut list = ::stack_dst::Stack::::new(); 163 | /// list.push_str("Hello"); 164 | /// list.push_str("world"); 165 | /// let mut it = list.iter(); 166 | /// assert_eq!(it.next(), Some("world")); 167 | /// assert_eq!(it.next(), Some("Hello")); 168 | /// assert_eq!(it.next(), None); 169 | /// ``` 170 | pub fn iter(&self) -> Iter { 171 | Iter(self, self.next_ofs) 172 | } 173 | /// Obtain unique/mutable iterator 174 | /// ``` 175 | /// let mut list = ::stack_dst::Stack::<[u8], ::stack_dst::buffers::Ptr8>::new(); 176 | /// list.push_copied(&[1,2,3]); 177 | /// list.push_copied(&[9]); 178 | /// for v in list.iter_mut() { 179 | /// v[0] -= 1; 180 | /// } 181 | /// let mut it = list.iter(); 182 | /// assert_eq!(it.next(), Some(&[8][..])); 183 | /// assert_eq!(it.next(), Some(&[0,2,3][..])); 184 | /// assert_eq!(it.next(), None); 185 | /// ``` 186 | pub fn iter_mut(&mut self) -> IterMut { 187 | IterMut(self, self.next_ofs) 188 | } 189 | } 190 | 191 | struct PushInnerInfo<'a, DInner> { 192 | /// Buffer for value data 193 | data: &'a mut crate::BufSlice, 194 | /// Buffer for metadata (length/vtable) 195 | meta: &'a mut crate::BufSlice, 196 | /// Memory location for resetting the push 197 | reset_slot: &'a mut usize, 198 | reset_value: usize, 199 | } 200 | impl Stack { 201 | /// See `push_inner_raw` 202 | unsafe fn push_inner(&mut self, fat_ptr: &T) -> Result, ()> { 203 | let bytes = mem::size_of_val(fat_ptr); 204 | let (_data_ptr, len, v) = crate::decompose_pointer(fat_ptr); 205 | self.push_inner_raw(bytes, &v[..len]) 206 | } 207 | 208 | /// Returns: 209 | /// - metadata slot 210 | /// - data slot 211 | /// - Total words used 212 | unsafe fn push_inner_raw( 213 | &mut self, 214 | bytes: usize, 215 | metadata: &[usize], 216 | ) -> Result, ()> { 217 | assert!(D::round_to_words(mem::size_of_val(metadata)) == Self::meta_words()); 218 | let words = D::round_to_words(bytes) + Self::meta_words(); 219 | 220 | let req_space = self.next_ofs + words; 221 | // Attempt resize (if the underlying buffer allows it) 222 | if req_space > self.data.as_ref().len() { 223 | let old_len = self.data.as_ref().len(); 224 | if let Ok(_) = self.data.extend(req_space) { 225 | let new_len = self.data.as_ref().len(); 226 | self.data.as_mut().rotate_right(new_len - old_len); 227 | } 228 | } 229 | 230 | // Check if there is sufficient space for the new item 231 | if req_space <= self.data.as_ref().len() { 232 | // Get the base pointer for the new item 233 | let prev_next_ofs = self.next_ofs; 234 | self.next_ofs += words; 235 | let len = self.data.as_ref().len(); 236 | let slot = &mut self.data.as_mut()[len - self.next_ofs..][..words]; 237 | let (meta, rv) = slot.split_at_mut(Self::meta_words()); 238 | 239 | // Populate the metadata 240 | super::store_metadata(meta, metadata); 241 | 242 | // Increment offset and return 243 | Ok(PushInnerInfo { 244 | meta, 245 | data: rv, 246 | reset_slot: &mut self.next_ofs, 247 | reset_value: prev_next_ofs, 248 | }) 249 | } else { 250 | Err(()) 251 | } 252 | } 253 | } 254 | 255 | impl Stack { 256 | /// Push the contents of a string slice as an item onto the stack 257 | /// 258 | /// ``` 259 | /// # use stack_dst::Stack; 260 | /// let mut stack = Stack::::new(); 261 | /// stack.push_str("Hello!"); 262 | /// ``` 263 | pub fn push_str(&mut self, v: &str) -> Result<(), ()> { 264 | unsafe { 265 | self.push_inner(v).map(|pii| { 266 | ptr::copy( 267 | v.as_bytes().as_ptr(), 268 | pii.data.as_mut_ptr() as *mut u8, 269 | v.len(), 270 | ) 271 | }) 272 | } 273 | } 274 | } 275 | impl Stack<[T], D> 276 | where 277 | (T, D::Inner): crate::AlignmentValid, 278 | { 279 | /// Pushes a set of items (cloning out of the input slice) 280 | /// 281 | /// ``` 282 | /// # use stack_dst::Stack; 283 | /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new(); 284 | /// stack.push_cloned(&[1, 2, 3]); 285 | /// ``` 286 | pub fn push_cloned(&mut self, v: &[T]) -> Result<(), ()> { 287 | <(T, D::Inner) as crate::AlignmentValid>::check(); 288 | self.push_from_iter(v.iter().cloned()) 289 | } 290 | /// Pushes a set of items (copying out of the input slice) 291 | /// 292 | /// ``` 293 | /// # use stack_dst::Stack; 294 | /// let mut stack = Stack::<[u8], ::stack_dst::buffers::U64_8>::new(); 295 | /// stack.push_copied(&[1, 2, 3]); 296 | /// ``` 297 | pub fn push_copied(&mut self, v: &[T]) -> Result<(), ()> 298 | where 299 | T: Copy, 300 | { 301 | <(T, D::Inner) as crate::AlignmentValid>::check(); 302 | // SAFE: Carefully constructed to maintain consistency 303 | unsafe { 304 | self.push_inner(v).map(|pii| { 305 | ptr::copy( 306 | v.as_ptr() as *const u8, 307 | pii.data.as_mut_ptr() as *mut u8, 308 | mem::size_of_val(v), 309 | ) 310 | }) 311 | } 312 | } 313 | } 314 | impl Stack<[T], D> 315 | where 316 | (T, D::Inner): crate::AlignmentValid, 317 | { 318 | /// Push an item, populated from an exact-sized iterator 319 | /// 320 | /// ``` 321 | /// # extern crate core; 322 | /// # use stack_dst::Stack; 323 | /// # use core::fmt::Display; 324 | /// let mut stack = Stack::<[u8], stack_dst::buffers::Ptr8>::new(); 325 | /// stack.push_from_iter(0..10); 326 | /// assert_eq!(stack.top().unwrap(), &[0,1,2,3,4,5,6,7,8,9]); 327 | /// ``` 328 | pub fn push_from_iter(&mut self, mut iter: impl ExactSizeIterator) -> Result<(), ()> { 329 | <(T, D::Inner) as crate::AlignmentValid>::check(); 330 | // SAFE: API used correctly 331 | unsafe { 332 | let pii = self.push_inner_raw(iter.len() * mem::size_of::(), &[0])?; 333 | crate::list_push_gen( 334 | pii.meta, 335 | pii.data, 336 | iter.len(), 337 | |_| iter.next().unwrap(), 338 | pii.reset_slot, 339 | pii.reset_value, 340 | ); 341 | Ok(()) 342 | } 343 | } 344 | } 345 | 346 | /// DST Stack iterator (immutable) 347 | pub struct Iter<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf>(&'a Stack, usize); 348 | impl<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> iter::Iterator for Iter<'a, T, D> { 349 | type Item = &'a T; 350 | fn next(&mut self) -> Option<&'a T> { 351 | if self.1 == 0 { 352 | None 353 | } else { 354 | // SAFE: Bounds checked, aliasing enforced by API 355 | let rv = unsafe { &*self.0.raw_at(self.1) }; 356 | self.1 -= Stack::::meta_words() + D::round_to_words(mem::size_of_val(rv)); 357 | Some(rv) 358 | } 359 | } 360 | } 361 | 362 | /// DST Stack iterator (immutable) 363 | pub struct IterMut<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf>(&'a mut Stack, usize); 364 | impl<'a, T: 'a + ?Sized, D: 'a + crate::DataBuf> iter::Iterator for IterMut<'a, T, D> { 365 | type Item = &'a mut T; 366 | fn next(&mut self) -> Option<&'a mut T> { 367 | if self.1 == 0 { 368 | None 369 | } else { 370 | // SAFE: Bounds checked, aliasing enforced by API 371 | let rv = unsafe { &mut *self.0.raw_at_mut(self.1) }; 372 | self.1 -= Stack::::meta_words() + D::round_to_words(mem::size_of_val(rv)); 373 | Some(rv) 374 | } 375 | } 376 | } 377 | -------------------------------------------------------------------------------- /src/stack/impls.rs: -------------------------------------------------------------------------------- 1 | macro_rules! d { 2 | ( $t:path; $($body:tt)* ) => { 3 | impl $t for super::Stack 4 | where 5 | T: $t, 6 | { 7 | $( $body )* 8 | } 9 | } 10 | } 11 | 12 | d! { ::core::fmt::Debug; 13 | fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { 14 | f.write_str("[")?; 15 | for v in self.iter() { 16 | v.fmt(f)?; 17 | f.write_str(",")?; 18 | } 19 | f.write_str("]")?; 20 | Ok( () ) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/value.rs: -------------------------------------------------------------------------------- 1 | //! Single DST stored inline 2 | 3 | use core::{marker, mem, ops, ptr}; 4 | 5 | /// Stack-allocated dynamically sized type 6 | /// 7 | /// `T` is the unsized type contained. 8 | /// `D` is the buffer used to hold the unsized type (both data and metadata). 9 | /// 10 | /// ``` 11 | /// # extern crate core; 12 | /// # use stack_dst::Value; 13 | /// # use core::fmt::Display; 14 | /// let val = Value::::new_stable(123456, |v| v as _) 15 | /// .expect("Insufficient size"); 16 | /// assert_eq!( format!("{}", val), "123456" ); 17 | /// ``` 18 | pub struct Value { 19 | _pd: marker::PhantomData, 20 | // Data contains the object data first, then padding, then the pointer information 21 | data: D, 22 | } 23 | 24 | impl Value { 25 | /// Construct a stack-based DST 26 | /// 27 | /// Returns Ok(dst) if the allocation was successful, or Err(val) if it failed 28 | /// 29 | /// ``` 30 | /// # extern crate core; 31 | /// # use stack_dst::Value; 32 | /// # use core::fmt::Display; 33 | /// let val = Value::::new(1234) 34 | /// .expect("Insufficient size"); 35 | /// assert_eq!( format!("{}", val), "1234" ); 36 | /// ``` 37 | #[cfg(feature = "unsize")] 38 | pub fn new>(val: U) -> Result, U> 39 | where 40 | (U, D::Inner): crate::AlignmentValid, 41 | D: Default, 42 | { 43 | Self::new_stable(val, |p| p) 44 | } 45 | 46 | /// Construct a stack-based DST using a pre-constructed buffer 47 | /// 48 | /// Returns `Ok(dst)` if the allocation was successful, or `Err(val)` if it failed 49 | /// 50 | /// ``` 51 | /// # extern crate core; 52 | /// # use stack_dst::Value; 53 | /// # use core::fmt::Display; 54 | /// # use core::mem::MaybeUninit; 55 | /// let val = Value::::in_buffer([MaybeUninit::new(0u64); 2], 1234) 56 | /// .expect("Insufficient size"); 57 | /// assert_eq!( format!("{}", val), "1234" ); 58 | /// ``` 59 | #[cfg(feature = "unsize")] 60 | pub fn in_buffer>(buffer: D, val: U) -> Result, U> 61 | where 62 | (U, D::Inner): crate::AlignmentValid, 63 | { 64 | Self::in_buffer_stable(buffer, val, |p| p) 65 | } 66 | 67 | /// Construct a stack-based DST (without needing `Unsize`). The closure `get_ref` 68 | /// must just convert `&U` to `&U` (if the pointers don't match, an assertion triggers) 69 | /// 70 | /// Returns `Ok(dst)` if the allocation was successful, or `Err(val)` if it failed 71 | /// 72 | /// ``` 73 | /// # extern crate core; 74 | /// # use stack_dst::Value; 75 | /// # use core::fmt::Display; 76 | /// let val = Value::::new_stable(1234, |v| v as _) 77 | /// .expect("Insufficient size"); 78 | /// assert_eq!( format!("{}", val), "1234" ); 79 | /// ``` 80 | pub fn new_stable &T>(val: U, get_ref: F) -> Result, U> 81 | where 82 | (U, D::Inner): crate::AlignmentValid, 83 | D: Default, 84 | { 85 | Self::in_buffer_stable(D::default(), val, get_ref) 86 | } 87 | 88 | /// Construct a stack-based DST (without needing `Unsize`) using a provided buffer. 89 | /// See `new_stable` for requirements on the `get_ref` closure. 90 | /// 91 | /// Returns `Ok(dst)` if the allocation was successful, or `Err(val)` if it failed 92 | /// 93 | /// ``` 94 | /// # extern crate core; 95 | /// # use stack_dst::Value; 96 | /// # use core::fmt::Display; 97 | /// # use core::mem::MaybeUninit; 98 | /// let val = Value::::in_buffer_stable([MaybeUninit::new(0u64); 2], 1234, |v| v) 99 | /// .expect("Insufficient size"); 100 | /// assert_eq!( format!("{}", val), "1234" ); 101 | /// ``` 102 | pub fn in_buffer_stable &T>( 103 | buffer: D, 104 | val: U, 105 | get_ref: F, 106 | ) -> Result, U> 107 | where 108 | (U, D::Inner): crate::AlignmentValid, 109 | { 110 | <(U, D::Inner) as crate::AlignmentValid>::check(); 111 | 112 | let rv = unsafe { 113 | let ptr: *const _ = crate::check_fat_pointer(&val, get_ref); 114 | let (raw_ptr, meta_len, meta) = super::decompose_pointer(ptr); 115 | 116 | Value::new_raw( 117 | &meta[..meta_len], 118 | raw_ptr as *mut _, 119 | mem::size_of::(), 120 | buffer, 121 | ) 122 | }; 123 | match rv { 124 | Some(r) => { 125 | // Prevent the destructor from running, now that we've copied it away 126 | mem::forget(val); 127 | Ok(r) 128 | } 129 | None => Err(val), 130 | } 131 | } 132 | 133 | #[cfg(all(feature = "alloc", feature = "unsize"))] 134 | /// Construct a stack-based DST, falling back on boxing if the value doesn't fit 135 | /// 136 | /// ``` 137 | /// # extern crate core; 138 | /// # use stack_dst::Value; 139 | /// # use core::fmt::Debug; 140 | /// let val = [1usize, 2, 3, 4]; 141 | /// assert!( Value::::new(val).is_err() ); 142 | /// let v = Value::::new_or_boxed(val); 143 | /// println!("v = {:?}", v); 144 | /// ``` 145 | pub fn new_or_boxed(val: U) -> Value 146 | where 147 | (U, D::Inner): crate::AlignmentValid, 148 | U: marker::Unsize, 149 | ::alloc::boxed::Box: marker::Unsize, 150 | D: Default, 151 | { 152 | Self::new(val).unwrap_or_else(|val| { 153 | Self::new::<::alloc::boxed::Box<_>>(::alloc::boxed::Box::new(val)) 154 | .ok() 155 | .expect("Insufficient space for Box") 156 | }) 157 | } 158 | 159 | /// UNSAFE: `data` must point to `size` bytes, which shouldn't be freed if `Some` is returned 160 | pub unsafe fn new_raw( 161 | info: &[usize], 162 | data: *mut (), 163 | size: usize, 164 | mut buffer: D, 165 | ) -> Option> { 166 | let req_words = D::round_to_words(mem::size_of_val(info)) + D::round_to_words(size); 167 | if let Err(_) = buffer.extend(req_words) { 168 | return None; 169 | } 170 | 171 | let mut rv = mem::ManuallyDrop::new(Value:: { 172 | _pd: marker::PhantomData, 173 | data: buffer, 174 | }); 175 | rv.write_value(data, size, info); 176 | Some(mem::ManuallyDrop::into_inner(rv)) 177 | } 178 | 179 | unsafe fn write_value(&mut self, data: *const (), size: usize, info: &[usize]) { 180 | let info_words = D::round_to_words(mem::size_of_val(info)); 181 | let req_words = info_words + D::round_to_words(size); 182 | let buf = self.data.as_mut(); 183 | assert!(req_words <= buf.len()); 184 | 185 | // Place pointer information at the end of the region 186 | // - Allows the data to be at the start for alignment purposes 187 | { 188 | let info_ofs = buf.len() - info_words; 189 | let info_dst = &mut buf[info_ofs..]; 190 | crate::store_metadata(info_dst, info); 191 | } 192 | 193 | ptr::copy_nonoverlapping(data as *const u8, buf.as_mut_ptr() as *mut u8, size); 194 | } 195 | 196 | /// Replace the contents without dropping the backing allocation 197 | /// 198 | /// ``` 199 | /// # extern crate core; 200 | /// # use stack_dst::Value; 201 | /// # use core::fmt::Display; 202 | /// let mut value = Value::::new_stable(1234, |v| v) 203 | /// .unwrap(); 204 | /// assert_eq!(format!("{}", value), "1234"); 205 | /// value.replace_stable(1.234, |v| v).unwrap(); 206 | /// assert_eq!(format!("{}", value), "1.234"); 207 | /// ``` 208 | pub fn replace_stable(&mut self, val: U, get_ref: impl Fn(&U) -> &T) -> Result<(), U> 209 | where 210 | (U, D::Inner): crate::AlignmentValid, 211 | { 212 | <(U, D::Inner) as crate::AlignmentValid>::check(); 213 | 214 | let size = mem::size_of::(); 215 | let (raw_ptr, meta_len, meta) = 216 | super::decompose_pointer(crate::check_fat_pointer(&val, get_ref)); 217 | let info = &meta[..meta_len]; 218 | 219 | // Check size requirements (allow resizing) 220 | let req_words = D::round_to_words(mem::size_of_val(info)) + D::round_to_words(size); 221 | if let Err(_) = self.data.extend(req_words) { 222 | return Err(val); 223 | } 224 | // If met, drop the existing item and move in the new item 225 | unsafe { 226 | ptr::drop_in_place::(&mut **self); 227 | self.write_value(raw_ptr, mem::size_of::(), info); 228 | } 229 | Ok(()) 230 | } 231 | #[cfg(feature = "unsize")] 232 | /// Replace the contents without dropping the backing allocation 233 | /// 234 | /// ``` 235 | /// # extern crate core; 236 | /// # use stack_dst::Value; 237 | /// # use core::fmt::Display; 238 | /// let mut value = Value::::new(1234).unwrap(); 239 | /// assert_eq!(format!("{}", value), "1234"); 240 | /// value.replace(1.234).unwrap(); 241 | /// assert_eq!(format!("{}", value), "1.234"); 242 | /// ``` 243 | pub fn replace(&mut self, val: U) -> Result<(), U> 244 | where 245 | (U, D::Inner): crate::AlignmentValid, 246 | U: marker::Unsize, 247 | { 248 | self.replace_stable(val, |v| v) 249 | } 250 | 251 | /// Obtain raw pointer to the contained data 252 | unsafe fn as_ptr(&self) -> *mut T { 253 | let data = self.data.as_ref(); 254 | let info_size = mem::size_of::<*mut T>() / mem::size_of::() - 1; 255 | let info_ofs = data.len() - D::round_to_words(info_size * mem::size_of::()); 256 | let (data, meta) = data.split_at(info_ofs); 257 | super::make_fat_ptr(data.as_ptr() as *mut (), meta) 258 | } 259 | 260 | /// Obtain raw pointer to the contained data 261 | unsafe fn as_ptr_mut(&mut self) -> *mut T { 262 | let data = self.data.as_mut(); 263 | let info_size = mem::size_of::<*mut T>() / mem::size_of::() - 1; 264 | let info_ofs = data.len() - D::round_to_words(info_size * mem::size_of::()); 265 | let (data, meta) = data.split_at_mut(info_ofs); 266 | super::make_fat_ptr(data.as_mut_ptr() as *mut (), meta) 267 | } 268 | } 269 | /// Specialisations for `str` (allowing storage of strings with single-byte alignment) 270 | impl Value { 271 | /// Create a new empty string with a default buffer 272 | pub fn empty_str() -> Result 273 | where 274 | D: Default, 275 | { 276 | Self::empty_str_in_buffer(Default::default()) 277 | } 278 | /// Create a new empty string with a provided buffer 279 | pub fn empty_str_in_buffer(buffer: D) -> Result { 280 | let rv = unsafe { 281 | let (raw_ptr, meta_len, meta) = super::decompose_pointer(""); 282 | 283 | Value::new_raw(&meta[..meta_len], raw_ptr as *mut (), 0, buffer) 284 | }; 285 | match rv { 286 | Some(r) => Ok(r), 287 | None => Err(()), 288 | } 289 | } 290 | /// Construct from a `str` using a default-constructed buffer 291 | /// ``` 292 | /// # extern crate core; 293 | /// # use stack_dst::Value; 294 | /// # use core::fmt::Display; 295 | /// let val = Value::::new_str("Hello, World") 296 | /// .expect("Insufficient size"); 297 | /// assert_eq!( &val[..], "Hello, World" ); 298 | /// ``` 299 | pub fn new_str(v: &str) -> Result 300 | where 301 | D: Default, 302 | { 303 | Self::new_str_in_buffer(Default::default(), v) 304 | } 305 | /// Construct from a `str` using a provided buffer 306 | /// 307 | /// ``` 308 | /// # extern crate core; 309 | /// # use stack_dst::Value; 310 | /// # use core::fmt::Display; 311 | /// # use core::mem::MaybeUninit; 312 | /// let val = Value::new_str_in_buffer([MaybeUninit::new(0u8); 32], "Hello, World") 313 | /// .expect("Insufficient size"); 314 | /// assert_eq!( &val[..], "Hello, World" ); 315 | /// ``` 316 | pub fn new_str_in_buffer(buffer: D, val: &str) -> Result { 317 | let rv = unsafe { 318 | let (raw_ptr, meta_len, meta) = super::decompose_pointer(val); 319 | 320 | Value::new_raw( 321 | &meta[..meta_len], 322 | raw_ptr as *mut (), 323 | mem::size_of_val(val), 324 | buffer, 325 | ) 326 | }; 327 | match rv { 328 | Some(r) => Ok(r), 329 | None => Err(val), 330 | } 331 | } 332 | 333 | /// Add a string to the end of a string 334 | /// 335 | /// ``` 336 | /// # use stack_dst::Value; 337 | /// let mut s = Value::::new_str("Foo").unwrap(); 338 | /// s.append_str("Bar").unwrap(); 339 | /// assert_eq!(&s[..], "FooBar"); 340 | /// ``` 341 | pub fn append_str(&mut self, val: &str) -> Result<(), ()> { 342 | let info_words = D::round_to_words(mem::size_of::()); 343 | 344 | let ofs = self.len(); 345 | 346 | // Check/expand sufficient space 347 | let req_words = D::round_to_words(ofs + val.len()) + info_words; 348 | if let Err(_) = self.data.extend(req_words) { 349 | return Err(()); 350 | } 351 | 352 | // Get the metadata slot 353 | let data = self.data.as_mut(); 354 | let info_ofs = data.len() - info_words; 355 | 356 | unsafe { 357 | ptr::copy_nonoverlapping( 358 | val.as_ptr(), 359 | (data.as_mut_ptr() as *mut u8).add(ofs), 360 | val.len(), 361 | ); 362 | crate::store_metadata(&mut data[info_ofs..], &[ofs + val.len()]); 363 | } 364 | 365 | Ok(()) 366 | } 367 | 368 | /// Resize the string (discarding trailing data) 369 | /// 370 | /// ``` 371 | /// # use stack_dst::Value; 372 | /// let mut s = Value::::new_str("FooBar").unwrap(); 373 | /// s.truncate(3); 374 | /// assert_eq!(&s[..], "Foo"); 375 | /// ``` 376 | pub fn truncate(&mut self, len: usize) { 377 | if len < self.len() { 378 | let _ = &self[..][len..]; // Index to force a panic if the index isn't char-aligned 379 | 380 | let info_words = D::round_to_words(mem::size_of::()); 381 | let data = self.data.as_mut(); 382 | let info_ofs = data.len() - info_words; 383 | crate::store_metadata(&mut data[info_ofs..], &[len]); 384 | } 385 | } 386 | } 387 | /// Specialisation for slices (acting like an `ArrayVec`) 388 | impl Value<[I], D> 389 | where 390 | (I, D::Inner): crate::AlignmentValid, 391 | { 392 | /// Create a new zero-sized slice (will error only if the metadata doesn't fit) 393 | pub fn empty_slice() -> Result 394 | where 395 | D: Default, 396 | { 397 | Self::empty_slice_with_buffer(Default::default()) 398 | } 399 | /// Create a new zero-sized slice in the provided buffer (will error only if the metadata doesn't fit) 400 | pub fn empty_slice_with_buffer(mut buffer: D) -> Result { 401 | <(I, D::Inner) as crate::AlignmentValid>::check(); 402 | 403 | let info_words = D::round_to_words(mem::size_of::()); 404 | let req_words = info_words; 405 | if let Err(_) = buffer.extend(req_words) { 406 | return Err(()); 407 | } 408 | assert!(req_words <= buffer.as_ref().len()); 409 | 410 | let mut rv = Value { 411 | _pd: marker::PhantomData, 412 | data: buffer, 413 | }; 414 | 415 | let data = rv.data.as_mut(); 416 | let info_ofs = data.len() - info_words; 417 | let (_data_dst, info_dst) = data.split_at_mut(info_ofs); 418 | 419 | crate::store_metadata(info_dst, &[0]); 420 | Ok(rv) 421 | } 422 | 423 | /// Append an item to the end of the slice (similar to `Vec::push`) 424 | pub fn append(&mut self, v: I) -> Result<(), I> { 425 | let info_words = D::round_to_words(mem::size_of::()); 426 | 427 | let ofs = self.len(); 428 | 429 | // Check/expand sufficient space 430 | let req_words = D::round_to_words((ofs + 1) * mem::size_of::()) + info_words; 431 | if let Err(_) = self.data.extend(req_words) { 432 | return Err(v); 433 | } 434 | let data = self.data.as_mut(); 435 | assert!(req_words <= data.len()); 436 | // Write the new value 437 | // SAFE: Alignment is checked, pointer is in-bounds 438 | unsafe { 439 | let data_ptr = (data.as_ptr() as *mut I).add(ofs); 440 | ptr::write(data_ptr, v); 441 | } 442 | // Only update item count after the write 443 | let info_ofs = data.len() - info_words; 444 | crate::store_metadata(&mut data[info_ofs..], &[ofs + 1]); 445 | 446 | Ok(()) 447 | } 448 | /// Inline append an item (See Self::append) 449 | pub fn appended(mut self, v: I) -> Result { 450 | match self.append(v) { 451 | Ok(_) => Ok(self), 452 | Err(v) => Err((self, v)), 453 | } 454 | } 455 | 456 | /// Extend a slice with an iterator 457 | pub fn extend>(&mut self, mut iter: It) -> Result<(), (I, It)> { 458 | while let Some(v) = iter.next() { 459 | match self.append(v) { 460 | Ok(_) => {} 461 | Err(v) => return Err((v, iter)), 462 | } 463 | } 464 | Ok(()) 465 | } 466 | /// Helper to extend during construction (see Self::extend) 467 | pub fn extended>(mut self, iter: It) -> Result { 468 | match self.extend(iter) { 469 | Ok(_) => Ok(self), 470 | Err((v, iter)) => Err((self, v, iter)), 471 | } 472 | } 473 | 474 | /// Remove the last item from the slice 475 | pub fn pop(&mut self) -> Option { 476 | if self.len() > 0 { 477 | let ofs = self.len() - 1; 478 | let data = self.data.as_mut(); 479 | let info_words = D::round_to_words(mem::size_of::()); 480 | let info_ofs = data.len() - info_words; 481 | unsafe { 482 | crate::store_metadata(&mut data[info_ofs..], &[ofs]); 483 | Some(ptr::read((data.as_ptr() as *const I).add(ofs))) 484 | } 485 | } else { 486 | None 487 | } 488 | } 489 | } 490 | impl ops::Deref for Value { 491 | type Target = T; 492 | fn deref(&self) -> &T { 493 | unsafe { &*self.as_ptr() } 494 | } 495 | } 496 | impl ops::DerefMut for Value { 497 | fn deref_mut(&mut self) -> &mut T { 498 | unsafe { &mut *self.as_ptr_mut() } 499 | } 500 | } 501 | impl ops::Drop for Value { 502 | fn drop(&mut self) { 503 | unsafe { ptr::drop_in_place(&mut **self) } 504 | } 505 | } 506 | 507 | mod trait_impls; 508 | -------------------------------------------------------------------------------- /src/value/trait_impls.rs: -------------------------------------------------------------------------------- 1 | use core::future; 2 | use core::pin; 3 | use core::task; 4 | 5 | macro_rules! d { 6 | ( $t:path; $($body:tt)* ) => { 7 | impl $t for super::Value 8 | where 9 | T: $t, 10 | { 11 | $( $body )* 12 | } 13 | } 14 | } 15 | 16 | d! { future::Future; 17 | type Output = T::Output; 18 | fn poll(self: pin::Pin<&mut Self>, cx: &mut task::Context) -> task::Poll { 19 | unsafe { pin::Pin::new_unchecked(&mut **self.get_unchecked_mut()).poll(cx) } 20 | } 21 | } 22 | d! { ::core::iter::Iterator; 23 | type Item = T::Item; 24 | fn next(&mut self) -> Option { 25 | (**self).next() 26 | } 27 | // NOTE: Only a few methods can be directly passed through 28 | // Namely, those that don't use `self` by value and don't use generics 29 | 30 | // Included because it's actually useful API information 31 | fn size_hint(&self) -> (usize, Option) { 32 | (**self).size_hint() 33 | } 34 | 35 | // Included because it can be 36 | fn nth(&mut self, n: usize) -> Option { 37 | (**self).nth(n) 38 | } 39 | } 40 | d! { ::core::iter::DoubleEndedIterator; 41 | fn next_back(&mut self) -> Option { 42 | (**self).next_back() 43 | } 44 | 45 | // Unstable in MSRV 46 | //fn nth_back(&mut self, n: usize) -> Option { 47 | // (**self).nth_back(n) 48 | //} 49 | } 50 | d! { ::core::iter::ExactSizeIterator; 51 | fn len(&self) -> usize { (**self).len() } 52 | 53 | // Unstable 54 | //fn is_empty(&self) -> bool { (**self).is_empty() } 55 | } 56 | 57 | macro_rules! impl_fmt { 58 | ( $( $t:ident )* ) => { 59 | $( 60 | d!{ ::core::fmt::$t; 61 | fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { 62 | (**self).fmt(f) 63 | } 64 | } 65 | )* 66 | } 67 | } 68 | impl_fmt! { 69 | Display Debug UpperHex LowerHex 70 | } 71 | -------------------------------------------------------------------------------- /tests/fifo.rs: -------------------------------------------------------------------------------- 1 | extern crate stack_dst; 2 | 3 | type DstFifo = stack_dst::Fifo; 4 | 5 | #[test] 6 | // A trivial check that ensures that methods are correctly called 7 | fn trivial_type() { 8 | let mut val = DstFifo::>::new(); 9 | val.push_back_stable(1234, |p| p).unwrap(); 10 | val.push_back_stable(1233, |p| p).unwrap(); 11 | assert!(*val.front().unwrap() == 1234); 12 | assert!(*val.front().unwrap() != 1233); 13 | val.pop_front(); 14 | assert!(*val.front().unwrap() != 1234); 15 | assert!(*val.front().unwrap() == 1233); 16 | } 17 | 18 | #[test] 19 | fn slice_push_panic_safety() { 20 | use std::sync::atomic::{AtomicUsize, Ordering}; 21 | static COUNT: AtomicUsize = AtomicUsize::new(0); 22 | struct Sentinel(bool); 23 | impl Clone for Sentinel { 24 | fn clone(&self) -> Self { 25 | if self.0 { 26 | panic!(); 27 | } else { 28 | Sentinel(self.0) 29 | } 30 | } 31 | } 32 | impl Drop for Sentinel { 33 | fn drop(&mut self) { 34 | COUNT.fetch_add(1, Ordering::SeqCst); 35 | } 36 | } 37 | let input = [Sentinel(false), Sentinel(true)]; 38 | 39 | let _ = ::std::panic::catch_unwind(::std::panic::AssertUnwindSafe(|| { 40 | let mut stack = DstFifo::<[Sentinel]>::new(); 41 | let _ = stack.push_cloned(&input); 42 | })); 43 | assert_eq!(COUNT.load(Ordering::SeqCst), 1); 44 | } 45 | 46 | #[test] 47 | fn retain() { 48 | use std::sync::atomic::{AtomicUsize, Ordering}; 49 | static FLAGS: AtomicUsize = AtomicUsize::new(0); 50 | struct Sentinel(usize); 51 | impl ::std::ops::Drop for Sentinel { 52 | fn drop(&mut self) { 53 | let flag = 1 << self.0; 54 | let v = FLAGS.fetch_or(1 << self.0, Ordering::SeqCst); 55 | assert!(v & flag == 0); 56 | } 57 | } 58 | impl AsRef for Sentinel { 59 | fn as_ref(&self) -> &Sentinel { 60 | self 61 | } 62 | } 63 | let mut stack: ::stack_dst::Fifo, ::stack_dst::buffers::Ptr16> = 64 | ::stack_dst::Fifo::new(); 65 | stack.push_back_stable(Sentinel(0), |v| v).ok().unwrap(); 66 | stack.push_back_stable(Sentinel(1), |v| v).ok().unwrap(); 67 | stack.push_back_stable(Sentinel(2), |v| v).ok().unwrap(); 68 | stack.push_back_stable(Sentinel(3), |v| v).ok().unwrap(); 69 | stack.push_back_stable(Sentinel(4), |v| v).ok().unwrap(); 70 | 71 | stack.retain(|v| v.as_ref().0 > 2); 72 | assert_eq!(FLAGS.load(Ordering::SeqCst), 0b00_111); 73 | { 74 | let mut it = stack.iter().map(|v| v.as_ref().0); 75 | assert_eq!(it.next(), Some(3)); 76 | assert_eq!(it.next(), Some(4)); 77 | assert_eq!(it.next(), None); 78 | } 79 | drop(stack); 80 | assert_eq!(FLAGS.load(Ordering::SeqCst), 0b11_111); 81 | } 82 | 83 | #[cfg(not(feature = "full_const_generics"))] 84 | mod unaligned { 85 | use stack_dst::Fifo; 86 | use std::any::Any; 87 | type Buf8_16 = ::stack_dst::buffers::ArrayBuf; 88 | #[test] 89 | #[should_panic] 90 | fn push_back_stable() { 91 | let mut stack = Fifo::::new(); 92 | let _ = stack.push_back_stable(123u32, |v| v as _); 93 | } 94 | #[test] 95 | #[should_panic] 96 | #[cfg(feature = "unsize")] 97 | fn push_back() { 98 | let mut stack = Fifo::::new(); 99 | let _ = stack.push_back(123u32); 100 | } 101 | 102 | #[test] 103 | #[should_panic] 104 | fn push_cloned() { 105 | let mut stack = Fifo::<[u32], Buf8_16>::new(); 106 | let _ = stack.push_cloned(&[123u32]); 107 | } 108 | 109 | #[test] 110 | #[should_panic] 111 | fn push_copied() { 112 | let mut stack = Fifo::<[u32], Buf8_16>::new(); 113 | let _ = stack.push_copied(&[123u32]); 114 | } 115 | #[test] 116 | #[should_panic] 117 | fn push_from_iter() { 118 | let mut stack = Fifo::<[u32], Buf8_16>::new(); 119 | let _ = stack.push_from_iter(0..1); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /tests/stack.rs: -------------------------------------------------------------------------------- 1 | use std::any::Any; 2 | 3 | extern crate stack_dst; 4 | 5 | type DstStack = stack_dst::Stack; 6 | 7 | #[test] 8 | // A trivial check that ensures that methods are correctly called 9 | fn trivial_type() { 10 | let mut val = DstStack::>::new(); 11 | val.push_stable(1234, |p| p).unwrap(); 12 | val.push_stable(1233, |p| p).unwrap(); 13 | assert!(*val.top().unwrap() != 1234); 14 | assert!(*val.top().unwrap() == 1233); 15 | val.pop(); 16 | assert!(*val.top().unwrap() == 1234); 17 | assert!(*val.top().unwrap() != 1233); 18 | } 19 | 20 | #[test] 21 | fn strings() { 22 | let mut stack: DstStack = DstStack::new(); 23 | stack.push_str("\n").unwrap(); 24 | stack.push_str("World").unwrap(); 25 | stack.push_str(" ").unwrap(); 26 | stack.push_str("Hello").unwrap(); 27 | 28 | assert_eq!(stack.top(), Some("Hello")); 29 | stack.pop(); 30 | assert_eq!(stack.top(), Some(" ")); 31 | stack.pop(); 32 | assert_eq!(stack.top(), Some("World")); 33 | stack.pop(); 34 | stack.push_str("World").unwrap(); 35 | stack.push_str("Cruel").unwrap(); 36 | assert_eq!(stack.top(), Some("Cruel")); 37 | stack.pop(); 38 | assert_eq!(stack.top(), Some("World")); 39 | stack.pop(); 40 | assert_eq!(stack.top(), Some("\n")); 41 | stack.pop(); 42 | assert_eq!(stack.top(), None); 43 | } 44 | 45 | #[test] 46 | fn slices() { 47 | let mut stack: DstStack<[u8]> = DstStack::new(); 48 | 49 | stack.push_cloned(b"123").unwrap(); 50 | stack.push_cloned(b"").unwrap(); 51 | stack.push_cloned(b"abcd").unwrap(); 52 | assert_eq!(stack.top(), Some(b"abcd" as &[_])); 53 | stack.pop(); 54 | assert_eq!(stack.top(), Some(b"" as &[_])); 55 | stack.pop(); 56 | assert_eq!(stack.top(), Some(b"123" as &[_])); 57 | stack.pop(); 58 | assert_eq!(stack.top(), None); 59 | } 60 | 61 | #[test] 62 | fn limits() { 63 | let mut val = stack_dst::Stack::::new(); 64 | // Pushing when full 65 | val.push_stable(1usize, |p| p).unwrap(); 66 | assert!(val.push_stable(2usize, |p| p).is_err()); 67 | 68 | // Popping past empty (should stay empty) 69 | val.pop(); 70 | assert!(val.is_empty()); 71 | val.pop(); 72 | assert!(val.is_empty()); 73 | 74 | // Zero-sized types 75 | val.push_stable((), |p| p).unwrap(); 76 | val.push_stable((), |p| p).unwrap(); 77 | assert!(val.push_stable((), |p| p).is_err()); 78 | val.pop(); 79 | 80 | // Pushing a value when there is space, but no enough space for the entire value 81 | assert!(val.push_stable(1usize, |p| p).is_err()); 82 | val.push_stable((), |p| p).unwrap(); 83 | } 84 | 85 | #[test] 86 | fn destructors() { 87 | struct DropWatch(::std::rc::Rc<::std::cell::Cell>); 88 | impl ::std::ops::Drop for DropWatch { 89 | fn drop(&mut self) { 90 | self.0.set(self.0.get() + 1); 91 | } 92 | } 93 | 94 | let v: ::std::rc::Rc<::std::cell::Cell<_>> = Default::default(); 95 | 96 | let mut stack = ::stack_dst::Stack::::new(); 97 | // Successful pushes shouldn't call destructors 98 | stack.push_stable(DropWatch(v.clone()), |p| p).ok().unwrap(); 99 | assert_eq!(v.get(), 0); 100 | stack.push_stable(DropWatch(v.clone()), |p| p).ok().unwrap(); 101 | assert_eq!(v.get(), 0); 102 | stack.push_stable(DropWatch(v.clone()), |p| p).ok().unwrap(); 103 | assert_eq!(v.get(), 0); 104 | stack.push_stable(DropWatch(v.clone()), |p| p).ok().unwrap(); 105 | assert_eq!(v.get(), 0); 106 | // Failed push should return the value (which will be dropped) 107 | assert!(stack.push_stable(DropWatch(v.clone()), |p| p).is_err()); 108 | assert_eq!(v.get(), 1); 109 | 110 | // Pop a value, drop increases 111 | stack.pop(); 112 | assert_eq!(v.get(), 2); 113 | // Drop the entire stack, the rest are dropped 114 | drop(stack); 115 | assert_eq!(v.get(), 2 + 3); 116 | } 117 | 118 | #[test] 119 | fn slice_push_panic_safety() { 120 | use std::sync::atomic::{AtomicUsize, Ordering}; 121 | static COUNT: AtomicUsize = AtomicUsize::new(0); 122 | struct Sentinel(bool); 123 | impl Clone for Sentinel { 124 | fn clone(&self) -> Self { 125 | if self.0 { 126 | panic!(); 127 | } else { 128 | Sentinel(self.0) 129 | } 130 | } 131 | } 132 | impl Drop for Sentinel { 133 | fn drop(&mut self) { 134 | COUNT.fetch_add(1, Ordering::SeqCst); 135 | } 136 | } 137 | let input = [Sentinel(false), Sentinel(true)]; 138 | 139 | let _ = ::std::panic::catch_unwind(::std::panic::AssertUnwindSafe(|| { 140 | let mut stack = ::stack_dst::Stack::<[Sentinel], ::stack_dst::buffers::Ptr8>::new(); 141 | let _ = stack.push_cloned(&input); 142 | })); 143 | assert_eq!(COUNT.load(Ordering::SeqCst), 1); 144 | } 145 | 146 | #[test] 147 | // Check that panic safety is maintained, even if the datatype isn't aligned to usize 148 | fn slice_push_panic_safety_unaligned() { 149 | use std::sync::atomic::{AtomicUsize, Ordering}; 150 | static COUNT: AtomicUsize = AtomicUsize::new(0); 151 | struct Sentinel(bool); 152 | impl Clone for Sentinel { 153 | fn clone(&self) -> Self { 154 | if !self.0 { 155 | panic!(); 156 | } else { 157 | Sentinel(self.0) 158 | } 159 | } 160 | } 161 | impl Drop for Sentinel { 162 | fn drop(&mut self) { 163 | COUNT.fetch_add(1, Ordering::SeqCst); 164 | } 165 | } 166 | let input = [ 167 | // 1 good followed by one bad 168 | Sentinel(true), 169 | Sentinel(false), 170 | ]; 171 | 172 | let _ = ::std::panic::catch_unwind(::std::panic::AssertUnwindSafe(|| { 173 | let mut stack = ::stack_dst::Stack::<[Sentinel], _>::with_buffer( 174 | [::std::mem::MaybeUninit::new(0xFFu8); 32], 175 | ); 176 | let _ = stack.push_cloned(&input); 177 | })); 178 | assert_eq!(COUNT.load(Ordering::SeqCst), 1); 179 | } 180 | 181 | #[cfg(not(feature = "full_const_generics"))] 182 | mod unaligned { 183 | use stack_dst::Stack; 184 | use std::any::Any; 185 | type Buf8_16 = ::stack_dst::buffers::ArrayBuf; 186 | #[test] 187 | #[should_panic] 188 | fn push_stable() { 189 | let mut stack = Stack::::new(); 190 | let _ = stack.push_stable(123u32, |v| v as _); 191 | } 192 | #[test] 193 | #[should_panic] 194 | #[cfg(feature = "unsize")] 195 | fn push() { 196 | let mut stack = Stack::::new(); 197 | let _ = stack.push(123u32); 198 | } 199 | #[test] 200 | #[should_panic] 201 | fn push_cloned() { 202 | let mut stack = Stack::<[u32], Buf8_16>::new(); 203 | let _ = stack.push_cloned(&[123u32]); 204 | } 205 | #[test] 206 | #[should_panic] 207 | fn push_copied() { 208 | let mut stack = Stack::<[u32], Buf8_16>::new(); 209 | let _ = stack.push_copied(&[123u32]); 210 | } 211 | #[test] 212 | #[should_panic] 213 | fn push_from_iter() { 214 | let mut stack = Stack::<[u32], Buf8_16>::new(); 215 | let _ = stack.push_from_iter(0..1); 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /tests/value.rs: -------------------------------------------------------------------------------- 1 | extern crate stack_dst; 2 | 3 | type Value2w = stack_dst::Value; 4 | type Value8w = stack_dst::Value; 5 | 6 | #[test] 7 | // A trivial check that ensures that methods are correctly called 8 | fn trivial_type() { 9 | let val = Value2w::>::new_stable(1234u32, |p| p).unwrap(); 10 | assert!(*val == 1234); 11 | assert!(*val != 1233); 12 | } 13 | 14 | #[test] 15 | // Create an instance with a Drop implementation, and ensure the drop handler fires when destructed 16 | // This also ensures that lifetimes are correctly handled 17 | fn ensure_drop() { 18 | use std::cell::Cell; 19 | #[derive(Debug)] 20 | struct Struct<'a>(&'a Cell); 21 | impl<'a> Drop for Struct<'a> { 22 | fn drop(&mut self) { 23 | self.0.set(true); 24 | } 25 | } 26 | 27 | let flag = Cell::new(false); 28 | let val = Value2w::::new_stable(Struct(&flag), |p| p).unwrap(); 29 | assert!(flag.get() == false); 30 | drop(val); 31 | assert!(flag.get() == true); 32 | } 33 | 34 | #[test] 35 | fn many_instances() { 36 | trait TestTrait { 37 | fn get_value(&self) -> u32; 38 | } 39 | 40 | #[inline(never)] 41 | fn instance_one() -> Value2w { 42 | #[derive(Debug)] 43 | struct OneStruct(u32); 44 | impl TestTrait for OneStruct { 45 | fn get_value(&self) -> u32 { 46 | self.0 47 | } 48 | } 49 | Value2w::new_stable(OneStruct(12345), |p| p as _).unwrap() 50 | } 51 | 52 | #[inline(never)] 53 | fn instance_two() -> Value2w { 54 | #[derive(Debug)] 55 | struct TwoStruct; 56 | impl TestTrait for TwoStruct { 57 | fn get_value(&self) -> u32 { 58 | 54321 59 | } 60 | } 61 | Value2w::new_stable(TwoStruct, |p| p as _).unwrap() 62 | } 63 | 64 | let i1 = instance_one(); 65 | let i2 = instance_two(); 66 | assert_eq!(i1.get_value(), 12345); 67 | assert_eq!(i2.get_value(), 54321); 68 | } 69 | 70 | #[test] 71 | fn closure() { 72 | let v1 = 1234u64; 73 | let c: Value8w String> = Value8w::new_stable(|| format!("{}", v1), |p| p as _) 74 | .map_err(|_| "Oops") 75 | .unwrap(); 76 | assert_eq!(c(), "1234"); 77 | } 78 | 79 | #[test] 80 | fn oversize() { 81 | use std::any::Any; 82 | const MAX_SIZE_PTRS: usize = 7; 83 | assert!(Value8w::::new_stable([0usize; MAX_SIZE_PTRS], |p| p).is_ok()); 84 | assert!(Value8w::::new_stable([0usize; MAX_SIZE_PTRS + 1], |p| p).is_err()); 85 | } 86 | 87 | #[test] 88 | fn option() { 89 | use std::any::Any; 90 | assert!(Some(Value8w::::new_stable("foo", |p| p).unwrap()).is_some()); 91 | } 92 | 93 | #[test] 94 | #[should_panic] 95 | fn stable_closure_different_pointer() { 96 | use std::fmt::Debug; 97 | static BIG_VALUE: [i32; 4] = [0, 0, 0, 0]; 98 | // Type confusion via a different pointer 99 | let _ = Value8w::::new_stable(123, |_| &BIG_VALUE as &dyn Debug); 100 | } 101 | #[test] 102 | #[should_panic] 103 | fn stable_closure_subset() { 104 | use std::fmt::Debug; 105 | let _ = Value8w::::new_stable((1, 2), |v| &v.0 as &dyn Debug); 106 | } 107 | 108 | // Various checks that ensure that any way of creating a structure also checks the alignment 109 | // - In the future, these would compile-error (using const-generics) 110 | #[cfg(not(feature = "full_const_generics"))] 111 | mod unaligned { 112 | use stack_dst::Value; 113 | use std::any::Any; 114 | 115 | type BufU8_16 = ::stack_dst::buffers::ArrayBuf; 116 | 117 | #[test] 118 | #[should_panic] 119 | fn new_stable() { 120 | let _ = Value::::new_stable(1234u32, |v| v); 121 | } 122 | #[test] 123 | #[should_panic] 124 | fn in_buffer_stable() { 125 | let _ = Value::::in_buffer_stable( 126 | [::std::mem::MaybeUninit::new(0u8); 16], 127 | 1234u32, 128 | |v| v, 129 | ); 130 | } 131 | #[test] 132 | #[should_panic] 133 | #[cfg(feature = "unsize")] 134 | fn new() { 135 | let _ = Value::::new(1234u32); 136 | } 137 | #[test] 138 | #[should_panic] 139 | #[cfg(feature = "unsize")] 140 | fn in_buffer() { 141 | let _ = Value::::in_buffer([::std::mem::MaybeUninit::new(0u8); 16], 1234u32); 142 | } 143 | #[test] 144 | #[should_panic] 145 | #[cfg(all(feature = "unsize", feature = "alloc"))] 146 | fn new_or_boxed() { 147 | let _ = Value::::new_or_boxed(1234u32); 148 | } 149 | #[test] 150 | #[should_panic] 151 | fn empty_slice() { 152 | let _ = Value::<[u32], BufU8_16>::empty_slice(); 153 | } 154 | #[test] 155 | #[should_panic] 156 | fn empty_slice_with_buffer() { 157 | let _ = Value::<[u32], _>::empty_slice_with_buffer([::std::mem::MaybeUninit::new(0u8); 16]); 158 | } 159 | } 160 | --------------------------------------------------------------------------------