├── .gitignore ├── .github ├── FUNDING.yml └── workflows │ └── rust.yml ├── Cargo.toml ├── rustfmt.toml ├── README.md ├── CHANGELOG.md └── src ├── volgrid2d_strided.rs ├── volgrid2d.rs ├── lib.rs ├── volregion.rs ├── voladdress_.rs ├── volseries.rs └── volblock.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [Lokathor] 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "voladdress" 3 | description = "A crate for easy volatile memory abstraction." 4 | repository = "https://github.com/rust-console/voladdress" 5 | version = "1.4.0" 6 | authors = ["Lokathor ", "Thomas Winwood "] 7 | edition = "2021" 8 | license = "Zlib OR Apache-2.0 OR MIT" 9 | 10 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | # Based on 2 | # https://github.com/rust-lang/rustfmt/blob/rustfmt-1.4.19/Configurations.md 3 | 4 | # Stable 5 | edition = "2018" 6 | fn_args_layout = "Compressed" 7 | max_width = 80 8 | tab_spaces = 2 9 | use_field_init_shorthand = true 10 | use_try_shorthand = true 11 | use_small_heuristics = "Max" 12 | 13 | # Unstable 14 | format_code_in_doc_comments = true 15 | merge_imports = true 16 | wrap_comments = true 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![License:Zlib](https://img.shields.io/badge/License-Zlib-green.svg)](https://opensource.org/licenses/Zlib) 2 | [![License:Apache2](https://img.shields.io/badge/License-Apache2-green.svg)](https://www.apache.org/licenses/LICENSE-2.0) 3 | [![License:MIT](https://img.shields.io/badge/License-MIT-green.svg)](https://opensource.org/licenses/MIT) 4 | 5 | [![crates.io](https://img.shields.io/crates/v/voladdress.svg)](https://crates.io/crates/voladdress) 6 | [![docs.rs](https://docs.rs/voladdress/badge.svg)](https://docs.rs/voladdress) 7 | 8 | # voladdress 9 | 10 | A crate to make volatile memory operations easy to work with. 11 | 12 | This is primarily used for Memory Mapped IO (MMIO). 13 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: {} 5 | pull_request: {} 6 | 7 | env: 8 | RUST_BACKTRACE: 1 9 | 10 | jobs: 11 | test: 12 | name: Test Rust ${{ matrix.rust }} on ${{ matrix.os }} 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | include: 17 | - { rust: 1.57.0 } 18 | - { rust: stable } 19 | - { rust: beta } 20 | - { rust: nightly } 21 | steps: 22 | - uses: hecrj/setup-rust-action@v1 23 | with: 24 | rust-version: ${{ matrix.rust }} 25 | - uses: actions/checkout@v2 26 | - run: cargo test --verbose --no-default-features 27 | - run: cargo test --verbose 28 | - run: cargo test --verbose --all-features 29 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 1.4.0 4 | 5 | * Added `width` and `height` methods to `VolGrid2d`. 6 | 7 | ## 1.3.0 8 | 9 | * New: `VolGrid2d` works like video memory (accessed with `(x,y)`) 10 | * New `VolGrid2dStrided` has many 11 | "frames" of `VolGrid2d`, each offset by the given stride in bytes. The stride 12 | can be larger, equal to, or smaller than the number of bytes per frame. 13 | * New: `VolRegion` is a 1d span with a dynamic size, like a slice. 14 | * Removed: the "experimental" cargo features were removed from Cargo.toml. 15 | If you had opted-in to using them you will have to adjust your `[dependencies]` entry. 16 | 17 | ## 1.2.3 18 | 19 | * Fixed up unclear documentation. 20 | 21 | ## 1.2.2 22 | 23 | * **Soundness:** Previous versions of the iterators in 24 | this crate (since 0.4) had a math error in the `nth` 25 | method, causing them to potentially go out of bounds. 26 | 27 | ## 1.2 28 | 29 | * The `Safe` and `Unsafe` types now also derive `Default`, `Clone`, and `Copy`. 30 | This doesn't do too much since they're already ZSTs with a public constructor, 31 | but it doesn't hurt. 32 | * `VolAddress`: Added const fn `as_ptr` and `as_mut_ptr`. 33 | * `VolBlock`: Added const fn `as_usize`, `as_ptr`, `as_mut_ptr`, and non-const 34 | fn `as_slice_ptr` and `as_slice_mut_ptr`. 35 | * It turns out that getting the pointer to a `VolBlock` by indexing to the 0th 36 | element and then turning that into a usize and then turning that into a 37 | pointer was enough layers to confuse LLVM. Specifically, all volatile accesses 38 | have an "aligned and non-null" debug check, which wasn't getting optimized out 39 | of debug builds with `build-std`, even with `opt-level=3`. Providing these 40 | more direct conversion methods does seem to help LLVM eliminate that non-null 41 | check more often. 42 | * Added `core::fmt::Pointer` impls. While `Debug` formats the address along with 43 | extra metadata, `Pointer` just formats the address. 44 | 45 | ## 1.1 46 | 47 | * Added `VolAddress::as_volblock` for (unsafely) converting from a `VolAddress` 48 | to an array into a `VolBlock`. This is totally fine in any case I've ever 49 | seen, but the general policy of the crate is that any creation of a `VolBlock` 50 | be an unsafe action, and so this is unsafe for consistency. 51 | 52 | * Also adds the `experimental_volmatrix` cargo feature, which adds another 53 | opt-in type for people to experiment with. 54 | 55 | ## 1.0.2 56 | 57 | * Temporarily adds the `experimental_volregion` feature, allowing a person to 58 | *experimentally* opt-in to the `VolRegion` type. This type will be part of a 59 | 1.1 release of the crate at some point. This feature is **not** part of the 60 | crate's SemVer, and it will go away entirely once `VolRegion` becomes a stable 61 | part of the crate. 62 | 63 | ## 1.0.0 64 | 65 | * Initial stable release. 66 | -------------------------------------------------------------------------------- /src/volgrid2d_strided.rs: -------------------------------------------------------------------------------- 1 | use crate::{VolAddress, VolGrid2d}; 2 | 3 | /// Models having many "frames" of [`VolGrid2d`] within a chunk of memory. 4 | /// 5 | /// Each frame may or may not overlap, according to the stride specified. 6 | /// * If the byte stride per frame is less than the byte size of a frame, the 7 | /// frames will have some amount of overlap. 8 | /// * If the stride bytes equals the frame bytes, then each frame will directly 9 | /// follow the previous one. 10 | /// * If the stride bytes exceeds the frame bytes, then there will be some 11 | /// amount of gap between frames. 12 | /// 13 | /// ## Generic Parameters 14 | /// * `T` / `R` / `W`: These parameters are applied to the [`VolAddress`] type 15 | /// returned when accessing the block in any way (indexing, iteration, etc). 16 | /// * `WIDTH` / `HEIGHT`: the width and height of a given frame. 17 | /// * `FRAMES`: the number of frames. 18 | /// * `BYTE_STRIDE`: The number of bytes between the start of each frame. 19 | /// 20 | /// ## Safety 21 | /// * This type stores a base [`VolAddress`] internally, and so you must follow 22 | /// all of those safety rules. Notably, the base address must never be zero. 23 | /// * The address space must legally contain `WIDTH * HEIGHT * FRAMES` 24 | /// contiguous values of the `T` type, starting from the base address. 25 | /// * The memory block must not wrap around past the end of the address space. 26 | #[repr(transparent)] 27 | #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] 28 | pub struct VolGrid2dStrided< 29 | T, 30 | R, 31 | W, 32 | const WIDTH: usize, 33 | const HEIGHT: usize, 34 | const FRAMES: usize, 35 | const BYTE_STRIDE: usize, 36 | > { 37 | pub(crate) base: VolAddress, 38 | } 39 | 40 | impl< 41 | T, 42 | R, 43 | W, 44 | const WIDTH: usize, 45 | const HEIGHT: usize, 46 | const FRAMES: usize, 47 | const BYTE_STRIDE: usize, 48 | > Clone for VolGrid2dStrided 49 | { 50 | #[inline] 51 | #[must_use] 52 | fn clone(&self) -> Self { 53 | *self 54 | } 55 | } 56 | impl< 57 | T, 58 | R, 59 | W, 60 | const WIDTH: usize, 61 | const HEIGHT: usize, 62 | const FRAMES: usize, 63 | const BYTE_STRIDE: usize, 64 | > Copy for VolGrid2dStrided 65 | { 66 | } 67 | 68 | impl< 69 | T, 70 | R, 71 | W, 72 | const WIDTH: usize, 73 | const HEIGHT: usize, 74 | const FRAMES: usize, 75 | const BYTE_STRIDE: usize, 76 | > VolGrid2dStrided 77 | { 78 | /// A [`VolAddress`] with multi-frame access pattern. 79 | /// 80 | /// # Safety 81 | /// 82 | /// The given address must be a valid for `WIDTH * HEIGHT` elements per frame, 83 | /// at frame indexes `0..FRAMES`, with all non-zero frame indexes being offset 84 | /// by `BYTE_STRIDE` bytes from the previous frame. 85 | #[inline] 86 | #[must_use] 87 | pub const unsafe fn new(address: usize) -> Self { 88 | Self { base: VolAddress::new(address) } 89 | } 90 | 91 | /// Gets a single frame as a `VolGrid2d`. 92 | /// 93 | /// Returns `None` if `z` is out of bounds. 94 | #[inline] 95 | #[must_use] 96 | pub const fn get_frame( 97 | self, z: usize, 98 | ) -> Option> { 99 | if z < FRAMES { 100 | // SAFETY: 101 | // - `z` is in bounds of `FRAMES`. 102 | // - `VolGrid3d::new` safety condition guarantees that all `VolGrid2d` 103 | // values we could construct for `0..FRAMES` are valid. 104 | Some(unsafe { 105 | VolGrid2d { 106 | base: self.base.cast::().add(z * BYTE_STRIDE).cast::(), 107 | } 108 | }) 109 | } else { 110 | None 111 | } 112 | } 113 | } 114 | 115 | #[test] 116 | fn test_vol_grid_2d_strided() { 117 | let small: VolGrid2dStrided = 118 | unsafe { VolGrid2dStrided::new(0x1000) }; 119 | assert_eq!(small.get_frame(0).unwrap().as_usize(), 0x1000); 120 | assert_eq!(small.get_frame(1).unwrap().as_usize(), 0x1100); 121 | assert_eq!(small.get_frame(2).unwrap().as_usize(), 0x1200); 122 | assert_eq!(small.get_frame(3).unwrap().as_usize(), 0x1300); 123 | assert_eq!(small.get_frame(4).unwrap().as_usize(), 0x1400); 124 | assert_eq!(small.get_frame(5).unwrap().as_usize(), 0x1500); 125 | assert!(small.get_frame(6).is_none()); 126 | } 127 | -------------------------------------------------------------------------------- /src/volgrid2d.rs: -------------------------------------------------------------------------------- 1 | use crate::{VolAddress, VolBlock}; 2 | 3 | /// A 2D version of [`VolBlock`], with a const generic `WIDTH` and `HEIGHT`. 4 | /// 5 | /// This is intended for "video-like" memory that is better to logically access 6 | /// with an `x` and `y` position rather than a single `i` index. It's just an 7 | /// alternative way to manage a `VolBlock`. 8 | /// 9 | /// ## Generic Parameters 10 | /// * `T` / `R` / `W`: These parameters are applied to the [`VolAddress`] type 11 | /// returned when accessing the block in any way (indexing, iteration, etc). 12 | /// * `WIDTH` / `HEIGHT`: the matrix width and height, the total element count 13 | /// is `WIDTH * HEIGHT`. 14 | /// 15 | /// ## Safety 16 | /// * This type stores a base [`VolAddress`] internally, and so you must follow 17 | /// all of those safety rules. Notably, the base address must never be zero. 18 | /// * The address space must legally contain `WIDTH * HEIGHT` contiguous values 19 | /// of the `T` type, starting from the base address. 20 | /// * The memory block must not wrap around past the end of the address space. 21 | #[repr(transparent)] 22 | #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] 23 | pub struct VolGrid2d { 24 | pub(crate) base: VolAddress, 25 | } 26 | 27 | impl Clone 28 | for VolGrid2d 29 | { 30 | #[inline] 31 | #[must_use] 32 | fn clone(&self) -> Self { 33 | *self 34 | } 35 | } 36 | impl Copy 37 | for VolGrid2d 38 | { 39 | } 40 | 41 | impl 42 | VolGrid2d 43 | { 44 | /// Converts the address into a `VolGrid2d` 45 | /// 46 | /// # Safety 47 | /// 48 | /// The given address must be a valid [`VolAddress`] at each position in the 49 | /// grid, as if you were making a `VolBlock`. 50 | #[inline] 51 | #[must_use] 52 | pub const unsafe fn new(address: usize) -> Self { 53 | Self { base: VolAddress::new(address) } 54 | } 55 | 56 | /// The grid's width. 57 | #[inline] 58 | #[must_use] 59 | pub const fn width(self) -> usize { 60 | WIDTH 61 | } 62 | 63 | /// The grid's height. 64 | #[inline] 65 | #[must_use] 66 | pub const fn height(self) -> usize { 67 | HEIGHT 68 | } 69 | 70 | /// Creates a `VolGrid2d` from an appropriately sized `VolBlock`. 71 | /// 72 | /// # Panics 73 | /// 74 | /// When `B != WIDTH * HEIGHT`. 75 | /// Note that such a panic should happen at compile time. 76 | #[inline] 77 | #[must_use] 78 | pub const fn from_block(block: VolBlock) -> Self { 79 | // TODO: one day in the distant future, when full const_generic is 80 | // implemented in rust, someone may be interested in coming down from their 81 | // flying car, replace the `B` parameter by `{ WIDTH * HEIGHT }` and remove 82 | // the assert! (same with into_block) 83 | assert!(B == WIDTH * HEIGHT); 84 | // SAFETY: block's safety requirement is that all VolAddress accessible 85 | // within it are safe, Self can only access those addresses, so 86 | // Self::new requirement is fulfilled. 87 | Self { base: block.base } 88 | } 89 | 90 | /// Turn a `VolGrid2d` into its `VolBlock` equivalent. 91 | /// 92 | /// # Panics 93 | /// 94 | /// When `B != WIDTH * HEIGHT`. 95 | /// Note that such a panic should happen at compile time. 96 | #[inline] 97 | #[must_use] 98 | pub const fn into_block(self) -> VolBlock { 99 | assert!(B == WIDTH * HEIGHT); 100 | // SAFETY: block's safety requirement is that all VolAddress accessible 101 | // within it are safe, all constructors of `VolGrid2d` already 102 | // guarantees that. 103 | VolBlock { base: self.base } 104 | } 105 | 106 | /// Gets the address of the `(x,y)` given. 107 | /// 108 | /// Returns `None` if either coordinate it out of bounds. 109 | #[inline] 110 | #[must_use] 111 | pub const fn get(self, x: usize, y: usize) -> Option> { 112 | if x < WIDTH && y < HEIGHT { 113 | // SAFETY: if condition 114 | Some(unsafe { self.base.add(x + y * WIDTH) }) 115 | } else { 116 | None 117 | } 118 | } 119 | 120 | /// Indexes the address of the `(x,y)` given. 121 | /// 122 | /// ## Panics 123 | /// 124 | /// * If either coordinate it out of bounds this will panic. 125 | #[inline] 126 | #[must_use] 127 | #[track_caller] 128 | pub const fn index(self, x: usize, y: usize) -> VolAddress { 129 | assert!(x < WIDTH); 130 | assert!(y < HEIGHT); 131 | // safety: asserts 132 | unsafe { self.base.add(x + y * WIDTH) } 133 | } 134 | 135 | /// Get a single row of the grid as a [`VolBlock`]. 136 | #[inline] 137 | #[must_use] 138 | pub const fn get_row(self, y: usize) -> Option> { 139 | if y < HEIGHT { 140 | // SAFETY: 141 | // - `y < HEIGHT` 142 | // - `VolGrid2d::new` safety condition guarantees that all addresses 143 | // constructible for `VolBlock` are valid `VolAddress`, which 144 | // is the safety condition of `VolBlock::new`. 145 | Some(unsafe { VolBlock { base: self.base.add(y * WIDTH) } }) 146 | } else { 147 | None 148 | } 149 | } 150 | 151 | /// Converts the `VolGrid2d` the `usize` for the start of the grid. 152 | #[inline] 153 | #[must_use] 154 | pub const fn as_usize(self) -> usize { 155 | self.base.address.get() 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![deny(missing_docs)] 3 | #![allow(clippy::iter_nth_zero)] 4 | #![cfg_attr(test, allow(clippy::redundant_clone))] 5 | #![cfg_attr(test, allow(bad_style))] 6 | 7 | //! A crate for working with volatile locations, particularly Memory Mapped IO 8 | //! (MMIO). 9 | //! 10 | //! ## Types 11 | //! 12 | //! The crate's core type is [VolAddress]. 13 | //! * `T` is the element type stored at the address. It is expected that your 14 | //! element type will be something that the CPU can read and write with a 15 | //! single instruction. Generally this will be a single integer, float, data 16 | //! pointer, function pointer, or a `repr(transparent)` wrapper around one of 17 | //! the other types just listed. 18 | //! * `R` should be [Safe], [Unsafe], or `()`. When `R` is `Safe` then you can 19 | //! *safely* read from the address. When `R` is `Unsafe` then you can 20 | //! *unsafely* read from the address. If `R` is any other type then you cannot 21 | //! read from the address at all. While any possible type can be used here, if 22 | //! reading isn't intended you should use `()` as the canonical null type. 23 | //! * `W` works like `R` in terms of what types you should use with it, but it 24 | //! controls writing instead of reading. 25 | //! 26 | //! The `VolAddress` type uses the "unsafe creation, then safe use" style. This 27 | //! allows us to use the fewest `unsafe` blocks overall. Once a `VolAddress` has 28 | //! been unsafely declared, each individual operation using them is generally 29 | //! going to be safe. Some addresses might be unsafe to use even after creation, 30 | //! but this is relatively rare. 31 | //! 32 | //! Here are some example declarations. Note that the address values used are 33 | //! for illustation purposes only, and will vary for each device. 34 | //! ``` 35 | //! # use voladdress::*; 36 | //! // read-only 37 | //! pub const VCOUNT: VolAddress = 38 | //! unsafe { VolAddress::new(0x0400_0006) }; 39 | //! 40 | //! // write-only 41 | //! pub const BG0_XOFFSET: VolAddress = 42 | //! unsafe { VolAddress::new(0x0400_0010) }; 43 | //! 44 | //! // read-write 45 | //! pub const BLDALPHA_A: VolAddress = 46 | //! unsafe { VolAddress::new(0x0400_0052) }; 47 | //! 48 | //! // this location has some illegal bit patterns, so it's unsafe 49 | //! // to write to with any random `u16` you might have. 50 | //! pub const RAW_DISPLAY_CONTROL: VolAddress = 51 | //! unsafe { VolAddress::new(0x0400_0000) }; 52 | //! 53 | //! // If we use a transparent wrapper and getter/setters, we can 54 | //! // prevent the illegal bit patterns, and now it's safe to write. 55 | //! #[repr(transparent)] 56 | //! pub struct DisplayCtrl(u16); 57 | //! pub const DISPLAY_CONTROL: VolAddress = 58 | //! unsafe { VolAddress::new(0x0400_0000) }; 59 | //! ``` 60 | //! 61 | //! ### Multiple Locations 62 | //! 63 | //! Often we have many identically typed values at a regular pattern in memory. 64 | //! These are handled with two very similar types. 65 | //! 66 | //! [VolBlock] is for when there's many values tightly 67 | //! packed, with no space in between. Use this type when you want to emulate how 68 | //! an array works. 69 | //! 70 | //! [VolSeries] is for when you have 71 | //! many values strided out at regular intervals, but they have extra space in 72 | //! between each element. 73 | //! 74 | //! In both cases, there's two basic ways to work with the data: 75 | //! * Using `len`, `index`, and `get`, you can produce individual `VolAddress` 76 | //! values similar to how a slice can produce references into the slice's data 77 | //! range. 78 | //! * Using `iter` or `iter_range` you can produce an in iterator that will go 79 | //! over the various `VolAddress` values during the iteration. 80 | //! 81 | //! ```no_run 82 | //! # use voladdress::*; 83 | //! pub const BG_PALETTE: VolBlock = 84 | //! unsafe { VolBlock::new(0x0500_0000) }; 85 | //! 86 | //! pub const COLOR_RED: u16 = 0b11111; 87 | //! BG_PALETTE.index(0).write(COLOR_RED); 88 | //! 89 | //! pub const COLOR_GREEN: u16 = 0b11111_00000; 90 | //! BG_PALETTE.iter_range(1..).for_each(|a| a.write(COLOR_GREEN)); 91 | //! 92 | //! pub const MY_ROM_PALETTE_DATA: [u16; 256] = [0xAB; 256]; 93 | //! BG_PALETTE 94 | //! .iter() 95 | //! .zip(MY_ROM_PALETTE_DATA.iter().copied()) 96 | //! .for_each(|(a, c)| a.write(c)); 97 | //! ``` 98 | //! 99 | //! ### No Lifetimes 100 | //! 101 | //! Note that `VolAddress`, `VolBlock`, and `VolSeries` are all `Copy` data 102 | //! types, without any lifetime parameter. It is assumed that the MMIO memory 103 | //! map of your device is a fixed part of the device, and that the types from 104 | //! this crate will be used to create `const` declarations that describe that 105 | //! single memory map which is unchanging during the entire program. If the 106 | //! memory mapping of your device *can* change then you must account for this in 107 | //! your declarations. 108 | 109 | use core::{ 110 | marker::PhantomData, 111 | num::NonZeroUsize, 112 | ptr::{read_volatile, write_volatile}, 113 | }; 114 | 115 | mod voladdress_; 116 | pub use voladdress_::*; 117 | 118 | mod volblock; 119 | pub use volblock::*; 120 | 121 | mod volseries; 122 | pub use volseries::*; 123 | 124 | mod volgrid2d; 125 | pub use volgrid2d::*; 126 | 127 | mod volgrid2d_strided; 128 | pub use volgrid2d_strided::*; 129 | 130 | mod volregion; 131 | pub use volregion::*; 132 | 133 | /// Lets you put "Safe" into a generic type parameter. 134 | /// 135 | /// This type affects the read and write methods of the volatile address types, 136 | /// but has no effect on its own. 137 | #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 138 | pub struct Safe; 139 | 140 | /// Lets you put "Unsafe" into a generic type parameter. 141 | /// 142 | /// This type affects the read and write methods of the volatile address types, 143 | /// but has no effect on its own. 144 | #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 145 | pub struct Unsafe; 146 | -------------------------------------------------------------------------------- /src/volregion.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | /// A dynamically sized span of volatile memory. 4 | /// 5 | /// If you think of [VolBlock] as being similar to an array, this type is more 6 | /// similar to a slice. 7 | /// 8 | /// The primary utility of this type is just that it bundles a pointer and 9 | /// length together, which allows you to have safe dynamic bounds checking. Just 10 | /// like with `VolBlock`, It does **not** have a lifetime or participate in 11 | /// borrow checking, and it does **not** enforce exclusive access. 12 | /// 13 | /// A `VolRegion` assumes that elements of the region are directly one after the 14 | /// other (again, like how `VolBlock` works). If you need dynamic bounds 15 | /// checking on a spaced out series of values that would be some other type, 16 | /// which doesn't currently exist in the library. (Open a PR maybe?) 17 | /// 18 | /// ## Generic Parameters 19 | /// * `T` / `R` / `W`: These parameters are applied to the [`VolAddress`] type 20 | /// returned when accessing the region in any way (indexing, iteration, etc). 21 | /// 22 | /// ## Safety 23 | /// * This type stores a base [`VolAddress`] internally, and so you must follow 24 | /// all of those safety rules. Notably, the base address must never be zero. 25 | /// * The region must legally contain `len` contiguous values of the `T` type, 26 | /// starting from the base address. 27 | /// * The region must not wrap around past the end of the address space. 28 | #[repr(C)] 29 | #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] 30 | pub struct VolRegion { 31 | pub(crate) addr: VolAddress, 32 | pub(crate) len: usize, 33 | } 34 | impl Clone for VolRegion { 35 | #[inline] 36 | #[must_use] 37 | fn clone(&self) -> Self { 38 | *self 39 | } 40 | } 41 | impl Copy for VolRegion {} 42 | impl core::fmt::Debug for VolRegion { 43 | #[cold] 44 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 45 | write!(f, "VolRegion<{elem_ty}, r{readability}, w{writeability}>({address:#X}, len: {len})", 46 | elem_ty = core::any::type_name::(), 47 | readability=core::any::type_name::(), 48 | writeability=core::any::type_name::(), 49 | address=self.addr.as_usize(), 50 | len=self.len, 51 | ) 52 | } 53 | } 54 | impl From> 55 | for VolRegion 56 | { 57 | #[inline] 58 | #[must_use] 59 | fn from(block: VolBlock) -> Self { 60 | Self { addr: block.base, len: C } 61 | } 62 | } 63 | 64 | impl VolRegion { 65 | /// Constructs a region from raw parts. 66 | /// 67 | /// ## Safety 68 | /// * As per the type docs. 69 | #[inline] 70 | #[must_use] 71 | pub const unsafe fn from_raw_parts( 72 | addr: VolAddress, len: usize, 73 | ) -> Self { 74 | Self { addr, len } 75 | } 76 | 77 | /// Gets the length (in elements) of the region. 78 | #[inline] 79 | #[must_use] 80 | #[allow(clippy::len_without_is_empty)] 81 | pub const fn len(self) -> usize { 82 | self.len 83 | } 84 | 85 | /// Converts the `VolBlock` the `usize` for the start of the block. 86 | #[inline] 87 | #[must_use] 88 | pub const fn as_usize(self) -> usize { 89 | self.addr.address.get() 90 | } 91 | 92 | /// Converts the `VolBlock` into an individual const pointer. 93 | /// 94 | /// This should usually only be used when you need to call a foreign function 95 | /// that expects a pointer. 96 | #[inline] 97 | #[must_use] 98 | pub const fn as_ptr(self) -> *const T { 99 | self.addr.address.get() as *const T 100 | } 101 | 102 | /// Converts the `VolBlock` into an individual mut pointer. 103 | /// 104 | /// This should usually only be used when you need to call a foreign function 105 | /// that expects a pointer. 106 | #[inline] 107 | #[must_use] 108 | pub const fn as_mut_ptr(self) -> *mut T { 109 | self.addr.address.get() as *mut T 110 | } 111 | 112 | /// Converts the `VolBlock` into a const slice pointer. 113 | #[inline] 114 | #[must_use] 115 | // TODO(2022-10-15): const fn this at some point in the future (1.64 minimum) 116 | pub fn as_slice_ptr(self) -> *const [T] { 117 | core::ptr::slice_from_raw_parts( 118 | self.addr.address.get() as *const T, 119 | self.len, 120 | ) 121 | } 122 | 123 | /// Converts the `VolBlock` into an mut slice pointer. 124 | #[inline] 125 | #[must_use] 126 | // TODO(2022-10-15): const fn this at some point in the future (unstable) 127 | pub fn as_slice_mut_ptr(self) -> *mut [T] { 128 | core::ptr::slice_from_raw_parts_mut( 129 | self.addr.address.get() as *mut T, 130 | self.len, 131 | ) 132 | } 133 | 134 | /// Index into the region. 135 | /// 136 | /// ## Panics 137 | /// * If the index requested is out of bounds this will panic. 138 | #[inline] 139 | #[must_use] 140 | #[track_caller] 141 | pub const fn index(self, i: usize) -> VolAddress { 142 | assert!(i < self.len); 143 | unsafe { self.addr.add(i) } 144 | } 145 | 146 | /// Gets `Some(addr)` if in bounds, or `None` if out of bounds. 147 | #[inline] 148 | #[must_use] 149 | pub const fn get(self, i: usize) -> Option> { 150 | if i < self.len { 151 | Some(unsafe { self.addr.add(i) }) 152 | } else { 153 | None 154 | } 155 | } 156 | 157 | /// Gets a sub-slice of this region as a new region. 158 | /// 159 | /// ## Panics 160 | /// * If either specified end of the range is out of bounds this will panic. 161 | #[inline] 162 | #[must_use] 163 | #[track_caller] 164 | pub fn sub_slice>(self, r: RB) -> Self { 165 | // TODO: some day make this a const fn, once start_bound and end_bound are 166 | // made into const fn, but that requires const trait impls. 167 | use core::ops::Bound; 168 | let start_inclusive: usize = match r.start_bound() { 169 | Bound::Included(i) => *i, 170 | Bound::Excluded(x) => x + 1, 171 | Bound::Unbounded => 0, 172 | }; 173 | assert!(start_inclusive < self.len); 174 | let end_exclusive: usize = match r.end_bound() { 175 | Bound::Included(i) => i + 1, 176 | Bound::Excluded(x) => *x, 177 | Bound::Unbounded => self.len, 178 | }; 179 | assert!(end_exclusive <= self.len); 180 | let len = end_exclusive.saturating_sub(start_inclusive); 181 | Self { addr: unsafe { self.addr.add(start_inclusive) }, len } 182 | } 183 | 184 | /// Gives an iterator over this region. 185 | #[inline] 186 | #[must_use] 187 | pub const fn iter(self) -> VolBlockIter { 188 | VolBlockIter { base: self.addr, count: self.len } 189 | } 190 | 191 | /// Same as `region.sub_slice(range).iter()` 192 | #[inline] 193 | #[must_use] 194 | #[track_caller] 195 | pub fn iter_range>( 196 | self, r: RB, 197 | ) -> VolBlockIter { 198 | self.sub_slice(r).iter() 199 | } 200 | } 201 | 202 | impl VolRegion 203 | where 204 | T: Copy, 205 | { 206 | /// Volatile reads each element into the provided buffer. 207 | /// 208 | /// ## Panics 209 | /// * If the buffer's length is not *exactly* this region's length. 210 | #[inline] 211 | pub fn read_to_slice(self, buffer: &mut [T]) { 212 | assert_eq!(self.len, buffer.len()); 213 | self.iter().zip(buffer.iter_mut()).for_each(|(va, s)| *s = va.read()) 214 | } 215 | } 216 | impl VolRegion 217 | where 218 | T: Copy, 219 | { 220 | /// Volatile reads each element into the provided buffer. 221 | /// 222 | /// ## Panics 223 | /// * If the buffer's length is not *exactly* this region's length. 224 | /// 225 | /// ## Safety 226 | /// * The safety rules of reading this address depend on the device. Consult 227 | /// your hardware manual. 228 | #[inline] 229 | pub unsafe fn read_to_slice(self, buffer: &mut [T]) { 230 | assert_eq!(self.len, buffer.len()); 231 | self.iter().zip(buffer.iter_mut()).for_each(|(va, s)| *s = va.read()) 232 | } 233 | } 234 | 235 | impl VolRegion 236 | where 237 | T: Copy, 238 | { 239 | /// Volatile all slice elements into this region. 240 | /// 241 | /// ## Panics 242 | /// * If the buffer's length is not *exactly* this region's length. 243 | #[inline] 244 | pub fn write_from_slice(self, buffer: &[T]) { 245 | assert_eq!(self.len, buffer.len()); 246 | self.iter().zip(buffer.iter()).for_each(|(va, s)| va.write(*s)) 247 | } 248 | } 249 | impl VolRegion 250 | where 251 | T: Copy, 252 | { 253 | /// Volatile all slice elements into this region. 254 | /// 255 | /// ## Panics 256 | /// * If the buffer's length is not *exactly* this region's length. 257 | /// 258 | /// ## Safety 259 | /// * The safety rules of writing this address depend on the device. Consult 260 | /// your hardware manual. 261 | #[inline] 262 | pub unsafe fn write_from_slice(self, buffer: &[T]) { 263 | assert_eq!(self.len, buffer.len()); 264 | self.iter().zip(buffer.iter()).for_each(|(va, s)| va.write(*s)) 265 | } 266 | } 267 | 268 | #[test] 269 | fn test_volregion_sub_slice() { 270 | let region: VolRegion = 271 | unsafe { VolRegion::from_raw_parts(VolAddress::new(1), 10) }; 272 | assert_eq!(region.len, 10); 273 | 274 | let sub_region = region.sub_slice(..); 275 | assert_eq!(sub_region.len, 10); 276 | 277 | let sub_region = region.sub_slice(2..); 278 | assert_eq!(sub_region.len, 10 - 2); 279 | 280 | let sub_region = region.sub_slice(..3); 281 | assert_eq!(sub_region.len, 3); 282 | 283 | let sub_region = region.sub_slice(4..6); 284 | assert_eq!(sub_region.len, 2); 285 | } 286 | -------------------------------------------------------------------------------- /src/voladdress_.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | /// A volatile address. 4 | /// 5 | /// This type stores a memory address and provides ergonomic volatile access to 6 | /// said memory address. 7 | /// 8 | /// Note that this type has several methods for accessing the data at the 9 | /// address specified, and a particular instance of this type can use them 10 | /// unsafely, use them safely, or not use them at all based on the generic 11 | /// values of `R` and `W` (explained below). 12 | /// * `read` 13 | /// * `write` 14 | /// * `apply` (reads, runs a function, then writes) 15 | /// 16 | /// ## Generic Parameters 17 | /// 18 | /// * `T`: The type of the value stored at the address. 19 | /// * The target type type must impl `Copy` for reading and writing to be 20 | /// allowed. 21 | /// * `R`: If the address is readable. 22 | /// * If `R=Safe` then you can safely read from the address. 23 | /// * If `R=Unsafe` then you can unsafely read from the address. 24 | /// * Otherwise you cannot read from the address. 25 | /// * `W`: If the address is writable. 26 | /// * If `W=Safe` then you can safely write to the address. 27 | /// * If `W=Unsafe` then you can unsafely write to the address. 28 | /// * Otherwise you cannot write to the address. 29 | /// 30 | /// The `VolAddress` type is intended to represent a single value of a `T` type 31 | /// that is the size of a single machine register (or less). 32 | /// * If there's an array of contiguous `T` values you want to model, consider 33 | /// using [`VolBlock`] instead. 34 | /// * If there's a series of strided `T` values you want to model, consider 35 | /// using [`VolSeries`] instead. 36 | /// * If the `T` type is larger than a single machine register it's probably 37 | /// **not** a good fit for the `VolAddress` abstraction. 38 | /// 39 | /// ## Safety 40 | /// This type's safety follows the "unsafe creation, then safe use" strategy. 41 | /// 42 | /// * **Validity Invariant**: The address of a `VolAddress` must always be 43 | /// non-zero, or you will instantly trigger UB. 44 | /// * **Safety Invariant**: The address of a `VolAddress` must be an aligned and 45 | /// legal address for a `T` type value (with correct `R` and `W` permissions) 46 | /// within the device's memory space, otherwise the `read` and `write` methods 47 | /// will trigger UB when called. 48 | /// * **Synchronization Invariant**: Volatile access has **no** cross-thread 49 | /// synchronization behavior within the LLVM memory model. The results of 50 | /// *all* volatile access is target-dependent, including cross-thread access. 51 | /// Volatile access has no automatic synchronization of its own, and so if 52 | /// your target requires some sort of synchronization for volatile accesses of 53 | /// the address in question you must provide the appropriate synchronization 54 | /// in some way external to this type. 55 | #[repr(transparent)] 56 | #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] 57 | pub struct VolAddress { 58 | pub(crate) address: NonZeroUsize, 59 | target: PhantomData, 60 | read_status: PhantomData, 61 | write_status: PhantomData, 62 | } 63 | 64 | impl Clone for VolAddress { 65 | #[inline] 66 | #[must_use] 67 | fn clone(&self) -> Self { 68 | *self 69 | } 70 | } 71 | impl Copy for VolAddress {} 72 | 73 | impl VolAddress { 74 | /// Constructs the value. 75 | /// 76 | /// ## Safety 77 | /// * As per the type docs. 78 | #[inline] 79 | #[must_use] 80 | pub const unsafe fn new(address: usize) -> Self { 81 | Self { 82 | address: NonZeroUsize::new_unchecked(address), 83 | target: PhantomData, 84 | read_status: PhantomData, 85 | write_status: PhantomData, 86 | } 87 | } 88 | 89 | /// Changes the target type from `T` to `Z`. 90 | /// 91 | /// ## Safety 92 | /// * As per the type docs 93 | #[inline] 94 | #[must_use] 95 | pub const unsafe fn cast(self) -> VolAddress { 96 | VolAddress { 97 | address: self.address, 98 | target: PhantomData, 99 | read_status: PhantomData, 100 | write_status: PhantomData, 101 | } 102 | } 103 | 104 | /// Changes the permissions of the address to the new read and write 105 | /// permissions specified. 106 | /// 107 | /// ## Safety 108 | /// * As per the type docs 109 | #[inline] 110 | #[must_use] 111 | pub const unsafe fn change_permissions( 112 | self, 113 | ) -> VolAddress { 114 | VolAddress { 115 | address: self.address, 116 | target: PhantomData, 117 | read_status: PhantomData, 118 | write_status: PhantomData, 119 | } 120 | } 121 | 122 | /// Converts the `VolAddress` back into a normal `usize` value. 123 | #[inline] 124 | #[must_use] 125 | pub const fn as_usize(self) -> usize { 126 | self.address.get() 127 | } 128 | 129 | /// Converts the `VolAddress` into const pointer form. 130 | /// 131 | /// This should usually only be used when you need to call a foreign function 132 | /// that expects a pointer. 133 | #[inline] 134 | #[must_use] 135 | pub const fn as_ptr(self) -> *const T { 136 | self.address.get() as *const T 137 | } 138 | 139 | /// Converts the `VolAddress` into mut pointer form. 140 | /// 141 | /// This should usually only be used when you need to call a foreign function 142 | /// that expects a pointer. 143 | #[inline] 144 | #[must_use] 145 | pub const fn as_mut_ptr(self) -> *mut T { 146 | self.address.get() as *mut T 147 | } 148 | 149 | /// Advances the pointer by the given number of positions (`usize`). 150 | /// 151 | /// Shorthand for `addr.offset(count as isize)` 152 | /// 153 | /// This is intended to basically work like [`<*mut 154 | /// T>::wrapping_add`](https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_add-1). 155 | /// 156 | /// ## Safety 157 | /// * As per the type docs 158 | #[inline] 159 | #[must_use] 160 | pub const unsafe fn add(self, count: usize) -> Self { 161 | self.offset(count as isize) 162 | } 163 | 164 | /// Reverses the pointer by the given number of positions (`usize`). 165 | /// 166 | /// Shorthand for `addr.offset((count as isize).wrapping_neg())` 167 | /// 168 | /// This is intended to basically work like [`<*mut 169 | /// T>::wrapping_sub`](https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_sub-1). 170 | /// 171 | /// ## Safety 172 | /// * As per the type docs 173 | #[inline] 174 | #[must_use] 175 | pub const unsafe fn sub(self, count: usize) -> Self { 176 | self.offset((count as isize).wrapping_neg()) 177 | } 178 | 179 | /// Offsets the address by the given number of positions (`isize`). 180 | /// 181 | /// This is intended to basically work like [`<*mut 182 | /// T>::wrapping_offset`](https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset-1). 183 | /// 184 | /// ## Safety 185 | /// * As per the type docs 186 | #[inline] 187 | #[must_use] 188 | pub const unsafe fn offset(self, count: isize) -> Self { 189 | let total_delta = core::mem::size_of::().wrapping_mul(count as usize); 190 | VolAddress { 191 | address: NonZeroUsize::new_unchecked( 192 | self.address.get().wrapping_add(total_delta), 193 | ), 194 | target: PhantomData, 195 | read_status: PhantomData, 196 | write_status: PhantomData, 197 | } 198 | } 199 | } 200 | 201 | impl VolAddress<[T; C], R, W> { 202 | /// Converts an address for an array to a block for each element of the array. 203 | /// 204 | /// ## Safety 205 | /// * As per the `VolBlock` construction rules. 206 | /// * It is *highly likely* that on any device this is safe, but because of 207 | /// possible strangeness with volatile side effects this is marked as an 208 | /// `unsafe` method. 209 | #[inline] 210 | #[must_use] 211 | pub const unsafe fn as_volblock(self) -> VolBlock { 212 | VolBlock { base: self.cast::() } 213 | } 214 | } 215 | 216 | impl VolAddress 217 | where 218 | T: Copy, 219 | { 220 | /// Volatile reads the current value of `A`. 221 | #[inline] 222 | pub fn read(self) -> T { 223 | // Safety: The declarer of the value gave this a `Safe` read typing, thus 224 | // they've asserted that this is a safe to read address. 225 | unsafe { read_volatile(self.address.get() as *const T) } 226 | } 227 | } 228 | impl VolAddress 229 | where 230 | T: Copy, 231 | { 232 | /// Volatile reads the current value of `A`. 233 | /// 234 | /// ## Safety 235 | /// * The safety rules of reading this address depend on the device. Consult 236 | /// your hardware manual. 237 | #[inline] 238 | pub unsafe fn read(self) -> T { 239 | read_volatile(self.address.get() as *const T) 240 | } 241 | } 242 | 243 | impl VolAddress 244 | where 245 | T: Copy, 246 | { 247 | /// Volatile writes a new value to `A`. 248 | #[inline] 249 | pub fn write(self, t: T) { 250 | // Safety: The declarer of the value gave this a `Safe` write typing, thus 251 | // they've asserted that this is a safe to write address. 252 | unsafe { write_volatile(self.address.get() as *mut T, t) } 253 | } 254 | } 255 | impl VolAddress 256 | where 257 | T: Copy, 258 | { 259 | /// Volatile writes a new value to `A`. 260 | /// 261 | /// ## Safety 262 | /// * The safety rules of writing this address depend on the device. Consult 263 | /// your hardware manual. 264 | #[inline] 265 | pub unsafe fn write(self, t: T) { 266 | write_volatile(self.address.get() as *mut T, t) 267 | } 268 | } 269 | 270 | impl VolAddress 271 | where 272 | T: Copy, 273 | { 274 | /// Reads the address, applies the operation, and writes back the new value. 275 | #[inline] 276 | pub fn apply(self, op: F) { 277 | let mut temp = self.read(); 278 | op(&mut temp); 279 | self.write(temp); 280 | } 281 | } 282 | impl VolAddress 283 | where 284 | T: Copy, 285 | { 286 | /// Reads the address, applies the operation, and writes back the new value. 287 | /// 288 | /// ## Safety 289 | /// * The safety rules of reading/writing this address depend on the device. 290 | /// Consult your hardware manual. 291 | #[inline] 292 | pub unsafe fn apply(self, op: F) { 293 | let mut temp = self.read(); 294 | op(&mut temp); 295 | self.write(temp); 296 | } 297 | } 298 | impl VolAddress 299 | where 300 | T: Copy, 301 | { 302 | /// Reads the address, applies the operation, and writes back the new value. 303 | /// 304 | /// ## Safety 305 | /// * The safety rules of reading/writing this address depend on the device. 306 | /// Consult your hardware manual. 307 | #[inline] 308 | pub unsafe fn apply(self, op: F) { 309 | let mut temp = self.read(); 310 | op(&mut temp); 311 | self.write(temp); 312 | } 313 | } 314 | impl VolAddress 315 | where 316 | T: Copy, 317 | { 318 | /// Reads the address, applies the operation, and writes back the new value. 319 | /// 320 | /// ## Safety 321 | /// * The safety rules of reading/writing this address depend on the device. 322 | /// Consult your hardware manual. 323 | #[inline] 324 | pub unsafe fn apply(self, op: F) { 325 | let mut temp = self.read(); 326 | op(&mut temp); 327 | self.write(temp); 328 | } 329 | } 330 | 331 | impl core::fmt::Debug for VolAddress { 332 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 333 | write!( 334 | f, 335 | "VolAddress<{elem_ty}, r{readability}, w{writeability}>(0x{address:#X})", 336 | elem_ty = core::any::type_name::(), 337 | readability = core::any::type_name::(), 338 | writeability = core::any::type_name::(), 339 | address = self.address.get() 340 | ) 341 | } 342 | } 343 | 344 | impl core::fmt::Pointer for VolAddress { 345 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 346 | write!(f, "0x{address:#X}", address = self.address.get()) 347 | } 348 | } 349 | -------------------------------------------------------------------------------- /src/volseries.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | /// A volatile memory "series". 4 | /// 5 | /// This is intended to model when a portion of memory is a series of evenly 6 | /// spaced values that are *not* directly contiguous. 7 | /// 8 | /// ## Generic Parameters 9 | /// * `T` / `R` / `W`: These parameters are applied to the [`VolAddress`] type 10 | /// returned when accessing the series in any way (indexing, iteration, etc). 11 | /// * `C`: the count of elements in the series. 12 | /// * `S`: the stride **in bytes** between series elements. 13 | /// 14 | /// ## Safety 15 | /// * This type stores a [`VolAddress`] internally, and so you must follow all 16 | /// of those safety rules. Notably, the base address must never be zero. 17 | /// * The address space must legally contain `C` values of the `T` type, spaced 18 | /// every `S` bytes, starting from the base address. 19 | /// * The memory series must not wrap around the end of the address space. 20 | #[repr(transparent)] 21 | #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] 22 | pub struct VolSeries { 23 | pub(crate) base: VolAddress, 24 | } 25 | 26 | impl Clone 27 | for VolSeries 28 | { 29 | #[inline] 30 | #[must_use] 31 | fn clone(&self) -> Self { 32 | *self 33 | } 34 | } 35 | impl Copy 36 | for VolSeries 37 | { 38 | } 39 | 40 | impl VolSeries { 41 | /// Constructs the value. 42 | /// 43 | /// ## Safety 44 | /// * As per the type docs. 45 | #[inline] 46 | #[must_use] 47 | pub const unsafe fn new(base: usize) -> Self { 48 | Self { base: VolAddress::new(base) } 49 | } 50 | 51 | /// The length of this series (in elements). 52 | #[inline] 53 | #[must_use] 54 | #[allow(clippy::len_without_is_empty)] 55 | pub const fn len(self) -> usize { 56 | C 57 | } 58 | 59 | /// The stride of this series (in bytes). 60 | #[inline] 61 | #[must_use] 62 | pub const fn stride(self) -> usize { 63 | S 64 | } 65 | 66 | /// Indexes to the `i`th position of the memory series. 67 | /// 68 | /// ## Panics 69 | /// * If the index is out of bounds this will panic. 70 | #[inline] 71 | #[must_use] 72 | #[track_caller] 73 | pub const fn index(self, i: usize) -> VolAddress { 74 | assert!(i < C); 75 | unsafe { self.base.cast::<[u8; S]>().add(i).cast::() } 76 | } 77 | 78 | /// Gets the address of the `i`th position, if it's in bounds. 79 | #[inline] 80 | #[must_use] 81 | pub const fn get(self, i: usize) -> Option> { 82 | if i < C { 83 | Some(unsafe { self.base.cast::<[u8; S]>().add(i).cast::() }) 84 | } else { 85 | None 86 | } 87 | } 88 | 89 | /// Creates an iterator over the addresses of the memory series. 90 | #[inline] 91 | #[must_use] 92 | pub const fn iter(self) -> VolSeriesIter { 93 | VolSeriesIter { base: self.base, count: C } 94 | } 95 | 96 | /// Makes an iterator over the range bounds given. 97 | /// 98 | /// If the range given is empty then your iterator will be empty. 99 | /// 100 | /// ## Panics 101 | /// * If the start or end of the range are out of bounds for the series. 102 | #[inline] 103 | #[must_use] 104 | #[track_caller] 105 | pub fn iter_range>( 106 | self, r: RB, 107 | ) -> VolSeriesIter { 108 | // TODO: some day make this a const fn, once start_bound and end_bound are 109 | // made into const fn, but that requires const trait impls. 110 | use core::ops::Bound; 111 | let start_inclusive: usize = match r.start_bound() { 112 | Bound::Included(i) => *i, 113 | Bound::Excluded(x) => x + 1, 114 | Bound::Unbounded => 0, 115 | }; 116 | assert!(start_inclusive < C); 117 | let end_exclusive: usize = match r.end_bound() { 118 | Bound::Included(i) => i + 1, 119 | Bound::Excluded(x) => *x, 120 | Bound::Unbounded => C, 121 | }; 122 | assert!(end_exclusive <= C); 123 | //extern crate std; 124 | //std::println!("start_bound {:?}", r.start_bound()); 125 | //std::println!("end_bound {:?}", r.end_bound()); 126 | //std::println!("start_inclusive {:?}", start_inclusive); 127 | //std::println!("end_exclusive {:?}", end_exclusive); 128 | let count = end_exclusive.saturating_sub(start_inclusive); 129 | VolSeriesIter { base: self.index(start_inclusive), count } 130 | } 131 | } 132 | 133 | #[test] 134 | fn test_volseries_iter_range() { 135 | let series: VolSeries = 136 | unsafe { VolSeries::new(1) }; 137 | // 138 | let i = series.iter_range(..); 139 | assert_eq!(i.base.as_usize(), 1); 140 | assert_eq!(i.count, 10); 141 | // 142 | let i = series.iter_range(2..); 143 | assert_eq!(i.base.as_usize(), 1 + 2); 144 | assert_eq!(i.count, 10 - 2); 145 | // 146 | let i = series.iter_range(2..=5); 147 | assert_eq!(i.base.as_usize(), 1 + 2); 148 | assert_eq!(i.count, 4); 149 | // 150 | let i = series.iter_range(..4); 151 | assert_eq!(i.base.as_usize(), 1); 152 | assert_eq!(i.count, 4); 153 | // 154 | let i = series.iter_range(..=4); 155 | assert_eq!(i.base.as_usize(), 1); 156 | assert_eq!(i.count, 5); 157 | } 158 | 159 | #[test] 160 | #[should_panic] 161 | fn test_volseries_iter_range_low_bound_panic() { 162 | let series: VolSeries = 163 | unsafe { VolSeries::new(1) }; 164 | // 165 | let _i = series.iter_range(10..); 166 | } 167 | 168 | #[test] 169 | #[should_panic] 170 | fn test_volseries_iter_range_high_bound_panic() { 171 | let series: VolSeries = 172 | unsafe { VolSeries::new(1) }; 173 | // 174 | let _i = series.iter_range(..=10); 175 | } 176 | 177 | impl core::fmt::Debug 178 | for VolSeries 179 | { 180 | #[cold] 181 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 182 | write!(f, "VolSeries<{elem_ty}, r{readability}, w{writeability}, c{count}, s{stride:#X}>(0x{address:#X})", 183 | elem_ty = core::any::type_name::(), 184 | readability=core::any::type_name::(), 185 | writeability=core::any::type_name::(), 186 | count=C, 187 | stride=S, 188 | address=self.base.address.get()) 189 | } 190 | } 191 | 192 | impl core::fmt::Pointer 193 | for VolSeries 194 | { 195 | #[cold] 196 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 197 | write!(f, "0x{address:#X}", address = self.base.address.get()) 198 | } 199 | } 200 | 201 | /// An iterator over a volatile series. 202 | /// 203 | /// You will generally not construct types of this value yourself. Instead, you 204 | /// obtain them via the [`VolSeries::iter`](VolSeries::iter) method. 205 | #[repr(C)] 206 | pub struct VolSeriesIter { 207 | pub(crate) base: VolAddress, 208 | pub(crate) count: usize, 209 | } 210 | 211 | impl Clone for VolSeriesIter { 212 | #[inline] 213 | #[must_use] 214 | fn clone(&self) -> Self { 215 | Self { base: self.base, count: self.count } 216 | } 217 | } 218 | 219 | impl core::iter::Iterator 220 | for VolSeriesIter 221 | { 222 | type Item = VolAddress; 223 | 224 | #[inline] 225 | fn nth(&mut self, n: usize) -> Option { 226 | if n < self.count { 227 | let out = Some(unsafe { self.base.cast::<[u8; S]>().add(n).cast::() }); 228 | self.count -= n + 1; 229 | self.base = unsafe { self.base.cast::<[u8; S]>().add(n + 1).cast::() }; 230 | out 231 | } else { 232 | self.count = 0; 233 | None 234 | } 235 | } 236 | 237 | #[inline] 238 | fn next(&mut self) -> Option { 239 | self.nth(0) 240 | } 241 | 242 | #[inline] 243 | #[must_use] 244 | fn last(mut self) -> Option { 245 | if self.count > 0 { 246 | self.nth(self.count - 1) 247 | } else { 248 | None 249 | } 250 | } 251 | 252 | #[inline] 253 | #[must_use] 254 | fn size_hint(&self) -> (usize, Option) { 255 | (self.count, Some(self.count)) 256 | } 257 | 258 | #[inline] 259 | #[must_use] 260 | fn count(self) -> usize { 261 | self.count 262 | } 263 | } 264 | 265 | impl core::iter::DoubleEndedIterator 266 | for VolSeriesIter 267 | { 268 | #[inline] 269 | fn next_back(&mut self) -> Option { 270 | self.nth_back(0) 271 | } 272 | 273 | #[inline] 274 | fn nth_back(&mut self, n: usize) -> Option { 275 | if n < self.count { 276 | let out = Some(unsafe { 277 | self.base.cast::<[u8; S]>().add(self.count - (n + 1)).cast::() 278 | }); 279 | self.count -= n + 1; 280 | out 281 | } else { 282 | self.count = 0; 283 | None 284 | } 285 | } 286 | } 287 | 288 | #[test] 289 | fn test_impl_Iterator_for_VolSeriesIter() { 290 | let i: VolSeriesIter = VolSeriesIter { 291 | base: unsafe { VolAddress::new(core::mem::align_of::()) }, 292 | count: 4, 293 | }; 294 | 295 | let mut i_c = i.clone().map(|a| a.as_usize()); 296 | assert_eq!(i_c.next(), Some(0x002)); 297 | assert_eq!(i_c.next(), Some(0x102)); 298 | assert_eq!(i_c.next(), Some(0x202)); 299 | assert_eq!(i_c.next(), Some(0x302)); 300 | assert_eq!(i_c.next(), None); 301 | assert_eq!(i_c.next(), None); 302 | 303 | let i_c = i.clone(); 304 | assert_eq!(i_c.size_hint(), (4, Some(4))); 305 | 306 | let i_c = i.clone(); 307 | assert_eq!(i_c.count(), 4); 308 | 309 | let i_c = i.clone().map(|a| a.as_usize()); 310 | assert_eq!(i_c.last(), Some(0x302)); 311 | 312 | let mut i_c = i.clone().map(|a| a.as_usize()); 313 | assert_eq!(i_c.nth(0), Some(0x002)); 314 | assert_eq!(i_c.nth(0), Some(0x102)); 315 | assert_eq!(i_c.nth(0), Some(0x202)); 316 | assert_eq!(i_c.nth(0), Some(0x302)); 317 | assert_eq!(i_c.nth(0), None); 318 | assert_eq!(i_c.nth(0), None); 319 | 320 | let mut i_c = i.clone().map(|a| a.as_usize()); 321 | assert_eq!(i_c.nth(1), Some(0x102)); 322 | assert_eq!(i_c.nth(1), Some(0x302)); 323 | assert_eq!(i_c.nth(1), None); 324 | assert_eq!(i_c.nth(1), None); 325 | 326 | let mut i_c = i.clone().map(|a| a.as_usize()); 327 | assert_eq!(i_c.nth(2), Some(0x202)); 328 | assert_eq!(i_c.nth(2), None); 329 | assert_eq!(i_c.nth(2), None); 330 | 331 | let mut i_c = i.clone().map(|a| a.as_usize()); 332 | assert_eq!(i_c.nth(3), Some(0x302)); 333 | assert_eq!(i_c.nth(3), None); 334 | assert_eq!(i_c.nth(3), None); 335 | 336 | let mut i_c = i.clone().map(|a| a.as_usize()); 337 | assert_eq!(i_c.nth(4), None); 338 | assert_eq!(i_c.nth(4), None); 339 | } 340 | 341 | #[test] 342 | fn test_impl_DoubleEndedIterator_for_VolSeriesIter() { 343 | let i: VolSeriesIter = VolSeriesIter { 344 | base: unsafe { VolAddress::new(core::mem::align_of::()) }, 345 | count: 4, 346 | }; 347 | 348 | let mut i_c = i.clone().map(|a| a.as_usize()); 349 | assert_eq!(i_c.next_back(), Some(0x302)); 350 | assert_eq!(i_c.next_back(), Some(0x202)); 351 | assert_eq!(i_c.next_back(), Some(0x102)); 352 | assert_eq!(i_c.next_back(), Some(0x002)); 353 | assert_eq!(i_c.next_back(), None); 354 | assert_eq!(i_c.next_back(), None); 355 | 356 | let mut i_c = i.clone().map(|a| a.as_usize()); 357 | assert_eq!(i_c.nth_back(0), Some(0x302)); 358 | assert_eq!(i_c.nth_back(0), Some(0x202)); 359 | assert_eq!(i_c.nth_back(0), Some(0x102)); 360 | assert_eq!(i_c.nth_back(0), Some(0x002)); 361 | assert_eq!(i_c.nth_back(0), None); 362 | assert_eq!(i_c.nth_back(0), None); 363 | 364 | let mut i_c = i.clone().map(|a| a.as_usize()); 365 | assert_eq!(i_c.nth_back(1), Some(0x202)); 366 | assert_eq!(i_c.nth_back(1), Some(0x002)); 367 | assert_eq!(i_c.nth_back(1), None); 368 | assert_eq!(i_c.nth_back(1), None); 369 | 370 | let mut i_c = i.clone().map(|a| a.as_usize()); 371 | assert_eq!(i_c.nth_back(2), Some(0x102)); 372 | assert_eq!(i_c.nth_back(2), None); 373 | assert_eq!(i_c.nth_back(2), None); 374 | 375 | let mut i_c = i.clone().map(|a| a.as_usize()); 376 | assert_eq!(i_c.nth_back(3), Some(0x002)); 377 | assert_eq!(i_c.nth_back(3), None); 378 | assert_eq!(i_c.nth_back(3), None); 379 | 380 | let mut i_c = i.clone().map(|a| a.as_usize()); 381 | assert_eq!(i_c.nth_back(4), None); 382 | assert_eq!(i_c.nth_back(4), None); 383 | } 384 | -------------------------------------------------------------------------------- /src/volblock.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | /// A volatile memory block. 4 | /// 5 | /// This is intended to model when a portion of memory is an array of identical 6 | /// values in a row, such as a block of 256 `u16` values in a row. 7 | /// 8 | /// ## Generic Parameters 9 | /// * `T` / `R` / `W`: These parameters are applied to the [`VolAddress`] type 10 | /// returned when accessing the block in any way (indexing, iteration, etc). 11 | /// * `C`: the count of elements in the block. 12 | /// 13 | /// ## Safety 14 | /// * This type stores a base [`VolAddress`] internally, and so you must follow 15 | /// all of those safety rules. Notably, the base address must never be zero. 16 | /// * The address space must legally contain `C` contiguous values of the `T` 17 | /// type, starting from the base address. 18 | /// * The memory block must not wrap around past the end of the address space. 19 | #[repr(transparent)] 20 | #[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] 21 | pub struct VolBlock { 22 | pub(crate) base: VolAddress, 23 | } 24 | 25 | impl Clone for VolBlock { 26 | #[inline] 27 | #[must_use] 28 | fn clone(&self) -> Self { 29 | *self 30 | } 31 | } 32 | impl Copy for VolBlock {} 33 | 34 | impl VolBlock { 35 | /// Constructs the value. 36 | /// 37 | /// ## Safety 38 | /// * As per the type docs. 39 | #[inline] 40 | #[must_use] 41 | pub const unsafe fn new(base: usize) -> Self { 42 | Self { base: VolAddress::new(base) } 43 | } 44 | 45 | /// The length of this block (in elements). 46 | #[inline] 47 | #[must_use] 48 | #[allow(clippy::len_without_is_empty)] 49 | pub const fn len(self) -> usize { 50 | C 51 | } 52 | 53 | /// Converts the `VolBlock` the `usize` for the start of the block. 54 | #[inline] 55 | #[must_use] 56 | pub const fn as_usize(self) -> usize { 57 | self.base.address.get() 58 | } 59 | 60 | /// Converts the `VolBlock` into an individual const pointer. 61 | /// 62 | /// This should usually only be used when you need to call a foreign function 63 | /// that expects a pointer. 64 | #[inline] 65 | #[must_use] 66 | pub const fn as_ptr(self) -> *const T { 67 | self.base.address.get() as *const T 68 | } 69 | 70 | /// Converts the `VolBlock` into an individual mut pointer. 71 | /// 72 | /// This should usually only be used when you need to call a foreign function 73 | /// that expects a pointer. 74 | #[inline] 75 | #[must_use] 76 | pub const fn as_mut_ptr(self) -> *mut T { 77 | self.base.address.get() as *mut T 78 | } 79 | 80 | /// Converts the `VolBlock` into a const slice pointer. 81 | #[inline] 82 | #[must_use] 83 | // TODO(2022-10-15): const fn this at some point in the future (1.64 minimum) 84 | pub fn as_slice_ptr(self) -> *const [T] { 85 | core::ptr::slice_from_raw_parts(self.base.address.get() as *const T, C) 86 | } 87 | 88 | /// Converts the `VolBlock` into a mut slice pointer. 89 | #[inline] 90 | #[must_use] 91 | // TODO(2022-10-15): const fn this at some point in the future (unstable) 92 | pub fn as_slice_mut_ptr(self) -> *mut [T] { 93 | core::ptr::slice_from_raw_parts_mut(self.base.address.get() as *mut T, C) 94 | } 95 | 96 | /// Indexes to the `i`th position of the memory block. 97 | /// 98 | /// ## Panics 99 | /// * If the index is out of bounds this will panic. 100 | #[inline] 101 | #[must_use] 102 | #[track_caller] 103 | pub const fn index(self, i: usize) -> VolAddress { 104 | assert!(i < C); 105 | unsafe { self.base.add(i) } 106 | } 107 | 108 | /// Gets the address of the `i`th position, if it's in bounds. 109 | #[inline] 110 | #[must_use] 111 | pub const fn get(self, i: usize) -> Option> { 112 | if i < C { 113 | Some(unsafe { self.base.add(i) }) 114 | } else { 115 | None 116 | } 117 | } 118 | 119 | /// Creates an iterator over the addresses of the memory block. 120 | #[inline] 121 | #[must_use] 122 | pub const fn iter(self) -> VolBlockIter { 123 | VolBlockIter { base: self.base, count: C } 124 | } 125 | 126 | /// Makes an iterator over the range bounds given. 127 | /// 128 | /// If the range given is empty then your iterator will be empty. 129 | /// 130 | /// ## Panics 131 | /// * If the start or end of the range are out of bounds for the block. 132 | #[inline] 133 | #[must_use] 134 | #[track_caller] 135 | pub fn iter_range>( 136 | self, r: RB, 137 | ) -> VolBlockIter { 138 | // TODO: some day make this a const fn, once start_bound and end_bound are 139 | // made into const fn, but that requires const trait impls. 140 | use core::ops::Bound; 141 | let start_inclusive: usize = match r.start_bound() { 142 | Bound::Included(i) => *i, 143 | Bound::Excluded(x) => x + 1, 144 | Bound::Unbounded => 0, 145 | }; 146 | assert!(start_inclusive < C); 147 | let end_exclusive: usize = match r.end_bound() { 148 | Bound::Included(i) => i + 1, 149 | Bound::Excluded(x) => *x, 150 | Bound::Unbounded => C, 151 | }; 152 | assert!(end_exclusive <= C); 153 | let count = end_exclusive.saturating_sub(start_inclusive); 154 | VolBlockIter { base: self.index(start_inclusive), count } 155 | } 156 | 157 | /// View the volatile block as an equivalent spanned region. 158 | /// 159 | /// This method exists because unfortunately the typing of the `Deref` trait 160 | /// doesn't allow for a Block to deref into a Region, so we have to provide 161 | /// the conversion through this manual method. 162 | #[inline] 163 | #[must_use] 164 | pub const fn as_region(self) -> VolRegion { 165 | VolRegion { addr: self.base, len: C } 166 | } 167 | } 168 | 169 | #[test] 170 | fn test_volblock_iter_range() { 171 | let block: VolBlock = unsafe { VolBlock::new(1) }; 172 | // 173 | let i = block.iter_range(..); 174 | assert_eq!(i.base.as_usize(), 1); 175 | assert_eq!(i.count, 10); 176 | // 177 | let i = block.iter_range(2..); 178 | assert_eq!(i.base.as_usize(), 1 + 2); 179 | assert_eq!(i.count, 10 - 2); 180 | // 181 | let i = block.iter_range(2..=5); 182 | assert_eq!(i.base.as_usize(), 1 + 2); 183 | assert_eq!(i.count, 4); 184 | // 185 | let i = block.iter_range(..4); 186 | assert_eq!(i.base.as_usize(), 1); 187 | assert_eq!(i.count, 4); 188 | // 189 | let i = block.iter_range(..=4); 190 | assert_eq!(i.base.as_usize(), 1); 191 | assert_eq!(i.count, 5); 192 | } 193 | 194 | #[test] 195 | #[should_panic] 196 | fn test_volblock_iter_range_low_bound_panic() { 197 | let block: VolBlock = unsafe { VolBlock::new(1) }; 198 | // 199 | let _i = block.iter_range(10..); 200 | } 201 | 202 | #[test] 203 | #[should_panic] 204 | fn test_volblock_iter_range_high_bound_panic() { 205 | let block: VolBlock = unsafe { VolBlock::new(1) }; 206 | // 207 | let _i = block.iter_range(..=10); 208 | } 209 | 210 | impl core::fmt::Debug for VolBlock { 211 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 212 | write!(f, "VolBlock<{elem_ty}, r{readability}, w{writeability}, c{count}>(0x{address:#X})", 213 | elem_ty = core::any::type_name::(), 214 | readability=core::any::type_name::(), 215 | writeability=core::any::type_name::(), 216 | count=C, 217 | address=self.base.address.get()) 218 | } 219 | } 220 | 221 | impl core::fmt::Pointer for VolBlock { 222 | fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { 223 | write!(f, "0x{address:#X}", address = self.base.address.get()) 224 | } 225 | } 226 | 227 | /// An iterator over a volatile block. 228 | /// 229 | /// You will generally not construct types of this value yourself. Instead, you 230 | /// obtain them via the [`VolBlock::iter`](VolBlock::iter) method. 231 | #[repr(C)] 232 | pub struct VolBlockIter { 233 | pub(crate) base: VolAddress, 234 | pub(crate) count: usize, 235 | } 236 | 237 | impl Clone for VolBlockIter { 238 | #[inline] 239 | #[must_use] 240 | fn clone(&self) -> Self { 241 | Self { base: self.base, count: self.count } 242 | } 243 | } 244 | 245 | impl core::iter::Iterator for VolBlockIter { 246 | type Item = VolAddress; 247 | 248 | #[inline] 249 | fn nth(&mut self, n: usize) -> Option { 250 | if n < self.count { 251 | let out = Some(unsafe { self.base.add(n) }); 252 | self.count -= n + 1; 253 | self.base = unsafe { self.base.add(n + 1) }; 254 | out 255 | } else { 256 | self.count = 0; 257 | None 258 | } 259 | } 260 | 261 | #[inline] 262 | fn next(&mut self) -> Option { 263 | self.nth(0) 264 | } 265 | 266 | #[inline] 267 | #[must_use] 268 | fn last(mut self) -> Option { 269 | if self.count > 0 { 270 | self.nth(self.count - 1) 271 | } else { 272 | None 273 | } 274 | } 275 | 276 | #[inline] 277 | #[must_use] 278 | fn size_hint(&self) -> (usize, Option) { 279 | (self.count, Some(self.count)) 280 | } 281 | 282 | #[inline] 283 | #[must_use] 284 | fn count(self) -> usize { 285 | self.count 286 | } 287 | } 288 | 289 | impl core::iter::DoubleEndedIterator for VolBlockIter { 290 | #[inline] 291 | fn next_back(&mut self) -> Option { 292 | self.nth_back(0) 293 | } 294 | 295 | #[inline] 296 | fn nth_back(&mut self, n: usize) -> Option { 297 | if n < self.count { 298 | let out = Some(unsafe { self.base.add(self.count - (n + 1)) }); 299 | self.count -= n + 1; 300 | out 301 | } else { 302 | self.count = 0; 303 | None 304 | } 305 | } 306 | } 307 | 308 | #[test] 309 | fn test_impl_Iterator_for_VolBlockIter() { 310 | let i: VolBlockIter = VolBlockIter { 311 | base: unsafe { VolAddress::new(core::mem::align_of::()) }, 312 | count: 4, 313 | }; 314 | 315 | let mut i_c = i.clone().map(|a| a.as_usize()); 316 | assert_eq!(i_c.next(), Some(2)); 317 | assert_eq!(i_c.next(), Some(4)); 318 | assert_eq!(i_c.next(), Some(6)); 319 | assert_eq!(i_c.next(), Some(8)); 320 | assert_eq!(i_c.next(), None); 321 | assert_eq!(i_c.next(), None); 322 | 323 | let i_c = i.clone(); 324 | assert_eq!(i_c.size_hint(), (4, Some(4))); 325 | 326 | let i_c = i.clone(); 327 | assert_eq!(i_c.count(), 4); 328 | 329 | let i_c = i.clone().map(|a| a.as_usize()); 330 | assert_eq!(i_c.last(), Some(8)); 331 | 332 | let mut i_c = i.clone().map(|a| a.as_usize()); 333 | assert_eq!(i_c.nth(0), Some(2)); 334 | assert_eq!(i_c.nth(0), Some(4)); 335 | assert_eq!(i_c.nth(0), Some(6)); 336 | assert_eq!(i_c.nth(0), Some(8)); 337 | assert_eq!(i_c.nth(0), None); 338 | assert_eq!(i_c.nth(0), None); 339 | 340 | let mut i_c = i.clone().map(|a| a.as_usize()); 341 | assert_eq!(i_c.nth(1), Some(4)); 342 | assert_eq!(i_c.nth(1), Some(8)); 343 | assert_eq!(i_c.nth(1), None); 344 | assert_eq!(i_c.nth(1), None); 345 | 346 | let mut i_c = i.clone().map(|a| a.as_usize()); 347 | assert_eq!(i_c.nth(2), Some(6)); 348 | assert_eq!(i_c.nth(2), None); 349 | assert_eq!(i_c.nth(2), None); 350 | 351 | let mut i_c = i.clone().map(|a| a.as_usize()); 352 | assert_eq!(i_c.nth(3), Some(8)); 353 | assert_eq!(i_c.nth(3), None); 354 | assert_eq!(i_c.nth(3), None); 355 | 356 | let mut i_c = i.clone().map(|a| a.as_usize()); 357 | assert_eq!(i_c.nth(4), None); 358 | assert_eq!(i_c.nth(4), None); 359 | } 360 | 361 | #[test] 362 | fn test_impl_DoubleEndedIterator_for_VolBlockIter() { 363 | let i: VolBlockIter = VolBlockIter { 364 | base: unsafe { VolAddress::new(core::mem::align_of::()) }, 365 | count: 4, 366 | }; 367 | 368 | let mut i_c = i.clone().map(|a| a.as_usize()); 369 | assert_eq!(i_c.next_back(), Some(8)); 370 | assert_eq!(i_c.next_back(), Some(6)); 371 | assert_eq!(i_c.next_back(), Some(4)); 372 | assert_eq!(i_c.next_back(), Some(2)); 373 | assert_eq!(i_c.next_back(), None); 374 | assert_eq!(i_c.next_back(), None); 375 | 376 | let mut i_c = i.clone().map(|a| a.as_usize()); 377 | assert_eq!(i_c.nth_back(0), Some(8)); 378 | assert_eq!(i_c.nth_back(0), Some(6)); 379 | assert_eq!(i_c.nth_back(0), Some(4)); 380 | assert_eq!(i_c.nth_back(0), Some(2)); 381 | assert_eq!(i_c.nth_back(0), None); 382 | assert_eq!(i_c.nth_back(0), None); 383 | 384 | let mut i_c = i.clone().map(|a| a.as_usize()); 385 | assert_eq!(i_c.nth_back(1), Some(6)); 386 | assert_eq!(i_c.nth_back(1), Some(2)); 387 | assert_eq!(i_c.nth_back(1), None); 388 | assert_eq!(i_c.nth_back(1), None); 389 | 390 | let mut i_c = i.clone().map(|a| a.as_usize()); 391 | assert_eq!(i_c.nth_back(2), Some(4)); 392 | assert_eq!(i_c.nth_back(2), None); 393 | assert_eq!(i_c.nth_back(2), None); 394 | 395 | let mut i_c = i.clone().map(|a| a.as_usize()); 396 | assert_eq!(i_c.nth_back(3), Some(2)); 397 | assert_eq!(i_c.nth_back(3), None); 398 | assert_eq!(i_c.nth_back(3), None); 399 | 400 | let mut i_c = i.clone().map(|a| a.as_usize()); 401 | assert_eq!(i_c.nth_back(4), None); 402 | assert_eq!(i_c.nth_back(4), None); 403 | } 404 | --------------------------------------------------------------------------------