├── src ├── with_alloc │ ├── mod.rs │ ├── vecdeque.rs │ └── alloc_ringbuffer.rs ├── set_len_trait.rs ├── with_const_generics.rs ├── ringbuffer_trait.rs └── lib.rs ├── .gitignore ├── tests ├── compile-fail │ ├── test_const_generic_array_zero_length.rs │ └── test_const_generic_array_zero_length_new.rs ├── compiletests.rs └── conversions.rs ├── Cargo.toml ├── LICENSE ├── .github └── workflows │ ├── coverage.yml │ └── rust.yml ├── README.md └── benches └── bench.rs /src/with_alloc/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod alloc_ringbuffer; 2 | pub mod vecdeque; 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Rust 2 | /target 3 | **/*.rs.bk 4 | Cargo.lock 5 | benchmarks.txt 6 | 7 | # Editors 8 | .idea 9 | *.iml 10 | *.swp 11 | 12 | # Coverage 13 | cobertura.xml 14 | tarpaulin-report.html 15 | 16 | # benchmarks 17 | .criterion 18 | 19 | # floobits 20 | .floo 21 | .flooignore 22 | -------------------------------------------------------------------------------- /tests/compile-fail/test_const_generic_array_zero_length.rs: -------------------------------------------------------------------------------- 1 | extern crate ringbuffer; 2 | 3 | use ringbuffer::ConstGenericRingBuffer; 4 | 5 | fn main() { 6 | let _ = ConstGenericRingBuffer::::new(); 7 | //~^ note: the above error was encountered while instantiating `fn ringbuffer::ConstGenericRingBuffer::::new::<0>` 8 | // ringbuffer can't be zero length 9 | } 10 | -------------------------------------------------------------------------------- /tests/compile-fail/test_const_generic_array_zero_length_new.rs: -------------------------------------------------------------------------------- 1 | extern crate ringbuffer; 2 | 3 | use ringbuffer::{ConstGenericRingBuffer, RingBuffer}; 4 | 5 | fn main() { 6 | let mut buf = ConstGenericRingBuffer::new::<0>(); 7 | //~^ note: the above error was encountered while instantiating `fn ringbuffer::ConstGenericRingBuffer::::new::<0>` 8 | // ringbuffer can't be zero length 9 | let _ = buf.enqueue(5); 10 | } 11 | -------------------------------------------------------------------------------- /tests/compiletests.rs: -------------------------------------------------------------------------------- 1 | extern crate compiletest_rs as compiletest; 2 | 3 | use std::path::PathBuf; 4 | 5 | #[cfg(test)] 6 | mod conversions; 7 | 8 | fn run_mode(mode: &'static str) { 9 | let mut config = compiletest::Config::default(); 10 | 11 | config.mode = mode.parse().expect("Invalid mode"); 12 | config.src_base = PathBuf::from(format!("tests/{}", mode)); 13 | config.link_deps(); // Populate config.target_rustcflags with dependencies on the path 14 | config.clean_rmeta(); // If your tests import the parent crate, this helps with E0464 15 | 16 | compiletest::run_tests(&config); 17 | } 18 | 19 | #[test] 20 | #[cfg_attr(miri, ignore)] 21 | fn compile_test() { 22 | run_mode("compile-fail"); 23 | } 24 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ringbuffer" 3 | version = "0.16.0" 4 | authors = [ 5 | "Vivian Roest ", 6 | "Jana Dönszelmann ", 7 | ] 8 | edition = "2021" 9 | rust-version = "1.79" 10 | description = "A fixed-size circular buffer" 11 | repository = "https://github.com/NULLx76/ringbuffer/" 12 | keywords = ["ring", "cyclic", "circular", "buffer", "no-std"] 13 | categories = ["data-structures"] 14 | license = "MIT" 15 | 16 | [dev-dependencies] 17 | criterion = { version = "0.4.0", features = ["html_reports"] } 18 | compiletest_rs = "0.10.0" 19 | 20 | [features] 21 | default = ["alloc"] 22 | # disable the alloc based ringbuffer, to make RingBuffers work in no_alloc environments 23 | alloc = [] 24 | 25 | [[bench]] 26 | name = "bench" 27 | harness = false 28 | 29 | [profile.bench] 30 | opt-level = 3 31 | lto = true 32 | 33 | [profile.release] 34 | opt-level = 3 35 | lto = true 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Victor Roest and Jonathan Dönszelmann 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/set_len_trait.rs: -------------------------------------------------------------------------------- 1 | /// `SetLen` is a trait defining the unsafe `set_len` method 2 | /// on ringbuffers that support the operation. 3 | pub trait SetLen { 4 | /// Force the length of the ringbuffer to `new_len`. 5 | /// 6 | /// Note that downsizing will not call Drop on elements at `new_len..old_len`, 7 | /// potentially causing a memory leak. 8 | /// 9 | /// # Panics 10 | /// Panics if `new_len` is greater than the ringbuffer capacity. 11 | /// 12 | /// # Safety 13 | /// - Safe when `new_len <= old_len`. 14 | /// - Safe when `new_len > old_len` and all the elements at `old_len..new_len` are already initialized. 15 | unsafe fn set_len(&mut self, new_len: usize); 16 | } 17 | 18 | /// Implement `set_len` given a `readptr` and a `writeptr`. 19 | #[macro_export] 20 | macro_rules! impl_ring_buffer_set_len { 21 | ($readptr: ident, $writeptr: ident) => { 22 | #[inline] 23 | unsafe fn set_len(&mut self, new_len: usize) { 24 | let cap = self.capacity(); 25 | assert!(new_len <= cap, "Cannot set the a length of {new_len} on a ringbuffer with capacity for {cap} items"); 26 | self.$writeptr = self.$readptr + new_len; 27 | } 28 | }; 29 | } 30 | -------------------------------------------------------------------------------- /.github/workflows/coverage.yml: -------------------------------------------------------------------------------- 1 | name: Coverage 2 | 3 | on: 4 | pull_request: 5 | push: 6 | 7 | jobs: 8 | coverage: 9 | runs-on: ubuntu-latest 10 | env: 11 | CARGO_TERM_COLOR: always 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Install Rust 15 | run: rustup update stable 16 | - name: Install cargo-llvm-cov 17 | uses: taiki-e/install-action@cargo-llvm-cov 18 | - name: Generate code coverage 19 | run: cargo llvm-cov --all-features --html 20 | - name: Upload artifact 21 | uses: actions/upload-pages-artifact@v3 22 | with: 23 | path: "./target/llvm-cov/html" 24 | - name: Deploy to Netlify 25 | uses: nwtgck/actions-netlify@v3.0 26 | with: 27 | publish-dir: './target/llvm-cov/html' 28 | production-branch: main 29 | github-token: ${{ secrets.GITHUB_TOKEN }} 30 | deploy-message: "Deploy from GitHub Actions" 31 | enable-pull-request-comment: true 32 | enable-commit-comment: true 33 | overwrites-pull-request-comment: true 34 | env: 35 | NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} 36 | NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} 37 | timeout-minutes: 1 -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | pull_request: 5 | push: 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | lint: 12 | name: Lint 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v3 16 | - name: Install nightly toolchain with lint tools available 17 | uses: actions-rs/toolchain@v1 18 | with: 19 | profile: minimal 20 | toolchain: nightly 21 | override: true 22 | components: clippy, rustfmt 23 | 24 | - name: Run cargo fmt 25 | uses: actions-rs/cargo@v1 26 | with: 27 | command: fmt 28 | args: --all -- --check 29 | 30 | - uses: actions-rs/clippy-check@v1 31 | with: 32 | token: ${{ secrets.GITHUB_TOKEN }} 33 | args: --all-features 34 | test: 35 | name: Test Stable 36 | runs-on: ubuntu-latest 37 | steps: 38 | - uses: actions/checkout@v3 39 | - name: Install stable toolchain 40 | uses: actions-rs/toolchain@v1 41 | with: 42 | toolchain: stable 43 | override: true 44 | 45 | - name: Run cargo test 46 | uses: actions-rs/cargo@v1 47 | with: 48 | command: test 49 | 50 | test-beta: 51 | name: Test Beta 52 | runs-on: ubuntu-latest 53 | 54 | steps: 55 | - uses: actions/checkout@v3 56 | - name: Install beta toolchain 57 | uses: actions-rs/toolchain@v1 58 | with: 59 | profile: minimal 60 | toolchain: beta 61 | override: true 62 | 63 | - name: Run cargo test 64 | uses: actions-rs/cargo@v1 65 | with: 66 | command: test 67 | test-nightly: 68 | name: Test Nightly 69 | runs-on: ubuntu-latest 70 | steps: 71 | - uses: actions/checkout@v3 72 | 73 | - name: Install nightly toolchain 74 | uses: actions-rs/toolchain@v1 75 | with: 76 | toolchain: nightly 77 | override: true 78 | 79 | - name: Run cargo test 80 | uses: actions-rs/cargo@v1 81 | with: 82 | command: test 83 | args: --all-features 84 | 85 | test-miri: 86 | name: Test Miri 87 | runs-on: ubuntu-latest 88 | steps: 89 | - uses: actions/checkout@v3 90 | - name: Install Miri 91 | run: | 92 | rustup toolchain install nightly --component miri 93 | rustup override set nightly 94 | cargo miri setup 95 | - name: Test with Miri 96 | run: cargo miri test 97 | 98 | build-no-std: 99 | name: Build no-std 100 | runs-on: ubuntu-latest 101 | steps: 102 | - uses: actions/checkout@v2 103 | 104 | - name: Install stable no-std toolchain 105 | uses: actions-rs/toolchain@v1 106 | with: 107 | toolchain: stable 108 | target: thumbv7em-none-eabihf 109 | override: true 110 | 111 | - name: Run cargo build 112 | uses: actions-rs/cargo@v1 113 | with: 114 | command: build 115 | args: --target thumbv7em-none-eabihf --all-features 116 | 117 | build-without-alloc: 118 | name: Build no-std without alloc 119 | runs-on: ubuntu-latest 120 | steps: 121 | - uses: actions/checkout@v2 122 | 123 | - name: Install stable no-std toolchain 124 | uses: actions-rs/toolchain@v1 125 | with: 126 | toolchain: stable 127 | target: thumbv7em-none-eabihf 128 | override: true 129 | 130 | - name: Run cargo build 131 | uses: actions-rs/cargo@v1 132 | with: 133 | command: build 134 | args: --target thumbv7em-none-eabihf --no-default-features 135 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ringbuffer 2 | ![Github Workflows](https://img.shields.io/github/actions/workflow/status/NULLx76/ringbuffer/rust.yml?style=for-the-badge) 3 | [![Docs.rs](https://img.shields.io/badge/docs.rs-ringbuffer-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K)](https://docs.rs/ringbuffer) 4 | [![Crates.io](https://img.shields.io/crates/v/ringbuffer?logo=rust&style=for-the-badge)](https://crates.io/crates/ringbuffer) 5 | 6 | The ringbuffer crate provides safe fixed size circular buffers (ringbuffers) in rust. 7 | 8 | Implementations for three kinds of ringbuffers, with a mostly similar API are provided: 9 | 10 | | type | description | 11 | |--------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------| 12 | | [`AllocRingBuffer`][1] | Ringbuffer allocated on the heap at runtime. This ringbuffer is still fixed size. This requires the alloc feature. | 13 | | [`GrowableAllocRingBuffer`][2] | Ringbuffer allocated on the heap at runtime. This ringbuffer can grow in size, and is implemented as an `alloc::VecDeque` internally. This requires the alloc feature. | 14 | | [`ConstGenericRingBuffer`][3] | Ringbuffer which uses const generics to allocate on the stack. | 15 | 16 | All of these ringbuffers also implement the [RingBuffer][4] trait for their shared API surface. 17 | 18 | [1]: https://docs.rs/ringbuffer/latest/ringbuffer/struct.AllocRingBuffer.html 19 | [2]: https://docs.rs/ringbuffer/latest/ringbuffer/struct.GrowableAllocRingBuffer.html 20 | [3]: https://docs.rs/ringbuffer/latest/ringbuffer/struct.ConstGenericRingBuffer.html 21 | [4]: https://docs.rs/ringbuffer/latest/ringbuffer/trait.RingBuffer.html 22 | 23 | MSRV: Rust 1.79 24 | 25 | # Usage 26 | 27 | ```rust 28 | use ringbuffer::{AllocRingBuffer, RingBuffer}; 29 | 30 | let mut buffer = AllocRingBuffer::with_capacity(2); 31 | 32 | // First entry of the buffer is now 5. 33 | buffer.push(5); 34 | 35 | // The last item we pushed is 5 36 | assert_eq!(buffer.back(), Some(&5)); 37 | 38 | // Second entry is now 42. 39 | buffer.push(42); 40 | assert_eq!(buffer.peek(), Some(&5)); 41 | assert!(buffer.is_full()); 42 | 43 | // Because capacity is reached the next push will be the first item of the buffer. 44 | buffer.push(1); 45 | assert_eq!(buffer.to_vec(), vec![42, 1]); 46 | ``` 47 | 48 | # Features 49 | 50 | | name | default | description | 51 | |-------|---------|--------------------------------------------------------------------------------------------------------------| 52 | | alloc | ✓ | Disable this feature to remove the dependency on alloc. Disabling this feature makes `ringbuffer` `no_std`. | 53 | 54 | # License 55 | 56 | Licensed under MIT License 57 | -------------------------------------------------------------------------------- /tests/conversions.rs: -------------------------------------------------------------------------------- 1 | extern crate alloc; 2 | 3 | use alloc::collections::{LinkedList, VecDeque}; 4 | use alloc::string::ToString; 5 | use core::ops::Deref; 6 | use ringbuffer::RingBuffer; 7 | use ringbuffer::{AllocRingBuffer, ConstGenericRingBuffer, GrowableAllocRingBuffer}; 8 | use std::vec; 9 | 10 | macro_rules! convert_test { 11 | ($name: ident: $from: expr => $to: ty) => { 12 | #[test] 13 | fn $name() { 14 | let a = $from; 15 | 16 | let mut b: $to = a.into(); 17 | assert_eq!(b.to_vec(), vec!['1', '2']); 18 | b.enqueue('3'); 19 | assert_eq!(b, b); 20 | } 21 | }; 22 | } 23 | 24 | macro_rules! convert_tests { 25 | ( 26 | [$($name: ident: $from: expr),* $(,)?] 27 | => $to: ty 28 | ) => { 29 | $( 30 | convert_test!($name: $from => $to); 31 | )* 32 | }; 33 | } 34 | 35 | convert_tests!( 36 | [ 37 | alloc_from_vec: vec!['1', '2'], 38 | alloc_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, 39 | alloc_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, 40 | alloc_from_str: "12".to_string(), 41 | alloc_from_str_slice: "12", 42 | alloc_from_slice: {let a: &[char] = &['1', '2']; a}, 43 | alloc_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, 44 | alloc_from_arr: {let a: [char; 2] = ['1', '2']; a}, 45 | 46 | alloc_from_cgrb: { ConstGenericRingBuffer::from(['1', '2'])}, 47 | alloc_from_garb: { GrowableAllocRingBuffer::from(['1', '2'])}, 48 | ] => AllocRingBuffer::<_> 49 | ); 50 | 51 | convert_tests!( 52 | [ 53 | growable_alloc_from_vec: vec!['1', '2'], 54 | growable_alloc_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, 55 | growable_alloc_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, 56 | growable_alloc_from_str: "12".to_string(), 57 | growable_alloc_from_str_slice: "12", 58 | growable_alloc_from_slice: {let a: &[char] = &['1', '2']; a}, 59 | growable_alloc_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, 60 | growable_alloc_from_arr: {let a: [char; 2] = ['1', '2']; a}, 61 | 62 | growable_alloc_from_cgrb: { ConstGenericRingBuffer::from(['1', '2'])}, 63 | growable_alloc_from_arb: { AllocRingBuffer::from(['1', '2'])}, 64 | ] => GrowableAllocRingBuffer::<_> 65 | ); 66 | 67 | convert_tests!( 68 | [ 69 | const_from_vec: vec!['1', '2'], 70 | const_from_ll: {let mut l = LinkedList::new(); l.push_back('1'); l.push_back('2'); l}, 71 | const_from_vd: {let mut l = VecDeque::new(); l.push_back('1'); l.push_back('2'); l}, 72 | const_from_str: "12".to_string(), 73 | const_from_str_slice: "12", 74 | const_from_slice: {let a: &[char] = &['1', '2']; a}, 75 | const_from_const_slice: {let a: &[char; 2] = &['1', '2']; a}, 76 | const_from_arr: {let a: [char; 2] = ['1', '2']; a}, 77 | 78 | const_from_garb: { GrowableAllocRingBuffer::from(['1', '2'])}, 79 | const_from_arb: { AllocRingBuffer::from(['1', '2'])}, 80 | ] => ConstGenericRingBuffer::<_, 2> 81 | ); 82 | 83 | #[test] 84 | fn test_extra_conversions_growable() { 85 | let a: &mut [i32; 2] = &mut [1, 2]; 86 | let a = GrowableAllocRingBuffer::from(a); 87 | assert_eq!(a.to_vec(), vec![1, 2]); 88 | 89 | let a: &mut [i32] = &mut [1, 2]; 90 | let a = GrowableAllocRingBuffer::from(a); 91 | assert_eq!(a.to_vec(), vec![1, 2]); 92 | 93 | let mut b = VecDeque::::new(); 94 | b.push_back(1); 95 | b.push_back(2); 96 | assert_eq!(a.deref(), &b); 97 | assert_eq!(a.as_ref(), &b); 98 | } 99 | 100 | #[test] 101 | fn test_extra_conversions_alloc() { 102 | let a: &mut [i32; 2] = &mut [1, 2]; 103 | let a = AllocRingBuffer::from(a); 104 | assert_eq!(a.to_vec(), vec![1, 2]); 105 | 106 | let a: &mut [i32] = &mut [1, 2]; 107 | let a = AllocRingBuffer::from(a); 108 | assert_eq!(a.to_vec(), vec![1, 2]); 109 | } 110 | 111 | #[test] 112 | fn test_extra_conversions_const() { 113 | let a: &mut [i32; 2] = &mut [1, 2]; 114 | let a = ConstGenericRingBuffer::<_, 2>::from(a); 115 | assert_eq!(a.to_vec(), vec![1, 2]); 116 | 117 | let a: &mut [i32] = &mut [1, 2]; 118 | let a = ConstGenericRingBuffer::<_, 2>::from(a); 119 | assert_eq!(a.to_vec(), vec![1, 2]); 120 | } 121 | 122 | #[test] 123 | fn test_const_generic_new_parameter() { 124 | // Can we specify size only on the method? 125 | let mut a = ConstGenericRingBuffer::new::<2>(); 126 | let _ = a.enqueue(5); 127 | 128 | // Can we specify size in both positions? 129 | let mut a = ConstGenericRingBuffer::::new::<50>(); 130 | let _ = a.enqueue(5); 131 | 132 | // Can we specify size only on the struct? 133 | let mut a = ConstGenericRingBuffer::::new(); 134 | let _ = a.enqueue(5); 135 | } 136 | -------------------------------------------------------------------------------- /src/with_alloc/vecdeque.rs: -------------------------------------------------------------------------------- 1 | use crate::ringbuffer_trait::{RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator}; 2 | use crate::{AllocRingBuffer, RingBuffer}; 3 | use alloc::collections::VecDeque; 4 | use core::ops::{Deref, DerefMut, Index, IndexMut}; 5 | 6 | /// A growable ringbuffer. Once capacity is reached, the size is doubled. 7 | /// Wrapper of the built-in [`VecDeque`] struct. 8 | /// 9 | /// The reason this is a wrapper, is that we want `RingBuffers` to implement `Index`, 10 | /// which we cannot do for remote types like `VecDeque` 11 | #[derive(Debug, Clone, PartialEq, Eq)] 12 | pub struct GrowableAllocRingBuffer(VecDeque); 13 | 14 | impl From<[T; N]> for GrowableAllocRingBuffer { 15 | fn from(value: [T; N]) -> Self { 16 | Self(VecDeque::from(value)) 17 | } 18 | } 19 | 20 | impl From> for GrowableAllocRingBuffer { 21 | fn from(value: VecDeque) -> Self { 22 | Self(value) 23 | } 24 | } 25 | 26 | impl From<&[T; N]> for GrowableAllocRingBuffer { 27 | // the cast here is actually not trivial 28 | #[allow(trivial_casts)] 29 | fn from(value: &[T; N]) -> Self { 30 | Self::from(value as &[T]) 31 | } 32 | } 33 | 34 | impl From<&[T]> for GrowableAllocRingBuffer { 35 | fn from(value: &[T]) -> Self { 36 | let mut rb = Self::new(); 37 | rb.extend(value.iter().cloned()); 38 | rb 39 | } 40 | } 41 | 42 | impl From> for GrowableAllocRingBuffer { 43 | fn from(mut v: AllocRingBuffer) -> GrowableAllocRingBuffer { 44 | let mut rb = GrowableAllocRingBuffer::new(); 45 | rb.extend(v.drain()); 46 | rb 47 | } 48 | } 49 | 50 | impl From<&mut [T]> for GrowableAllocRingBuffer { 51 | fn from(value: &mut [T]) -> Self { 52 | Self::from(&*value) 53 | } 54 | } 55 | 56 | impl From<&mut [T; CAP]> for GrowableAllocRingBuffer { 57 | fn from(value: &mut [T; CAP]) -> Self { 58 | Self::from(value.clone()) 59 | } 60 | } 61 | 62 | impl From> for GrowableAllocRingBuffer { 63 | fn from(value: alloc::vec::Vec) -> Self { 64 | let mut res = GrowableAllocRingBuffer::new(); 65 | res.extend(value); 66 | res 67 | } 68 | } 69 | 70 | impl From> for GrowableAllocRingBuffer { 71 | fn from(value: alloc::collections::LinkedList) -> Self { 72 | let mut res = GrowableAllocRingBuffer::new(); 73 | res.extend(value); 74 | res 75 | } 76 | } 77 | 78 | impl From for GrowableAllocRingBuffer { 79 | fn from(value: alloc::string::String) -> Self { 80 | let mut res = GrowableAllocRingBuffer::new(); 81 | res.extend(value.chars()); 82 | res 83 | } 84 | } 85 | 86 | impl From<&str> for GrowableAllocRingBuffer { 87 | fn from(value: &str) -> Self { 88 | let mut res = GrowableAllocRingBuffer::new(); 89 | res.extend(value.chars()); 90 | res 91 | } 92 | } 93 | 94 | impl From> 95 | for GrowableAllocRingBuffer 96 | { 97 | fn from(mut value: crate::ConstGenericRingBuffer) -> Self { 98 | let mut res = GrowableAllocRingBuffer::new(); 99 | res.extend(value.drain()); 100 | res 101 | } 102 | } 103 | 104 | impl Deref for GrowableAllocRingBuffer { 105 | type Target = VecDeque; 106 | 107 | fn deref(&self) -> &Self::Target { 108 | &self.0 109 | } 110 | } 111 | 112 | impl DerefMut for GrowableAllocRingBuffer { 113 | fn deref_mut(&mut self) -> &mut Self::Target { 114 | &mut self.0 115 | } 116 | } 117 | 118 | impl Default for GrowableAllocRingBuffer { 119 | fn default() -> Self { 120 | Self::new() 121 | } 122 | } 123 | 124 | impl AsRef> for GrowableAllocRingBuffer { 125 | fn as_ref(&self) -> &VecDeque { 126 | &self.0 127 | } 128 | } 129 | 130 | impl GrowableAllocRingBuffer { 131 | /// Creates an empty ringbuffer. 132 | #[must_use] 133 | pub fn new() -> Self { 134 | Self(VecDeque::new()) 135 | } 136 | 137 | /// Creates an empty ringbuffer with space for at least capacity elements. 138 | #[must_use] 139 | pub fn with_capacity(capacity: usize) -> Self { 140 | Self(VecDeque::with_capacity(capacity)) 141 | } 142 | } 143 | 144 | impl IntoIterator for GrowableAllocRingBuffer { 145 | type Item = T; 146 | type IntoIter = RingBufferIntoIterator; 147 | 148 | fn into_iter(self) -> Self::IntoIter { 149 | RingBufferIntoIterator::new(self) 150 | } 151 | } 152 | 153 | impl<'a, T> IntoIterator for &'a GrowableAllocRingBuffer { 154 | type Item = &'a T; 155 | type IntoIter = RingBufferIterator<'a, T, GrowableAllocRingBuffer>; 156 | 157 | fn into_iter(self) -> Self::IntoIter { 158 | self.iter() 159 | } 160 | } 161 | 162 | impl<'a, T> IntoIterator for &'a mut GrowableAllocRingBuffer { 163 | type Item = &'a mut T; 164 | type IntoIter = RingBufferMutIterator<'a, T, GrowableAllocRingBuffer>; 165 | 166 | fn into_iter(self) -> Self::IntoIter { 167 | self.iter_mut() 168 | } 169 | } 170 | 171 | unsafe impl RingBuffer for GrowableAllocRingBuffer { 172 | unsafe fn ptr_len(rb: *const Self) -> usize { 173 | (*rb).0.len() 174 | } 175 | 176 | #[inline] 177 | unsafe fn ptr_capacity(rb: *const Self) -> usize { 178 | (*rb).0.capacity() 179 | } 180 | #[inline] 181 | unsafe fn ptr_buffer_size(rb: *const Self) -> usize { 182 | (*rb).0.capacity() 183 | } 184 | 185 | fn dequeue(&mut self) -> Option { 186 | self.pop_front() 187 | } 188 | 189 | fn enqueue(&mut self, value: T) -> Option { 190 | self.push_back(value); 191 | None 192 | } 193 | 194 | fn fill_with T>(&mut self, mut f: F) { 195 | self.clear(); 196 | let initial_capacity = self.0.capacity(); 197 | for _ in 0..initial_capacity { 198 | self.0.push_back(f()); 199 | } 200 | 201 | debug_assert_eq!(initial_capacity, self.0.capacity()); 202 | } 203 | 204 | fn clear(&mut self) { 205 | self.0.clear(); 206 | } 207 | 208 | fn get(&self, index: usize) -> Option<&T> { 209 | if self.is_empty() { 210 | None 211 | } else { 212 | self.0.get(crate::mask_modulo(self.0.len(), index)) 213 | } 214 | } 215 | 216 | fn get_signed(&self, index: isize) -> Option<&T> { 217 | if self.is_empty() { 218 | None 219 | } else if index >= 0 { 220 | self.0 221 | .get(crate::mask_modulo(self.0.len(), index.unsigned_abs())) 222 | } else { 223 | let positive_index = index.unsigned_abs() - 1; 224 | let masked = crate::mask_modulo(self.0.len(), positive_index); 225 | let index = self.0.len() - 1 - masked; 226 | 227 | self.0.get(index) 228 | } 229 | } 230 | 231 | unsafe fn ptr_get_mut_signed(rb: *mut Self, index: isize) -> Option<*mut T> { 232 | #[allow(trivial_casts)] 233 | if RingBuffer::ptr_len(rb) == 0 { 234 | None 235 | } else if index >= 0 { 236 | (*rb).0.get_mut(index.unsigned_abs()) 237 | } else { 238 | let len = Self::ptr_len(rb); 239 | 240 | let positive_index = index.unsigned_abs() + 1; 241 | let masked = crate::mask_modulo(len, positive_index); 242 | let index = len - 1 - masked; 243 | 244 | (*rb).0.get_mut(index) 245 | } 246 | .map(|i| i as *mut T) 247 | } 248 | 249 | unsafe fn ptr_get_mut(rb: *mut Self, index: usize) -> Option<*mut T> { 250 | #[allow(trivial_casts)] 251 | if RingBuffer::ptr_len(rb) == 0 { 252 | None 253 | } else { 254 | (*rb).0.get_mut(index) 255 | } 256 | .map(|i| i as *mut T) 257 | } 258 | 259 | unsafe fn ptr_copy_to_slice(rb: *const Self, offset: usize, dst: &mut [T]) 260 | where 261 | T: Copy, 262 | { 263 | let len = Self::ptr_len(rb); 264 | let dst_len = dst.len(); 265 | assert!( 266 | (offset == 0 && len == 0) || offset < len, 267 | "offset ({offset}) is out of bounds for the current buffer length ({len})" 268 | ); 269 | assert!(len - offset == dst_len, "destination slice length ({dst_len}) doesn't match buffer length ({len}) when considering the specified offset ({offset})"); 270 | 271 | if dst_len == 0 { 272 | return; 273 | } 274 | 275 | let (front, back) = (*rb).0.as_slices(); 276 | let first_len = front.len(); 277 | 278 | if offset < first_len { 279 | let n_in_first = first_len - offset; 280 | dst[..n_in_first].copy_from_slice(&front[offset..]); 281 | 282 | if n_in_first < dst_len { 283 | dst[n_in_first..].copy_from_slice(&back[..dst_len - n_in_first]); 284 | } 285 | } else { 286 | dst.copy_from_slice(&back[offset - first_len..]); 287 | } 288 | } 289 | 290 | unsafe fn ptr_copy_from_slice(rb: *mut Self, offset: usize, src: &[T]) 291 | where 292 | T: Copy, 293 | { 294 | let len = Self::ptr_len(rb); 295 | let src_len = src.len(); 296 | assert!( 297 | (offset == 0 && len == 0) || offset < len, 298 | "offset ({offset}) is out of bounds for the current buffer length ({len})" 299 | ); 300 | assert!(len - offset == src_len, "source slice length ({src_len}) doesn't match buffer length ({len}) when considering the specified offset ({offset})"); 301 | 302 | if src_len == 0 { 303 | return; 304 | } 305 | 306 | let (front, back) = (*rb).0.as_mut_slices(); 307 | let first_len = front.len(); 308 | 309 | if offset < first_len { 310 | let n_in_first = first_len - offset; 311 | front[offset..].copy_from_slice(&src[..n_in_first]); 312 | 313 | if n_in_first < src_len { 314 | back[..src_len - n_in_first].copy_from_slice(&src[n_in_first..]); 315 | } 316 | } else { 317 | back[offset - first_len..].copy_from_slice(src); 318 | } 319 | } 320 | } 321 | 322 | impl Extend for GrowableAllocRingBuffer { 323 | fn extend>(&mut self, iter: I) { 324 | self.0.extend(iter); 325 | } 326 | } 327 | 328 | impl Index for GrowableAllocRingBuffer { 329 | type Output = T; 330 | 331 | fn index(&self, index: usize) -> &Self::Output { 332 | self.get(index).expect("index out of bounds") 333 | } 334 | } 335 | 336 | impl IndexMut for GrowableAllocRingBuffer { 337 | fn index_mut(&mut self, index: usize) -> &mut Self::Output { 338 | self.get_mut(index).expect("index out of bounds") 339 | } 340 | } 341 | 342 | impl FromIterator for GrowableAllocRingBuffer { 343 | fn from_iter>(iter: I) -> Self { 344 | Self(VecDeque::from_iter(iter)) 345 | } 346 | } 347 | -------------------------------------------------------------------------------- /benches/bench.rs: -------------------------------------------------------------------------------- 1 | #![feature(coverage_attribute)] 2 | #![coverage(off)] 3 | use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion}; 4 | use ringbuffer::{AllocRingBuffer, ConstGenericRingBuffer, RingBuffer, SetLen}; 5 | 6 | fn benchmark_push, F: Fn() -> T>(b: &mut Bencher, new: F) { 7 | b.iter(|| { 8 | let mut rb = new(); 9 | 10 | for i in 0..1_000_000 { 11 | rb.enqueue(i); 12 | black_box(()); 13 | } 14 | 15 | rb 16 | }) 17 | } 18 | 19 | fn benchmark_push_dequeue, F: Fn() -> T>(b: &mut Bencher, new: F) { 20 | b.iter(|| { 21 | let mut rb = new(); 22 | 23 | for _i in 0..100_000 { 24 | let _ = rb.enqueue(1); 25 | black_box(()); 26 | let _ = rb.enqueue(2); 27 | black_box(()); 28 | 29 | assert_eq!(black_box(rb.dequeue()), Some(1)); 30 | assert_eq!(black_box(rb.dequeue()), Some(2)); 31 | 32 | let _ = rb.enqueue(1); 33 | black_box(()); 34 | let _ = rb.enqueue(2); 35 | black_box(()); 36 | 37 | assert_eq!(black_box(rb.dequeue()), Some(1)); 38 | assert_eq!(black_box(rb.dequeue()), Some(2)); 39 | 40 | let _ = rb.enqueue(1); 41 | black_box(()); 42 | let _ = rb.enqueue(2); 43 | black_box(()); 44 | 45 | assert_eq!(black_box(rb.get_signed(-1)), Some(&2)); 46 | assert_eq!(black_box(rb.get_signed(-2)), Some(&1)); 47 | } 48 | 49 | rb 50 | }) 51 | } 52 | 53 | fn benchmark_various, F: Fn() -> T>(b: &mut Bencher, new: F) { 54 | b.iter(|| { 55 | let mut rb = new(); 56 | 57 | for i in 0..100_000 { 58 | rb.enqueue(i); 59 | black_box(()); 60 | black_box(rb.back()); 61 | } 62 | 63 | rb 64 | }) 65 | } 66 | 67 | fn benchmark_skip, F: Fn() -> T>(b: &mut Bencher, new: F) { 68 | let mut rb = new(); 69 | rb.fill(9); 70 | b.iter(|| { 71 | for i in 0..rb.len() { 72 | assert_eq!(rb.iter().skip(i).next(), Some(&9)); 73 | } 74 | }) 75 | } 76 | 77 | fn benchmark_copy_to_slice_vs_extend, F: Fn() -> T>( 78 | rb_size: usize, 79 | rb_type: &str, 80 | fn_name: &str, 81 | c: &mut Criterion, 82 | new: F, 83 | ) { 84 | let mut group = c.benchmark_group(format!("{fn_name}({rb_type}, {rb_size})")); 85 | let mut output = vec![0; rb_size]; 86 | group.bench_function(format!("CopyTo({rb_type}; {rb_size})"), |b| { 87 | let mut rb = new(); 88 | rb.fill(9); 89 | // making sure the read/write pointers wrap around 90 | for _ in 0..rb_size / 2 { 91 | let _ = rb.dequeue(); 92 | let _ = rb.enqueue(9); 93 | } 94 | b.iter(|| { 95 | rb.copy_to_slice(0, &mut output); 96 | assert_eq!(output[output.len() / 2], 9); 97 | assert_eq!(output.len(), rb_size); 98 | }) 99 | }); 100 | let mut output: Vec = Vec::with_capacity(rb_size); 101 | group.bench_function(format!("ExtendVec({rb_type}; {rb_size})"), |b| { 102 | let mut rb = new(); 103 | rb.fill(9); 104 | // making sure the read/write pointers wrap around 105 | for _ in 0..rb_size / 2 { 106 | let _ = rb.dequeue(); 107 | let _ = rb.enqueue(9); 108 | } 109 | b.iter(|| { 110 | unsafe { output.set_len(0) }; 111 | output.extend(rb.iter()); 112 | assert_eq!(output[output.len() / 2], 9); 113 | assert_eq!(output.len(), rb_size); 114 | }) 115 | }); 116 | group.finish(); 117 | } 118 | 119 | fn benchmark_copy_from_slice_vs_extend + SetLen, F: Fn() -> T>( 120 | rb_size: usize, 121 | rb_type: &str, 122 | fn_name: &str, 123 | c: &mut Criterion, 124 | new: F, 125 | ) { 126 | let mut group = c.benchmark_group(format!("{fn_name}({rb_type}, {rb_size})")); 127 | let input = vec![9; rb_size]; 128 | group.bench_function(format!("CopyFrom({rb_type}; {rb_size})"), |b| { 129 | let mut rb = new(); 130 | rb.fill(0); 131 | // making sure the read/write pointers wrap around 132 | for _ in 0..rb_size / 2 { 133 | let _ = rb.dequeue(); 134 | let _ = rb.enqueue(0); 135 | } 136 | for _ in 0..rb_size / 2 {} 137 | b.iter(|| { 138 | rb.copy_from_slice(0, &input); 139 | assert_eq!(rb[rb.len() / 2], 9); 140 | assert_eq!(rb.len(), rb_size); 141 | }) 142 | }); 143 | group.bench_function(format!("ExtendRb({rb_type}; {rb_size})"), |b| { 144 | let mut rb = new(); 145 | // making sure the read/write pointers wrap around 146 | for _ in 0..rb_size / 2 { 147 | let _ = rb.dequeue(); 148 | let _ = rb.enqueue(0); 149 | } 150 | b.iter(|| { 151 | unsafe { rb.set_len(0) }; 152 | rb.extend(input.iter().copied()); 153 | assert_eq!(rb[rb.len() / 2], 9); 154 | assert_eq!(rb.len(), rb_size); 155 | }) 156 | }); 157 | group.finish(); 158 | } 159 | 160 | macro_rules! generate_benches { 161 | (called, $c: tt, $rb: tt, $ty: tt, $fn: tt, $bmfunc: tt, $($i:tt),*) => { 162 | $( 163 | $c.bench_function(&format!("{} {} 1M capacity {}", stringify!($rb), stringify!($bmfunc), stringify!($i)), |b| $bmfunc(b, || { 164 | $rb::<$ty>::$fn($i) 165 | })); 166 | )* 167 | }; 168 | (non_power_two, $c: tt, $rb: tt, $ty: tt, $fn: tt, $bmfunc: tt, $($i:tt),*) => { 169 | $( 170 | $c.bench_function(&format!("{} {} 1M capacity not power of two {}", stringify!($rb), stringify!($bmfunc), stringify!($i)), |b| $bmfunc(b, || { 171 | $rb::<$ty>::$fn($i) 172 | })); 173 | )* 174 | }; 175 | 176 | (typed, $c: tt, $rb: tt, $ty: tt, $fn: tt, $bmfunc: tt, $($i:tt),*) => { 177 | $( 178 | $c.bench_function(&format!("{} {} 1M capacity {}", stringify!($rb), stringify!($bmfunc) ,stringify!($i)), |b| $bmfunc(b, || { 179 | $rb::<$ty, $i>::$fn() 180 | })); 181 | )* 182 | }; 183 | 184 | (compare, $c: tt, $rb: tt, $ty: tt, $fn: tt, $bmfunc: tt, $($i:tt),*) => { 185 | $( 186 | $bmfunc($i, stringify!($rb), stringify!($bmfunc), $c, || { 187 | $rb::<$ty>::$fn($i) 188 | }); 189 | )* 190 | }; 191 | 192 | (compare_typed, $c: tt, $rb: tt, $ty: tt, $fn: tt, $bmfunc: tt, $($i:tt),*) => { 193 | $( 194 | $bmfunc($i, stringify!($rb), stringify!($bmfunc), $c, || { 195 | $rb::<$ty, $i>::$fn() 196 | }); 197 | )* 198 | }; 199 | } 200 | 201 | fn criterion_benchmark(c: &mut Criterion) { 202 | // TODO: Improve benchmarks 203 | // * What are representative operations 204 | // * Make sure it's accurate 205 | // * more general benchmarks but preferably less/quickjer 206 | 207 | generate_benches![ 208 | called, 209 | c, 210 | AllocRingBuffer, 211 | i32, 212 | new, 213 | benchmark_push, 214 | 16, 215 | 1024, 216 | 4096, 217 | 8192 218 | ]; 219 | generate_benches![ 220 | typed, 221 | c, 222 | ConstGenericRingBuffer, 223 | i32, 224 | new, 225 | benchmark_push, 226 | 16, 227 | 1024, 228 | 4096, 229 | 8192 230 | ]; 231 | generate_benches![ 232 | called, 233 | c, 234 | AllocRingBuffer, 235 | i32, 236 | new, 237 | benchmark_various, 238 | 16, 239 | 1024, 240 | 4096, 241 | 8192 242 | ]; 243 | generate_benches![ 244 | typed, 245 | c, 246 | ConstGenericRingBuffer, 247 | i32, 248 | new, 249 | benchmark_various, 250 | 16, 251 | 1024, 252 | 4096, 253 | 8192 254 | ]; 255 | generate_benches![ 256 | called, 257 | c, 258 | AllocRingBuffer, 259 | i32, 260 | new, 261 | benchmark_push_dequeue, 262 | 16, 263 | 1024, 264 | 4096, 265 | 8192 266 | ]; 267 | generate_benches![ 268 | typed, 269 | c, 270 | ConstGenericRingBuffer, 271 | i32, 272 | new, 273 | benchmark_push_dequeue, 274 | 16, 275 | 1024, 276 | 4096, 277 | 8192 278 | ]; 279 | generate_benches![ 280 | non_power_two, 281 | c, 282 | AllocRingBuffer, 283 | i32, 284 | new, 285 | benchmark_various, 286 | 16, 287 | 17, 288 | 1024, 289 | 4096, 290 | 8192, 291 | 8195 292 | ]; 293 | generate_benches![ 294 | typed, 295 | c, 296 | ConstGenericRingBuffer, 297 | i32, 298 | new, 299 | benchmark_skip, 300 | 16, 301 | 1024, 302 | 4096, 303 | 8192 304 | ]; 305 | generate_benches![ 306 | called, 307 | c, 308 | AllocRingBuffer, 309 | i32, 310 | new, 311 | benchmark_skip, 312 | 16, 313 | 17, 314 | 1024, 315 | 4096, 316 | 8192, 317 | 8195 318 | ]; 319 | generate_benches![ 320 | compare, 321 | c, 322 | AllocRingBuffer, 323 | i32, 324 | new, 325 | benchmark_copy_to_slice_vs_extend, 326 | 16, 327 | 1024, 328 | 4096, 329 | 8192, 330 | 1_000_000, 331 | 1_048_576 332 | ]; 333 | generate_benches![ 334 | compare_typed, 335 | c, 336 | ConstGenericRingBuffer, 337 | i32, 338 | new, 339 | benchmark_copy_to_slice_vs_extend, 340 | 16, 341 | 1024, 342 | 4096, 343 | 8192, 344 | 1_000_000, 345 | 1_048_576 346 | ]; 347 | generate_benches![ 348 | compare, 349 | c, 350 | AllocRingBuffer, 351 | i32, 352 | new, 353 | benchmark_copy_from_slice_vs_extend, 354 | 16, 355 | 1024, 356 | 4096, 357 | 8192, 358 | 1_000_000, 359 | 1_048_576 360 | ]; 361 | generate_benches![ 362 | compare_typed, 363 | c, 364 | ConstGenericRingBuffer, 365 | i32, 366 | new, 367 | benchmark_copy_from_slice_vs_extend, 368 | 16, 369 | 1024, 370 | 4096, 371 | 8192, 372 | 1_000_000, 373 | 1_048_576 374 | ]; 375 | 376 | generate_benches![ 377 | compare, 378 | c, 379 | AllocRingBuffer, 380 | i32, 381 | new, 382 | benchmark_copy_to_slice_vs_extend, 383 | 16, 384 | 1024, 385 | 4096, 386 | 8192, 387 | 1_000_000, 388 | 1_048_576 389 | ]; 390 | generate_benches![ 391 | compare_typed, 392 | c, 393 | ConstGenericRingBuffer, 394 | i32, 395 | new, 396 | benchmark_copy_to_slice_vs_extend, 397 | 16, 398 | 1024, 399 | 4096, 400 | 8192, 401 | 1_000_000, 402 | 1_048_576 403 | ]; 404 | generate_benches![ 405 | compare, 406 | c, 407 | AllocRingBuffer, 408 | i32, 409 | new, 410 | benchmark_copy_from_slice_vs_extend, 411 | 16, 412 | 1024, 413 | 4096, 414 | 8192, 415 | 1_000_000, 416 | 1_048_576 417 | ]; 418 | generate_benches![ 419 | compare_typed, 420 | c, 421 | ConstGenericRingBuffer, 422 | i32, 423 | new, 424 | benchmark_copy_from_slice_vs_extend, 425 | 16, 426 | 1024, 427 | 4096, 428 | 8192, 429 | 1_000_000, 430 | 1_048_576 431 | ]; 432 | } 433 | 434 | criterion_group!(benches, criterion_benchmark); 435 | criterion_main!(benches); 436 | -------------------------------------------------------------------------------- /src/with_alloc/alloc_ringbuffer.rs: -------------------------------------------------------------------------------- 1 | use core::ops::{Index, IndexMut}; 2 | 3 | use crate::ringbuffer_trait::{ 4 | RingBuffer, RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator, 5 | }; 6 | 7 | extern crate alloc; 8 | 9 | // We need boxes, so depend on alloc 10 | use crate::{impl_ring_buffer_set_len, mask_and, GrowableAllocRingBuffer, SetLen}; 11 | use core::ptr; 12 | 13 | /// The `AllocRingBuffer` is a `RingBuffer` which is based on a Vec. This means it allocates at runtime 14 | /// on the heap, and therefore needs the [`alloc`] crate. This struct and therefore the dependency on 15 | /// alloc can be disabled by disabling the `alloc` (default) feature. 16 | /// 17 | /// # Example 18 | /// ``` 19 | /// use ringbuffer::{AllocRingBuffer, RingBuffer}; 20 | /// 21 | /// let mut buffer = AllocRingBuffer::new(2); 22 | /// 23 | /// // First entry of the buffer is now 5. 24 | /// buffer.enqueue(5); 25 | /// 26 | /// // The last item we enqueued is 5 27 | /// assert_eq!(buffer.back(), Some(&5)); 28 | /// 29 | /// // Second entry is now 42. 30 | /// buffer.enqueue(42); 31 | /// 32 | /// assert_eq!(buffer.peek(), Some(&5)); 33 | /// assert!(buffer.is_full()); 34 | /// 35 | /// // Because capacity is reached the next enqueue will be the first item of the buffer. 36 | /// buffer.enqueue(1); 37 | /// assert_eq!(buffer.to_vec(), vec![42, 1]); 38 | /// ``` 39 | #[derive(Debug)] 40 | pub struct AllocRingBuffer { 41 | pub(crate) buf: *mut T, 42 | 43 | // the size of the allocation. Next power of 2 up from the capacity 44 | size: usize, 45 | // maximum number of elements actually allowed in the ringbuffer. 46 | // Always less than or equal than the size 47 | capacity: usize, 48 | 49 | readptr: usize, 50 | writeptr: usize, 51 | } 52 | 53 | // SAFETY: all methods that require mutable access take &mut, 54 | // being send and sync was the old behavior but broke when we switched to *mut T. 55 | unsafe impl Sync for AllocRingBuffer {} 56 | unsafe impl Send for AllocRingBuffer {} 57 | 58 | impl From<[T; N]> for AllocRingBuffer { 59 | fn from(value: [T; N]) -> Self { 60 | let mut rb = Self::new(value.len()); 61 | rb.extend(value); 62 | rb 63 | } 64 | } 65 | 66 | impl From<&[T; N]> for AllocRingBuffer { 67 | // the cast here is actually not trivial 68 | #[allow(trivial_casts)] 69 | fn from(value: &[T; N]) -> Self { 70 | Self::from(value as &[T]) 71 | } 72 | } 73 | 74 | impl From<&[T]> for AllocRingBuffer { 75 | fn from(value: &[T]) -> Self { 76 | let mut rb = Self::new(value.len()); 77 | rb.extend(value.iter().cloned()); 78 | rb 79 | } 80 | } 81 | 82 | impl From> for AllocRingBuffer { 83 | fn from(mut v: GrowableAllocRingBuffer) -> AllocRingBuffer { 84 | let mut rb = AllocRingBuffer::new(v.len()); 85 | rb.extend(v.drain()); 86 | rb 87 | } 88 | } 89 | 90 | impl From<&mut [T]> for AllocRingBuffer { 91 | fn from(value: &mut [T]) -> Self { 92 | Self::from(&*value) 93 | } 94 | } 95 | 96 | impl From<&mut [T; CAP]> for AllocRingBuffer { 97 | fn from(value: &mut [T; CAP]) -> Self { 98 | Self::from(value.clone()) 99 | } 100 | } 101 | 102 | impl From> for AllocRingBuffer { 103 | fn from(value: alloc::vec::Vec) -> Self { 104 | let mut res = AllocRingBuffer::new(value.len()); 105 | res.extend(value); 106 | res 107 | } 108 | } 109 | 110 | impl From> for AllocRingBuffer { 111 | fn from(value: alloc::collections::VecDeque) -> Self { 112 | let mut res = AllocRingBuffer::new(value.len()); 113 | res.extend(value); 114 | res 115 | } 116 | } 117 | 118 | impl From> for AllocRingBuffer { 119 | fn from(value: alloc::collections::LinkedList) -> Self { 120 | let mut res = AllocRingBuffer::new(value.len()); 121 | res.extend(value); 122 | res 123 | } 124 | } 125 | 126 | impl From for AllocRingBuffer { 127 | fn from(value: alloc::string::String) -> Self { 128 | let mut res = AllocRingBuffer::new(value.len()); 129 | res.extend(value.chars()); 130 | res 131 | } 132 | } 133 | 134 | impl From<&str> for AllocRingBuffer { 135 | fn from(value: &str) -> Self { 136 | let mut res = AllocRingBuffer::new(value.len()); 137 | res.extend(value.chars()); 138 | res 139 | } 140 | } 141 | 142 | impl From> for AllocRingBuffer { 143 | fn from(mut value: crate::ConstGenericRingBuffer) -> Self { 144 | let mut res = AllocRingBuffer::new(value.len()); 145 | res.extend(value.drain()); 146 | res 147 | } 148 | } 149 | 150 | impl Drop for AllocRingBuffer { 151 | fn drop(&mut self) { 152 | self.drain().for_each(drop); 153 | 154 | let layout = alloc::alloc::Layout::array::(self.size).unwrap(); 155 | unsafe { 156 | alloc::alloc::dealloc(self.buf.cast(), layout); 157 | } 158 | } 159 | } 160 | 161 | impl Clone for AllocRingBuffer { 162 | fn clone(&self) -> Self { 163 | debug_assert_ne!(self.capacity, 0); 164 | 165 | let mut new = Self::new(self.capacity); 166 | new.extend(self.iter().cloned()); 167 | new 168 | } 169 | } 170 | 171 | impl PartialEq for AllocRingBuffer { 172 | fn eq(&self, other: &Self) -> bool { 173 | self.capacity == other.capacity 174 | && self.len() == other.len() 175 | && self.iter().zip(other.iter()).all(|(a, b)| a == b) 176 | } 177 | } 178 | 179 | impl Eq for AllocRingBuffer {} 180 | 181 | impl IntoIterator for AllocRingBuffer { 182 | type Item = T; 183 | type IntoIter = RingBufferIntoIterator; 184 | 185 | fn into_iter(self) -> Self::IntoIter { 186 | RingBufferIntoIterator::new(self) 187 | } 188 | } 189 | 190 | #[allow(clippy::into_iter_without_iter)] 191 | // iter() is implemented on the trait 192 | impl<'a, T> IntoIterator for &'a AllocRingBuffer { 193 | type Item = &'a T; 194 | type IntoIter = RingBufferIterator<'a, T, AllocRingBuffer>; 195 | 196 | fn into_iter(self) -> Self::IntoIter { 197 | self.iter() 198 | } 199 | } 200 | 201 | #[allow(clippy::into_iter_without_iter)] 202 | // iter_mut() is implemented on the trait 203 | impl<'a, T> IntoIterator for &'a mut AllocRingBuffer { 204 | type Item = &'a mut T; 205 | type IntoIter = RingBufferMutIterator<'a, T, AllocRingBuffer>; 206 | 207 | fn into_iter(self) -> Self::IntoIter { 208 | self.iter_mut() 209 | } 210 | } 211 | 212 | impl Extend for AllocRingBuffer { 213 | fn extend>(&mut self, iter: A) { 214 | let iter = iter.into_iter(); 215 | 216 | for i in iter { 217 | let _ = self.enqueue(i); 218 | } 219 | } 220 | } 221 | 222 | unsafe impl RingBuffer for AllocRingBuffer { 223 | #[inline] 224 | unsafe fn ptr_capacity(rb: *const Self) -> usize { 225 | (*rb).capacity 226 | } 227 | 228 | #[inline] 229 | unsafe fn ptr_buffer_size(rb: *const Self) -> usize { 230 | (*rb).size 231 | } 232 | 233 | impl_ringbuffer!(readptr, writeptr); 234 | 235 | #[inline] 236 | fn enqueue(&mut self, value: T) -> Option { 237 | let mut ret = None; 238 | 239 | if self.is_full() { 240 | // mask with and is allowed here because size is always a power of two 241 | let previous_value = 242 | unsafe { ptr::read(get_unchecked_mut(self, mask_and(self.size, self.readptr))) }; 243 | 244 | ret = Some(previous_value); 245 | self.readptr += 1; 246 | } 247 | 248 | // mask with and is allowed here because size is always a power of two 249 | let index = mask_and(self.size, self.writeptr); 250 | 251 | unsafe { 252 | ptr::write(get_unchecked_mut(self, index), value); 253 | } 254 | 255 | self.writeptr += 1; 256 | 257 | ret 258 | } 259 | 260 | fn dequeue(&mut self) -> Option { 261 | if self.is_empty() { 262 | None 263 | } else { 264 | // mask with and is allowed here because size is always a power of two 265 | let index = mask_and(self.size, self.readptr); 266 | let res = unsafe { get_unchecked_mut(self, index) }; 267 | self.readptr += 1; 268 | 269 | // Safety: the fact that we got this maybeuninit from the buffer (with mask) means that 270 | // it's initialized. If it wasn't the is_empty call would have caught it. Values 271 | // are always initialized when inserted so this is safe. 272 | unsafe { Some(ptr::read(res)) } 273 | } 274 | } 275 | 276 | impl_ringbuffer_ext!( 277 | get_base_ptr, 278 | get_base_mut_ptr, 279 | get_unchecked, 280 | get_unchecked_mut, 281 | readptr, 282 | writeptr, 283 | mask_and 284 | ); 285 | 286 | #[inline] 287 | fn fill_with T>(&mut self, mut f: F) { 288 | self.clear(); 289 | 290 | self.readptr = 0; 291 | self.writeptr = self.capacity; 292 | 293 | for i in 0..self.capacity { 294 | unsafe { ptr::write(get_unchecked_mut(self, i), f()) }; 295 | } 296 | } 297 | } 298 | 299 | impl AllocRingBuffer { 300 | /// Creates a `AllocRingBuffer` with a certain capacity. The actual capacity is the input to the 301 | /// function raised to the power of two (effectively the input is the log2 of the actual capacity) 302 | #[inline] 303 | #[must_use] 304 | pub fn with_capacity_power_of_2(cap_power_of_two: usize) -> Self { 305 | Self::new(1 << cap_power_of_two) 306 | } 307 | 308 | #[inline] 309 | /// Alias of [`with_capacity`](AllocRingBuffer::new). 310 | #[must_use] 311 | #[deprecated = "alias of new"] 312 | pub fn with_capacity(cap: usize) -> Self { 313 | Self::new(cap) 314 | } 315 | 316 | /// Creates a `AllocRingBuffer` with a certain capacity. The capacity must not be zero. 317 | /// 318 | /// # Panics 319 | /// Panics when capacity is zero 320 | #[inline] 321 | #[must_use] 322 | pub fn new(capacity: usize) -> Self { 323 | assert_ne!(capacity, 0, "Capacity must be greater than 0"); 324 | let size = capacity.next_power_of_two(); 325 | let layout = alloc::alloc::Layout::array::(size).unwrap(); 326 | let buf = unsafe { alloc::alloc::alloc(layout).cast() }; 327 | Self { 328 | buf, 329 | size, 330 | capacity, 331 | readptr: 0, 332 | writeptr: 0, 333 | } 334 | } 335 | } 336 | 337 | /// Get a const pointer to the buffer 338 | unsafe fn get_base_ptr(rb: *const AllocRingBuffer) -> *const T { 339 | (*rb).buf.cast() 340 | } 341 | 342 | /// Get a mut pointer to the buffer 343 | unsafe fn get_base_mut_ptr(rb: *mut AllocRingBuffer) -> *mut T { 344 | (*rb).buf 345 | } 346 | 347 | /// Get a reference from the buffer without checking it is initialized. 348 | /// 349 | /// Caller must be sure the index is in bounds, or this will panic. 350 | #[inline] 351 | unsafe fn get_unchecked<'a, T>(rb: *const AllocRingBuffer, index: usize) -> &'a T { 352 | let p = (*rb).buf.add(index); 353 | // Safety: caller makes sure the index is in bounds for the ringbuffer. 354 | // All in bounds values in the ringbuffer are initialized 355 | &*p 356 | } 357 | 358 | /// Get a mut reference from the buffer without checking it is initialized. 359 | /// 360 | /// Caller must be sure the index is in bounds, or this will panic. 361 | #[inline] 362 | unsafe fn get_unchecked_mut(rb: *mut AllocRingBuffer, index: usize) -> *mut T { 363 | let p = (*rb).buf.add(index); 364 | 365 | // Safety: caller makes sure the index is in bounds for the ringbuffer. 366 | // All in bounds values in the ringbuffer are initialized 367 | p.cast() 368 | } 369 | 370 | impl Index for AllocRingBuffer { 371 | type Output = T; 372 | 373 | fn index(&self, index: usize) -> &Self::Output { 374 | self.get(index).expect("index out of bounds") 375 | } 376 | } 377 | 378 | impl IndexMut for AllocRingBuffer { 379 | fn index_mut(&mut self, index: usize) -> &mut Self::Output { 380 | self.get_mut(index).expect("index out of bounds") 381 | } 382 | } 383 | 384 | impl SetLen for AllocRingBuffer { 385 | impl_ring_buffer_set_len!(readptr, writeptr); 386 | } 387 | 388 | #[cfg(test)] 389 | mod tests { 390 | use crate::{AllocRingBuffer, RingBuffer}; 391 | 392 | // just test that this compiles 393 | #[test] 394 | fn test_generic_clone() { 395 | fn helper(a: &AllocRingBuffer) -> AllocRingBuffer { 396 | a.clone() 397 | } 398 | 399 | _ = helper(&AllocRingBuffer::new(2)); 400 | _ = helper(&AllocRingBuffer::new(5)); 401 | } 402 | 403 | #[test] 404 | fn test_not_power_of_two() { 405 | let mut rb = AllocRingBuffer::new(10); 406 | const NUM_VALS: usize = 1000; 407 | 408 | // recycle the ringbuffer a bunch of time to see if noneof the logic 409 | // messes up 410 | for _ in 0..100 { 411 | for i in 0..NUM_VALS { 412 | let _ = rb.enqueue(i); 413 | } 414 | assert!(rb.is_full()); 415 | 416 | for i in 0..10 { 417 | assert_eq!(Some(i + NUM_VALS - rb.capacity()), rb.dequeue()); 418 | } 419 | 420 | assert!(rb.is_empty()); 421 | } 422 | } 423 | 424 | #[test] 425 | fn test_with_capacity_power_of_two() { 426 | let b = AllocRingBuffer::::with_capacity_power_of_2(2); 427 | assert_eq!(b.capacity, 4); 428 | } 429 | 430 | #[test] 431 | #[should_panic] 432 | fn test_index_zero_length() { 433 | let b = AllocRingBuffer::::new(2); 434 | let _ = b[2]; 435 | } 436 | 437 | #[test] 438 | fn test_extend() { 439 | let mut buf = AllocRingBuffer::::new(4); 440 | (0..4).for_each(|_| { 441 | let _ = buf.enqueue(0); 442 | }); 443 | 444 | let new_data = [0, 1, 2]; 445 | buf.extend(new_data); 446 | 447 | let expected = [0, 0, 1, 2]; 448 | 449 | for i in 0..4 { 450 | let actual = buf[i]; 451 | let expected = expected[i]; 452 | assert_eq!(actual, expected); 453 | } 454 | } 455 | 456 | #[test] 457 | fn test_extend_with_overflow() { 458 | let mut buf = AllocRingBuffer::::new(8); 459 | (0..8).for_each(|_| { 460 | let _ = buf.enqueue(0); 461 | }); 462 | 463 | let new_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; 464 | buf.extend(new_data); 465 | 466 | let expected = [2, 3, 4, 5, 6, 7, 8, 9]; 467 | 468 | for i in 0..8 { 469 | let actual = buf[i]; 470 | let expected = expected[i]; 471 | assert_eq!(actual, expected); 472 | } 473 | } 474 | 475 | #[test] 476 | fn test_conversions() { 477 | // from &[T] 478 | let data: &[i32] = &[1, 2, 3, 4]; 479 | let buf = AllocRingBuffer::from(data); 480 | assert_eq!(buf.capacity, 4); 481 | assert_eq!(buf.to_vec(), alloc::vec![1, 2, 3, 4]); 482 | 483 | // from &[T; N] 484 | let buf = AllocRingBuffer::from(&[1, 2, 3, 4]); 485 | assert_eq!(buf.capacity, 4); 486 | assert_eq!(buf.to_vec(), alloc::vec![1, 2, 3, 4]); 487 | 488 | // from [T; N] 489 | let buf = AllocRingBuffer::from([1, 2, 3, 4]); 490 | assert_eq!(buf.capacity, 4); 491 | assert_eq!(buf.to_vec(), alloc::vec![1, 2, 3, 4]); 492 | } 493 | } 494 | -------------------------------------------------------------------------------- /src/with_const_generics.rs: -------------------------------------------------------------------------------- 1 | use crate::ringbuffer_trait::{RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator}; 2 | use crate::{impl_ring_buffer_set_len, RingBuffer, SetLen}; 3 | use core::iter::FromIterator; 4 | use core::mem::MaybeUninit; 5 | use core::mem::{self, ManuallyDrop}; 6 | use core::ops::{Index, IndexMut}; 7 | 8 | /// The `ConstGenericRingBuffer` struct is a `RingBuffer` implementation which does not require `alloc` but 9 | /// uses const generics instead. 10 | /// 11 | /// [`ConstGenericRingBuffer`] allocates the ringbuffer on the stack, and the size must be known at 12 | /// compile time through const-generics. 13 | /// 14 | /// # Example 15 | /// ``` 16 | /// use ringbuffer::{ConstGenericRingBuffer, RingBuffer}; 17 | /// 18 | /// let mut buffer = ConstGenericRingBuffer::<_, 2>::new(); 19 | /// 20 | /// // First entry of the buffer is now 5. 21 | /// buffer.enqueue(5); 22 | /// 23 | /// // The last item we enqueued is 5 24 | /// assert_eq!(buffer.back(), Some(&5)); 25 | /// 26 | /// // Second entry is now 42. 27 | /// buffer.enqueue(42); 28 | /// 29 | /// assert_eq!(buffer.peek(), Some(&5)); 30 | /// assert!(buffer.is_full()); 31 | /// 32 | /// // Because capacity is reached the next enqueue will be the first item of the buffer. 33 | /// buffer.enqueue(1); 34 | /// assert_eq!(buffer.to_vec(), vec![42, 1]); 35 | /// ``` 36 | #[derive(Debug)] 37 | pub struct ConstGenericRingBuffer { 38 | pub(crate) buf: [MaybeUninit; CAP], 39 | readptr: usize, 40 | writeptr: usize, 41 | } 42 | 43 | impl From<[T; CAP]> for ConstGenericRingBuffer { 44 | fn from(value: [T; CAP]) -> Self { 45 | let v = ManuallyDrop::new(value); 46 | Self { 47 | // Safety: 48 | // T has the same layout as MaybeUninit 49 | // [T; N] has the same layout as [MaybeUninit; N] 50 | // Without ManuallyDrop this would be unsound as 51 | // transmute_copy doesn't take ownership 52 | buf: unsafe { mem::transmute_copy(&v) }, 53 | readptr: 0, 54 | writeptr: CAP, 55 | } 56 | } 57 | } 58 | 59 | impl From<&[T; CAP]> for ConstGenericRingBuffer { 60 | fn from(value: &[T; CAP]) -> Self { 61 | Self::from(value.clone()) 62 | } 63 | } 64 | 65 | impl From<&[T]> for ConstGenericRingBuffer { 66 | fn from(value: &[T]) -> Self { 67 | value.iter().cloned().collect() 68 | } 69 | } 70 | 71 | impl From<&mut [T; CAP]> for ConstGenericRingBuffer { 72 | fn from(value: &mut [T; CAP]) -> Self { 73 | Self::from(value.clone()) 74 | } 75 | } 76 | 77 | impl From<&mut [T]> for ConstGenericRingBuffer { 78 | fn from(value: &mut [T]) -> Self { 79 | value.iter().cloned().collect() 80 | } 81 | } 82 | 83 | #[cfg(feature = "alloc")] 84 | impl From> for ConstGenericRingBuffer { 85 | fn from(value: alloc::vec::Vec) -> Self { 86 | value.into_iter().collect() 87 | } 88 | } 89 | 90 | #[cfg(feature = "alloc")] 91 | impl From> for ConstGenericRingBuffer { 92 | fn from(value: alloc::collections::VecDeque) -> Self { 93 | value.into_iter().collect() 94 | } 95 | } 96 | 97 | #[cfg(feature = "alloc")] 98 | impl From> 99 | for ConstGenericRingBuffer 100 | { 101 | fn from(value: alloc::collections::LinkedList) -> Self { 102 | value.into_iter().collect() 103 | } 104 | } 105 | 106 | #[cfg(feature = "alloc")] 107 | impl From for ConstGenericRingBuffer { 108 | fn from(value: alloc::string::String) -> Self { 109 | value.chars().collect() 110 | } 111 | } 112 | 113 | impl From<&str> for ConstGenericRingBuffer { 114 | fn from(value: &str) -> Self { 115 | value.chars().collect() 116 | } 117 | } 118 | 119 | #[cfg(feature = "alloc")] 120 | impl From> 121 | for ConstGenericRingBuffer 122 | { 123 | fn from(mut value: crate::GrowableAllocRingBuffer) -> Self { 124 | value.drain().collect() 125 | } 126 | } 127 | 128 | #[cfg(feature = "alloc")] 129 | impl From> for ConstGenericRingBuffer { 130 | fn from(mut value: crate::AllocRingBuffer) -> Self { 131 | value.drain().collect() 132 | } 133 | } 134 | 135 | impl Drop for ConstGenericRingBuffer { 136 | fn drop(&mut self) { 137 | self.drain().for_each(drop); 138 | } 139 | } 140 | 141 | impl Clone for ConstGenericRingBuffer { 142 | fn clone(&self) -> Self { 143 | let mut new = ConstGenericRingBuffer::::new(); 144 | new.extend(self.iter().cloned()); 145 | new 146 | } 147 | } 148 | 149 | // We need to manually implement PartialEq because MaybeUninit isn't PartialEq 150 | impl PartialEq for ConstGenericRingBuffer { 151 | fn eq(&self, other: &Self) -> bool { 152 | if self.len() == other.len() { 153 | for (a, b) in self.iter().zip(other.iter()) { 154 | if a != b { 155 | return false; 156 | } 157 | } 158 | true 159 | } else { 160 | false 161 | } 162 | } 163 | } 164 | 165 | impl Eq for ConstGenericRingBuffer {} 166 | 167 | impl ConstGenericRingBuffer { 168 | const ERROR_CAPACITY_IS_NOT_ALLOWED_TO_BE_ZERO: () = 169 | assert!(CAP != 0, "Capacity is not allowed to be zero"); 170 | 171 | /// Creates a const generic ringbuffer, size is passed as a const generic. 172 | /// 173 | /// Note that the size does not have to be a power of two, but that not using a power 174 | /// of two might be significantly (up to 3 times) slower. 175 | #[inline] 176 | #[must_use] 177 | pub const fn new() -> Self 178 | where 179 | ConstGenericRingBuffer: From>, 180 | { 181 | #[allow(clippy::let_unit_value)] 182 | let () = Self::ERROR_CAPACITY_IS_NOT_ALLOWED_TO_BE_ZERO; 183 | 184 | Self { 185 | buf: [const { MaybeUninit::::uninit() }; CAP], 186 | writeptr: 0, 187 | readptr: 0, 188 | } 189 | } 190 | } 191 | 192 | /// Get a const pointer to the buffer 193 | unsafe fn get_base_ptr(rb: *const ConstGenericRingBuffer) -> *const T { 194 | (*rb).buf.as_ptr().cast() 195 | } 196 | 197 | /// Get a mut pointer to the buffer 198 | unsafe fn get_base_mut_ptr(rb: *mut ConstGenericRingBuffer) -> *mut T { 199 | (*rb).buf.as_mut_ptr().cast() 200 | } 201 | 202 | /// Get a reference from the buffer without checking it is initialized 203 | /// Caller MUST be sure this index is initialized, or undefined behavior will happen 204 | unsafe fn get_unchecked<'a, T, const N: usize>( 205 | rb: *const ConstGenericRingBuffer, 206 | index: usize, 207 | ) -> &'a T { 208 | (*rb).buf[index] 209 | .as_ptr() 210 | .as_ref() 211 | .expect("const array ptr shouldn't be null!") 212 | } 213 | 214 | /// Get a mutable reference from the buffer without checking it is initialized 215 | /// Caller MUST be sure this index is initialized, or undefined behavior will happen 216 | unsafe fn get_unchecked_mut( 217 | rb: *mut ConstGenericRingBuffer, 218 | index: usize, 219 | ) -> *mut T { 220 | (*rb).buf[index] 221 | .as_mut_ptr() 222 | .as_mut() 223 | .expect("const array ptr shouldn't be null!") 224 | } 225 | 226 | impl IntoIterator for ConstGenericRingBuffer { 227 | type Item = T; 228 | type IntoIter = RingBufferIntoIterator; 229 | 230 | fn into_iter(self) -> Self::IntoIter { 231 | RingBufferIntoIterator::new(self) 232 | } 233 | } 234 | 235 | #[allow(clippy::into_iter_without_iter)] 236 | // iter() is implemented on the trait 237 | impl<'a, T, const CAP: usize> IntoIterator for &'a ConstGenericRingBuffer { 238 | type Item = &'a T; 239 | type IntoIter = RingBufferIterator<'a, T, ConstGenericRingBuffer>; 240 | 241 | fn into_iter(self) -> Self::IntoIter { 242 | self.iter() 243 | } 244 | } 245 | 246 | #[allow(clippy::into_iter_without_iter)] 247 | // iter_mut() is implemented on the trait 248 | impl<'a, T, const CAP: usize> IntoIterator for &'a mut ConstGenericRingBuffer { 249 | type Item = &'a mut T; 250 | type IntoIter = RingBufferMutIterator<'a, T, ConstGenericRingBuffer>; 251 | 252 | fn into_iter(self) -> Self::IntoIter { 253 | self.iter_mut() 254 | } 255 | } 256 | 257 | impl Extend for ConstGenericRingBuffer { 258 | fn extend>(&mut self, iter: A) { 259 | let iter = iter.into_iter(); 260 | 261 | for i in iter { 262 | let _ = self.enqueue(i); 263 | } 264 | } 265 | } 266 | 267 | unsafe impl RingBuffer for ConstGenericRingBuffer { 268 | #[inline] 269 | unsafe fn ptr_capacity(_: *const Self) -> usize { 270 | CAP 271 | } 272 | 273 | #[inline] 274 | unsafe fn ptr_buffer_size(_: *const Self) -> usize { 275 | CAP 276 | } 277 | 278 | impl_ringbuffer!(readptr, writeptr); 279 | 280 | #[inline] 281 | fn enqueue(&mut self, value: T) -> Option { 282 | let mut ret = None; 283 | 284 | if self.is_full() { 285 | let previous_value = mem::replace( 286 | &mut self.buf[crate::mask_modulo(CAP, self.readptr)], 287 | MaybeUninit::uninit(), 288 | ); 289 | // make sure we drop whatever is being overwritten 290 | // SAFETY: the buffer is full, so this must be initialized 291 | // : also, index has been masked 292 | ret = Some(unsafe { previous_value.assume_init() }); 293 | self.readptr += 1; 294 | } 295 | let index = crate::mask_modulo(CAP, self.writeptr); 296 | self.buf[index] = MaybeUninit::new(value); 297 | self.writeptr += 1; 298 | 299 | ret 300 | } 301 | 302 | fn dequeue(&mut self) -> Option { 303 | if self.is_empty() { 304 | None 305 | } else { 306 | let index = crate::mask_modulo(CAP, self.readptr); 307 | let res = mem::replace(&mut self.buf[index], MaybeUninit::uninit()); 308 | self.readptr += 1; 309 | 310 | // Safety: the fact that we got this maybeuninit from the buffer (with mask) means that 311 | // it's initialized. If it wasn't the is_empty call would have caught it. Values 312 | // are always initialized when inserted so this is safe. 313 | unsafe { Some(res.assume_init()) } 314 | } 315 | } 316 | 317 | impl_ringbuffer_ext!( 318 | get_base_ptr, 319 | get_base_mut_ptr, 320 | get_unchecked, 321 | get_unchecked_mut, 322 | readptr, 323 | writeptr, 324 | crate::mask_modulo 325 | ); 326 | 327 | #[inline] 328 | fn fill_with T>(&mut self, mut f: F) { 329 | self.clear(); 330 | self.readptr = 0; 331 | self.writeptr = CAP; 332 | self.buf.fill_with(|| MaybeUninit::new(f())); 333 | } 334 | } 335 | 336 | impl Default for ConstGenericRingBuffer { 337 | /// Creates a buffer with a capacity specified through the Cap type parameter. 338 | /// # Panics 339 | /// Panics if `CAP` is 0 340 | #[inline] 341 | fn default() -> Self { 342 | Self::new() 343 | } 344 | } 345 | 346 | impl FromIterator for ConstGenericRingBuffer { 347 | fn from_iter>(iter: T) -> Self { 348 | let mut res = Self::default(); 349 | for i in iter { 350 | let _ = res.enqueue(i); 351 | } 352 | 353 | res 354 | } 355 | } 356 | 357 | impl Index for ConstGenericRingBuffer { 358 | type Output = T; 359 | 360 | fn index(&self, index: usize) -> &Self::Output { 361 | self.get(index).expect("index out of bounds") 362 | } 363 | } 364 | 365 | impl IndexMut for ConstGenericRingBuffer { 366 | fn index_mut(&mut self, index: usize) -> &mut Self::Output { 367 | self.get_mut(index).expect("index out of bounds") 368 | } 369 | } 370 | 371 | impl SetLen for ConstGenericRingBuffer { 372 | impl_ring_buffer_set_len!(readptr, writeptr); 373 | } 374 | 375 | #[cfg(test)] 376 | mod tests { 377 | use crate::{AllocRingBuffer, ConstGenericRingBuffer, GrowableAllocRingBuffer, RingBuffer}; 378 | use alloc::collections::{LinkedList, VecDeque}; 379 | use alloc::string::ToString; 380 | use alloc::vec; 381 | 382 | #[test] 383 | fn test_not_power_of_two() { 384 | let mut rb = ConstGenericRingBuffer::::new(); 385 | const NUM_VALS: usize = 1000; 386 | 387 | // recycle the ringbuffer a bunch of time to see if noneof the logic 388 | // messes up 389 | for _ in 0..100 { 390 | for i in 0..NUM_VALS { 391 | let _ = rb.enqueue(i); 392 | } 393 | assert!(rb.is_full()); 394 | 395 | for i in 0..10 { 396 | assert_eq!(Some(i + NUM_VALS - rb.capacity()), rb.dequeue()); 397 | } 398 | 399 | assert!(rb.is_empty()); 400 | } 401 | } 402 | 403 | #[test] 404 | #[should_panic] 405 | fn test_index_zero_length() { 406 | let b = ConstGenericRingBuffer::::new(); 407 | let _ = b[2]; 408 | } 409 | 410 | #[test] 411 | fn test_extend() { 412 | let mut buf = ConstGenericRingBuffer::::new(); 413 | (0..4).for_each(|_| { 414 | let _ = buf.enqueue(0); 415 | }); 416 | 417 | let new_data = [0, 1, 2]; 418 | buf.extend(new_data); 419 | 420 | let expected = [0, 0, 1, 2]; 421 | 422 | for i in 0..4 { 423 | let actual = buf[i]; 424 | let expected = expected[i]; 425 | assert_eq!(actual, expected); 426 | } 427 | } 428 | 429 | #[test] 430 | fn test_extend_with_overflow() { 431 | let mut buf = ConstGenericRingBuffer::::new(); 432 | (0..8).for_each(|_| { 433 | let _ = buf.enqueue(0); 434 | }); 435 | 436 | let new_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; 437 | buf.extend(new_data); 438 | 439 | let expected = [2, 3, 4, 5, 6, 7, 8, 9]; 440 | 441 | for i in 0..8 { 442 | let actual = buf[i]; 443 | let expected = expected[i]; 444 | assert_eq!(actual, expected); 445 | } 446 | } 447 | 448 | #[test] 449 | fn from() { 450 | assert_eq!( 451 | ConstGenericRingBuffer::::from([1, 2, 3]).to_vec(), 452 | vec![1, 2, 3] 453 | ); 454 | 455 | let v: &[i32; 3] = &[1, 2, 3]; 456 | assert_eq!( 457 | ConstGenericRingBuffer::::from(v).to_vec(), 458 | vec![1, 2, 3] 459 | ); 460 | 461 | let v: &[i32] = &[1, 2, 3]; 462 | assert_eq!( 463 | ConstGenericRingBuffer::::from(v).to_vec(), 464 | vec![1, 2, 3] 465 | ); 466 | 467 | let v: &mut [i32; 3] = &mut [1, 2, 3]; 468 | assert_eq!( 469 | ConstGenericRingBuffer::::from(v).to_vec(), 470 | vec![1, 2, 3] 471 | ); 472 | 473 | let v: &mut [i32] = &mut [1, 2, 3]; 474 | assert_eq!( 475 | ConstGenericRingBuffer::::from(v).to_vec(), 476 | vec![1, 2, 3] 477 | ); 478 | 479 | assert_eq!( 480 | ConstGenericRingBuffer::::from(vec![1, 2, 3]).to_vec(), 481 | vec![1, 2, 3] 482 | ); 483 | assert_eq!( 484 | ConstGenericRingBuffer::::from( 485 | vec![1, 2, 3].into_iter().collect::>() 486 | ) 487 | .to_vec(), 488 | vec![1, 2, 3] 489 | ); 490 | assert_eq!( 491 | ConstGenericRingBuffer::::from( 492 | vec![1, 2, 3].into_iter().collect::>() 493 | ) 494 | .to_vec(), 495 | vec![1, 2, 3] 496 | ); 497 | assert_eq!( 498 | ConstGenericRingBuffer::<_, 3>::from("abc".to_string()).to_vec(), 499 | vec!['a', 'b', 'c'] 500 | ); 501 | assert_eq!( 502 | ConstGenericRingBuffer::<_, 3>::from("abc").to_vec(), 503 | vec!['a', 'b', 'c'] 504 | ); 505 | assert_eq!( 506 | ConstGenericRingBuffer::<_, 3>::from(GrowableAllocRingBuffer::from(vec![1, 2, 3])) 507 | .to_vec(), 508 | vec![1, 2, 3] 509 | ); 510 | assert_eq!( 511 | ConstGenericRingBuffer::<_, 3>::from(AllocRingBuffer::from(vec![1, 2, 3])).to_vec(), 512 | vec![1, 2, 3] 513 | ); 514 | } 515 | } 516 | -------------------------------------------------------------------------------- /src/ringbuffer_trait.rs: -------------------------------------------------------------------------------- 1 | use core::ops::{Index, IndexMut}; 2 | 3 | #[cfg(feature = "alloc")] 4 | extern crate alloc; 5 | #[cfg(feature = "alloc")] 6 | use alloc::vec::Vec; 7 | 8 | /// `RingBuffer` is a trait defining the standard interface for all `RingBuffer` 9 | /// implementations ([`AllocRingBuffer`](crate::AllocRingBuffer), [`ConstGenericRingBuffer`](crate::ConstGenericRingBuffer)) 10 | /// 11 | /// This trait is not object safe, so can't be used dynamically. However it is possible to 12 | /// define a generic function over types implementing `RingBuffer`. 13 | /// 14 | /// # Safety 15 | /// Implementing this implies that the ringbuffer upholds some safety 16 | /// guarantees, such as returning a different value from `get_mut` any 17 | /// for every different index passed in. See the exact requirements 18 | /// in the safety comment on the next function of the mutable Iterator 19 | /// implementation, since these safety guarantees are necessary for 20 | /// [`iter_mut`](RingBuffer::iter_mut) to work 21 | pub unsafe trait RingBuffer: 22 | Sized + IntoIterator + Extend + Index + IndexMut 23 | { 24 | /// Returns the length of the internal buffer. 25 | /// This length grows up to the capacity and then stops growing. 26 | /// This is because when the length is reached, new items are appended at the start. 27 | fn len(&self) -> usize { 28 | // Safety: self is a RingBuffer 29 | unsafe { Self::ptr_len(self) } 30 | } 31 | 32 | /// Raw pointer version of len 33 | /// 34 | /// # Safety 35 | /// ONLY SAFE WHEN self is a *mut to to an implementor of `RingBuffer` 36 | #[doc(hidden)] 37 | unsafe fn ptr_len(rb: *const Self) -> usize; 38 | 39 | /// Returns true if the buffer is entirely empty. 40 | #[inline] 41 | fn is_empty(&self) -> bool { 42 | self.len() == 0 43 | } 44 | 45 | /// Returns true when the length of the ringbuffer equals the capacity. This happens whenever 46 | /// more elements than capacity have been pushed to the buffer. 47 | #[inline] 48 | fn is_full(&self) -> bool { 49 | self.len() == self.capacity() 50 | } 51 | 52 | /// Returns the capacity of the buffer. 53 | fn capacity(&self) -> usize { 54 | // Safety: self is a RingBuffer 55 | unsafe { Self::ptr_capacity(self) } 56 | } 57 | 58 | /// Returns the number of elements allocated for this ringbuffer (can be larger than capacity). 59 | fn buffer_size(&self) -> usize { 60 | // Safety: self is a RingBuffer 61 | unsafe { Self::ptr_buffer_size(self) } 62 | } 63 | 64 | /// Raw pointer version of capacity. 65 | /// 66 | /// # Safety 67 | /// ONLY SAFE WHEN self is a *mut to to an implementor of `RingBuffer` 68 | #[doc(hidden)] 69 | unsafe fn ptr_capacity(rb: *const Self) -> usize; 70 | 71 | /// Raw pointer version of `buffer_size`. 72 | /// 73 | /// # Safety 74 | /// ONLY SAFE WHEN self is a *mut to to an implementor of `RingBuffer` 75 | #[doc(hidden)] 76 | unsafe fn ptr_buffer_size(rb: *const Self) -> usize; 77 | 78 | /// Alias for [`enqueue`] 79 | #[deprecated = "use enqueue instead"] 80 | #[inline] 81 | fn push(&mut self, value: T) { 82 | let _ = self.enqueue(value); 83 | } 84 | 85 | /// Adds a value onto the buffer. 86 | /// 87 | /// Cycles around if capacity is reached. 88 | /// Forms a more natural counterpart to [`dequeue`](RingBuffer::dequeue). 89 | /// An alias is provided with [`push`](RingBuffer::push). 90 | fn enqueue(&mut self, value: T) -> Option; 91 | 92 | /// dequeues the top item off the ringbuffer, and moves this item out. 93 | fn dequeue(&mut self) -> Option; 94 | 95 | /// dequeues the top item off the queue, but does not return it. Instead it is dropped. 96 | /// If the ringbuffer is empty, this function is a nop. 97 | #[inline] 98 | #[deprecated = "use dequeue instead"] 99 | fn skip(&mut self) { 100 | let _ = self.dequeue(); 101 | } 102 | 103 | /// Returns an iterator over the elements in the ringbuffer, 104 | /// dequeueing elements as they are iterated over. 105 | /// 106 | /// ``` 107 | /// use ringbuffer::{AllocRingBuffer, RingBuffer}; 108 | /// 109 | /// let mut rb = AllocRingBuffer::new(16); 110 | /// for i in 0..8 { 111 | /// rb.push(i); 112 | /// } 113 | /// 114 | /// assert_eq!(rb.len(), 8); 115 | /// 116 | /// for i in rb.drain() { 117 | /// // prints the numbers 0 through 8 118 | /// println!("{}", i); 119 | /// } 120 | /// 121 | /// // No elements remain 122 | /// assert_eq!(rb.len(), 0); 123 | /// 124 | /// ``` 125 | fn drain(&mut self) -> RingBufferDrainingIterator<'_, T, Self> { 126 | RingBufferDrainingIterator::new(self) 127 | } 128 | 129 | /// Sets every element in the ringbuffer to the value returned by f. 130 | fn fill_with T>(&mut self, f: F); 131 | 132 | /// Sets every element in the ringbuffer to it's default value 133 | fn fill_default(&mut self) 134 | where 135 | T: Default, 136 | { 137 | self.fill_with(Default::default); 138 | } 139 | 140 | /// Sets every element in the ringbuffer to `value` 141 | fn fill(&mut self, value: T) 142 | where 143 | T: Clone, 144 | { 145 | self.fill_with(|| value.clone()); 146 | } 147 | 148 | /// Empties the buffer entirely. Sets the length to 0 but keeps the capacity allocated. 149 | fn clear(&mut self); 150 | 151 | /// Gets a value relative to the current index. 0 is the next index to be written to with push. 152 | /// -1 and down are the last elements pushed and 0 and up are the items that were pushed the longest ago. 153 | fn get_signed(&self, index: isize) -> Option<&T>; 154 | 155 | /// Gets a value relative to the current index. 0 is the next index to be written to with push. 156 | fn get(&self, index: usize) -> Option<&T>; 157 | 158 | /// Gets a value relative to the current index mutably. 0 is the next index to be written to with push. 159 | /// -1 and down are the last elements pushed and 0 and up are the items that were pushed the longest ago. 160 | #[inline] 161 | fn get_mut_signed(&mut self, index: isize) -> Option<&mut T> { 162 | // Safety: self is a RingBuffer 163 | unsafe { Self::ptr_get_mut_signed(self, index).map(|i| &mut *i) } 164 | } 165 | 166 | /// Gets a value relative to the current index mutably. 0 is the next index to be written to with push. 167 | #[inline] 168 | fn get_mut(&mut self, index: usize) -> Option<&mut T> { 169 | // Safety: self is a RingBuffer 170 | unsafe { Self::ptr_get_mut(self, index).map(|i| &mut *i) } 171 | } 172 | 173 | /// same as [`get_mut`](RingBuffer::get_mut) but on raw pointers. 174 | /// 175 | /// # Safety 176 | /// ONLY SAFE WHEN self is a *mut to to an implementor of `RingBuffer` 177 | #[doc(hidden)] 178 | unsafe fn ptr_get_mut(rb: *mut Self, index: usize) -> Option<*mut T>; 179 | 180 | /// same as [`get_mut`](RingBuffer::get_mut) but on raw pointers. 181 | /// 182 | /// # Safety 183 | /// ONLY SAFE WHEN self is a *mut to to an implementor of `RingBuffer` 184 | #[doc(hidden)] 185 | unsafe fn ptr_get_mut_signed(rb: *mut Self, index: isize) -> Option<*mut T>; 186 | 187 | /// Returns the value at the current index. 188 | /// This is the value that will be overwritten by the next push and also the value pushed 189 | /// the longest ago. (alias of [`Self::front`]) 190 | #[inline] 191 | fn peek(&self) -> Option<&T> { 192 | self.front() 193 | } 194 | 195 | /// Returns the value at the front of the queue. 196 | /// This is the value that will be overwritten by the next push and also the value pushed 197 | /// the longest ago. 198 | /// (alias of peek) 199 | #[inline] 200 | fn front(&self) -> Option<&T> { 201 | self.get(0) 202 | } 203 | 204 | /// Returns a mutable reference to the value at the back of the queue. 205 | /// This is the value that will be overwritten by the next push. 206 | /// (alias of peek) 207 | #[inline] 208 | fn front_mut(&mut self) -> Option<&mut T> { 209 | self.get_mut(0) 210 | } 211 | 212 | /// Returns the value at the back of the queue. 213 | /// This is the item that was pushed most recently. 214 | #[inline] 215 | fn back(&self) -> Option<&T> { 216 | self.get_signed(-1) 217 | } 218 | 219 | /// Returns a mutable reference to the value at the back of the queue. 220 | /// This is the item that was pushed most recently. 221 | #[inline] 222 | fn back_mut(&mut self) -> Option<&mut T> { 223 | self.get_mut_signed(-1) 224 | } 225 | 226 | /// Creates a mutable iterator over the buffer starting from the item pushed the longest ago, 227 | /// and ending at the element most recently pushed. 228 | #[inline] 229 | fn iter_mut(&mut self) -> RingBufferMutIterator<'_, T, Self> { 230 | RingBufferMutIterator::new(self) 231 | } 232 | 233 | /// Creates an iterator over the buffer starting from the item pushed the longest ago, 234 | /// and ending at the element most recently pushed. 235 | #[inline] 236 | fn iter(&self) -> RingBufferIterator<'_, T, Self> { 237 | RingBufferIterator::new(self) 238 | } 239 | 240 | /// Converts the buffer to a vector. This Copies all elements in the ringbuffer. 241 | #[cfg(feature = "alloc")] 242 | fn to_vec(&self) -> Vec 243 | where 244 | T: Clone, 245 | { 246 | self.iter().cloned().collect() 247 | } 248 | 249 | /// Returns true if elem is in the ringbuffer. 250 | fn contains(&self, elem: &T) -> bool 251 | where 252 | T: PartialEq, 253 | { 254 | self.iter().any(|i| i == elem) 255 | } 256 | 257 | /// Efficiently copy items from the ringbuffer to a target slice. 258 | /// 259 | /// # Panics 260 | /// Panics if the buffer length minus the offset is NOT equal to `target.len()`. 261 | /// 262 | /// # Safety 263 | /// ONLY SAFE WHEN self is a *const to to an implementor of `RingBuffer` 264 | unsafe fn ptr_copy_to_slice(rb: *const Self, offset: usize, dst: &mut [T]) 265 | where 266 | T: Copy; 267 | 268 | /// Efficiently copy items from the ringbuffer to a target slice. 269 | /// 270 | /// # Panics 271 | /// Panics if the buffer length minus the offset is NOT equal to `target.len()`. 272 | fn copy_to_slice(&self, offset: usize, dst: &mut [T]) 273 | where 274 | T: Copy, 275 | { 276 | unsafe { Self::ptr_copy_to_slice(self, offset, dst) } 277 | } 278 | 279 | /// Efficiently copy items from a slice to the ringbuffer. 280 | /// # Panics 281 | /// Panics if the buffer length minus the offset is NOT equal to `source.len()`. 282 | /// 283 | /// # Safety 284 | /// ONLY SAFE WHEN self is a *mut to to an implementor of `RingBuffer` 285 | unsafe fn ptr_copy_from_slice(rb: *mut Self, offset: usize, src: &[T]) 286 | where 287 | T: Copy; 288 | 289 | /// Efficiently copy items from a slice to the ringbuffer. 290 | /// 291 | /// # Panics 292 | /// Panics if the buffer length minus the offset is NOT equal to `source.len()`. 293 | fn copy_from_slice(&mut self, offset: usize, src: &[T]) 294 | where 295 | T: Copy, 296 | { 297 | unsafe { Self::ptr_copy_from_slice(self, offset, src) } 298 | } 299 | } 300 | 301 | mod iter { 302 | use crate::RingBuffer; 303 | use core::iter::FusedIterator; 304 | use core::marker::PhantomData; 305 | use core::ptr::NonNull; 306 | 307 | /// `RingBufferIterator` holds a reference to a `RingBuffer` and iterates over it. `index` is the 308 | /// current iterator position. 309 | pub struct RingBufferIterator<'rb, T, RB: RingBuffer> { 310 | obj: &'rb RB, 311 | len: usize, 312 | index: usize, 313 | phantom: PhantomData, 314 | } 315 | 316 | impl<'rb, T, RB: RingBuffer> RingBufferIterator<'rb, T, RB> { 317 | #[inline] 318 | pub fn new(obj: &'rb RB) -> Self { 319 | Self { 320 | obj, 321 | len: obj.len(), 322 | index: 0, 323 | phantom: PhantomData, 324 | } 325 | } 326 | } 327 | 328 | impl<'rb, T: 'rb, RB: RingBuffer> Iterator for RingBufferIterator<'rb, T, RB> { 329 | type Item = &'rb T; 330 | 331 | #[inline] 332 | fn next(&mut self) -> Option { 333 | if self.index < self.len { 334 | let res = self.obj.get(self.index); 335 | self.index += 1; 336 | res 337 | } else { 338 | None 339 | } 340 | } 341 | 342 | fn size_hint(&self) -> (usize, Option) { 343 | (self.len, Some(self.len)) 344 | } 345 | 346 | fn nth(&mut self, n: usize) -> Option { 347 | self.index = (self.index + n).min(self.len); 348 | self.next() 349 | } 350 | } 351 | 352 | impl<'rb, T: 'rb, RB: RingBuffer> FusedIterator for RingBufferIterator<'rb, T, RB> {} 353 | 354 | impl<'rb, T: 'rb, RB: RingBuffer> ExactSizeIterator for RingBufferIterator<'rb, T, RB> {} 355 | 356 | impl<'rb, T: 'rb, RB: RingBuffer> DoubleEndedIterator for RingBufferIterator<'rb, T, RB> { 357 | #[inline] 358 | fn next_back(&mut self) -> Option { 359 | if self.len > 0 && self.index < self.len { 360 | let res = self.obj.get(self.len - 1); 361 | self.len -= 1; 362 | res 363 | } else { 364 | None 365 | } 366 | } 367 | 368 | fn nth_back(&mut self, n: usize) -> Option { 369 | self.len = self.len - n.min(self.len); 370 | self.next_back() 371 | } 372 | } 373 | 374 | /// `RingBufferMutIterator` holds a reference to a `RingBuffer` and iterates over it. `index` is the 375 | /// current iterator position. 376 | /// 377 | /// WARNING: NEVER ACCESS THE `obj` FIELD OUTSIDE OF NEXT. It's private on purpose, and 378 | /// can technically be accessed in the same module. However, this breaks the safety of `next()` 379 | pub struct RingBufferMutIterator<'rb, T, RB: RingBuffer> { 380 | obj: NonNull, 381 | index: usize, 382 | len: usize, 383 | phantom: PhantomData<&'rb mut T>, 384 | } 385 | 386 | impl<'rb, T, RB: RingBuffer> RingBufferMutIterator<'rb, T, RB> { 387 | pub fn new(obj: &'rb mut RB) -> Self { 388 | Self { 389 | len: obj.len(), 390 | obj: NonNull::from(obj), 391 | index: 0, 392 | phantom: PhantomData, 393 | } 394 | } 395 | } 396 | 397 | impl<'rb, T: 'rb, RB: RingBuffer + 'rb> FusedIterator for RingBufferMutIterator<'rb, T, RB> {} 398 | 399 | impl<'rb, T: 'rb, RB: RingBuffer + 'rb> ExactSizeIterator for RingBufferMutIterator<'rb, T, RB> {} 400 | 401 | impl<'rb, T: 'rb, RB: RingBuffer + 'rb> DoubleEndedIterator 402 | for RingBufferMutIterator<'rb, T, RB> 403 | { 404 | #[inline] 405 | fn next_back(&mut self) -> Option { 406 | if self.len > 0 && self.index < self.len { 407 | self.len -= 1; 408 | let res = unsafe { RB::ptr_get_mut(self.obj.as_ptr(), self.len) }; 409 | res.map(|i| unsafe { &mut *i }) 410 | } else { 411 | None 412 | } 413 | } 414 | 415 | fn nth_back(&mut self, n: usize) -> Option { 416 | self.len = self.len - n.min(self.len); 417 | self.next_back() 418 | } 419 | } 420 | 421 | impl<'rb, T, RB: RingBuffer + 'rb> Iterator for RingBufferMutIterator<'rb, T, RB> { 422 | type Item = &'rb mut T; 423 | 424 | fn next(&mut self) -> Option { 425 | if self.index < self.len { 426 | let res = unsafe { RB::ptr_get_mut(self.obj.as_ptr(), self.index) }; 427 | self.index += 1; 428 | // Safety: ptr_get_mut always returns a valid pointer 429 | res.map(|i| unsafe { &mut *i }) 430 | } else { 431 | None 432 | } 433 | } 434 | 435 | fn size_hint(&self) -> (usize, Option) { 436 | (self.len, Some(self.len)) 437 | } 438 | 439 | fn nth(&mut self, n: usize) -> Option { 440 | self.index = (self.index + n).min(self.len); 441 | self.next() 442 | } 443 | } 444 | 445 | /// `RingBufferMutIterator` holds a reference to a `RingBuffer` and iterates over it. 446 | pub struct RingBufferDrainingIterator<'rb, T, RB: RingBuffer> { 447 | obj: &'rb mut RB, 448 | phantom: PhantomData, 449 | } 450 | 451 | impl<'rb, T, RB: RingBuffer> RingBufferDrainingIterator<'rb, T, RB> { 452 | #[inline] 453 | pub fn new(obj: &'rb mut RB) -> Self { 454 | Self { 455 | obj, 456 | phantom: PhantomData, 457 | } 458 | } 459 | } 460 | 461 | impl> Iterator for RingBufferDrainingIterator<'_, T, RB> { 462 | type Item = T; 463 | 464 | fn next(&mut self) -> Option { 465 | self.obj.dequeue() 466 | } 467 | 468 | fn size_hint(&self) -> (usize, Option) { 469 | (self.obj.len(), Some(self.obj.len())) 470 | } 471 | } 472 | 473 | /// `RingBufferIntoIterator` holds a `RingBuffer` and iterates over it. 474 | pub struct RingBufferIntoIterator> { 475 | obj: RB, 476 | phantom: PhantomData, 477 | } 478 | 479 | impl> RingBufferIntoIterator { 480 | #[inline] 481 | pub fn new(obj: RB) -> Self { 482 | Self { 483 | obj, 484 | phantom: PhantomData, 485 | } 486 | } 487 | } 488 | 489 | impl> Iterator for RingBufferIntoIterator { 490 | type Item = T; 491 | 492 | #[inline] 493 | fn next(&mut self) -> Option { 494 | self.obj.dequeue() 495 | } 496 | 497 | fn size_hint(&self) -> (usize, Option) { 498 | (self.obj.len(), Some(self.obj.len())) 499 | } 500 | } 501 | } 502 | 503 | pub use iter::{ 504 | RingBufferDrainingIterator, RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator, 505 | }; 506 | 507 | /// Implement various functions on implementors of [`RingBuffer`]. 508 | /// This is to avoid duplicate code. 509 | macro_rules! impl_ringbuffer { 510 | ($readptr: ident, $writeptr: ident) => { 511 | #[inline] 512 | unsafe fn ptr_len(rb: *const Self) -> usize { 513 | (*rb).$writeptr - (*rb).$readptr 514 | } 515 | }; 516 | } 517 | 518 | /// Implement various functions on implementors of [`RingBuffer`]. 519 | /// This is to avoid duplicate code. 520 | macro_rules! impl_ringbuffer_ext { 521 | ($get_base_ptr: ident, $get_base_mut_ptr: ident, $get_unchecked: ident, $get_unchecked_mut: ident, $readptr: ident, $writeptr: ident, $mask: expr) => { 522 | #[inline] 523 | fn get_signed(&self, index: isize) -> Option<&T> { 524 | use core::ops::Not; 525 | self.is_empty().not().then(move || { 526 | let index_from_readptr = if index >= 0 { 527 | index 528 | } else { 529 | self.len() as isize + index 530 | }; 531 | 532 | let normalized_index = 533 | self.$readptr as isize + index_from_readptr.rem_euclid(self.len() as isize); 534 | 535 | unsafe { 536 | // SAFETY: index has been modulo-ed to be within range 537 | // to be within bounds 538 | $get_unchecked(self, $mask(self.buffer_size(), normalized_index as usize)) 539 | } 540 | }) 541 | } 542 | 543 | #[inline] 544 | fn get(&self, index: usize) -> Option<&T> { 545 | use core::ops::Not; 546 | self.is_empty().not().then(move || { 547 | let normalized_index = self.$readptr + index.rem_euclid(self.len()); 548 | unsafe { 549 | // SAFETY: index has been modulo-ed to be within range 550 | // to be within bounds 551 | $get_unchecked(self, $mask(self.buffer_size(), normalized_index)) 552 | } 553 | }) 554 | } 555 | 556 | #[inline] 557 | #[doc(hidden)] 558 | unsafe fn ptr_get_mut_signed(rb: *mut Self, index: isize) -> Option<*mut T> { 559 | (Self::ptr_len(rb) != 0).then(move || { 560 | let index_from_readptr = if index >= 0 { 561 | index 562 | } else { 563 | Self::ptr_len(rb) as isize + index 564 | }; 565 | 566 | let normalized_index = (*rb).$readptr as isize 567 | + index_from_readptr.rem_euclid(Self::ptr_len(rb) as isize); 568 | 569 | unsafe { 570 | // SAFETY: index has been modulo-ed to be within range 571 | // to be within bounds 572 | $get_unchecked_mut( 573 | rb, 574 | $mask(Self::ptr_buffer_size(rb), normalized_index as usize), 575 | ) 576 | } 577 | }) 578 | } 579 | 580 | #[inline] 581 | #[doc(hidden)] 582 | unsafe fn ptr_get_mut(rb: *mut Self, index: usize) -> Option<*mut T> { 583 | (Self::ptr_len(rb) != 0).then(move || { 584 | let normalized_index = (*rb).$readptr + index.rem_euclid(Self::ptr_len(rb)); 585 | 586 | unsafe { 587 | // SAFETY: index has been modulo-ed to be within range 588 | // to be within bounds 589 | $get_unchecked_mut(rb, $mask(Self::ptr_buffer_size(rb), normalized_index)) 590 | } 591 | }) 592 | } 593 | 594 | #[inline] 595 | fn clear(&mut self) { 596 | for i in self.drain() { 597 | drop(i); 598 | } 599 | 600 | self.$readptr = 0; 601 | self.$writeptr = 0; 602 | } 603 | 604 | unsafe fn ptr_copy_to_slice(rb: *const Self, offset: usize, dst: &mut [T]) 605 | where 606 | T: Copy, 607 | { 608 | let len = Self::ptr_len(rb); 609 | let dst_len = dst.len(); 610 | assert!( 611 | (offset == 0 && len == 0) || offset < len, 612 | "offset ({offset}) is out of bounds for the current buffer length ({len})" 613 | ); 614 | assert!(len - offset == dst_len, "destination slice length ({dst_len}) doesn't match buffer length ({len}) when considering the specified offset ({offset})"); 615 | 616 | if dst_len == 0 { 617 | return; 618 | } 619 | 620 | let base: *const T = $get_base_ptr(rb); 621 | let size = Self::ptr_buffer_size(rb); 622 | let offset_readptr = (*rb).$readptr + offset; 623 | 624 | let from_idx = $mask(size, offset_readptr); 625 | let to_idx = $mask(size, offset_readptr + dst_len); 626 | 627 | if from_idx < to_idx { 628 | dst.copy_from_slice(unsafe { 629 | // SAFETY: index has been modulo-ed to be within range 630 | // to be within bounds 631 | core::slice::from_raw_parts(base.add(from_idx), dst_len) 632 | }); 633 | } else { 634 | dst[..size - from_idx].copy_from_slice(unsafe { 635 | // SAFETY: index has been modulo-ed to be within range 636 | // to be within bounds 637 | core::slice::from_raw_parts(base.add(from_idx), size - from_idx) 638 | }); 639 | dst[size - from_idx..].copy_from_slice(unsafe { 640 | // SAFETY: index has been modulo-ed to be within range 641 | // to be within bounds 642 | core::slice::from_raw_parts(base, to_idx) 643 | }); 644 | } 645 | } 646 | 647 | unsafe fn ptr_copy_from_slice(rb: *mut Self, offset: usize, src: &[T]) 648 | where 649 | T: Copy, 650 | { 651 | let len = Self::ptr_len(rb); 652 | let src_len = src.len(); 653 | assert!( 654 | (offset == 0 && len == 0) || offset < len, 655 | "offset ({offset}) is out of bounds for the current buffer length ({len})" 656 | ); 657 | assert!(len - offset == src_len, "source slice length ({src_len}) doesn't match buffer length ({len}) when considering the specified offset ({offset})"); 658 | 659 | if src_len == 0 { 660 | return; 661 | } 662 | 663 | let base: *mut T = $get_base_mut_ptr(rb); 664 | let size = Self::ptr_buffer_size(rb); 665 | let offset_readptr = (*rb).$readptr + offset; 666 | 667 | let from_idx = $mask(size, offset_readptr); 668 | let to_idx = $mask(size, offset_readptr + src_len); 669 | 670 | if from_idx < to_idx { 671 | unsafe { 672 | // SAFETY: index has been modulo-ed to be within range 673 | // to be within bounds 674 | core::slice::from_raw_parts_mut(base.add(from_idx), src_len) 675 | } 676 | .copy_from_slice(src); 677 | } else { 678 | unsafe { 679 | // SAFETY: index has been modulo-ed to be within range 680 | // to be within bounds 681 | core::slice::from_raw_parts_mut(base.add(from_idx), size - from_idx) 682 | } 683 | .copy_from_slice(&src[..size - from_idx]); 684 | unsafe { 685 | // SAFETY: index has been modulo-ed to be within range 686 | // to be within bounds 687 | core::slice::from_raw_parts_mut(base, to_idx) 688 | } 689 | .copy_from_slice(&src[size - from_idx..]); 690 | } 691 | } 692 | }; 693 | } 694 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![deny(missing_docs)] 3 | #![deny(warnings)] 4 | #![deny(unused_import_braces)] 5 | #![deny(unused_results)] 6 | #![deny(trivial_casts)] 7 | #![deny(trivial_numeric_casts)] 8 | #![deny(unused_qualifications)] 9 | #![deny(clippy::must_use_candidate)] 10 | #![deny(clippy::default_trait_access)] 11 | #![deny(clippy::doc_markdown)] 12 | #![deny(clippy::semicolon_if_nothing_returned)] 13 | #![allow(unused_unsafe)] // to support older rust versions 14 | #![doc = include_str!("../README.md")] 15 | 16 | #[cfg(feature = "alloc")] 17 | extern crate alloc; 18 | 19 | #[macro_use] 20 | pub(crate) mod ringbuffer_trait; 21 | 22 | pub use ringbuffer_trait::RingBuffer; 23 | 24 | mod set_len_trait; 25 | pub use set_len_trait::SetLen; 26 | 27 | #[cfg(feature = "alloc")] 28 | mod with_alloc; 29 | #[cfg(feature = "alloc")] 30 | pub use with_alloc::alloc_ringbuffer::AllocRingBuffer; 31 | #[cfg(feature = "alloc")] 32 | pub use with_alloc::vecdeque::GrowableAllocRingBuffer; 33 | 34 | mod with_const_generics; 35 | pub use with_const_generics::ConstGenericRingBuffer; 36 | 37 | /// Used internally. Computes the bitmask used to properly wrap the ringbuffers. 38 | #[inline] 39 | #[cfg(feature = "alloc")] 40 | const fn mask_and(cap: usize, index: usize) -> usize { 41 | debug_assert!(cap.is_power_of_two()); 42 | index & (cap - 1) 43 | } 44 | 45 | /// Used internally. Computes the bitmask used to properly wrap the ringbuffers. 46 | #[inline] 47 | const fn mask_modulo(cap: usize, index: usize) -> usize { 48 | index % cap 49 | } 50 | 51 | #[cfg(test)] 52 | #[allow(non_upper_case_globals)] 53 | mod tests { 54 | extern crate std; 55 | 56 | use core::fmt::Debug; 57 | use std::vec; 58 | use std::vec::Vec; 59 | 60 | use crate::ringbuffer_trait::{RingBufferIterator, RingBufferMutIterator}; 61 | use crate::{AllocRingBuffer, ConstGenericRingBuffer, GrowableAllocRingBuffer, RingBuffer}; 62 | 63 | #[test] 64 | fn run_test_neg_index() { 65 | //! Test for issue #43 66 | 67 | const capacity: usize = 8; 68 | fn test_neg_index(mut b: impl RingBuffer) { 69 | for i in 0..capacity + 2 { 70 | let _ = b.enqueue(i); 71 | assert_eq!(b.get_signed(-1), Some(&i)); 72 | } 73 | } 74 | 75 | test_neg_index(AllocRingBuffer::new(capacity)); 76 | test_neg_index(ConstGenericRingBuffer::::new()); 77 | test_neg_index(GrowableAllocRingBuffer::with_capacity(capacity)); 78 | } 79 | 80 | #[test] 81 | fn run_test_default() { 82 | fn test_default(b: impl RingBuffer) { 83 | assert_eq!(b.capacity(), 8); 84 | assert_eq!(b.len(), 0); 85 | } 86 | 87 | test_default(AllocRingBuffer::new(8)); 88 | test_default(GrowableAllocRingBuffer::with_capacity(8)); 89 | test_default(ConstGenericRingBuffer::::new()); 90 | } 91 | 92 | #[test] 93 | fn run_test_new() { 94 | fn test_new(b: impl RingBuffer) { 95 | assert_eq!(b.capacity(), 8); 96 | assert_eq!(b.len(), 0); 97 | } 98 | 99 | test_new(AllocRingBuffer::new(8)); 100 | test_new(GrowableAllocRingBuffer::with_capacity(8)); 101 | test_new(ConstGenericRingBuffer::::new()); 102 | } 103 | 104 | #[test] 105 | fn test_default_eq_new() { 106 | assert_eq!( 107 | GrowableAllocRingBuffer::::default(), 108 | GrowableAllocRingBuffer::::new() 109 | ); 110 | assert_eq!( 111 | ConstGenericRingBuffer::::default(), 112 | ConstGenericRingBuffer::::new() 113 | ); 114 | } 115 | 116 | #[test] 117 | fn run_test_len() { 118 | fn test_len(mut b: impl RingBuffer) { 119 | assert_eq!(0, b.len()); 120 | let _ = b.enqueue(1); 121 | assert_eq!(1, b.len()); 122 | let _ = b.enqueue(2); 123 | assert_eq!(2, b.len()); 124 | } 125 | 126 | test_len(AllocRingBuffer::new(8)); 127 | test_len(GrowableAllocRingBuffer::with_capacity(8)); 128 | test_len(ConstGenericRingBuffer::::new()); 129 | } 130 | 131 | #[test] 132 | fn run_test_len_wrap() { 133 | fn test_len_wrap(mut b: impl RingBuffer) { 134 | assert_eq!(0, b.len()); 135 | let _ = b.enqueue(1); 136 | assert_eq!(1, b.len()); 137 | let _ = b.enqueue(2); 138 | assert_eq!(2, b.len()); 139 | // Now we are wrapping 140 | let _ = b.enqueue(3); 141 | assert_eq!(2, b.len()); 142 | let _ = b.enqueue(4); 143 | assert_eq!(2, b.len()); 144 | } 145 | 146 | test_len_wrap(AllocRingBuffer::new(2)); 147 | test_len_wrap(ConstGenericRingBuffer::::new()); 148 | 149 | // the growable ringbuffer actually should grow instead of wrap 150 | let mut grb = GrowableAllocRingBuffer::with_capacity(2); 151 | assert_eq!(0, grb.len()); 152 | let _ = grb.enqueue(0); 153 | assert_eq!(1, grb.len()); 154 | let _ = grb.enqueue(1); 155 | assert_eq!(2, grb.len()); 156 | let _ = grb.enqueue(2); 157 | assert_eq!(3, grb.len()); 158 | } 159 | 160 | #[test] 161 | fn run_test_clear() { 162 | fn test_clear(mut b: impl RingBuffer) { 163 | let _ = b.enqueue(1); 164 | let _ = b.enqueue(2); 165 | let _ = b.enqueue(3); 166 | 167 | b.clear(); 168 | assert!(b.is_empty()); 169 | assert_eq!(0, b.len()); 170 | } 171 | 172 | test_clear(AllocRingBuffer::new(8)); 173 | test_clear(GrowableAllocRingBuffer::with_capacity(8)); 174 | test_clear(ConstGenericRingBuffer::::new()); 175 | } 176 | 177 | #[test] 178 | fn run_test_empty() { 179 | fn test_empty(mut b: impl RingBuffer) { 180 | assert!(b.is_empty()); 181 | let _ = b.enqueue(1); 182 | let _ = b.enqueue(2); 183 | let _ = b.enqueue(3); 184 | assert!(!b.is_empty()); 185 | 186 | b.clear(); 187 | assert!(b.is_empty()); 188 | assert_eq!(0, b.len()); 189 | } 190 | 191 | test_empty(AllocRingBuffer::new(8)); 192 | test_empty(GrowableAllocRingBuffer::with_capacity(8)); 193 | test_empty(ConstGenericRingBuffer::::new()); 194 | } 195 | 196 | #[test] 197 | fn run_test_iter() { 198 | fn test_iter(mut b: impl RingBuffer) { 199 | let _ = b.enqueue(1); 200 | let _ = b.enqueue(2); 201 | let _ = b.enqueue(3); 202 | let _ = b.enqueue(4); 203 | let _ = b.enqueue(5); 204 | let _ = b.enqueue(6); 205 | let _ = b.enqueue(7); 206 | 207 | let mut iter = b.iter(); 208 | assert_eq!(&1, iter.next().unwrap()); 209 | assert_eq!(&7, iter.next_back().unwrap()); 210 | assert_eq!(&2, iter.next().unwrap()); 211 | assert_eq!(&3, iter.next().unwrap()); 212 | assert_eq!(&6, iter.next_back().unwrap()); 213 | assert_eq!(&5, iter.next_back().unwrap()); 214 | assert_eq!(&4, iter.next().unwrap()); 215 | assert_eq!(None, iter.next()); 216 | } 217 | 218 | test_iter(AllocRingBuffer::new(8)); 219 | test_iter(GrowableAllocRingBuffer::with_capacity(8)); 220 | test_iter(ConstGenericRingBuffer::::new()); 221 | } 222 | 223 | #[test] 224 | fn run_test_forward_iter_non_power_of_two() { 225 | fn test_iter(mut b: impl RingBuffer) { 226 | let _ = b.enqueue(1); 227 | let _ = b.enqueue(2); 228 | let _ = b.enqueue(3); 229 | let _ = b.enqueue(4); 230 | let _ = b.enqueue(5); 231 | let _ = b.enqueue(6); 232 | let _ = b.enqueue(7); 233 | 234 | let mut iter = b.iter(); 235 | assert_eq!(&1, iter.next().unwrap()); 236 | assert_eq!(&2, iter.next().unwrap()); 237 | assert_eq!(&3, iter.next().unwrap()); 238 | assert_eq!(&4, iter.next().unwrap()); 239 | assert_eq!(&5, iter.next().unwrap()); 240 | assert_eq!(&6, iter.next().unwrap()); 241 | assert_eq!(&7, iter.next().unwrap()); 242 | assert_eq!(None, iter.next()); 243 | } 244 | 245 | test_iter(AllocRingBuffer::new(7)); 246 | test_iter(GrowableAllocRingBuffer::with_capacity(7)); 247 | test_iter(ConstGenericRingBuffer::::new()); 248 | } 249 | 250 | #[test] 251 | fn run_test_iter_non_power_of_two() { 252 | fn test_iter(mut b: impl RingBuffer) { 253 | let _ = b.enqueue(1); 254 | let _ = b.enqueue(2); 255 | let _ = b.enqueue(3); 256 | let _ = b.enqueue(4); 257 | let _ = b.enqueue(5); 258 | let _ = b.enqueue(6); 259 | let _ = b.enqueue(7); 260 | 261 | let mut iter = b.iter(); 262 | assert_eq!(&1, iter.next().unwrap()); 263 | assert_eq!(&7, iter.next_back().unwrap()); 264 | assert_eq!(&2, iter.next().unwrap()); 265 | assert_eq!(&3, iter.next().unwrap()); 266 | assert_eq!(&6, iter.next_back().unwrap()); 267 | assert_eq!(&5, iter.next_back().unwrap()); 268 | assert_eq!(&4, iter.next().unwrap()); 269 | assert_eq!(None, iter.next()); 270 | } 271 | 272 | test_iter(AllocRingBuffer::new(7)); 273 | test_iter(GrowableAllocRingBuffer::with_capacity(7)); 274 | test_iter(ConstGenericRingBuffer::::new()); 275 | } 276 | 277 | #[test] 278 | fn run_test_iter_ref() { 279 | fn test_iter(mut b: B) 280 | where 281 | B: RingBuffer, 282 | for<'a> &'a B: IntoIterator>, 283 | { 284 | let _ = b.enqueue(1); 285 | let _ = b.enqueue(2); 286 | let _ = b.enqueue(3); 287 | let _ = b.enqueue(4); 288 | let _ = b.enqueue(5); 289 | let _ = b.enqueue(6); 290 | let _ = b.enqueue(7); 291 | 292 | let mut iter = (&b).into_iter(); 293 | assert_eq!(&1, iter.next().unwrap()); 294 | assert_eq!(&7, iter.next_back().unwrap()); 295 | assert_eq!(&2, iter.next().unwrap()); 296 | assert_eq!(&3, iter.next().unwrap()); 297 | assert_eq!(&6, iter.next_back().unwrap()); 298 | assert_eq!(&5, iter.next_back().unwrap()); 299 | assert_eq!(&4, iter.next().unwrap()); 300 | assert_eq!(None, iter.next()); 301 | } 302 | 303 | test_iter(AllocRingBuffer::new(8)); 304 | test_iter(GrowableAllocRingBuffer::with_capacity(8)); 305 | test_iter(ConstGenericRingBuffer::::new()); 306 | } 307 | 308 | #[test] 309 | fn run_test_into_iter() { 310 | fn test_iter(mut b: impl RingBuffer) { 311 | let _ = b.enqueue(1); 312 | let _ = b.enqueue(2); 313 | let _ = b.enqueue(3); 314 | let _ = b.enqueue(4); 315 | let _ = b.enqueue(5); 316 | let _ = b.enqueue(6); 317 | let _ = b.enqueue(7); 318 | 319 | let mut iter = b.into_iter(); 320 | assert_eq!(1, iter.next().unwrap()); 321 | assert_eq!(2, iter.next().unwrap()); 322 | assert_eq!(3, iter.next().unwrap()); 323 | assert_eq!(4, iter.next().unwrap()); 324 | assert_eq!(5, iter.next().unwrap()); 325 | assert_eq!(6, iter.next().unwrap()); 326 | assert_eq!(7, iter.next().unwrap()); 327 | assert_eq!(None, iter.next()); 328 | } 329 | 330 | test_iter(AllocRingBuffer::new(8)); 331 | test_iter(GrowableAllocRingBuffer::with_capacity(8)); 332 | test_iter(ConstGenericRingBuffer::::new()); 333 | } 334 | 335 | #[cfg(feature = "alloc")] 336 | #[test] 337 | fn run_test_iter_with_lifetimes() { 338 | fn test_iter<'a>(string: &'a str, mut b: impl RingBuffer<&'a str>) { 339 | let _ = b.enqueue(&string[0..1]); 340 | let _ = b.enqueue(&string[1..2]); 341 | let _ = b.enqueue(&string[2..3]); 342 | 343 | let mut iter = b.iter(); 344 | assert_eq!(&&string[0..1], iter.next().unwrap()); 345 | assert_eq!(&&string[1..2], iter.next().unwrap()); 346 | assert_eq!(&&string[2..3], iter.next().unwrap()); 347 | } 348 | 349 | extern crate alloc; 350 | use alloc::string::ToString as _; 351 | let string = "abc".to_string(); 352 | 353 | test_iter(&string, AllocRingBuffer::new(8)); 354 | test_iter(&string, GrowableAllocRingBuffer::with_capacity(8)); 355 | test_iter(&string, ConstGenericRingBuffer::<&str, 8>::new()); 356 | } 357 | 358 | #[test] 359 | fn run_test_double_iter() { 360 | fn test_double_iter(mut b: impl RingBuffer) { 361 | let _ = b.enqueue(1); 362 | let _ = b.enqueue(2); 363 | let _ = b.enqueue(3); 364 | 365 | let mut iter1 = b.iter(); 366 | let mut iter2 = b.iter(); 367 | 368 | assert_eq!(&1, iter1.next().unwrap()); 369 | assert_eq!(&2, iter1.next().unwrap()); 370 | assert_eq!(&3, iter1.next().unwrap()); 371 | assert_eq!(&1, iter2.next().unwrap()); 372 | assert_eq!(&2, iter2.next().unwrap()); 373 | assert_eq!(&3, iter2.next().unwrap()); 374 | } 375 | 376 | test_double_iter(AllocRingBuffer::new(8)); 377 | test_double_iter(GrowableAllocRingBuffer::with_capacity(8)); 378 | test_double_iter(ConstGenericRingBuffer::::new()); 379 | } 380 | 381 | #[test] 382 | fn run_test_iter_wrap() { 383 | fn test_iter_wrap(mut b: impl RingBuffer) { 384 | let _ = b.enqueue(1); 385 | let _ = b.enqueue(2); 386 | // Wrap 387 | let _ = b.enqueue(3); 388 | 389 | let mut iter = b.iter(); 390 | assert_eq!(&2, iter.next().unwrap()); 391 | assert_eq!(&3, iter.next().unwrap()); 392 | } 393 | 394 | test_iter_wrap(AllocRingBuffer::new(2)); 395 | test_iter_wrap(ConstGenericRingBuffer::::new()); 396 | 397 | // the growable ringbuffer shouldn't actually stop growing 398 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 399 | 400 | let _ = b.enqueue(1); 401 | let _ = b.enqueue(2); 402 | // No wrap 403 | let _ = b.enqueue(3); 404 | 405 | let mut iter = b.iter(); 406 | assert_eq!(&1, iter.next().unwrap()); 407 | assert_eq!(&2, iter.next().unwrap()); 408 | assert_eq!(&3, iter.next().unwrap()); 409 | assert!(iter.next().is_none()); 410 | } 411 | 412 | #[test] 413 | fn run_test_iter_mut() { 414 | fn test_iter_mut(mut b: impl RingBuffer) { 415 | let _ = b.enqueue(1); 416 | let _ = b.enqueue(2); 417 | let _ = b.enqueue(3); 418 | 419 | for el in b.iter_mut() { 420 | *el += 1; 421 | } 422 | 423 | assert_eq!(vec![2, 3, 4], b.to_vec()); 424 | } 425 | 426 | test_iter_mut(AllocRingBuffer::new(8)); 427 | test_iter_mut(GrowableAllocRingBuffer::with_capacity(8)); 428 | test_iter_mut(ConstGenericRingBuffer::::new()); 429 | } 430 | 431 | #[test] 432 | fn run_test_iter_mut_ref() { 433 | fn test_iter_mut(mut b: B) 434 | where 435 | B: RingBuffer, 436 | for<'a> &'a mut B: 437 | IntoIterator>, 438 | { 439 | let _ = b.enqueue(1); 440 | let _ = b.enqueue(2); 441 | let _ = b.enqueue(3); 442 | 443 | for el in &mut b { 444 | *el += 1; 445 | } 446 | 447 | assert_eq!(vec![2, 3, 4], b.to_vec()); 448 | } 449 | 450 | test_iter_mut(AllocRingBuffer::new(8)); 451 | test_iter_mut(GrowableAllocRingBuffer::with_capacity(8)); 452 | test_iter_mut(ConstGenericRingBuffer::::new()); 453 | } 454 | 455 | #[test] 456 | fn test_iter_mut_wrap() { 457 | fn run_test_iter_mut_wrap(mut b: impl RingBuffer) { 458 | let _ = b.enqueue(1); 459 | let _ = b.enqueue(2); 460 | let _ = b.enqueue(3); 461 | 462 | for i in b.iter_mut() { 463 | *i += 1; 464 | } 465 | 466 | assert_eq!(vec![3, 4], b.to_vec()); 467 | } 468 | 469 | run_test_iter_mut_wrap(AllocRingBuffer::new(2)); 470 | run_test_iter_mut_wrap(ConstGenericRingBuffer::::new()); 471 | 472 | // The growable ringbuffer actually shouldn't wrap 473 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 474 | 475 | let _ = b.enqueue(1); 476 | let _ = b.enqueue(2); 477 | let _ = b.enqueue(3); 478 | 479 | for i in b.iter_mut() { 480 | *i += 1; 481 | } 482 | 483 | assert_eq!(vec![2, 3, 4], b.to_vec()); 484 | } 485 | 486 | #[test] 487 | fn test_iter_mut_miri_fail() { 488 | fn run_test_iter_mut_wrap(mut b: impl RingBuffer) { 489 | let _ = b.enqueue(1); 490 | let _ = b.enqueue(2); 491 | let _ = b.enqueue(3); 492 | 493 | let buf = b.iter_mut().collect::>(); 494 | 495 | for i in buf { 496 | *i += 1; 497 | } 498 | 499 | assert_eq!(vec![3, 4], b.to_vec()); 500 | } 501 | 502 | run_test_iter_mut_wrap(AllocRingBuffer::new(2)); 503 | run_test_iter_mut_wrap(ConstGenericRingBuffer::::new()); 504 | 505 | // the growable ringbuffer actually shouldn't wrap 506 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 507 | let _ = b.enqueue(1); 508 | let _ = b.enqueue(2); 509 | let _ = b.enqueue(3); 510 | 511 | let buf = b.iter_mut().collect::>(); 512 | 513 | for i in buf { 514 | *i += 1; 515 | } 516 | 517 | assert_eq!(vec![2, 3, 4], b.to_vec()); 518 | } 519 | 520 | #[test] 521 | fn run_test_to_vec() { 522 | fn test_to_vec(mut b: impl RingBuffer) { 523 | let _ = b.enqueue(1); 524 | let _ = b.enqueue(2); 525 | let _ = b.enqueue(3); 526 | 527 | assert_eq!(vec![1, 2, 3], b.to_vec()); 528 | } 529 | 530 | test_to_vec(AllocRingBuffer::new(8)); 531 | test_to_vec(GrowableAllocRingBuffer::with_capacity(8)); 532 | test_to_vec(ConstGenericRingBuffer::::new()); 533 | } 534 | 535 | #[test] 536 | fn run_test_to_vec_wrap() { 537 | fn test_to_vec_wrap(mut b: impl RingBuffer) { 538 | let _ = b.enqueue(1); 539 | let _ = b.enqueue(2); 540 | // Wrap 541 | let _ = b.enqueue(3); 542 | 543 | assert_eq!(vec![2, 3], b.to_vec()); 544 | } 545 | 546 | test_to_vec_wrap(AllocRingBuffer::new(2)); 547 | test_to_vec_wrap(ConstGenericRingBuffer::::new()); 548 | 549 | // The growable ringbuffer should actually remember all items 550 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 551 | 552 | let _ = b.enqueue(1); 553 | let _ = b.enqueue(2); 554 | let _ = b.enqueue(3); 555 | 556 | assert_eq!(vec![1, 2, 3], b.to_vec()); 557 | } 558 | 559 | #[test] 560 | fn run_test_index() { 561 | fn test_index(mut b: impl RingBuffer) { 562 | let _ = b.enqueue(2); 563 | assert_eq!(b[0], 2); 564 | } 565 | 566 | test_index(AllocRingBuffer::new(8)); 567 | test_index(GrowableAllocRingBuffer::with_capacity(8)); 568 | test_index(ConstGenericRingBuffer::::new()); 569 | } 570 | 571 | #[test] 572 | fn run_test_get() { 573 | fn test_index(mut b: impl RingBuffer) { 574 | let _ = b.enqueue(0); 575 | let _ = b.enqueue(1); 576 | let _ = b.enqueue(2); 577 | let _ = b.enqueue(3); 578 | let _ = b.enqueue(4); 579 | let _ = b.enqueue(5); 580 | let _ = b.enqueue(6); 581 | let _ = b.enqueue(7); 582 | 583 | assert_eq!(b.get(0), Some(&0)); 584 | assert_eq!(b.get(1), Some(&1)); 585 | assert_eq!(b.get(2), Some(&2)); 586 | assert_eq!(b.get(3), Some(&3)); 587 | assert_eq!(b.get(4), Some(&4)); 588 | assert_eq!(b.get(5), Some(&5)); 589 | assert_eq!(b.get(6), Some(&6)); 590 | assert_eq!(b.get(7), Some(&7)); 591 | } 592 | 593 | test_index(AllocRingBuffer::new(8)); 594 | test_index(GrowableAllocRingBuffer::with_capacity(8)); 595 | test_index(ConstGenericRingBuffer::::new()); 596 | } 597 | 598 | #[test] 599 | fn run_test_index_mut() { 600 | fn test_index_mut(mut b: impl RingBuffer) { 601 | let _ = b.enqueue(2); 602 | 603 | assert_eq!(b[0], 2); 604 | 605 | b[0] = 5; 606 | 607 | assert_eq!(b[0], 5); 608 | } 609 | 610 | test_index_mut(AllocRingBuffer::new(8)); 611 | test_index_mut(GrowableAllocRingBuffer::with_capacity(8)); 612 | test_index_mut(ConstGenericRingBuffer::::new()); 613 | } 614 | 615 | #[test] 616 | fn run_test_peek_some() { 617 | fn test_peek_some(mut b: impl RingBuffer) { 618 | let _ = b.enqueue(1); 619 | let _ = b.enqueue(2); 620 | 621 | assert_eq!(b.peek(), Some(&1)); 622 | } 623 | 624 | test_peek_some(AllocRingBuffer::new(2)); 625 | test_peek_some(GrowableAllocRingBuffer::with_capacity(2)); 626 | test_peek_some(ConstGenericRingBuffer::::new()); 627 | } 628 | 629 | #[test] 630 | fn run_test_peek_none() { 631 | fn test_peek_none(b: impl RingBuffer) { 632 | assert_eq!(b.peek(), None); 633 | } 634 | 635 | test_peek_none(AllocRingBuffer::new(8)); 636 | test_peek_none(GrowableAllocRingBuffer::with_capacity(8)); 637 | test_peek_none(ConstGenericRingBuffer::::new()); 638 | } 639 | 640 | #[test] 641 | fn run_test_get_relative() { 642 | fn test_get_relative(mut b: impl RingBuffer) { 643 | let _ = b.enqueue(0); 644 | let _ = b.enqueue(1); 645 | 646 | // get[(index + 1) % len] = 1 647 | assert_eq!(b.get(0).unwrap(), &0); 648 | assert_eq!(b.get(1).unwrap(), &1); 649 | 650 | // Wraps around 651 | assert_eq!(b.get(2).unwrap(), &0); 652 | assert_eq!(b.get(3).unwrap(), &1); 653 | } 654 | 655 | test_get_relative(AllocRingBuffer::new(8)); 656 | test_get_relative(GrowableAllocRingBuffer::with_capacity(8)); 657 | test_get_relative(ConstGenericRingBuffer::::new()); 658 | } 659 | 660 | #[test] 661 | fn run_test_wrapping_get_relative() { 662 | fn test_wrapping_get_relative(mut b: impl RingBuffer) { 663 | let _ = b.enqueue(0); 664 | let _ = b.enqueue(1); 665 | let _ = b.enqueue(2); 666 | 667 | // [0, ...] 668 | // ^ 669 | // [0, 1] 670 | // ^ 671 | // [2, 1] 672 | // ^ 673 | // get(0) == b[index] = 1 674 | // get(1) == b[(index+1) % len] = 1 675 | assert_eq!(b.get(0).unwrap(), &1); 676 | assert_eq!(b.get(1).unwrap(), &2); 677 | } 678 | 679 | test_wrapping_get_relative(AllocRingBuffer::new(2)); 680 | test_wrapping_get_relative(ConstGenericRingBuffer::::new()); 681 | 682 | // the growable ringbuffer actually shouldn't wrap 683 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 684 | let _ = b.enqueue(0); 685 | let _ = b.enqueue(1); 686 | let _ = b.enqueue(2); 687 | 688 | assert_eq!(b.get(0).unwrap(), &0); 689 | assert_eq!(b.get(1).unwrap(), &1); 690 | assert_eq!(b.get(2).unwrap(), &2); 691 | } 692 | 693 | #[test] 694 | fn run_test_get_relative_zero_length() { 695 | fn test_get_relative_zero_length(b: impl RingBuffer) { 696 | assert!(b.get(1).is_none()); 697 | } 698 | 699 | test_get_relative_zero_length(AllocRingBuffer::new(8)); 700 | test_get_relative_zero_length(GrowableAllocRingBuffer::with_capacity(8)); 701 | test_get_relative_zero_length(ConstGenericRingBuffer::::new()); 702 | } 703 | 704 | #[test] 705 | fn run_test_get_relative_mut() { 706 | fn test_get_relative_mut(mut b: impl RingBuffer) { 707 | let _ = b.enqueue(0); 708 | let _ = b.enqueue(1); 709 | 710 | // [0, ...] 711 | // ^ 712 | // [0, 1, ...] 713 | // ^ 714 | // get[(index + 0) % len] = 0 (wrap to 0 because len == 2) 715 | // get[(index + 1) % len] = 1 716 | *b.get_mut(0).unwrap() = 3; 717 | *b.get_mut(1).unwrap() = 4; 718 | 719 | assert_eq!(b.get(0).unwrap(), &3); 720 | assert_eq!(b.get(1).unwrap(), &4); 721 | } 722 | 723 | test_get_relative_mut(AllocRingBuffer::new(8)); 724 | test_get_relative_mut(GrowableAllocRingBuffer::with_capacity(8)); 725 | test_get_relative_mut(ConstGenericRingBuffer::::new()); 726 | } 727 | 728 | #[test] 729 | fn run_test_wrapping_get_relative_mut() { 730 | fn test_wrapping_get_relative_mut(mut b: impl RingBuffer) { 731 | let _ = b.enqueue(0); 732 | let _ = b.enqueue(1); 733 | let _ = b.enqueue(2); 734 | 735 | *b.get_mut(0).unwrap() = 3; 736 | 737 | // [0, ...] 738 | // ^ 739 | // [0, 1] 740 | // ^ 741 | // [2, 1] 742 | // ^ 743 | // get(0) == b[index] = 1 744 | // get(1) == b[(index+1) % len] = 1 745 | assert_eq!(b.get(0).unwrap(), &3); 746 | assert_eq!(b.get(1).unwrap(), &2); 747 | } 748 | 749 | test_wrapping_get_relative_mut(AllocRingBuffer::new(2)); 750 | test_wrapping_get_relative_mut(ConstGenericRingBuffer::::new()); 751 | 752 | // the growable ringbuffer actually shouldn't wrap 753 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 754 | 755 | let _ = b.enqueue(0); 756 | let _ = b.enqueue(1); 757 | let _ = b.enqueue(2); 758 | 759 | *b.get_mut(0).unwrap() = 3; 760 | 761 | assert_eq!(b.get(0).unwrap(), &3); 762 | assert_eq!(b.get(1).unwrap(), &1); 763 | assert_eq!(b.get(2).unwrap(), &2); 764 | } 765 | 766 | #[test] 767 | fn run_test_get_relative_mut_zero_length() { 768 | fn test_get_relative_mut_zero_length(mut b: impl RingBuffer) { 769 | assert!(b.get_mut(1).is_none()); 770 | } 771 | 772 | test_get_relative_mut_zero_length(AllocRingBuffer::new(8)); 773 | test_get_relative_mut_zero_length(GrowableAllocRingBuffer::with_capacity(8)); 774 | test_get_relative_mut_zero_length(ConstGenericRingBuffer::::new()); 775 | } 776 | 777 | #[test] 778 | fn run_test_from_iterator() { 779 | fn test_from_iterator + FromIterator>() { 780 | let b: T = std::iter::repeat(1).take(1024).collect(); 781 | assert_eq!(b.len(), 1024); 782 | assert_eq!(b.to_vec(), vec![1; 1024]); 783 | } 784 | 785 | test_from_iterator::>(); 786 | test_from_iterator::>(); 787 | } 788 | 789 | #[test] 790 | fn run_test_from_iterator_wrap() { 791 | fn test_from_iterator_wrap + FromIterator>() { 792 | let b: T = std::iter::repeat(1).take(8000).collect(); 793 | assert_eq!(b.len(), b.capacity()); 794 | assert_eq!(b.to_vec(), vec![1; b.capacity()]); 795 | } 796 | 797 | test_from_iterator_wrap::>(); 798 | test_from_iterator_wrap::>(); 799 | } 800 | 801 | #[test] 802 | fn run_test_get_relative_negative() { 803 | fn test_get_relative_negative(mut b: impl RingBuffer) { 804 | let _ = b.enqueue(0); 805 | let _ = b.enqueue(1); 806 | 807 | // [0, ...] 808 | // ^ 809 | // [0, 1, ...] 810 | // ^ 811 | // get[(index + -1) % len] = 1 812 | // get[(index + -2) % len] = 0 (wrap to 1 because len == 2) 813 | assert_eq!(b.get_signed(-1).unwrap(), &1); 814 | assert_eq!(b.get_signed(-2).unwrap(), &0); 815 | 816 | // Wraps around 817 | assert_eq!(b.get_signed(-3).unwrap(), &1); 818 | assert_eq!(b.get_signed(-4).unwrap(), &0); 819 | } 820 | 821 | test_get_relative_negative(AllocRingBuffer::new(8)); 822 | test_get_relative_negative(ConstGenericRingBuffer::::new()); 823 | } 824 | 825 | #[test] 826 | fn run_test_contains() { 827 | fn test_contains(mut b: impl RingBuffer) { 828 | let _ = b.enqueue(1); 829 | let _ = b.enqueue(2); 830 | 831 | assert!(b.contains(&1)); 832 | assert!(b.contains(&2)); 833 | } 834 | 835 | test_contains(AllocRingBuffer::new(8)); 836 | test_contains(GrowableAllocRingBuffer::with_capacity(8)); 837 | test_contains(ConstGenericRingBuffer::::new()); 838 | } 839 | 840 | #[test] 841 | fn run_test_is_full() { 842 | fn test_is_full(mut b: impl RingBuffer) { 843 | assert!(!b.is_full()); 844 | let _ = b.enqueue(1); 845 | assert!(!b.is_full()); 846 | let _ = b.enqueue(2); 847 | assert!(b.is_full()); 848 | } 849 | 850 | test_is_full(AllocRingBuffer::new(2)); 851 | test_is_full(GrowableAllocRingBuffer::with_capacity(2)); 852 | test_is_full(ConstGenericRingBuffer::::new()); 853 | } 854 | 855 | #[test] 856 | fn run_test_front_some() { 857 | fn test_front_some(mut b: impl RingBuffer) { 858 | let _ = b.enqueue(1); 859 | let _ = b.enqueue(2); 860 | 861 | assert_eq!(b.front(), Some(&1)); 862 | } 863 | 864 | test_front_some(AllocRingBuffer::new(2)); 865 | test_front_some(GrowableAllocRingBuffer::with_capacity(2)); 866 | test_front_some(ConstGenericRingBuffer::::new()); 867 | } 868 | 869 | #[test] 870 | fn run_test_front_none() { 871 | fn test_front_none(b: impl RingBuffer) { 872 | assert_eq!(b.front(), None); 873 | } 874 | 875 | test_front_none(AllocRingBuffer::new(8)); 876 | test_front_none(GrowableAllocRingBuffer::with_capacity(8)); 877 | test_front_none(ConstGenericRingBuffer::::new()); 878 | } 879 | 880 | #[test] 881 | fn run_test_back_some() { 882 | fn test_back_some(mut b: impl RingBuffer) { 883 | let _ = b.enqueue(1); 884 | let _ = b.enqueue(2); 885 | 886 | assert_eq!(b.back(), Some(&2)); 887 | } 888 | 889 | test_back_some(AllocRingBuffer::new(2)); 890 | test_back_some(GrowableAllocRingBuffer::with_capacity(2)); 891 | test_back_some(ConstGenericRingBuffer::::new()); 892 | } 893 | 894 | #[test] 895 | fn run_test_back_none() { 896 | fn test_back_none(b: impl RingBuffer) { 897 | assert_eq!(b.back(), None); 898 | } 899 | 900 | test_back_none(AllocRingBuffer::new(8)); 901 | test_back_none(GrowableAllocRingBuffer::with_capacity(8)); 902 | test_back_none(ConstGenericRingBuffer::::new()); 903 | } 904 | 905 | #[test] 906 | fn run_test_front_some_mut() { 907 | fn test_front_some_mut(mut b: impl RingBuffer) { 908 | let _ = b.enqueue(1); 909 | let _ = b.enqueue(2); 910 | 911 | assert_eq!(b.front_mut(), Some(&mut 1)); 912 | } 913 | 914 | test_front_some_mut(AllocRingBuffer::new(2)); 915 | test_front_some_mut(GrowableAllocRingBuffer::with_capacity(2)); 916 | test_front_some_mut(ConstGenericRingBuffer::::new()); 917 | } 918 | 919 | #[test] 920 | fn run_test_front_none_mut() { 921 | fn test_front_none_mut(mut b: impl RingBuffer) { 922 | assert_eq!(b.front_mut(), None); 923 | } 924 | 925 | test_front_none_mut(AllocRingBuffer::new(8)); 926 | test_front_none_mut(GrowableAllocRingBuffer::with_capacity(8)); 927 | test_front_none_mut(ConstGenericRingBuffer::::new()); 928 | } 929 | 930 | #[test] 931 | fn run_test_back_some_mut() { 932 | fn test_back_some_mut(mut b: impl RingBuffer) { 933 | let _ = b.enqueue(1); 934 | let _ = b.enqueue(2); 935 | 936 | assert_eq!(b.back_mut(), Some(&mut 2)); 937 | } 938 | 939 | test_back_some_mut(AllocRingBuffer::new(2)); 940 | test_back_some_mut(GrowableAllocRingBuffer::with_capacity(2)); 941 | test_back_some_mut(ConstGenericRingBuffer::::new()); 942 | } 943 | 944 | #[test] 945 | fn run_test_back_none_mut() { 946 | fn test_back_none_mut(mut b: impl RingBuffer) { 947 | assert_eq!(b.back_mut(), None); 948 | } 949 | 950 | test_back_none_mut(AllocRingBuffer::new(8)); 951 | test_back_none_mut(GrowableAllocRingBuffer::with_capacity(8)); 952 | test_back_none_mut(ConstGenericRingBuffer::::new()); 953 | } 954 | 955 | #[test] 956 | fn run_test_dequeue() { 957 | fn run_test_dequeue(mut b: impl RingBuffer) { 958 | let _ = b.enqueue(0); 959 | let _ = b.enqueue(1); 960 | 961 | assert_eq!(b.len(), 2); 962 | 963 | assert_eq!(b.dequeue(), Some(0)); 964 | assert_eq!(b.dequeue(), Some(1)); 965 | 966 | assert_eq!(b.len(), 0); 967 | 968 | assert_eq!(b.dequeue(), None); 969 | } 970 | 971 | run_test_dequeue(AllocRingBuffer::new(8)); 972 | run_test_dequeue(GrowableAllocRingBuffer::with_capacity(8)); 973 | run_test_dequeue(ConstGenericRingBuffer::::new()); 974 | } 975 | 976 | #[test] 977 | fn run_test_skip() { 978 | #[allow(deprecated)] 979 | fn test_skip(mut b: impl RingBuffer) { 980 | let _ = b.enqueue(0); 981 | let _ = b.enqueue(1); 982 | 983 | assert_eq!(b.len(), 2); 984 | 985 | b.skip(); 986 | b.skip(); 987 | 988 | assert_eq!(b.len(), 0); 989 | } 990 | 991 | test_skip(AllocRingBuffer::new(8)); 992 | test_skip(GrowableAllocRingBuffer::with_capacity(8)); 993 | test_skip(ConstGenericRingBuffer::::new()); 994 | } 995 | 996 | #[test] 997 | fn run_test_skip_2() { 998 | #[allow(deprecated)] 999 | fn test_skip2(mut rb: impl RingBuffer) { 1000 | rb.skip(); 1001 | rb.skip(); 1002 | rb.skip(); 1003 | let _ = rb.enqueue(1); 1004 | assert_eq!(rb.dequeue(), Some(1)); 1005 | assert_eq!(rb.dequeue(), None); 1006 | rb.skip(); 1007 | assert_eq!(rb.dequeue(), None); 1008 | } 1009 | 1010 | test_skip2(AllocRingBuffer::new(2)); 1011 | test_skip2(GrowableAllocRingBuffer::with_capacity(2)); 1012 | test_skip2(ConstGenericRingBuffer::::new()); 1013 | } 1014 | 1015 | #[test] 1016 | #[allow(deprecated)] 1017 | fn run_test_push_pop() { 1018 | fn test_push_pop(mut b: impl RingBuffer) { 1019 | b.push(0); 1020 | b.push(1); 1021 | 1022 | assert_eq!(b.dequeue(), Some(0)); 1023 | assert_eq!(b.dequeue(), Some(1)); 1024 | assert_eq!(b.dequeue(), None); 1025 | 1026 | b.push(0); 1027 | b.push(1); 1028 | 1029 | assert_eq!(b.dequeue(), Some(0)); 1030 | assert_eq!(b.dequeue(), Some(1)); 1031 | assert_eq!(b.dequeue(), None); 1032 | } 1033 | 1034 | test_push_pop(AllocRingBuffer::new(8)); 1035 | test_push_pop(GrowableAllocRingBuffer::with_capacity(8)); 1036 | test_push_pop(ConstGenericRingBuffer::::new()); 1037 | } 1038 | 1039 | #[test] 1040 | fn run_test_enqueue_dequeue_enqueue() { 1041 | fn test_enqueue_dequeue_enqueue(mut b: impl RingBuffer) { 1042 | let _ = b.enqueue(0); 1043 | let _ = b.enqueue(1); 1044 | 1045 | assert_eq!(b.dequeue(), Some(0)); 1046 | assert_eq!(b.dequeue(), Some(1)); 1047 | assert_eq!(b.dequeue(), None); 1048 | 1049 | let _ = b.enqueue(0); 1050 | let _ = b.enqueue(1); 1051 | 1052 | assert_eq!(b.dequeue(), Some(0)); 1053 | assert_eq!(b.dequeue(), Some(1)); 1054 | assert_eq!(b.dequeue(), None); 1055 | } 1056 | 1057 | test_enqueue_dequeue_enqueue(AllocRingBuffer::new(8)); 1058 | test_enqueue_dequeue_enqueue(GrowableAllocRingBuffer::with_capacity(8)); 1059 | test_enqueue_dequeue_enqueue(ConstGenericRingBuffer::::new()); 1060 | } 1061 | 1062 | #[test] 1063 | fn large_negative_index() { 1064 | fn test_large_negative_index(mut b: impl RingBuffer) { 1065 | let _ = b.enqueue(1); 1066 | let _ = b.enqueue(2); 1067 | assert_eq!(b.get_signed(1), Some(&2)); 1068 | assert_eq!(b.get_signed(0), Some(&1)); 1069 | assert_eq!(b.get_signed(-1), Some(&2)); 1070 | assert_eq!(b.get_signed(-2), Some(&1)); 1071 | assert_eq!(b.get_signed(-3), Some(&2)); 1072 | } 1073 | 1074 | test_large_negative_index(AllocRingBuffer::new(2)); 1075 | test_large_negative_index(ConstGenericRingBuffer::::new()); 1076 | test_large_negative_index(GrowableAllocRingBuffer::::new()); 1077 | } 1078 | 1079 | #[test] 1080 | fn large_negative_index_mut() { 1081 | fn test_large_negative_index(mut b: impl RingBuffer) { 1082 | let _ = b.enqueue(1); 1083 | let _ = b.enqueue(2); 1084 | assert_eq!(b.get_mut_signed(1), Some(&mut 2)); 1085 | assert_eq!(b.get_mut_signed(0), Some(&mut 1)); 1086 | assert_eq!(b.get_mut_signed(-1), Some(&mut 2)); 1087 | assert_eq!(b.get_mut_signed(-2), Some(&mut 1)); 1088 | assert_eq!(b.get_mut_signed(-3), Some(&mut 2)); 1089 | } 1090 | 1091 | test_large_negative_index(AllocRingBuffer::new(2)); 1092 | test_large_negative_index(ConstGenericRingBuffer::::new()); 1093 | test_large_negative_index(GrowableAllocRingBuffer::::new()); 1094 | } 1095 | 1096 | #[test] 1097 | fn run_test_enqueue_dequeue_enqueue_full() { 1098 | fn test_enqueue_dequeue_enqueue_full(mut b: impl RingBuffer) { 1099 | let _ = b.enqueue(0); 1100 | let _ = b.enqueue(1); 1101 | let _ = b.enqueue(2); 1102 | 1103 | assert_eq!(b.dequeue(), Some(1)); 1104 | assert_eq!(b.dequeue(), Some(2)); 1105 | assert_eq!(b.dequeue(), None); 1106 | 1107 | let _ = b.enqueue(0); 1108 | let _ = b.enqueue(1); 1109 | let _ = b.enqueue(2); 1110 | 1111 | assert_eq!(b.dequeue(), Some(1)); 1112 | assert_eq!(b.dequeue(), Some(2)); 1113 | assert_eq!(b.dequeue(), None); 1114 | } 1115 | 1116 | test_enqueue_dequeue_enqueue_full(AllocRingBuffer::new(2)); 1117 | test_enqueue_dequeue_enqueue_full(ConstGenericRingBuffer::::new()); 1118 | 1119 | // the growable ringbuffer should actually keep growing and dequeue all items 1120 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 1121 | let _ = b.enqueue(0); 1122 | let _ = b.enqueue(1); 1123 | let _ = b.enqueue(2); 1124 | 1125 | assert_eq!(b.dequeue(), Some(0)); 1126 | assert_eq!(b.dequeue(), Some(1)); 1127 | assert_eq!(b.dequeue(), Some(2)); 1128 | assert_eq!(b.dequeue(), None); 1129 | 1130 | let _ = b.enqueue(0); 1131 | let _ = b.enqueue(1); 1132 | let _ = b.enqueue(2); 1133 | 1134 | assert_eq!(b.dequeue(), Some(0)); 1135 | assert_eq!(b.dequeue(), Some(1)); 1136 | assert_eq!(b.dequeue(), Some(2)); 1137 | assert_eq!(b.dequeue(), None); 1138 | } 1139 | 1140 | #[test] 1141 | fn run_test_enqueue_dequeue_enqueue_full_get() { 1142 | fn test_enqueue_dequeue_enqueue_full_get(mut b: impl RingBuffer) { 1143 | let _ = b.enqueue(0); 1144 | let _ = b.enqueue(1); 1145 | let _ = b.enqueue(2); 1146 | 1147 | assert_eq!(b.dequeue(), Some(1)); 1148 | assert_eq!(b.dequeue(), Some(2)); 1149 | assert_eq!(b.dequeue(), None); 1150 | 1151 | let _ = b.enqueue(0); 1152 | let _ = b.enqueue(1); 1153 | let _ = b.enqueue(2); 1154 | 1155 | assert_eq!(b.dequeue(), Some(1)); 1156 | assert_eq!(b.dequeue(), Some(2)); 1157 | assert_eq!(b.dequeue(), None); 1158 | 1159 | let _ = b.enqueue(0); 1160 | let _ = b.enqueue(1); 1161 | let _ = b.enqueue(2); 1162 | 1163 | assert_eq!(b.get_signed(-1), Some(&2)); 1164 | assert_eq!(b.get_signed(-2), Some(&1)); 1165 | assert_eq!(b.get_signed(-3), Some(&2)); 1166 | } 1167 | 1168 | test_enqueue_dequeue_enqueue_full_get(AllocRingBuffer::new(2)); 1169 | test_enqueue_dequeue_enqueue_full_get(ConstGenericRingBuffer::::new()); 1170 | 1171 | // the growable ringbuffer should actually keep growing and dequeue all items 1172 | let mut b = GrowableAllocRingBuffer::with_capacity(2); 1173 | 1174 | let _ = b.enqueue(0); 1175 | let _ = b.enqueue(1); 1176 | let _ = b.enqueue(2); 1177 | 1178 | assert_eq!(b.dequeue(), Some(0)); 1179 | assert_eq!(b.dequeue(), Some(1)); 1180 | assert_eq!(b.dequeue(), Some(2)); 1181 | assert_eq!(b.dequeue(), None); 1182 | 1183 | let _ = b.enqueue(0); 1184 | let _ = b.enqueue(1); 1185 | let _ = b.enqueue(2); 1186 | 1187 | assert_eq!(b.dequeue(), Some(0)); 1188 | assert_eq!(b.dequeue(), Some(1)); 1189 | assert_eq!(b.dequeue(), Some(2)); 1190 | assert_eq!(b.dequeue(), None); 1191 | 1192 | let _ = b.enqueue(0); 1193 | let _ = b.enqueue(1); 1194 | let _ = b.enqueue(2); 1195 | 1196 | assert_eq!(b.get_signed(-1), Some(&2)); 1197 | assert_eq!(b.get_signed(-2), Some(&1)); 1198 | assert_eq!(b.get_signed(-3), Some(&0)); 1199 | } 1200 | 1201 | #[test] 1202 | #[cfg_attr(miri, ignore)] 1203 | // this test takes far too long with Miri enabled 1204 | fn run_test_enqueue_dequeue_enqueue_full_get_rep() { 1205 | fn test_enqueue_dequeue_enqueue_full_get_rep(mut rb: impl RingBuffer) { 1206 | for _ in 0..100_000 { 1207 | let _ = rb.enqueue(1); 1208 | let _ = rb.enqueue(2); 1209 | 1210 | assert_eq!(rb.dequeue(), Some(1)); 1211 | assert_eq!(rb.dequeue(), Some(2)); 1212 | 1213 | let _ = rb.enqueue(1); 1214 | let _ = rb.enqueue(2); 1215 | 1216 | assert_eq!(rb.dequeue(), Some(1)); 1217 | assert_eq!(rb.dequeue(), Some(2)); 1218 | 1219 | let _ = rb.enqueue(1); 1220 | let _ = rb.enqueue(2); 1221 | 1222 | assert_eq!(rb.get_signed(-1), Some(&2)); 1223 | assert_eq!(rb.get_signed(-2), Some(&1)); 1224 | } 1225 | } 1226 | 1227 | test_enqueue_dequeue_enqueue_full_get_rep(AllocRingBuffer::new(8)); 1228 | test_enqueue_dequeue_enqueue_full_get_rep(GrowableAllocRingBuffer::with_capacity(8)); 1229 | test_enqueue_dequeue_enqueue_full_get_rep(ConstGenericRingBuffer::::new()); 1230 | } 1231 | 1232 | #[test] 1233 | fn run_test_clone() { 1234 | fn test_clone(mut rb: impl RingBuffer + Clone + Eq + Debug) { 1235 | let _ = rb.enqueue(42); 1236 | let _ = rb.enqueue(32); 1237 | let _ = rb.enqueue(22); 1238 | 1239 | let mut other = rb.clone(); 1240 | 1241 | assert_eq!(rb, other); 1242 | 1243 | let _ = rb.enqueue(11); 1244 | let _ = rb.enqueue(12); 1245 | let _ = other.enqueue(11); 1246 | let _ = other.enqueue(12); 1247 | 1248 | assert_eq!(rb, other); 1249 | } 1250 | 1251 | test_clone(AllocRingBuffer::new(4)); 1252 | test_clone(GrowableAllocRingBuffer::with_capacity(4)); 1253 | test_clone(ConstGenericRingBuffer::::new()); 1254 | } 1255 | 1256 | #[test] 1257 | fn run_test_default_fill() { 1258 | fn test_default_fill(mut rb: impl RingBuffer) { 1259 | for i in 0..rb.capacity() { 1260 | for _ in 0..i { 1261 | let _ = rb.enqueue(1); 1262 | } 1263 | 1264 | assert_eq!(rb.len(), i); 1265 | rb.fill_default(); 1266 | assert_eq!(rb.len(), 4); 1267 | 1268 | // 4x 1269 | assert_eq!(rb.dequeue(), Some(0)); 1270 | assert_eq!(rb.dequeue(), Some(0)); 1271 | assert_eq!(rb.dequeue(), Some(0)); 1272 | assert_eq!(rb.dequeue(), Some(0)); 1273 | } 1274 | } 1275 | 1276 | test_default_fill(AllocRingBuffer::new(4)); 1277 | test_default_fill(GrowableAllocRingBuffer::with_capacity(4)); 1278 | test_default_fill(ConstGenericRingBuffer::::new()); 1279 | } 1280 | 1281 | #[test] 1282 | fn run_test_eq() { 1283 | let mut alloc_a = ConstGenericRingBuffer::::new(); 1284 | let mut alloc_b = ConstGenericRingBuffer::::new(); 1285 | 1286 | assert!(alloc_a.eq(&alloc_b)); 1287 | let _ = alloc_a.enqueue(1); 1288 | assert!(!alloc_b.eq(&alloc_a)); 1289 | let _ = alloc_b.enqueue(1); 1290 | assert!(alloc_a.eq(&alloc_b)); 1291 | let _ = alloc_a.enqueue(4); 1292 | let _ = alloc_b.enqueue(2); 1293 | assert!(!alloc_b.eq(&alloc_a)); 1294 | } 1295 | 1296 | #[test] 1297 | fn run_next_back_test() { 1298 | fn next_back_test(mut rb: impl RingBuffer) { 1299 | for i in 1..=4 { 1300 | let _ = rb.enqueue(i); 1301 | } 1302 | 1303 | let mut it = rb.iter(); 1304 | assert_eq!(Some(&4), it.next_back()); 1305 | assert_eq!(Some(&3), it.next_back()); 1306 | assert_eq!(Some(&1), it.next()); 1307 | assert_eq!(Some(&2), it.next_back()); 1308 | assert_eq!(None, it.next_back()); 1309 | } 1310 | 1311 | next_back_test(ConstGenericRingBuffer::::new()); 1312 | next_back_test(AllocRingBuffer::new(8)); 1313 | next_back_test(GrowableAllocRingBuffer::with_capacity(8)); 1314 | } 1315 | 1316 | #[test] 1317 | fn run_next_back_test_mut() { 1318 | fn next_back_test_mut(mut rb: impl RingBuffer) { 1319 | for i in 1..=4 { 1320 | let _ = rb.enqueue(i); 1321 | } 1322 | 1323 | let mut it = rb.iter_mut(); 1324 | assert_eq!(Some(&mut 4), it.next_back()); 1325 | assert_eq!(Some(&mut 3), it.next_back()); 1326 | assert_eq!(Some(&mut 1), it.next()); 1327 | assert_eq!(Some(&mut 2), it.next_back()); 1328 | assert_eq!(None, it.next_back()); 1329 | } 1330 | 1331 | next_back_test_mut(ConstGenericRingBuffer::::new()); 1332 | next_back_test_mut(AllocRingBuffer::new(8)); 1333 | next_back_test_mut(GrowableAllocRingBuffer::with_capacity(8)); 1334 | } 1335 | 1336 | #[test] 1337 | fn test_fill() { 1338 | let mut b = AllocRingBuffer::from([vec![1], vec![2]]); 1339 | b.fill(vec![2]); 1340 | assert_eq!(b.dequeue(), Some(vec![2])); 1341 | assert_eq!(b.dequeue(), Some(vec![2])); 1342 | } 1343 | 1344 | #[test] 1345 | fn run_test_fill() { 1346 | fn test_fill(mut rb: impl RingBuffer) { 1347 | for i in 0..rb.capacity() { 1348 | for _ in 0..i { 1349 | let _ = rb.enqueue(1); 1350 | } 1351 | 1352 | assert_eq!(rb.len(), i); 1353 | rb.fill(3); 1354 | assert_eq!(rb.len(), 4); 1355 | 1356 | // 4x 1357 | assert_eq!(rb.dequeue(), Some(3)); 1358 | assert_eq!(rb.dequeue(), Some(3)); 1359 | assert_eq!(rb.dequeue(), Some(3)); 1360 | assert_eq!(rb.dequeue(), Some(3)); 1361 | } 1362 | } 1363 | 1364 | test_fill(AllocRingBuffer::new(4)); 1365 | test_fill(GrowableAllocRingBuffer::with_capacity(4)); 1366 | test_fill(ConstGenericRingBuffer::::new()); 1367 | } 1368 | 1369 | mod test_dropping { 1370 | use super::*; 1371 | use std::boxed::Box; 1372 | use std::cell::{RefCell, RefMut}; 1373 | 1374 | struct DropTest { 1375 | flag: bool, 1376 | } 1377 | 1378 | struct Dropee<'a> { 1379 | parent: Option>, 1380 | } 1381 | 1382 | impl<'a> Drop for Dropee<'a> { 1383 | fn drop(&mut self) { 1384 | if let Some(parent) = &mut self.parent { 1385 | parent.flag = true; 1386 | } 1387 | } 1388 | } 1389 | 1390 | macro_rules! test_dropped { 1391 | ($constructor: block) => {{ 1392 | let dt = Box::into_raw(Box::new(RefCell::new(DropTest { flag: false }))); 1393 | { 1394 | let d = Dropee { 1395 | // Safety: 1396 | // We know the pointer is initialized as it was created just above. 1397 | // Also no other mutable borrow can exist at this time 1398 | parent: Some(unsafe { dt.as_ref() }.unwrap().borrow_mut()), 1399 | }; 1400 | let mut rb = { $constructor }; 1401 | let _ = rb.enqueue(d); 1402 | let _ = rb.enqueue(Dropee { parent: None }); 1403 | } 1404 | { 1405 | // Safety: 1406 | // We know the pointer exists and is no longer borrowed as the block above limited it 1407 | assert!(unsafe { dt.as_ref() }.unwrap().borrow().flag); 1408 | } 1409 | // Safety: 1410 | // No other references exist to box so we can safely drop it 1411 | unsafe { 1412 | drop(Box::from_raw(dt)); 1413 | } 1414 | }}; 1415 | } 1416 | 1417 | #[test] 1418 | fn run_test_drops_contents_alloc() { 1419 | test_dropped!({ AllocRingBuffer::new(1) }); 1420 | } 1421 | 1422 | #[test] 1423 | fn run_test_drops_contents_const_generic() { 1424 | test_dropped!({ ConstGenericRingBuffer::<_, 1>::new() }); 1425 | } 1426 | 1427 | #[test] 1428 | fn run_test_drops_contents_growable_alloc() { 1429 | test_dropped!({ GrowableAllocRingBuffer::with_capacity(1) }); 1430 | } 1431 | } 1432 | 1433 | #[test] 1434 | fn test_clone() { 1435 | macro_rules! test_clone { 1436 | ($e: expr) => { 1437 | let mut e1 = $e; 1438 | let _ = e1.enqueue(1); 1439 | let _ = e1.enqueue(2); 1440 | 1441 | let mut e2 = e1.clone(); 1442 | 1443 | let _ = e2.enqueue(11); 1444 | let _ = e2.enqueue(12); 1445 | 1446 | assert_eq!(e1.to_vec(), vec![1, 2]); 1447 | assert_eq!(e2.to_vec(), vec![1, 2, 11, 12]); 1448 | }; 1449 | } 1450 | 1451 | test_clone!(ConstGenericRingBuffer::<_, 4>::new()); 1452 | test_clone!(GrowableAllocRingBuffer::<_>::new()); 1453 | test_clone!(AllocRingBuffer::<_>::new(4)); 1454 | } 1455 | 1456 | #[test] 1457 | fn iter_nth_override() { 1458 | macro_rules! test_concrete { 1459 | ($rb_init: expr) => { 1460 | let rb = $rb_init([1, 2, 3, 4]); 1461 | assert_eq!(rb.iter().nth(0), Some(&1)); 1462 | assert_eq!(rb.iter().nth(1), Some(&2)); 1463 | assert_eq!(rb.iter().nth(2), Some(&3)); 1464 | assert_eq!(rb.iter().nth(3), Some(&4)); 1465 | assert_eq!(rb.iter().nth(4), None); 1466 | 1467 | let mut rb = $rb_init([1, 2, 3, 4]); 1468 | assert_eq!(rb.iter_mut().nth(0), Some(&mut 1)); 1469 | assert_eq!(rb.iter_mut().nth(1), Some(&mut 2)); 1470 | assert_eq!(rb.iter_mut().nth(2), Some(&mut 3)); 1471 | assert_eq!(rb.iter_mut().nth(3), Some(&mut 4)); 1472 | assert_eq!(rb.iter_mut().nth(4), None); 1473 | 1474 | let rb = $rb_init([1, 2, 3, 4]); 1475 | assert_eq!(rb.clone().into_iter().nth(0), Some(1)); 1476 | assert_eq!(rb.clone().into_iter().nth(1), Some(2)); 1477 | assert_eq!(rb.clone().into_iter().nth(2), Some(3)); 1478 | assert_eq!(rb.clone().into_iter().nth(3), Some(4)); 1479 | assert_eq!(rb.clone().into_iter().nth(4), None); 1480 | }; 1481 | } 1482 | 1483 | test_concrete!(|values: [i32; 4]| ConstGenericRingBuffer::<_, 4>::from(values)); 1484 | test_concrete!(|values: [i32; 4]| GrowableAllocRingBuffer::<_>::from(values)); 1485 | test_concrete!(|values: [i32; 4]| AllocRingBuffer::<_>::from(values)); 1486 | } 1487 | 1488 | #[test] 1489 | fn iter_nth_back_override() { 1490 | macro_rules! test_concrete { 1491 | ($rb_init: expr) => { 1492 | let rb = $rb_init([1, 2, 3, 4]); 1493 | assert_eq!(rb.iter().nth_back(0), Some(&4)); 1494 | assert_eq!(rb.iter().nth_back(1), Some(&3)); 1495 | assert_eq!(rb.iter().nth_back(2), Some(&2)); 1496 | assert_eq!(rb.iter().nth_back(3), Some(&1)); 1497 | assert_eq!(rb.iter().nth_back(4), None); 1498 | 1499 | let mut rb = $rb_init([1, 2, 3, 4]); 1500 | assert_eq!(rb.iter_mut().nth_back(0), Some(&mut 4)); 1501 | assert_eq!(rb.iter_mut().nth_back(1), Some(&mut 3)); 1502 | assert_eq!(rb.iter_mut().nth_back(2), Some(&mut 2)); 1503 | assert_eq!(rb.iter_mut().nth_back(3), Some(&mut 1)); 1504 | assert_eq!(rb.iter_mut().nth_back(4), None); 1505 | }; 1506 | } 1507 | 1508 | test_concrete!(|values: [i32; 4]| ConstGenericRingBuffer::<_, 4>::from(values)); 1509 | test_concrete!(|values: [i32; 4]| GrowableAllocRingBuffer::<_>::from(values)); 1510 | test_concrete!(|values: [i32; 4]| AllocRingBuffer::<_>::from(values)); 1511 | } 1512 | 1513 | #[test] 1514 | fn test_copy_from_slice_power_of_two() { 1515 | macro_rules! test_concrete { 1516 | ($rb_init: expr) => { 1517 | // same-sized slice 1518 | let mut rb = $rb_init([1, 2, 3, 4]); 1519 | rb.copy_from_slice(0, &[5, 6, 7, 8]); 1520 | assert_eq!(rb.to_vec(), alloc::vec![5, 6, 7, 8]); 1521 | 1522 | // same-sized slice after a push 1523 | let mut rb = $rb_init([1, 2, 3, 4]); 1524 | let initial_len = rb.len(); 1525 | let _ = rb.enqueue(0); 1526 | if rb.len() > initial_len { 1527 | let _ = rb.dequeue(); 1528 | } 1529 | rb.copy_from_slice(0, &[5, 6, 7, 8]); 1530 | assert_eq!(rb.to_vec(), alloc::vec![5, 6, 7, 8]); 1531 | 1532 | // same-sized slice after a roundtrip 1533 | let mut rb = $rb_init([1, 2, 3, 4]); 1534 | let initial_len = rb.len(); 1535 | for _ in 0..rb.len() { 1536 | let _ = rb.enqueue(0); 1537 | if rb.len() > initial_len { 1538 | let _ = rb.dequeue(); 1539 | } 1540 | } 1541 | rb.copy_from_slice(0, &[5, 6, 7, 8]); 1542 | assert_eq!(rb.to_vec(), alloc::vec![5, 6, 7, 8]); 1543 | 1544 | // from offset 1545 | let mut rb = $rb_init([1, 2, 3, 4]); 1546 | rb.copy_from_slice(2, &[5, 6]); 1547 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 5, 6]); 1548 | 1549 | // from offset after a push 1550 | let mut rb = $rb_init([1, 2, 3, 4]); 1551 | let initial_len = rb.len(); 1552 | let _ = rb.enqueue(0); 1553 | if rb.len() > initial_len { 1554 | let _ = rb.dequeue(); 1555 | } 1556 | rb.copy_from_slice(2, &[5, 6]); 1557 | assert_eq!(rb.to_vec(), alloc::vec![2, 3, 5, 6]); 1558 | 1559 | // from offset after a roundtrip 1560 | let mut rb = $rb_init([1, 2, 3, 4]); 1561 | let initial_len = rb.len(); 1562 | for _ in 0..rb.len() { 1563 | let _ = rb.enqueue(0); 1564 | if rb.len() > initial_len { 1565 | let _ = rb.dequeue(); 1566 | } 1567 | } 1568 | rb.copy_from_slice(2, &[5, 6]); 1569 | assert_eq!(rb.to_vec(), alloc::vec![0, 0, 5, 6]); 1570 | }; 1571 | } 1572 | 1573 | test_concrete!(|values: [i32; 4]| ConstGenericRingBuffer::<_, 4>::from(values)); 1574 | test_concrete!(|values: [i32; 4]| GrowableAllocRingBuffer::<_>::from(values)); 1575 | test_concrete!(|values: [i32; 4]| AllocRingBuffer::<_>::from(values)); 1576 | } 1577 | 1578 | #[test] 1579 | fn test_copy_from_slice_capacity_smaller_than_size() { 1580 | macro_rules! test_concrete { 1581 | ($rb_init: expr) => { 1582 | // same-sized slice 1583 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1584 | rb.copy_from_slice(0, &[8, 9, 10, 11, 12, 13, 14]); 1585 | assert_eq!(rb.to_vec(), alloc::vec![8, 9, 10, 11, 12, 13, 14]); 1586 | 1587 | // same-sized slice after a push 1588 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1589 | let initial_len = rb.len(); 1590 | let _ = rb.enqueue(0); 1591 | if rb.len() > initial_len { 1592 | let _ = rb.dequeue(); 1593 | } 1594 | rb.copy_from_slice(0, &[8, 9, 10, 11, 12, 13, 14]); 1595 | assert_eq!(rb.to_vec(), alloc::vec![8, 9, 10, 11, 12, 13, 14]); 1596 | 1597 | // same-sized slice after a roundtrip 1598 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1599 | let initial_len = rb.len(); 1600 | for _ in 0..rb.len() { 1601 | let _ = rb.enqueue(0); 1602 | if rb.len() > initial_len { 1603 | let _ = rb.dequeue(); 1604 | } 1605 | } 1606 | rb.copy_from_slice(0, &[8, 9, 10, 11, 12, 13, 14]); 1607 | assert_eq!(rb.to_vec(), alloc::vec![8, 9, 10, 11, 12, 13, 14]); 1608 | 1609 | // from offset 1610 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1611 | rb.copy_from_slice(2, &[8, 9, 10, 11, 12]); 1612 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 8, 9, 10, 11, 12]); 1613 | 1614 | // from offset after a push 1615 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1616 | let initial_len = rb.len(); 1617 | let _ = rb.enqueue(0); 1618 | if rb.len() > initial_len { 1619 | let _ = rb.dequeue(); 1620 | } 1621 | rb.copy_from_slice(2, &[8, 9, 10, 11, 12]); 1622 | assert_eq!(rb.to_vec(), alloc::vec![2, 3, 8, 9, 10, 11, 12]); 1623 | 1624 | // from offset after a roundtrip 1625 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1626 | let initial_len = rb.len(); 1627 | for _ in 0..rb.len() { 1628 | let _ = rb.enqueue(0); 1629 | if rb.len() > initial_len { 1630 | let _ = rb.dequeue(); 1631 | } 1632 | } 1633 | rb.copy_from_slice(2, &[8, 9, 10, 11, 12]); 1634 | assert_eq!(rb.to_vec(), alloc::vec![0, 0, 8, 9, 10, 11, 12]); 1635 | }; 1636 | } 1637 | 1638 | test_concrete!(|values: [i32; 7]| ConstGenericRingBuffer::<_, 7>::from(values)); 1639 | test_concrete!(|values: [i32; 7]| GrowableAllocRingBuffer::<_>::from(values)); 1640 | test_concrete!(|values: [i32; 7]| AllocRingBuffer::<_>::from(values)); 1641 | } 1642 | 1643 | #[test] 1644 | fn test_copy_from_slice_non_full_rb() { 1645 | macro_rules! test_concrete { 1646 | ($rb_init: expr) => { 1647 | let mut rb = $rb_init(&[3, 2, 1]); 1648 | assert_eq!(rb.capacity(), 7); 1649 | // we have some space left 1650 | assert!(rb.len() < rb.capacity()); 1651 | 1652 | // copy preserves length 1653 | rb.copy_from_slice(0, &[1, 2, 3]); 1654 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3]); 1655 | 1656 | let _ = rb.enqueue(4); 1657 | let _ = rb.enqueue(5); 1658 | let _ = rb.enqueue(6); 1659 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 4, 5, 6]); 1660 | 1661 | // still preserving length 1662 | rb.copy_from_slice(0, &[6, 5, 4, 3, 2, 1]); 1663 | assert_eq!(rb.to_vec(), alloc::vec![6, 5, 4, 3, 2, 1]); 1664 | 1665 | // making sure the read/write ptrs have traversed the ring 1666 | for i in 0..6 { 1667 | let _ = rb.enqueue(i + 1); 1668 | let _ = rb.dequeue(); 1669 | } 1670 | 1671 | // sanity check 1672 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 4, 5, 6]); 1673 | // copy from offset 1674 | rb.copy_from_slice(3, &[3, 2, 1]); 1675 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 3, 2, 1]); 1676 | // copy again 1677 | rb.copy_from_slice(0, &[6, 5, 4, 1, 2, 3]); 1678 | assert_eq!(rb.to_vec(), alloc::vec![6, 5, 4, 1, 2, 3]); 1679 | }; 1680 | } 1681 | 1682 | test_concrete!(|values: &[i32]| { 1683 | let mut rb = ConstGenericRingBuffer::<_, 7>::new(); 1684 | rb.extend(values.iter().copied()); 1685 | rb 1686 | }); 1687 | test_concrete!(|values: &[i32]| { 1688 | let mut rb = GrowableAllocRingBuffer::<_>::with_capacity(7); 1689 | rb.extend(values.iter().copied()); 1690 | rb 1691 | }); 1692 | test_concrete!(|values: &[i32]| { 1693 | let mut rb = AllocRingBuffer::<_>::new(7); 1694 | rb.extend(values.iter().copied()); 1695 | rb 1696 | }); 1697 | } 1698 | 1699 | #[test] 1700 | fn test_copy_from_slice_empty() { 1701 | macro_rules! test_concrete { 1702 | ($rb_init: expr) => { 1703 | let mut rb = $rb_init(); 1704 | rb.copy_from_slice(0, &[0; 0]); 1705 | assert_eq!(rb.to_vec(), alloc::vec![]); 1706 | }; 1707 | } 1708 | 1709 | test_concrete!(ConstGenericRingBuffer::::new); 1710 | test_concrete!(|| GrowableAllocRingBuffer::::with_capacity(1)); 1711 | test_concrete!(|| AllocRingBuffer::::new(1)); 1712 | } 1713 | 1714 | #[test] 1715 | fn test_copy_to_slice_power_of_two() { 1716 | macro_rules! test_concrete { 1717 | ($rb_init: expr) => { 1718 | // same-sized slice 1719 | let rb = $rb_init([1, 2, 3, 4]); 1720 | let mut slice = [0; 4]; 1721 | rb.copy_to_slice(0, &mut slice); 1722 | assert_eq!(slice.as_slice(), &[1, 2, 3, 4]); 1723 | 1724 | // same-sized slice after a push 1725 | let mut rb = $rb_init([1, 2, 3, 4]); 1726 | let initial_len = rb.len(); 1727 | let _ = rb.enqueue(0); 1728 | if rb.len() > initial_len { 1729 | let _ = rb.dequeue(); 1730 | } 1731 | let mut slice = [0; 4]; 1732 | rb.copy_to_slice(0, &mut slice); 1733 | assert_eq!(slice.as_slice(), &[2, 3, 4, 0]); 1734 | 1735 | // same-sized slice after a roundtrip 1736 | let mut rb = $rb_init([4, 3, 2, 1]); 1737 | let initial_len = rb.len(); 1738 | for i in 0..rb.len() { 1739 | let _ = rb.enqueue((i + 1).try_into().unwrap()); 1740 | if rb.len() > initial_len { 1741 | let _ = rb.dequeue(); 1742 | } 1743 | } 1744 | let mut slice = [0; 4]; 1745 | rb.copy_to_slice(0, &mut slice); 1746 | assert_eq!(slice.as_slice(), &[1, 2, 3, 4]); 1747 | 1748 | // from offset 1749 | let rb = $rb_init([1, 2, 3, 4]); 1750 | let mut slice = [0; 2]; 1751 | rb.copy_to_slice(2, &mut slice); 1752 | assert_eq!(slice.as_slice(), &[3, 4]); 1753 | 1754 | // from offset after a push 1755 | let mut rb = $rb_init([1, 2, 3, 4]); 1756 | let initial_len = rb.len(); 1757 | let _ = rb.enqueue(0); 1758 | if rb.len() > initial_len { 1759 | let _ = rb.dequeue(); 1760 | } 1761 | let mut slice = [0; 2]; 1762 | rb.copy_to_slice(2, &mut slice); 1763 | assert_eq!(slice.as_slice(), &[4, 0]); 1764 | 1765 | // from offset after a roundtrip 1766 | let mut rb = $rb_init([4, 3, 2, 1]); 1767 | let initial_len = rb.len(); 1768 | for i in 0..rb.len() { 1769 | let _ = rb.enqueue((i + 1).try_into().unwrap()); 1770 | if rb.len() > initial_len { 1771 | let _ = rb.dequeue(); 1772 | } 1773 | } 1774 | let mut slice = [0; 2]; 1775 | rb.copy_to_slice(2, &mut slice); 1776 | assert_eq!(slice.as_slice(), &[3, 4]); 1777 | }; 1778 | } 1779 | 1780 | test_concrete!(|values: [i32; 4]| ConstGenericRingBuffer::<_, 4>::from(values)); 1781 | test_concrete!(|values: [i32; 4]| GrowableAllocRingBuffer::<_>::from(values)); 1782 | test_concrete!(|values: [i32; 4]| AllocRingBuffer::<_>::from(values)); 1783 | } 1784 | 1785 | #[test] 1786 | fn test_copy_to_slice_capacity_smaller_than_size() { 1787 | macro_rules! test_concrete { 1788 | ($rb_init: expr) => { 1789 | // same-sized slice 1790 | let rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1791 | let mut slice = [0; 7]; 1792 | rb.copy_to_slice(0, &mut slice); 1793 | assert_eq!(slice.as_slice(), &[1, 2, 3, 4, 5, 6, 7]); 1794 | 1795 | // same-sized slice after a push 1796 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1797 | let initial_len = rb.len(); 1798 | let _ = rb.enqueue(0); 1799 | if rb.len() > initial_len { 1800 | let _ = rb.dequeue(); 1801 | } 1802 | let mut slice = [0; 7]; 1803 | rb.copy_to_slice(0, &mut slice); 1804 | assert_eq!(slice.as_slice(), &[2, 3, 4, 5, 6, 7, 0]); 1805 | 1806 | // same-sized slice after a roundtrip 1807 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1808 | let initial_len = rb.len(); 1809 | for i in 0..rb.len() { 1810 | let _ = rb.enqueue((i + 1).try_into().unwrap()); 1811 | if rb.len() > initial_len { 1812 | let _ = rb.dequeue(); 1813 | } 1814 | } 1815 | let mut slice = [0; 7]; 1816 | rb.copy_to_slice(0, &mut slice); 1817 | assert_eq!(slice.as_slice(), &[1, 2, 3, 4, 5, 6, 7]); 1818 | 1819 | // from offset 1820 | let rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1821 | let mut slice = [0; 5]; 1822 | rb.copy_to_slice(2, &mut slice); 1823 | assert_eq!(slice.as_slice(), &[3, 4, 5, 6, 7]); 1824 | 1825 | // from offset after a push 1826 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1827 | let initial_len = rb.len(); 1828 | let _ = rb.enqueue(0); 1829 | if rb.len() > initial_len { 1830 | let _ = rb.dequeue(); 1831 | } 1832 | let mut slice = [0; 5]; 1833 | rb.copy_to_slice(2, &mut slice); 1834 | assert_eq!(slice.as_slice(), &[4, 5, 6, 7, 0]); 1835 | 1836 | // from offset after a roundtrip 1837 | let mut rb = $rb_init([1, 2, 3, 4, 5, 6, 7]); 1838 | let initial_len = rb.len(); 1839 | for i in 0..rb.len() { 1840 | let _ = rb.enqueue((i + 1).try_into().unwrap()); 1841 | if rb.len() > initial_len { 1842 | let _ = rb.dequeue(); 1843 | } 1844 | } 1845 | let mut slice = [0; 5]; 1846 | rb.copy_to_slice(2, &mut slice); 1847 | assert_eq!(slice.as_slice(), &[3, 4, 5, 6, 7]); 1848 | }; 1849 | } 1850 | 1851 | test_concrete!(|values: [i32; 7]| ConstGenericRingBuffer::<_, 7>::from(values)); 1852 | test_concrete!(|values: [i32; 7]| GrowableAllocRingBuffer::<_>::from(values)); 1853 | test_concrete!(|values: [i32; 7]| AllocRingBuffer::<_>::from(values)); 1854 | } 1855 | 1856 | #[test] 1857 | fn test_copy_to_slice_non_full_rb() { 1858 | macro_rules! test_concrete { 1859 | ($rb_init: expr) => { 1860 | let mut rb = $rb_init(&[1, 2, 3]); 1861 | assert_eq!(rb.capacity(), 7); 1862 | // we have some space left 1863 | assert!(rb.len() < rb.capacity()); 1864 | 1865 | // copy based on length 1866 | let mut slice = [0; 3]; 1867 | rb.copy_to_slice(0, &mut slice); 1868 | assert_eq!(slice.as_slice(), &[1, 2, 3]); 1869 | 1870 | let _ = rb.enqueue(4); 1871 | let _ = rb.enqueue(5); 1872 | let _ = rb.enqueue(6); 1873 | // still based on length 1874 | let mut slice = [0; 6]; 1875 | rb.copy_to_slice(0, &mut slice); 1876 | assert_eq!(slice.as_slice(), &[1, 2, 3, 4, 5, 6]); 1877 | 1878 | // making sure the read/write ptrs have traversed the ring 1879 | for i in 0..6 { 1880 | let _ = rb.enqueue(i + 1); 1881 | let _ = rb.dequeue(); 1882 | } 1883 | 1884 | // sanity check 1885 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 4, 5, 6]); 1886 | // copy again 1887 | let mut slice = [0; 6]; 1888 | rb.copy_to_slice(0, &mut slice); 1889 | assert_eq!(slice.as_slice(), &[1, 2, 3, 4, 5, 6]); 1890 | }; 1891 | } 1892 | 1893 | test_concrete!(|values: &[i32]| { 1894 | let mut rb = ConstGenericRingBuffer::<_, 7>::new(); 1895 | rb.extend(values.iter().copied()); 1896 | rb 1897 | }); 1898 | test_concrete!(|values: &[i32]| { 1899 | let mut rb = GrowableAllocRingBuffer::<_>::with_capacity(7); 1900 | rb.extend(values.iter().copied()); 1901 | rb 1902 | }); 1903 | test_concrete!(|values: &[i32]| { 1904 | let mut rb = AllocRingBuffer::<_>::new(7); 1905 | rb.extend(values.iter().copied()); 1906 | rb 1907 | }); 1908 | } 1909 | 1910 | #[test] 1911 | fn test_copy_to_slice_empty() { 1912 | macro_rules! test_concrete { 1913 | ($rb_init: expr) => { 1914 | let rb = $rb_init(); 1915 | let mut slice = []; 1916 | rb.copy_to_slice(0, &mut slice); 1917 | assert_eq!(slice.as_slice(), &[0; 0]); 1918 | }; 1919 | } 1920 | 1921 | test_concrete!(ConstGenericRingBuffer::::new); 1922 | test_concrete!(|| GrowableAllocRingBuffer::::with_capacity(1)); 1923 | test_concrete!(|| AllocRingBuffer::::new(1)); 1924 | } 1925 | 1926 | #[test] 1927 | fn test_set_len_primitive() { 1928 | use crate::SetLen; 1929 | 1930 | let values = [1, 2, 3, 4, 5, 6, 7, 8]; 1931 | 1932 | macro_rules! test_concrete { 1933 | ($rb_init: expr) => { 1934 | let mut rb = $rb_init(); 1935 | let initial_capacity = rb.capacity(); 1936 | unsafe { rb.set_len(4) }; 1937 | assert_eq!(rb.capacity(), initial_capacity); 1938 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 4]); 1939 | unsafe { rb.set_len(8) }; 1940 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 4, 5, 6, 7, 8]); 1941 | }; 1942 | } 1943 | 1944 | test_concrete!(|| ConstGenericRingBuffer::::from(values)); 1945 | test_concrete!(|| AllocRingBuffer::::from(values)); 1946 | } 1947 | 1948 | #[test] 1949 | fn test_set_len_leak() { 1950 | use crate::SetLen; 1951 | 1952 | #[derive(Default, Clone)] 1953 | struct Droppable { 1954 | dropped: bool, 1955 | } 1956 | impl Drop for Droppable { 1957 | fn drop(&mut self) { 1958 | self.dropped = true; 1959 | } 1960 | } 1961 | 1962 | let values = (0..8).map(|_| Droppable::default()).collect::>(); 1963 | 1964 | macro_rules! test_concrete { 1965 | ($rb_init: expr) => { 1966 | let mut rb = $rb_init(); 1967 | let initial_capacity = rb.capacity(); 1968 | unsafe { rb.set_len(4) }; 1969 | assert_eq!(rb.capacity(), initial_capacity); 1970 | assert!(rb.to_vec().iter().all(|item| !item.dropped)); 1971 | unsafe { rb.set_len(8) }; 1972 | assert!(rb.to_vec().iter().all(|item| !item.dropped)); 1973 | rb.clear(); 1974 | assert!(rb.to_vec().iter().all(|item| item.dropped)); 1975 | }; 1976 | } 1977 | 1978 | test_concrete!(|| ConstGenericRingBuffer::::from(values.clone())); 1979 | test_concrete!(|| AllocRingBuffer::::from(values)); 1980 | } 1981 | 1982 | #[test] 1983 | fn test_set_len_uninit_primitive() { 1984 | use crate::SetLen; 1985 | 1986 | macro_rules! test_concrete { 1987 | ($rb_init: expr) => { 1988 | let mut rb = $rb_init(); 1989 | assert_eq!(rb.len(), 0); 1990 | unsafe { rb.set_len(4) }; 1991 | assert_eq!(rb.len(), 4); 1992 | assert_eq!(rb.to_vec(), alloc::vec![1, 2, 3, 4]); 1993 | }; 1994 | } 1995 | 1996 | test_concrete!(|| { 1997 | let mut rb = ConstGenericRingBuffer::::new(); 1998 | let _ = rb.buf[0].write(1); 1999 | let _ = rb.buf[1].write(2); 2000 | let _ = rb.buf[2].write(3); 2001 | let _ = rb.buf[3].write(4); 2002 | rb 2003 | }); 2004 | test_concrete!(|| { 2005 | let rb = AllocRingBuffer::::with_capacity_power_of_2(3); 2006 | unsafe { 2007 | *rb.buf = 1; 2008 | *rb.buf.add(1) = 2; 2009 | *rb.buf.add(2) = 3; 2010 | *rb.buf.add(3) = 4; 2011 | } 2012 | rb 2013 | }); 2014 | } 2015 | 2016 | #[test] 2017 | fn test_set_len_uninit_droppable() { 2018 | use crate::SetLen; 2019 | 2020 | #[derive(Default, Clone)] 2021 | struct Droppable { 2022 | dropped: bool, 2023 | } 2024 | impl Drop for Droppable { 2025 | fn drop(&mut self) { 2026 | self.dropped = true; 2027 | } 2028 | } 2029 | 2030 | macro_rules! test_concrete { 2031 | ($rb_init: expr) => { 2032 | let mut rb = $rb_init(); 2033 | assert_eq!(rb.len(), 0); 2034 | assert!(rb.to_vec().iter().all(|item| !item.dropped)); 2035 | unsafe { rb.set_len(4) }; 2036 | assert_eq!(rb.len(), 4); 2037 | assert!(rb.to_vec().iter().all(|item| !item.dropped)); 2038 | rb.clear(); 2039 | assert!(rb.to_vec().iter().all(|item| item.dropped)); 2040 | }; 2041 | } 2042 | 2043 | test_concrete!(|| { 2044 | let mut rb = ConstGenericRingBuffer::::new(); 2045 | let _ = rb.buf[0].write(Droppable::default()); 2046 | let _ = rb.buf[1].write(Droppable::default()); 2047 | let _ = rb.buf[2].write(Droppable::default()); 2048 | let _ = rb.buf[3].write(Droppable::default()); 2049 | rb 2050 | }); 2051 | test_concrete!(|| { 2052 | let rb = AllocRingBuffer::::with_capacity_power_of_2(3); 2053 | unsafe { 2054 | *rb.buf = Droppable::default(); 2055 | *rb.buf.add(1) = Droppable::default(); 2056 | *rb.buf.add(2) = Droppable::default(); 2057 | *rb.buf.add(3) = Droppable::default(); 2058 | } 2059 | rb 2060 | }); 2061 | } 2062 | } 2063 | --------------------------------------------------------------------------------