├── .gitignore ├── .github ├── FUNDING.yml ├── workflows │ ├── release.yaml │ └── build.yaml ├── CONTRIBUTING.md └── CODE_OF_CONDUCT.md ├── src ├── free │ ├── v1_40.rs │ ├── v1_59.rs │ ├── v1_47.rs │ ├── v1_51.rs │ ├── v1_54.rs │ ├── v1_53.rs │ ├── v1_48.rs │ ├── v1_43.rs │ └── v1_59 │ │ ├── zip.rs │ │ └── available_parallelism.rs ├── inherent │ ├── v1_48.rs │ ├── v1_41.rs │ ├── v1_55.rs │ ├── v1_51 │ │ ├── str.rs │ │ └── slice.rs │ ├── v1_59.rs │ ├── v1_47.rs │ ├── v1_49.rs │ ├── v1_46.rs │ ├── v1_45.rs │ ├── v1_57.rs │ ├── v1_43.rs │ ├── v1_42.rs │ ├── v1_60.rs │ ├── v1_37.rs │ ├── v1_52.rs │ ├── mod.rs │ ├── v1_58.rs │ ├── v1_50.rs │ ├── v1_40.rs │ ├── v1_44.rs │ ├── v1_51.rs │ ├── v1_53.rs │ ├── v1_54.rs │ ├── v1_38.rs │ └── v1_49 │ │ └── sort.rs ├── pattern │ ├── memchr.rs │ └── mod.rs └── lib.rs ├── .editorconfig ├── rustfmt.toml ├── LICENSE-MIT ├── Cargo.toml ├── README.md └── LICENSE-Apache /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: jhpratt 2 | custom: ["paypal.me/jhpratt"] 3 | -------------------------------------------------------------------------------- /src/free/v1_40.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod mem { 2 | pub fn take(dest: &mut T) -> T { 3 | core::mem::replace(dest, T::default()) 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.yaml] 12 | indent_size = 2 13 | -------------------------------------------------------------------------------- /src/free/v1_59.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "std")] mod available_parallelism; 2 | mod zip; 3 | 4 | #[cfg(feature = "std")] 5 | #[allow(unreachable_pub)] // false positive 6 | pub use available_parallelism::*; 7 | #[allow(unreachable_pub)] // false positive 8 | pub use zip::*; 9 | -------------------------------------------------------------------------------- /src/free/v1_47.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod f32 { 2 | pub(crate) mod consts { 3 | pub const TAU: f32 = 6.28318530717958647692528676655900577_f32; 4 | } 5 | } 6 | 7 | pub(crate) mod f64 { 8 | pub(crate) mod consts { 9 | pub const TAU: f64 = 6.28318530717958647692528676655900577_f64; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/free/v1_51.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] 2 | pub(crate) mod task { 3 | use alloc::sync::Arc; 4 | 5 | pub trait Wake { 6 | fn wake(self: Arc); 7 | #[cfg(since = "1.41")] 8 | fn wake_by_ref(self: &Arc) { 9 | self.clone().wake(); 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/inherent/v1_48.rs: -------------------------------------------------------------------------------- 1 | use core::ops::Range; 2 | 3 | use easy_ext::ext; 4 | 5 | use crate::inherent::Sealed; 6 | #[ext] 7 | pub impl [T] 8 | where Self: Sealed<[T]> 9 | { 10 | fn as_ptr_range(&self) -> Range<*const T> { 11 | let start = self.as_ptr(); 12 | let end = unsafe { start.add(self.len()) }; 13 | start..end 14 | } 15 | 16 | fn as_mut_ptr_range(&mut self) -> Range<*mut T> { 17 | let start = self.as_mut_ptr(); 18 | let end = unsafe { start.add(self.len()) }; 19 | start..end 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/inherent/v1_41.rs: -------------------------------------------------------------------------------- 1 | use easy_ext::ext; 2 | 3 | use crate::inherent::Sealed; 4 | 5 | #[ext] 6 | pub impl Result 7 | where Self: Sealed> 8 | { 9 | fn map_or U>(self, default: U, f: F) -> U { 10 | match self { 11 | Ok(t) => f(t), 12 | Err(_) => default, 13 | } 14 | } 15 | 16 | fn map_or_else U, F: FnOnce(T) -> U>(self, default: D, f: F) -> U { 17 | match self { 18 | Ok(t) => f(t), 19 | Err(e) => default(e), 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | newline_style = "unix" 2 | use_field_init_shorthand = true 3 | use_try_shorthand = true 4 | 5 | # Unstable features below 6 | unstable_features = true 7 | version = "Two" 8 | comment_width = 100 9 | error_on_line_overflow = true 10 | format_code_in_doc_comments = true 11 | format_macro_bodies = true 12 | format_macro_matchers = true 13 | format_strings = true 14 | imports_granularity = "Module" 15 | group_imports = "StdExternalCrate" 16 | normalize_doc_attributes = true 17 | wrap_comments = true 18 | use_small_heuristics = "max" 19 | where_single_line = true 20 | inline_attribute_width = 50 21 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | tags: 4 | - "v[0-9]+.[0-9]+.[0-9]+" 5 | 6 | name: Create release 7 | 8 | jobs: 9 | build: 10 | name: Create release 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v2 15 | 16 | - name: Create release 17 | uses: actions/create-release@v1 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | with: 21 | tag_name: ${{ github.ref }} 22 | release_name: ${{ github.ref }} 23 | body: "" 24 | draft: false 25 | prerelease: false 26 | -------------------------------------------------------------------------------- /src/free/v1_54.rs: -------------------------------------------------------------------------------- 1 | #[allow(unreachable_pub)] // false positive 2 | pub(crate) mod prelude { 3 | pub(crate) mod rust_2015 { 4 | #[cfg(feature = "alloc")] 5 | pub use alloc::borrow::ToOwned; 6 | #[cfg(feature = "alloc")] 7 | pub use alloc::boxed::Box; 8 | #[cfg(feature = "alloc")] 9 | pub use alloc::string::{String, ToString}; 10 | #[cfg(feature = "alloc")] pub use alloc::vec::Vec; 11 | pub use core::prelude::v1::*; 12 | } 13 | pub(crate) mod rust_2018 { 14 | pub use super::rust_2015::*; 15 | } 16 | pub(crate) mod rust_2021 { 17 | pub use core::convert::{TryFrom, TryInto}; 18 | pub use core::iter::FromIterator; 19 | 20 | pub use super::rust_2015::*; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/inherent/v1_55.rs: -------------------------------------------------------------------------------- 1 | use core::mem::MaybeUninit; 2 | use core::ops::Bound; 3 | 4 | use easy_ext::ext; 5 | 6 | use crate::inherent::Sealed; 7 | 8 | #[ext] 9 | pub impl Bound 10 | where Self: Sealed> 11 | { 12 | fn cloned(self) -> Self { 13 | match self { 14 | Bound::Unbounded => Bound::Unbounded, 15 | Bound::Included(x) => Bound::Included(x.clone()), 16 | Bound::Excluded(x) => Bound::Excluded(x.clone()), 17 | } 18 | } 19 | } 20 | 21 | #[ext] 22 | pub impl MaybeUninit 23 | where Self: Sealed> 24 | { 25 | unsafe fn assume_init_mut(&mut self) -> &mut T { 26 | &mut *self.as_mut_ptr() 27 | } 28 | 29 | unsafe fn assume_init_ref(&self) -> &T { 30 | &*self.as_ptr() 31 | } 32 | 33 | fn write(&mut self, value: T) -> &mut T { 34 | *self = MaybeUninit::new(value); 35 | unsafe { self.assume_init_mut() } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/inherent/v1_51/str.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | 3 | use crate::pattern::Pattern; 4 | 5 | pub struct SplitInclusive<'a, P: Pattern<'a>>(pub(super) SplitInternal<'a, P>); 6 | 7 | pub(super) struct SplitInternal<'a, P: Pattern<'a>> { 8 | pub(super) start: usize, 9 | pub(super) end: usize, 10 | pub(super) matcher: P::Searcher, 11 | pub(super) allow_trailing_empty: bool, 12 | pub(super) finished: bool, 13 | } 14 | 15 | impl<'a, P> fmt::Debug for SplitInternal<'a, P> 16 | where 17 | P: Pattern<'a>, 18 | P::Searcher: fmt::Debug, 19 | { 20 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 21 | f.debug_struct("SplitInternal") 22 | .field("start", &self.start) 23 | .field("end", &self.end) 24 | .field("matcher", &self.matcher) 25 | .field("allow_trailing_empty", &self.allow_trailing_empty) 26 | .field("finished", &self.finished) 27 | .finish() 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/inherent/v1_59.rs: -------------------------------------------------------------------------------- 1 | use core::num::{NonZeroU128, NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize}; 2 | 3 | use easy_ext::ext; 4 | 5 | use crate::inherent::Sealed; 6 | 7 | #[ext] 8 | pub impl<'a, T: 'a, E> Result<&'a T, E> 9 | where Self: Sealed> 10 | { 11 | fn copied(self) -> Result 12 | where T: Copy { 13 | self.map(|&t| t) 14 | } 15 | 16 | fn cloned(self) -> Result 17 | where T: Clone { 18 | self.map(|t| t.clone()) 19 | } 20 | } 21 | 22 | macro_rules! impl_nonzero { 23 | ($($t:ty)+) => {$( 24 | #[allow(unreachable_pub)] // false positive 25 | #[ext] 26 | pub impl $t 27 | where Self: Sealed<$t> 28 | { 29 | fn is_power_of_two(self) -> bool { 30 | self.get().is_power_of_two() 31 | } 32 | } 33 | )*}; 34 | } 35 | 36 | impl_nonzero![NonZeroU8 NonZeroU16 NonZeroU32 NonZeroU64 NonZeroU128 NonZeroUsize]; 37 | -------------------------------------------------------------------------------- /src/inherent/v1_47.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] use alloc::boxed::Box; 2 | #[cfg(feature = "alloc")] use alloc::vec::Vec; 3 | use core::ops::{DerefMut, Range}; 4 | 5 | use easy_ext::ext; 6 | 7 | use crate::inherent::Sealed; 8 | 9 | #[ext] 10 | pub impl> Range 11 | where Self: Sealed> 12 | { 13 | fn is_empty(&self) -> bool { 14 | !(self.start < self.end) 15 | } 16 | } 17 | 18 | #[ext] 19 | pub impl Result 20 | where Self: Sealed> 21 | { 22 | fn as_deref(&self) -> Result<&T::Target, &E> { 23 | self.as_ref().map(|t| t.deref()) 24 | } 25 | 26 | fn as_deref_mut(&mut self) -> Result<&mut T::Target, &mut E> { 27 | self.as_mut().map(|t| t.deref_mut()) 28 | } 29 | } 30 | 31 | #[cfg(feature = "alloc")] 32 | #[ext] 33 | pub impl Vec 34 | where Self: Sealed> 35 | { 36 | fn leak<'a>(self) -> &'a mut [T] 37 | where T: 'a { 38 | Box::leak(self.into_boxed_slice()) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Jacob Pratt 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /src/inherent/v1_49.rs: -------------------------------------------------------------------------------- 1 | mod sort; 2 | 3 | use core::cmp::Ordering; 4 | 5 | use easy_ext::ext; 6 | 7 | use crate::inherent::Sealed; 8 | 9 | #[ext] 10 | pub impl [T] 11 | where Self: Sealed<[T]> 12 | { 13 | fn select_nth_unstable(&mut self, index: usize) -> (&mut [T], &mut T, &mut [T]) 14 | where T: Ord { 15 | let mut f = |a: &T, b: &T| a.lt(b); 16 | sort::partition_at_index(self, index, &mut f) 17 | } 18 | 19 | fn select_nth_unstable_by( 20 | &mut self, 21 | index: usize, 22 | mut compare: F, 23 | ) -> (&mut [T], &mut T, &mut [T]) 24 | where 25 | F: FnMut(&T, &T) -> Ordering, 26 | { 27 | let mut f = |a: &T, b: &T| compare(a, b) == Ordering::Less; 28 | sort::partition_at_index(self, index, &mut f) 29 | } 30 | 31 | fn select_nth_unstable_by_key( 32 | &mut self, 33 | index: usize, 34 | mut f: F, 35 | ) -> (&mut [T], &mut T, &mut [T]) 36 | where 37 | F: FnMut(&T) -> K, 38 | K: Ord, 39 | { 40 | let mut g = |a: &T, b: &T| f(a).lt(&f(b)); 41 | sort::partition_at_index(self, index, &mut g) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/free/v1_53.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod array { 2 | pub fn from_ref(s: &T) -> &[T; 1] { 3 | unsafe { &*(s as *const T as *const [T; 1]) } 4 | } 5 | pub fn from_mut(s: &mut T) -> &mut [T; 1] { 6 | unsafe { &mut *(s as *mut T as *mut [T; 1]) } 7 | } 8 | } 9 | 10 | pub(crate) mod cmp { 11 | use core::cmp::Ordering; 12 | 13 | #[must_use] 14 | pub fn min_by Ordering>(v1: T, v2: T, compare: F) -> T { 15 | match compare(&v1, &v2) { 16 | Ordering::Less | Ordering::Equal => v1, 17 | Ordering::Greater => v2, 18 | } 19 | } 20 | #[must_use] 21 | pub fn min_by_key K, K: Ord>(v1: T, v2: T, mut f: F) -> T { 22 | min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2))) 23 | } 24 | #[must_use] 25 | pub fn max_by Ordering>(v1: T, v2: T, compare: F) -> T { 26 | match compare(&v1, &v2) { 27 | Ordering::Less | Ordering::Equal => v2, 28 | Ordering::Greater => v1, 29 | } 30 | } 31 | #[must_use] 32 | pub fn max_by_key K, K: Ord>(v1: T, v2: T, mut f: F) -> T { 33 | max_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2))) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/inherent/v1_46.rs: -------------------------------------------------------------------------------- 1 | use easy_ext::ext; 2 | 3 | use crate::inherent::Sealed; 4 | 5 | macro_rules! impl_int_v1_46 { 6 | ($($signed_type:ty, $unsigned_type:ty),+) => {$( 7 | #[ext] 8 | pub impl $signed_type where Self: Sealed<$signed_type>, { 9 | fn leading_ones(self) -> u32 { 10 | (self as $unsigned_type).leading_ones() 11 | } 12 | 13 | fn trailing_ones(self) -> u32 { 14 | (self as $unsigned_type).trailing_ones() 15 | } 16 | } 17 | 18 | #[ext] 19 | pub impl $unsigned_type where Self: Sealed<$unsigned_type>, { 20 | fn leading_ones(self) -> u32 { 21 | (!self).leading_zeros() 22 | } 23 | 24 | fn trailing_ones(self) -> u32 { 25 | (!self).trailing_zeros() 26 | } 27 | } 28 | )*}; 29 | } 30 | 31 | impl_int_v1_46![i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize]; 32 | 33 | #[ext] 34 | pub impl Option 35 | where Self: Sealed> 36 | { 37 | fn zip(self, other: Option) -> Option<(T, U)> { 38 | match (self, other) { 39 | (Some(a), Some(b)) => Some((a, b)), 40 | _ => None, 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/free/v1_48.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod future { 2 | use core::future::Future; 3 | use core::pin::Pin; 4 | use core::task::{Context, Poll}; 5 | 6 | pub struct Ready(Option); 7 | 8 | impl Unpin for Ready {} 9 | 10 | impl Future for Ready { 11 | type Output = T; 12 | 13 | fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll { 14 | Poll::Ready(self.0.take().expect("Ready polled after completion")) 15 | } 16 | } 17 | 18 | pub fn ready(t: T) -> Ready { 19 | Ready(Some(t)) 20 | } 21 | 22 | #[derive(Debug)] 23 | #[must_use = "futures do nothing unless you `.await` or poll them"] 24 | pub struct Pending { 25 | _data: core::marker::PhantomData, 26 | } 27 | 28 | pub fn pending() -> Pending { 29 | Pending { _data: core::marker::PhantomData } 30 | } 31 | 32 | impl Future for Pending { 33 | type Output = T; 34 | 35 | fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll { 36 | Poll::Pending 37 | } 38 | } 39 | 40 | impl Unpin for Pending {} 41 | 42 | impl Clone for Pending { 43 | fn clone(&self) -> Self { 44 | pending() 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/inherent/v1_45.rs: -------------------------------------------------------------------------------- 1 | use easy_ext::ext; 2 | 3 | use crate::inherent::Sealed; 4 | use crate::pattern::{Pattern, ReverseSearcher}; 5 | 6 | macro_rules! impl_int_v1_45 { 7 | ($($type:ident)+) => {$( 8 | #[ext] 9 | pub impl $type where Self: Sealed<$type>, { 10 | fn saturating_neg(self) -> Self { 11 | if self == core::$type::MIN { 12 | core::$type::MAX 13 | } else { 14 | -self 15 | } 16 | } 17 | 18 | fn saturating_abs(self) -> Self { 19 | if self.is_negative() { 20 | self.saturating_neg() 21 | } else { 22 | self 23 | } 24 | } 25 | } 26 | )*}; 27 | } 28 | 29 | impl_int_v1_45![i8 i16 i32 i64 i128 isize]; 30 | 31 | #[ext] 32 | pub impl str 33 | where Self: Sealed 34 | { 35 | #[must_use = "this returns the remaining substring as a new slice, without modifying the \ 36 | original"] 37 | fn strip_prefix<'a, P: Pattern<'a>>(&'a self, prefix: P) -> Option<&'a str> { 38 | prefix.strip_prefix_of(self) 39 | } 40 | 41 | #[must_use = "this returns the remaining substring as a new slice, without modifying the \ 42 | original"] 43 | fn strip_suffix<'a, P>(&'a self, suffix: P) -> Option<&'a str> 44 | where 45 | P: Pattern<'a>, 46 |

>::Searcher: ReverseSearcher<'a>, 47 | { 48 | suffix.strip_suffix_of(self) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/free/v1_43.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod f32 { 2 | pub(crate) mod consts { 3 | pub const LOG10_2: f32 = 0.301029995663981195213738894724493027_f32; 4 | pub const LOG2_10: f32 = 3.32192809488736234787031942948939018_f32; 5 | } 6 | } 7 | 8 | pub(crate) mod f64 { 9 | pub(crate) mod consts { 10 | pub const LOG10_2: f64 = 0.301029995663981195213738894724493027_f64; 11 | pub const LOG2_10: f64 = 3.32192809488736234787031942948939018_f64; 12 | } 13 | } 14 | 15 | pub(crate) mod iter { 16 | use core::iter::FusedIterator; 17 | 18 | pub fn once_with A>(gen: F) -> OnceWith { 19 | OnceWith { gen: Some(gen) } 20 | } 21 | 22 | #[derive(Copy, Clone, Debug)] 23 | pub struct OnceWith { 24 | gen: Option, 25 | } 26 | 27 | impl A> Iterator for OnceWith { 28 | type Item = A; 29 | 30 | fn next(&mut self) -> Option { 31 | let f = self.gen.take()?; 32 | Some(f()) 33 | } 34 | 35 | fn size_hint(&self) -> (usize, Option) { 36 | self.gen.iter().size_hint() 37 | } 38 | } 39 | 40 | impl A> DoubleEndedIterator for OnceWith { 41 | fn next_back(&mut self) -> Option { 42 | self.next() 43 | } 44 | } 45 | 46 | impl A> ExactSizeIterator for OnceWith { 47 | fn len(&self) -> usize { 48 | self.gen.iter().len() 49 | } 50 | } 51 | 52 | impl A> FusedIterator for OnceWith {} 53 | } 54 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "standback" 3 | version = "0.4.4" 4 | authors = ["Jacob Pratt ", "The Rust Project Developers"] 5 | edition = "2018" 6 | repository = "https://github.com/jhpratt/standback" 7 | keywords = ["std", "back-compatible", "polyfill"] 8 | categories = ["no-std", "rust-patterns"] 9 | readme = "README.md" 10 | license = "MIT OR Apache-2.0" 11 | description = "New standard library, old compiler." 12 | build = "build.rs" 13 | include = ["src/**/*", "LICENSE-*", "README.md", "CHANGELOG.md", "build.rs"] 14 | 15 | [package.metadata.docs.rs] 16 | targets = ["x86_64-unknown-linux-gnu"] 17 | 18 | [features] 19 | default = ["std", "msrv-1-36"] 20 | std = ["alloc"] 21 | alloc = [] 22 | 23 | # List all versions from the MSRV to one before the current release. 24 | msrv-1-36 = ["msrv-1-37"] 25 | msrv-1-37 = ["msrv-1-38"] 26 | msrv-1-38 = ["msrv-1-39"] 27 | msrv-1-39 = ["msrv-1-40"] 28 | msrv-1-40 = ["msrv-1-41"] 29 | msrv-1-41 = ["msrv-1-42"] 30 | msrv-1-42 = ["msrv-1-43"] 31 | msrv-1-43 = ["msrv-1-44"] 32 | msrv-1-44 = ["msrv-1-45"] 33 | msrv-1-45 = ["msrv-1-46"] 34 | msrv-1-46 = ["msrv-1-47"] 35 | msrv-1-47 = ["msrv-1-48"] 36 | msrv-1-48 = ["msrv-1-49"] 37 | msrv-1-49 = ["msrv-1-50"] 38 | msrv-1-50 = ["msrv-1-51"] 39 | msrv-1-51 = ["msrv-1-52"] 40 | msrv-1-52 = ["msrv-1-53"] 41 | msrv-1-53 = ["msrv-1-54"] 42 | msrv-1-54 = ["msrv-1-55"] 43 | msrv-1-55 = ["msrv-1-56"] 44 | msrv-1-56 = ["msrv-1-57"] 45 | msrv-1-57 = ["msrv-1-58"] 46 | msrv-1-58 = ["msrv-1-59"] 47 | msrv-1-59 = [] 48 | 49 | [dependencies] 50 | easy-ext = "1.0.0" 51 | 52 | [target.'cfg(unix)'.dependencies] 53 | libc = "0.2.119" 54 | 55 | [build-dependencies] 56 | version_check = "0.9.3" 57 | -------------------------------------------------------------------------------- /src/inherent/v1_57.rs: -------------------------------------------------------------------------------- 1 | use core::convert::Infallible; 2 | use core::fmt; 3 | 4 | use easy_ext::ext; 5 | 6 | use crate::inherent::sealed::Sealed; 7 | 8 | #[ext] 9 | pub impl T 10 | where 11 | T: Iterator, 12 | Self: Sealed, 13 | { 14 | fn map_while(self, predicate: P) -> MapWhile 15 | where 16 | Self: Sized, 17 | P: FnMut(Self::Item) -> Option, 18 | { 19 | MapWhile { iter: self, predicate } 20 | } 21 | } 22 | 23 | #[must_use = "iterators are lazy and do nothing unless consumed"] 24 | #[derive(Clone)] 25 | pub struct MapWhile { 26 | iter: I, 27 | predicate: P, 28 | } 29 | 30 | impl fmt::Debug for MapWhile { 31 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 32 | f.debug_struct("MapWhile").field("iter", &self.iter).finish() 33 | } 34 | } 35 | 36 | impl Iterator for MapWhile 37 | where P: FnMut(I::Item) -> Option 38 | { 39 | type Item = B; 40 | 41 | fn next(&mut self) -> Option { 42 | let x = self.iter.next()?; 43 | (self.predicate)(x) 44 | } 45 | 46 | fn size_hint(&self) -> (usize, Option) { 47 | let (_, upper) = self.iter.size_hint(); 48 | (0, upper) 49 | } 50 | 51 | fn fold(mut self, init: Acc, fold: Fold) -> Acc 52 | where 53 | Self: Sized, 54 | Fold: FnMut(Acc, Self::Item) -> Acc, 55 | { 56 | fn ok(mut f: impl FnMut(B, T) -> B) -> impl FnMut(B, T) -> Result { 57 | move |acc, x| Ok(f(acc, x)) 58 | } 59 | 60 | self.try_fold(init, ok(fold)).unwrap() 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Standback 2 | 3 | ![build status](https://github.com/jhpratt/standback/workflows/Build/badge.svg?branch=master&event=push) 4 | ![license](https://img.shields.io/badge/license-MIT%20or%20Apache--2-brightgreen) 5 | ![version](https://img.shields.io/crates/v/standback) 6 | ![rustc 1.36.0](https://img.shields.io/badge/rustc-1.36.0-blue) 7 | 8 | [Documentation](https://docs.rs/standback) 9 | 10 | Standback exists to allow the usage of various APIs that have been stabilized 11 | since rustc 1.36.0 _without_ having to require users to upgrade their compiler. 12 | The best part? Only old features are built from scratch; anything stable on the 13 | compiler in use will just be re-exported. 14 | 15 | Note that it is sometimes the case that newly stabilized methods would require 16 | internal methods, direct access to fields, or nightly features to work. As such, 17 | not every feature is backported. Found a neat way to implement a method or type 18 | that is possible on stable? Pull requests are accepted! 19 | 20 | ## License 21 | 22 | A majority of this code comes directly from the Rust standard library, where its 23 | license is the following. All new code is also released under this license. 24 | 25 | This project is licensed under either of 26 | 27 | - [Apache License, Version 2.0](https://github.com/jhpratt/standback/blob/master/LICENSE-Apache) 28 | - [MIT license](https://github.com/jhpratt/standback/blob/master/LICENSE-MIT) 29 | 30 | at your option. 31 | 32 | Unless you explicitly state otherwise, any contribution intentionally submitted 33 | for inclusion in time by you, as defined in the Apache-2.0 license, shall be 34 | dual licensed as above, without any additional terms or conditions. 35 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Bugs 4 | 5 | Think you found a bug? Create an issue on the [issue tracker]. Be sure to include enough information to reproduce the issue, trying to eliminate anything unnecessary. 6 | 7 | If an issue already exists for your bug, feel free to add another example! Even if it's similar to an existing one, it might help in narrowing down the cause. 8 | 9 | ## Pull Requests 10 | 11 | All pull requests are appreciated! Even if it's just fixing a typo in documentation, it's an improvement from what we had before. If you're planning on making a larger change to the codebase, it's recommended that you either ask on Matrix or file an issue before doing so - it will save you time if the PR won't be accepted! 12 | 13 | ### Tests 14 | 15 | Any PR modifying code should be accompanied by a change to tests (and possibly additions). Documentation should also be updated if necessary. 16 | 17 | ### Commits 18 | 19 | Commits will almost certainly be squashed when merged, so don't stress about having the perfect commit message. It is appreciated to keep commits to logical chunks, as this makes it easier to review. 20 | 21 | ## Continuous Integration 22 | 23 | standback uses [GitHub Actions] for continuous integration. Types are checked and the full test suite is run on Windows, Mac, and Linux on both the minimum supported Rust version and the most recent stable. Formatting is verified, as are clippy lints. 24 | 25 | ## Formatting 26 | 27 | standback uses rustfmt for formatting, and uses a number of nightly features. As such, you will need to run `cargo +nightly fmt` before committing. Formatting is important, so not doing this may cause CI to fail! 28 | 29 | [issue tracker]: https://github.com/jhpratt/standback/issues/new 30 | [GitHub Actions]: https://github.com/features/actions 31 | -------------------------------------------------------------------------------- /src/inherent/v1_43.rs: -------------------------------------------------------------------------------- 1 | use easy_ext::ext; 2 | 3 | use crate::inherent::Sealed; 4 | 5 | #[ext] 6 | pub impl f32 7 | where Self: Sealed 8 | { 9 | const DIGITS: u32 = 6; 10 | const EPSILON: f32 = 1.19209290e-07_f32; 11 | const INFINITY: f32 = 1.0_f32 / 0.0_f32; 12 | const MANTISSA_DIGITS: u32 = 24; 13 | const MAX: f32 = 3.40282347e+38_f32; 14 | const MAX_10_EXP: i32 = 38; 15 | const MAX_EXP: i32 = 128; 16 | const MIN: f32 = -3.40282347e+38_f32; 17 | const MIN_10_EXP: i32 = -37; 18 | const MIN_EXP: i32 = -125; 19 | const MIN_POSITIVE: f32 = 1.17549435e-38_f32; 20 | const NAN: f32 = 0.0_f32 / 0.0_f32; 21 | const NEG_INFINITY: f32 = -1.0_f32 / 0.0_f32; 22 | const RADIX: u32 = 2; 23 | } 24 | 25 | #[ext] 26 | pub impl f64 27 | where Self: Sealed 28 | { 29 | const DIGITS: u32 = 15; 30 | const EPSILON: f64 = 2.2204460492503131e-16_f64; 31 | const INFINITY: f64 = 1.0_f64 / 0.0_f64; 32 | const MANTISSA_DIGITS: u32 = 53; 33 | const MAX: f64 = 1.7976931348623157e+308_f64; 34 | const MAX_10_EXP: i32 = 308; 35 | const MAX_EXP: i32 = 1024; 36 | const MIN: f64 = -1.7976931348623157e+308_f64; 37 | const MIN_10_EXP: i32 = -307; 38 | const MIN_EXP: i32 = -1021; 39 | const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64; 40 | const NAN: f64 = 0.0_f64 / 0.0_f64; 41 | const NEG_INFINITY: f64 = -1.0_f64 / 0.0_f64; 42 | const RADIX: u32 = 2; 43 | } 44 | 45 | macro_rules! impl_int_v1_43 { 46 | ($($signed_type:ty, $unsigned_type:ty),+) => {$( 47 | #[ext] 48 | impl $signed_type where Self: Sealed<$signed_type>, { 49 | const MIN: Self = !0 ^ ((!0 as $unsigned_type) >> 1) as Self; 50 | const MAX: Self = !Self::MIN; 51 | } 52 | 53 | #[ext] 54 | impl $unsigned_type where Self: Sealed<$unsigned_type>, { 55 | const MIN: Self = 0; 56 | const MAX: Self = !0; 57 | } 58 | )+} 59 | } 60 | 61 | impl_int_v1_43![i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize]; 62 | -------------------------------------------------------------------------------- /src/inherent/v1_42.rs: -------------------------------------------------------------------------------- 1 | use core::mem::ManuallyDrop; 2 | use core::ptr; 3 | #[cfg(feature = "std")] 4 | use std::{ 5 | sync::{Condvar, LockResult, MutexGuard, WaitTimeoutResult}, 6 | time::{Duration, Instant}, 7 | }; 8 | 9 | use easy_ext::ext; 10 | 11 | use crate::inherent::Sealed; 12 | 13 | #[cfg(feature = "std")] 14 | fn new_wait_timeout_result(value: bool) -> WaitTimeoutResult { 15 | unsafe { core::mem::transmute(value) } 16 | } 17 | 18 | #[cfg(feature = "std")] 19 | #[ext] 20 | pub impl Condvar 21 | where Self: Sealed 22 | { 23 | fn wait_while<'a, T, F>( 24 | &self, 25 | mut guard: MutexGuard<'a, T>, 26 | mut condition: F, 27 | ) -> LockResult> 28 | where 29 | F: FnMut(&mut T) -> bool, 30 | { 31 | while condition(&mut *guard) { 32 | guard = self.wait(guard)?; 33 | } 34 | Ok(guard) 35 | } 36 | 37 | fn wait_timeout_while<'a, T, F>( 38 | &self, 39 | mut guard: MutexGuard<'a, T>, 40 | dur: Duration, 41 | mut condition: F, 42 | ) -> LockResult<(MutexGuard<'a, T>, WaitTimeoutResult)> 43 | where 44 | F: FnMut(&mut T) -> bool, 45 | { 46 | let start = Instant::now(); 47 | loop { 48 | if !condition(&mut *guard) { 49 | return Ok((guard, new_wait_timeout_result(false))); 50 | } 51 | let timeout = match dur.checked_sub(start.elapsed()) { 52 | Some(timeout) => timeout, 53 | None => return Ok((guard, new_wait_timeout_result(true))), 54 | }; 55 | guard = self.wait_timeout(guard, timeout)?.0; 56 | } 57 | } 58 | } 59 | 60 | #[ext] 61 | pub impl ManuallyDrop 62 | where Self: Sealed> 63 | { 64 | #[must_use = "if you don't need the value, you can use `ManuallyDrop::drop` instead"] 65 | unsafe fn take(slot: &mut ManuallyDrop) -> T { 66 | ptr::read(slot as *mut _ as *const _) 67 | } 68 | } 69 | 70 | #[macro_export] 71 | macro_rules! matches { 72 | ($expression:expr, $( $pattern:pat )|+ $(if $guard:expr)? $(,)?) => { 73 | match $expression { 74 | $( $pattern )|+ $(if $guard)? => true, 75 | _ => false 76 | } 77 | }; 78 | } 79 | -------------------------------------------------------------------------------- /src/inherent/v1_60.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] 2 | use alloc::vec::Vec; 3 | use core::mem::MaybeUninit; 4 | #[cfg(feature = "alloc")] 5 | use core::slice; 6 | use core::{ascii, mem, ptr}; 7 | 8 | use easy_ext::ext; 9 | 10 | use crate::inherent::sealed::Sealed; 11 | 12 | #[ext] 13 | pub impl u8 14 | where 15 | Self: Sealed, 16 | { 17 | fn escape_ascii(self) -> ascii::EscapeDefault { 18 | ascii::escape_default(self) 19 | } 20 | } 21 | 22 | #[cfg(feature = "alloc")] 23 | #[ext] 24 | pub impl Vec 25 | where 26 | Self: Sealed>, 27 | { 28 | fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { 29 | unsafe { 30 | slice::from_raw_parts_mut( 31 | self.as_mut_ptr().add(self.len()) as *mut MaybeUninit, 32 | self.capacity() - self.len(), 33 | ) 34 | } 35 | } 36 | } 37 | 38 | #[ext] 39 | pub impl MaybeUninit 40 | where 41 | Self: Sealed>, 42 | { 43 | unsafe fn assume_init_drop(&mut self) { 44 | ptr::drop_in_place(self.as_mut_ptr()) 45 | } 46 | 47 | unsafe fn assume_init_read(&self) -> T { 48 | self.as_ptr().read() 49 | } 50 | } 51 | 52 | macro_rules! impl_abs_diff { 53 | ($($unsigned:ident $signed:ident)*) => {$( 54 | #[ext] 55 | pub impl $unsigned 56 | where Self: Sealed<$unsigned> 57 | { 58 | fn abs_diff(self, other: $unsigned) -> $unsigned { 59 | if mem::size_of::() == 1 { 60 | (self as i32).wrapping_sub(other as i32).abs() as Self 61 | } else if self < other { 62 | other - self 63 | } else { 64 | self - other 65 | } 66 | } 67 | } 68 | 69 | #[ext] 70 | pub impl $signed 71 | where Self: Sealed<$signed> { 72 | fn abs_diff(self, other: $signed) -> $unsigned { 73 | if self < other { 74 | (other as $unsigned).wrapping_sub(self as $unsigned) 75 | } else { 76 | (self as $unsigned).wrapping_sub(other as $unsigned) 77 | } 78 | } 79 | } 80 | )*}; 81 | } 82 | 83 | impl_abs_diff![ 84 | u8 i8 85 | u16 i16 86 | u32 i32 87 | u64 i64 88 | u128 i128 89 | usize isize 90 | ]; 91 | -------------------------------------------------------------------------------- /src/inherent/v1_37.rs: -------------------------------------------------------------------------------- 1 | use core::cell::Cell; 2 | use core::ops::{Bound, RangeBounds}; 3 | use core::ptr; 4 | 5 | use easy_ext::ext; 6 | 7 | use crate::inherent::Sealed; 8 | 9 | #[ext] 10 | pub impl Cell 11 | where Self: Sealed> 12 | { 13 | fn from_mut(t: &mut T) -> &Cell { 14 | unsafe { &*(t as *mut T as *const Cell) } 15 | } 16 | } 17 | 18 | #[ext] 19 | pub impl Cell<[T]> 20 | where Self: Sealed> 21 | { 22 | fn as_slice_of_cells(&self) -> &[Cell] { 23 | unsafe { &*(self as *const Cell<[T]> as *const [Cell]) } 24 | } 25 | } 26 | 27 | #[ext] 28 | pub impl Option 29 | where Self: Sealed> 30 | { 31 | fn xor(self, optb: Option) -> Option { 32 | match (self, optb) { 33 | (Some(a), None) => Some(a), 34 | (None, Some(b)) => Some(b), 35 | _ => None, 36 | } 37 | } 38 | } 39 | 40 | #[ext] 41 | pub impl [T] 42 | where Self: Sealed<[T]> 43 | { 44 | fn copy_within>(&mut self, src: R, dest: usize) 45 | where T: Copy { 46 | let src_start = match src.start_bound() { 47 | Bound::Included(&n) => n, 48 | Bound::Excluded(&n) => n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail()), 49 | Bound::Unbounded => 0, 50 | }; 51 | let src_end = match src.end_bound() { 52 | Bound::Included(&n) => n.checked_add(1).unwrap_or_else(|| slice_index_overflow_fail()), 53 | Bound::Excluded(&n) => n, 54 | Bound::Unbounded => self.len(), 55 | }; 56 | assert!(src_start <= src_end, "src end is before src start"); 57 | assert!(src_end <= self.len(), "src is out of bounds"); 58 | let count = src_end - src_start; 59 | assert!(dest <= self.len() - count, "dest is out of bounds"); 60 | unsafe { 61 | ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count); 62 | } 63 | } 64 | } 65 | 66 | #[cold] 67 | fn slice_index_overflow_fail() -> ! { 68 | panic!("attempted to index slice up to maximum usize"); 69 | } 70 | 71 | #[ext] 72 | pub impl Iter 73 | where Self: DoubleEndedIterator 74 | { 75 | fn nth_back(&mut self, mut n: usize) -> Option { 76 | for x in self.rev() { 77 | if n == 0 { 78 | return Some(x); 79 | } 80 | n -= 1; 81 | } 82 | None 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/free/v1_59/zip.rs: -------------------------------------------------------------------------------- 1 | use core::cmp; 2 | use core::fmt::{self, Debug}; 3 | use core::iter::{DoubleEndedIterator, ExactSizeIterator, FusedIterator, Iterator}; 4 | 5 | pub fn zip(a: A, b: B) -> Zip { 6 | Zip { a: a.into_iter(), b: b.into_iter() } 7 | } 8 | 9 | #[derive(Clone)] 10 | #[must_use = "iterators are lazy and do nothing unless consumed"] 11 | pub struct Zip { 12 | a: A, 13 | b: B, 14 | } 15 | 16 | impl Debug for Zip { 17 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 18 | f.debug_struct("Zip").finish() 19 | } 20 | } 21 | 22 | impl Iterator for Zip { 23 | type Item = (A::Item, B::Item); 24 | 25 | fn next(&mut self) -> Option<(A::Item, B::Item)> { 26 | let x = self.a.next()?; 27 | let y = self.b.next()?; 28 | Some((x, y)) 29 | } 30 | 31 | fn size_hint(&self) -> (usize, Option) { 32 | let (a_lower, a_upper) = self.a.size_hint(); 33 | let (b_lower, b_upper) = self.b.size_hint(); 34 | 35 | let lower = cmp::min(a_lower, b_lower); 36 | 37 | let upper = match (a_upper, b_upper) { 38 | (Some(x), Some(y)) => Some(cmp::min(x, y)), 39 | (Some(x), None) => Some(x), 40 | (None, Some(y)) => Some(y), 41 | (None, None) => None, 42 | }; 43 | 44 | (lower, upper) 45 | } 46 | } 47 | 48 | impl 49 | DoubleEndedIterator for Zip 50 | { 51 | fn next_back(&mut self) -> Option<(A::Item, B::Item)> { 52 | let a_sz = self.a.len(); 53 | let b_sz = self.b.len(); 54 | if a_sz != b_sz { 55 | if a_sz > b_sz { 56 | for _ in 0..a_sz - b_sz { 57 | self.a.next_back(); 58 | } 59 | } else { 60 | for _ in 0..b_sz - a_sz { 61 | self.b.next_back(); 62 | } 63 | } 64 | } 65 | match (self.a.next_back(), self.b.next_back()) { 66 | (Some(x), Some(y)) => Some((x, y)), 67 | (None, None) => None, 68 | _ => unreachable!(), 69 | } 70 | } 71 | } 72 | 73 | impl ExactSizeIterator for Zip {} 74 | impl FusedIterator for Zip {} 75 | -------------------------------------------------------------------------------- /src/inherent/v1_52.rs: -------------------------------------------------------------------------------- 1 | use core::char::DecodeUtf16; 2 | 3 | use easy_ext::ext; 4 | 5 | use crate::inherent::Sealed; 6 | use crate::pattern::{Pattern, ReverseSearcher, Searcher}; 7 | 8 | #[ext] 9 | pub impl char 10 | where Self: Sealed 11 | { 12 | const MAX: char = '\u{10ffff}'; 13 | const REPLACEMENT_CHARACTER: char = '\u{FFFD}'; 14 | const UNICODE_VERSION: (u8, u8, u8) = { 15 | #[cfg(before = "1.38")] 16 | { 17 | (11, 0, 0) 18 | } 19 | #[cfg(all(since = "1.38", before = "1.44"))] 20 | { 21 | (12, 1, 0) 22 | } 23 | #[cfg(all(since = "1.44", before = "1.45"))] 24 | { 25 | (13, 0, 0) 26 | } 27 | #[cfg(since = "1.45")] 28 | { 29 | core::char::UNICODE_VERSION 30 | } 31 | }; 32 | 33 | fn decode_utf16>(iter: I) -> DecodeUtf16 { 34 | core::char::decode_utf16(iter) 35 | } 36 | 37 | fn from_digit(num: u32, radix: u32) -> Option { 38 | core::char::from_digit(num, radix) 39 | } 40 | 41 | unsafe fn from_u32_unchecked(i: u32) -> char { 42 | core::char::from_u32_unchecked(i) 43 | } 44 | 45 | fn from_u32(i: u32) -> Option { 46 | core::char::from_u32(i) 47 | } 48 | } 49 | 50 | #[ext] 51 | pub impl [T] 52 | where Self: Sealed<[T]> 53 | { 54 | fn partition_point bool>(&self, mut pred: P) -> usize { 55 | let mut left = 0; 56 | let mut right = self.len(); 57 | 58 | while left != right { 59 | let mid = left + (right - left) / 2; 60 | let value = unsafe { self.get_unchecked(mid) }; 61 | if pred(value) { 62 | left = mid + 1; 63 | } else { 64 | right = mid; 65 | } 66 | } 67 | 68 | left 69 | } 70 | } 71 | 72 | #[ext] 73 | pub impl str 74 | where Self: Sealed 75 | { 76 | fn rsplit_once<'a, P>(&'a self, delimiter: P) -> Option<(&'a str, &'a str)> 77 | where 78 | P: Pattern<'a>, 79 |

>::Searcher: ReverseSearcher<'a>, 80 | { 81 | let (start, end) = delimiter.into_searcher(self).next_match_back()?; 82 | Some((&self[..start], &self[end..])) 83 | } 84 | 85 | fn split_once<'a, P: Pattern<'a>>(&'a self, delimiter: P) -> Option<(&'a str, &'a str)> { 86 | let (start, end) = delimiter.into_searcher(self).next_match()?; 87 | Some((&self[..start], &self[end..])) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/inherent/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(shim = "1.37")] mod v1_37; 2 | #[cfg(shim = "1.38")] mod v1_38; 3 | #[cfg(shim = "1.40")] mod v1_40; 4 | #[cfg(shim = "1.41")] mod v1_41; 5 | #[cfg(shim = "1.42")] mod v1_42; 6 | #[cfg(shim = "1.43")] mod v1_43; 7 | #[cfg(shim = "1.44")] mod v1_44; 8 | #[cfg(shim = "1.45")] mod v1_45; 9 | #[cfg(shim = "1.46")] mod v1_46; 10 | #[cfg(shim = "1.47")] mod v1_47; 11 | #[cfg(shim = "1.48")] mod v1_48; 12 | #[cfg(shim = "1.49")] mod v1_49; 13 | #[cfg(shim = "1.50")] mod v1_50; 14 | #[cfg(shim = "1.51")] mod v1_51; 15 | #[cfg(shim = "1.52")] mod v1_52; 16 | #[cfg(shim = "1.53")] mod v1_53; 17 | #[cfg(shim = "1.54")] mod v1_54; 18 | #[cfg(shim = "1.55")] mod v1_55; 19 | #[cfg(shim = "1.57")] mod v1_57; 20 | #[cfg(shim = "1.58")] mod v1_58; 21 | #[cfg(shim = "1.59")] mod v1_59; 22 | #[cfg(shim = "1.60")] mod v1_60; 23 | 24 | #[allow(unused_imports)] use sealed::Sealed; 25 | mod sealed { 26 | #[allow(unreachable_pub)] 27 | pub trait Sealed {} 28 | impl Sealed for T {} 29 | } 30 | 31 | #[cfg(shim = "1.37")] 32 | #[allow(unreachable_pub)] 33 | pub use v1_37::*; 34 | #[cfg(shim = "1.38")] 35 | #[allow(unreachable_pub)] 36 | pub use v1_38::*; 37 | #[cfg(shim = "1.40")] 38 | #[allow(unreachable_pub)] 39 | pub use v1_40::*; 40 | #[cfg(shim = "1.41")] 41 | #[allow(unreachable_pub)] 42 | pub use v1_41::*; 43 | #[cfg(shim = "1.42")] 44 | #[allow(unreachable_pub)] 45 | pub use v1_42::*; 46 | #[cfg(shim = "1.43")] 47 | #[allow(unreachable_pub)] 48 | pub use v1_43::*; 49 | #[cfg(shim = "1.44")] 50 | #[allow(unreachable_pub)] 51 | pub use v1_44::*; 52 | #[cfg(shim = "1.45")] 53 | #[allow(unreachable_pub)] 54 | pub use v1_45::*; 55 | #[cfg(shim = "1.46")] 56 | #[allow(unreachable_pub)] 57 | pub use v1_46::*; 58 | #[cfg(shim = "1.47")] 59 | #[allow(unreachable_pub)] 60 | pub use v1_47::*; 61 | #[cfg(shim = "1.48")] 62 | #[allow(unreachable_pub)] 63 | pub use v1_48::*; 64 | #[cfg(shim = "1.49")] 65 | #[allow(unreachable_pub)] 66 | pub use v1_49::*; 67 | #[cfg(shim = "1.50")] 68 | #[allow(unreachable_pub)] 69 | pub use v1_50::*; 70 | #[cfg(shim = "1.51")] 71 | #[allow(unreachable_pub)] 72 | pub use v1_51::*; 73 | #[cfg(shim = "1.52")] 74 | #[allow(unreachable_pub)] 75 | pub use v1_52::*; 76 | #[cfg(shim = "1.53")] 77 | #[allow(unreachable_pub)] 78 | pub use v1_53::*; 79 | #[cfg(shim = "1.54")] 80 | #[allow(unreachable_pub)] 81 | pub use v1_54::*; 82 | #[cfg(shim = "1.55")] 83 | #[allow(unreachable_pub)] 84 | pub use v1_55::*; 85 | #[cfg(shim = "1.57")] 86 | #[allow(unreachable_pub)] 87 | pub use v1_57::*; 88 | #[cfg(shim = "1.58")] 89 | #[allow(unreachable_pub)] 90 | pub use v1_58::*; 91 | #[cfg(shim = "1.59")] 92 | #[allow(unreachable_pub)] 93 | pub use v1_59::*; 94 | #[cfg(shim = "1.60")] 95 | #[allow(unreachable_pub)] 96 | pub use v1_60::*; 97 | -------------------------------------------------------------------------------- /.github/workflows/build.yaml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: ["**"] 6 | tags-ignore: ["**"] 7 | paths-ignore: 8 | - "**.md" 9 | - LICENSE-Apache 10 | - LICENSE-MIT 11 | pull_request: 12 | paths-ignore: 13 | - "**.md" 14 | - LICENSE-Apache 15 | - LICENSE-MIT 16 | 17 | jobs: 18 | check: 19 | name: Type checking (${{ matrix.target.triple }}) 20 | runs-on: ubuntu-latest 21 | strategy: 22 | matrix: 23 | target: 24 | - { triple: x86_64-unknown-linux-gnu, std: true } 25 | - { triple: x86_64-pc-windows-msvc, std: true } 26 | - { triple: x86_64-apple-darwin, std: true } 27 | - { triple: thumbv7em-none-eabihf, std: false } 28 | 29 | steps: 30 | - name: Checkout sources 31 | uses: actions/checkout@v2 32 | 33 | - name: Install toolchain 34 | uses: actions-rs/toolchain@v1 35 | with: 36 | profile: minimal 37 | toolchain: stable 38 | target: ${{ matrix.target.triple }} 39 | 40 | - name: Install cargo-hack 41 | shell: bash 42 | run: | 43 | curl -LsSf https://github.com/taiki-e/cargo-hack/releases/latest/download/cargo-hack-x86_64-unknown-linux-gnu.tar.gz | tar xzf - -C ~/.cargo/bin 44 | 45 | - name: Check powerset 46 | uses: actions-rs/cargo@v1 47 | with: 48 | command: hack 49 | args: | 50 | check 51 | --target ${{ matrix.target.triple }} 52 | --version-range 1.36.. 53 | --clean-per-version 54 | --feature-powerset 55 | --exclude-features default 56 | --exclude-no-default-features 57 | if: matrix.target.std == true 58 | 59 | - name: Check powerset 60 | uses: actions-rs/cargo@v1 61 | with: 62 | command: hack 63 | args: | 64 | check 65 | --target ${{ matrix.target.triple }} 66 | --version-range 1.36.. 67 | --clean-per-version 68 | --feature-powerset 69 | --exclude-features default,std 70 | --exclude-no-default-features 71 | if: matrix.target.std == false 72 | 73 | fmt: 74 | name: Formatting 75 | runs-on: ubuntu-latest 76 | 77 | steps: 78 | - name: Checkout sources 79 | uses: actions/checkout@v2 80 | 81 | - name: Install toolchain 82 | uses: actions-rs/toolchain@v1 83 | with: 84 | profile: minimal 85 | toolchain: nightly 86 | override: true 87 | components: rustfmt 88 | 89 | - name: Run `cargo fmt -- --check` 90 | uses: actions-rs/cargo@v1 91 | with: 92 | command: fmt 93 | args: -- --check 94 | -------------------------------------------------------------------------------- /src/inherent/v1_58.rs: -------------------------------------------------------------------------------- 1 | use core::num::{NonZeroU128, NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize}; 2 | #[cfg(feature = "std")] 3 | use std::fs::{File, Metadata, OpenOptions}; 4 | #[cfg(feature = "std")] use std::path::Path; 5 | 6 | use easy_ext::ext; 7 | 8 | use crate::inherent::Sealed; 9 | 10 | #[cfg(feature = "std")] 11 | #[ext] 12 | pub impl Metadata 13 | where Self: Sealed 14 | { 15 | fn is_symlink(&self) -> bool { 16 | self.file_type().is_symlink() 17 | } 18 | } 19 | 20 | #[cfg(feature = "std")] 21 | #[ext] 22 | pub impl Path 23 | where Self: Sealed 24 | { 25 | fn is_symlink(&self) -> bool { 26 | std::fs::symlink_metadata(self).map(|m| m.is_symlink()).unwrap_or(false) 27 | } 28 | } 29 | 30 | macro_rules! impl_int_v1_58 { 31 | ($($signed_type:ty, $unsigned_type:ty),+) => {$( 32 | #[ext] 33 | pub impl $signed_type where Self: Sealed<$signed_type> { 34 | fn saturating_div(self, rhs: Self) -> Self { 35 | match self.overflowing_div(rhs) { 36 | (result, false) => result, 37 | (_, true) => Self::max_value(), 38 | } 39 | } 40 | } 41 | 42 | #[ext] 43 | pub impl $unsigned_type where Self: Sealed<$unsigned_type> { 44 | fn saturating_div(self, rhs: Self) -> Self { 45 | self.wrapping_div(rhs) 46 | } 47 | } 48 | )*}; 49 | } 50 | 51 | impl_int_v1_58![i8, u8, i16, u16, i32, u32, i64, u64, i128, u128, isize, usize]; 52 | 53 | #[ext] 54 | pub impl Option 55 | where Self: Sealed> 56 | { 57 | unsafe fn unwrap_unchecked(self) -> T { 58 | debug_assert!(self.is_some()); 59 | match self { 60 | Some(val) => val, 61 | None => core::hint::unreachable_unchecked(), 62 | } 63 | } 64 | } 65 | 66 | #[ext] 67 | pub impl Result 68 | where Self: Sealed> 69 | { 70 | unsafe fn unwrap_unchecked(self) -> T { 71 | debug_assert!(self.is_ok()); 72 | match self { 73 | Ok(val) => val, 74 | Err(_) => core::hint::unreachable_unchecked(), 75 | } 76 | } 77 | 78 | unsafe fn unwrap_err_unchecked(self) -> E { 79 | debug_assert!(self.is_err()); 80 | match self { 81 | Ok(_) => core::hint::unreachable_unchecked(), 82 | Err(err) => err, 83 | } 84 | } 85 | } 86 | 87 | macro_rules! impl_nonzero_v1_58 { 88 | ($($t:ty)*) => {$( 89 | #[ext] 90 | pub impl $t 91 | where Self: Sealed<$t> 92 | { 93 | fn is_power_of_two(self) -> bool { 94 | self.get().is_power_of_two() 95 | } 96 | } 97 | )*}; 98 | } 99 | 100 | impl_nonzero_v1_58![NonZeroU8 NonZeroU16 NonZeroU32 NonZeroU64 NonZeroU128 NonZeroUsize]; 101 | 102 | #[cfg(feature = "std")] 103 | #[ext] 104 | pub impl File 105 | where Self: Sealed 106 | { 107 | fn with_options() -> OpenOptions { 108 | OpenOptions::new() 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/pattern/memchr.rs: -------------------------------------------------------------------------------- 1 | use core::{cmp, mem, usize}; 2 | 3 | #[allow(unused_imports)] use crate::shim::*; 4 | 5 | const LO_U64: u64 = 0x0101010101010101; 6 | const HI_U64: u64 = 0x8080808080808080; 7 | 8 | const LO_USIZE: usize = LO_U64 as usize; 9 | const HI_USIZE: usize = HI_U64 as usize; 10 | const USIZE_BYTES: usize = mem::size_of::(); 11 | 12 | fn contains_zero_byte(x: usize) -> bool { 13 | x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0 14 | } 15 | 16 | #[cfg(target_pointer_width = "16")] 17 | fn repeat_byte(b: u8) -> usize { 18 | (b as usize) << 8 | b as usize 19 | } 20 | 21 | #[cfg(not(target_pointer_width = "16"))] 22 | fn repeat_byte(b: u8) -> usize { 23 | (b as usize) * (usize::MAX / 255) 24 | } 25 | 26 | pub(super) fn memchr(x: u8, text: &[u8]) -> Option { 27 | if text.len() < 2 * USIZE_BYTES { 28 | return text.iter().position(|elt| *elt == x); 29 | } 30 | 31 | memchr_general_case(x, text) 32 | } 33 | 34 | fn memchr_general_case(x: u8, text: &[u8]) -> Option { 35 | let len = text.len(); 36 | let ptr = text.as_ptr(); 37 | let mut offset = ptr.align_offset(USIZE_BYTES); 38 | 39 | if offset > 0 { 40 | offset = cmp::min(offset, len); 41 | if let Some(index) = text[..offset].iter().position(|elt| *elt == x) { 42 | return Some(index); 43 | } 44 | } 45 | 46 | let repeated_x = repeat_byte(x); 47 | while offset <= len - 2 * USIZE_BYTES { 48 | unsafe { 49 | let u = *(ptr.add(offset) as *const usize); 50 | let v = *(ptr.add(offset + USIZE_BYTES) as *const usize); 51 | 52 | let zu = contains_zero_byte(u ^ repeated_x); 53 | let zv = contains_zero_byte(v ^ repeated_x); 54 | if zu || zv { 55 | break; 56 | } 57 | } 58 | offset += USIZE_BYTES * 2; 59 | } 60 | 61 | text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i) 62 | } 63 | 64 | pub(super) fn memrchr(x: u8, text: &[u8]) -> Option { 65 | let len = text.len(); 66 | let ptr = text.as_ptr(); 67 | type Chunk = usize; 68 | 69 | let (min_aligned_offset, max_aligned_offset) = { 70 | let (prefix, _, suffix) = unsafe { text.align_to::<(Chunk, Chunk)>() }; 71 | (prefix.len(), len - suffix.len()) 72 | }; 73 | 74 | let mut offset = max_aligned_offset; 75 | if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) { 76 | return Some(offset + index); 77 | } 78 | 79 | let repeated_x = repeat_byte(x); 80 | let chunk_bytes = mem::size_of::(); 81 | 82 | while offset > min_aligned_offset { 83 | unsafe { 84 | let u = *(ptr.offset(offset as isize - 2 * chunk_bytes as isize) as *const Chunk); 85 | let v = *(ptr.offset(offset as isize - chunk_bytes as isize) as *const Chunk); 86 | 87 | let zu = contains_zero_byte(u ^ repeated_x); 88 | let zv = contains_zero_byte(v ^ repeated_x); 89 | if zu || zv { 90 | break; 91 | } 92 | } 93 | offset -= 2 * chunk_bytes; 94 | } 95 | 96 | text[..offset].iter().rposition(|elt| *elt == x) 97 | } 98 | -------------------------------------------------------------------------------- /src/inherent/v1_50.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] 2 | use alloc::collections::btree_map; 3 | use core::cell::{RefCell, UnsafeCell}; 4 | #[cfg(feature = "std")] 5 | use std::collections::hash_map; 6 | 7 | use easy_ext::ext; 8 | 9 | use crate::inherent::Sealed; 10 | 11 | #[ext] 12 | pub impl bool 13 | where Self: Sealed 14 | { 15 | fn then T>(self, f: F) -> Option { 16 | if self { Some(f()) } else { None } 17 | } 18 | } 19 | 20 | #[cfg(feature = "alloc")] 21 | #[ext] 22 | pub impl<'a, K: Ord + 'a, V: 'a> btree_map::Entry<'a, K, V> 23 | where Self: Sealed> 24 | { 25 | fn or_insert_with_key V>(self, default: F) -> &'a mut V { 26 | match self { 27 | btree_map::Entry::Occupied(entry) => entry.into_mut(), 28 | btree_map::Entry::Vacant(entry) => { 29 | let value = default(entry.key()); 30 | entry.insert(value) 31 | } 32 | } 33 | } 34 | } 35 | 36 | #[cfg(feature = "std")] 37 | #[ext] 38 | pub impl<'a, K: 'a, V: 'a> hash_map::Entry<'a, K, V> 39 | where Self: Sealed> 40 | { 41 | fn or_insert_with_key V>(self, default: F) -> &'a mut V { 42 | match self { 43 | hash_map::Entry::Occupied(entry) => entry.into_mut(), 44 | hash_map::Entry::Vacant(entry) => { 45 | let value = default(entry.key()); 46 | entry.insert(value) 47 | } 48 | } 49 | } 50 | } 51 | 52 | #[ext] 53 | pub impl f32 54 | where Self: Sealed 55 | { 56 | #[must_use = "method returns a new number and does not mutate the original value"] 57 | fn clamp(self, min: f32, max: f32) -> f32 { 58 | assert!(min <= max); 59 | let mut x = self; 60 | if x < min { 61 | x = min; 62 | } 63 | if x > max { 64 | x = max; 65 | } 66 | x 67 | } 68 | } 69 | 70 | #[ext] 71 | pub impl f64 72 | where Self: Sealed 73 | { 74 | #[must_use = "method returns a new number and does not mutate the original value"] 75 | fn clamp(self, min: f64, max: f64) -> f64 { 76 | assert!(min <= max); 77 | let mut x = self; 78 | if x < min { 79 | x = min; 80 | } 81 | if x > max { 82 | x = max; 83 | } 84 | x 85 | } 86 | } 87 | 88 | #[ext] 89 | pub impl T 90 | where Self: Sealed 91 | { 92 | #[must_use] 93 | fn clamp(self, min: Self, max: Self) -> Self 94 | where Self: Sized { 95 | assert!(min <= max); 96 | if self < min { 97 | min 98 | } else if self > max { 99 | max 100 | } else { 101 | self 102 | } 103 | } 104 | } 105 | 106 | #[ext] 107 | pub impl RefCell 108 | where Self: Sealed> 109 | { 110 | fn take(&self) -> T { 111 | self.replace(Default::default()) 112 | } 113 | } 114 | 115 | #[ext] 116 | pub impl [T] 117 | where Self: Sealed<[T]> 118 | { 119 | fn fill(&mut self, value: T) 120 | where T: Clone { 121 | if let Some((last, elems)) = self.split_last_mut() { 122 | for el in elems { 123 | el.clone_from(&value); 124 | } 125 | 126 | *last = value 127 | } 128 | } 129 | } 130 | 131 | #[ext] 132 | pub impl UnsafeCell 133 | where Self: Sealed> 134 | { 135 | fn get_mut(&mut self) -> &mut T { 136 | unsafe { &mut *self.get() } 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/inherent/v1_40.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] use alloc::vec::Vec; 2 | use core::ops::DerefMut; 3 | #[cfg(feature = "alloc")] use core::ptr; 4 | 5 | use easy_ext::ext; 6 | 7 | use crate::inherent::Sealed; 8 | 9 | #[ext] 10 | pub impl Option 11 | where Self: Sealed> 12 | { 13 | fn as_deref_mut(&mut self) -> Option<&mut T::Target> { 14 | self.as_mut().map(|t| t.deref_mut()) 15 | } 16 | 17 | fn as_deref(&self) -> Option<&T::Target> { 18 | self.as_ref().map(|t| t.deref()) 19 | } 20 | } 21 | 22 | #[ext] 23 | pub impl Option> 24 | where Self: Sealed>> 25 | { 26 | fn flatten(self) -> Option { 27 | self.and_then(core::convert::identity) 28 | } 29 | } 30 | 31 | #[ext] 32 | pub impl f32 33 | where Self: Sealed 34 | { 35 | fn to_be_bytes(self) -> [u8; 4] { 36 | self.to_bits().to_be_bytes() 37 | } 38 | 39 | fn to_le_bytes(self) -> [u8; 4] { 40 | self.to_bits().to_le_bytes() 41 | } 42 | 43 | fn to_ne_bytes(self) -> [u8; 4] { 44 | self.to_bits().to_ne_bytes() 45 | } 46 | 47 | fn from_be_bytes(bytes: [u8; 4]) -> Self { 48 | Self::from_bits(u32::from_be_bytes(bytes)) 49 | } 50 | 51 | fn from_le_bytes(bytes: [u8; 4]) -> Self { 52 | Self::from_bits(u32::from_le_bytes(bytes)) 53 | } 54 | 55 | fn from_ne_bytes(bytes: [u8; 4]) -> Self { 56 | Self::from_bits(u32::from_ne_bytes(bytes)) 57 | } 58 | } 59 | 60 | #[ext] 61 | pub impl f64 62 | where Self: Sealed 63 | { 64 | fn to_be_bytes(self) -> [u8; 8] { 65 | self.to_bits().to_be_bytes() 66 | } 67 | 68 | fn to_le_bytes(self) -> [u8; 8] { 69 | self.to_bits().to_le_bytes() 70 | } 71 | 72 | fn to_ne_bytes(self) -> [u8; 8] { 73 | self.to_bits().to_ne_bytes() 74 | } 75 | 76 | fn from_be_bytes(bytes: [u8; 8]) -> Self { 77 | Self::from_bits(u64::from_be_bytes(bytes)) 78 | } 79 | 80 | fn from_le_bytes(bytes: [u8; 8]) -> Self { 81 | Self::from_bits(u64::from_le_bytes(bytes)) 82 | } 83 | 84 | fn from_ne_bytes(bytes: [u8; 8]) -> Self { 85 | Self::from_bits(u64::from_ne_bytes(bytes)) 86 | } 87 | } 88 | 89 | #[cfg(feature = "alloc")] 90 | #[ext] 91 | pub impl [T] 92 | where Self: Sealed<[T]> 93 | { 94 | fn repeat(&self, n: usize) -> Vec { 95 | if n == 0 { 96 | return Vec::new(); 97 | } 98 | 99 | let mut buf = Vec::with_capacity(self.len().checked_mul(n).expect("capacity overflow")); 100 | 101 | buf.extend(self); 102 | { 103 | let mut m = n >> 1; 104 | while m > 0 { 105 | unsafe { 106 | ptr::copy_nonoverlapping( 107 | buf.as_ptr(), 108 | (buf.as_mut_ptr() as *mut T).add(buf.len()), 109 | buf.len(), 110 | ); 111 | let buf_len = buf.len(); 112 | buf.set_len(buf_len * 2); 113 | } 114 | 115 | m >>= 1; 116 | } 117 | } 118 | 119 | let rem_len = self.len() * n - buf.len(); 120 | if rem_len > 0 { 121 | unsafe { 122 | ptr::copy_nonoverlapping( 123 | buf.as_ptr(), 124 | (buf.as_mut_ptr() as *mut T).add(buf.len()), 125 | rem_len, 126 | ); 127 | let buf_cap = buf.capacity(); 128 | buf.set_len(buf_cap); 129 | } 130 | } 131 | buf 132 | } 133 | } 134 | -------------------------------------------------------------------------------- /src/inherent/v1_44.rs: -------------------------------------------------------------------------------- 1 | #![allow(deprecated)] 2 | 3 | use core::alloc::{Layout, LayoutErr}; 4 | use core::cmp; 5 | use core::mem::{self, transmute}; 6 | #[cfg(feature = "std")] use std::ffi::OsString; 7 | #[cfg(feature = "std")] use std::path::PathBuf; 8 | 9 | use easy_ext::ext; 10 | 11 | use crate::inherent::Sealed; 12 | 13 | #[cfg(feature = "std")] 14 | #[ext] 15 | pub impl PathBuf 16 | where Self: Sealed 17 | { 18 | fn with_capacity(capacity: usize) -> PathBuf { 19 | OsString::with_capacity(capacity).into() 20 | } 21 | 22 | fn capacity(&self) -> usize { 23 | unsafe { transmute::<_, &OsString>(self) }.capacity() 24 | } 25 | 26 | fn clear(&mut self) { 27 | unsafe { transmute::<_, &mut OsString>(self) }.clear() 28 | } 29 | 30 | fn reserve(&mut self, additional: usize) { 31 | unsafe { transmute::<_, &mut OsString>(self) }.reserve(additional) 32 | } 33 | 34 | fn reserve_exact(&mut self, additional: usize) { 35 | unsafe { transmute::<_, &mut OsString>(self) }.reserve_exact(additional) 36 | } 37 | 38 | fn shrink_to_fit(&mut self) { 39 | unsafe { transmute::<_, &mut OsString>(self) }.shrink_to_fit() 40 | } 41 | } 42 | 43 | #[ext] 44 | pub impl Layout 45 | where Self: Sealed 46 | { 47 | fn align_to(&self, align: usize) -> Result { 48 | Layout::from_size_align(self.size(), cmp::max(self.align(), align)) 49 | } 50 | 51 | fn pad_to_align(&self) -> Layout { 52 | let pad = padding_needed_for(self, self.align()); 53 | let new_size = self.size() + pad; 54 | Layout::from_size_align(new_size, self.align()).unwrap() 55 | } 56 | 57 | fn array(n: usize) -> Result { 58 | repeat(&Layout::new::(), n).map(|(k, offs)| { 59 | debug_assert!(offs == mem::size_of::()); 60 | k 61 | }) 62 | } 63 | 64 | fn extend(&self, next: Self) -> Result<(Layout, usize), LayoutErr> { 65 | let new_align = cmp::max(self.align(), next.align()); 66 | let pad = padding_needed_for(self, next.align()); 67 | 68 | let offset = self.size().checked_add(pad).ok_or(layout_err())?; 69 | let new_size = offset.checked_add(next.size()).ok_or(layout_err())?; 70 | 71 | let layout = Layout::from_size_align(new_size, new_align)?; 72 | Ok((layout, offset)) 73 | } 74 | } 75 | 76 | fn padding_needed_for(zelf: &Layout, align: usize) -> usize { 77 | let len = zelf.size(); 78 | let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1); 79 | len_rounded_up.wrapping_sub(len) 80 | } 81 | 82 | fn repeat(zelf: &Layout, n: usize) -> Result<(Layout, usize), LayoutErr> { 83 | let padded_size = zelf.size() + padding_needed_for(zelf, zelf.align()); 84 | let alloc_size = padded_size.checked_mul(n).ok_or(layout_err())?; 85 | 86 | unsafe { Ok((Layout::from_size_align_unchecked(alloc_size, zelf.align()), padded_size)) } 87 | } 88 | 89 | fn layout_err() -> LayoutErr { 90 | unsafe { transmute(()) } 91 | } 92 | 93 | mod sealed { 94 | #[allow(unreachable_pub)] 95 | pub trait FloatToInt { 96 | unsafe fn to_int_unchecked(self) -> Int; 97 | } 98 | 99 | macro_rules! impl_float_to_int { 100 | ($float:ident => $($int:ident)+) => {$( 101 | impl FloatToInt<$int> for $float { 102 | unsafe fn to_int_unchecked(self) -> $int { 103 | self as $int 104 | } 105 | } 106 | )+} 107 | } 108 | 109 | impl_float_to_int!(f32 => u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize); 110 | impl_float_to_int!(f64 => u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize); 111 | } 112 | 113 | #[ext] 114 | pub impl f32 115 | where Self: Sealed 116 | { 117 | unsafe fn to_int_unchecked(self) -> Int 118 | where f32: sealed::FloatToInt { 119 | sealed::FloatToInt::to_int_unchecked(self) 120 | } 121 | } 122 | 123 | #[ext] 124 | pub impl f64 125 | where Self: Sealed 126 | { 127 | unsafe fn to_int_unchecked(self) -> Int 128 | where f64: sealed::FloatToInt { 129 | sealed::FloatToInt::to_int_unchecked(self) 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Conduct 2 | 3 | - We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic. 4 | - Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all. 5 | - Please be kind and courteous. There's no need to be mean or rude. 6 | - Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer. 7 | - Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works. 8 | - We will exclude you from interaction if you insult, demean, or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the [Citizen Code of Conduct]; if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don't tolerate behavior that excludes people in socially marginalized groups. 9 | - Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or a member of the [moderation team] immediately. Whether you're a regular contributor or a newcomer, we care about making this community a safe place for you and we've got your back. 10 | - Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome. 11 | 12 | # Moderation 13 | 14 | These are the policies for upholding our community's standards of conduct. If you feel that a thread needs moderation, please contact a member of the [moderation team]. 15 | 16 | 1. Remarks that violate the Rust standards of conduct, including hateful, hurtful, oppressive, or exclusionary remarks, are not allowed. (Cursing is allowed, but never targeting another user, and never in a hateful manner.) 17 | 1. Remarks that moderators find inappropriate, whether listed in the code of conduct or not, are also not allowed. 18 | 1. Moderators will first respond to such remarks with a warning. 19 | 1. If the warning is unheeded, the user will be "kicked", i.e., kicked out of the communication channel to cool off. 20 | 1. If the user comes back and continues to make trouble, they will be banned, i.e., indefinitely excluded. 21 | 1. Moderators may choose at their discretion to un-ban the user if it was a first offense and they offer the offended party a genuine apology. 22 | 1. If a moderator bans someone and you think it was unjustified, please take it up with that moderator, or with a different moderator, **in private**. Complaints about bans in-channel are not allowed. 23 | 1. Moderators are held to a higher standard than other community members. If a moderator creates an inappropriate situation, they should expect less leeway than others. 24 | 25 | In the Rust community we strive to go the extra step to look out for each other. Don't just aim to be technically unimpeachable, try to be your best self. In particular, avoid flirting with offensive or sensitive issues, particularly if they're off-topic; this all too often leads to unnecessary fights, hurt feelings, and damaged trust; worse, it can drive people away from the community entirely. 26 | 27 | And if someone takes issue with something you said or did, resist the urge to be defensive. Just stop doing what it was they complained about and apologize. Even if you feel you were misinterpreted or unfairly accused, chances are good there was something you could've communicated better — remember that it's your responsibility to make your fellow Rustaceans comfortable. Everyone wants to get along and we are all here first and foremost because we want to talk about cool technology. You will find that people will be eager to assume good intent and forgive as long as you earn their trust. 28 | 29 | The enforcement policies listed above apply to all official Rust venues; including GitHub repositories and Matrix. 30 | 31 | _Adopted from the [Rust Code of Conduct]._ 32 | 33 | [Citizen Code of Conduct]: http://citizencodeofconduct.org/ 34 | [moderation team]: https://github.com/orgs/time-rs/teams/moderators/members 35 | [Rust Code of Conduct]: https://www.rust-lang.org/policies/code-of-conduct 36 | -------------------------------------------------------------------------------- /src/inherent/v1_51.rs: -------------------------------------------------------------------------------- 1 | mod slice; 2 | mod str; 3 | 4 | #[cfg(feature = "alloc")] use alloc::sync::Arc; 5 | use core::iter::Peekable; 6 | #[cfg(feature = "alloc")] use core::mem; 7 | use core::task::Poll; 8 | #[cfg(feature = "std")] 9 | use std::io::{Seek, SeekFrom}; 10 | 11 | use easy_ext::ext; 12 | 13 | use crate::inherent::Sealed; 14 | use crate::pattern::Pattern; 15 | 16 | #[cfg(feature = "alloc")] 17 | #[ext] 18 | pub impl Arc 19 | where Self: Sealed> 20 | { 21 | unsafe fn decrement_strong_count(ptr: *const T) { 22 | drop(Arc::from_raw(ptr)); 23 | } 24 | 25 | unsafe fn increment_strong_count(ptr: *const T) { 26 | let arc = mem::ManuallyDrop::new(Arc::::from_raw(ptr)); 27 | let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); 28 | } 29 | } 30 | 31 | #[ext] 32 | pub impl Peekable 33 | where Self: Sealed> 34 | { 35 | fn next_if(&mut self, func: impl FnOnce(&I::Item) -> bool) -> Option { 36 | if func(self.peek()?) { self.next() } else { None } 37 | } 38 | 39 | fn next_if_eq(&mut self, expected: &T) -> Option 40 | where 41 | T: ?Sized, 42 | I::Item: PartialEq, 43 | { 44 | self.next_if(|next| next == expected) 45 | } 46 | } 47 | 48 | #[cfg(feature = "std")] 49 | #[ext] 50 | pub impl T 51 | where Self: Sealed 52 | { 53 | fn stream_position(&mut self) -> std::io::Result { 54 | self.seek(SeekFrom::Current(0)) 55 | } 56 | } 57 | 58 | #[ext] 59 | pub impl [T] 60 | where Self: Sealed<[T]> 61 | { 62 | fn fill_with(&mut self, mut f: F) 63 | where F: FnMut() -> T { 64 | for el in self { 65 | *el = f(); 66 | } 67 | } 68 | 69 | fn split_inclusive_mut(&mut self, pred: F) -> slice::SplitInclusiveMut<'_, T, F> 70 | where F: FnMut(&T) -> bool { 71 | slice::SplitInclusiveMut::new(self, pred) 72 | } 73 | 74 | fn split_inclusive(&self, pred: F) -> slice::SplitInclusive<'_, T, F> 75 | where F: FnMut(&T) -> bool { 76 | slice::SplitInclusive::new(self, pred) 77 | } 78 | 79 | #[must_use = "returns the subslice without modifying the original"] 80 | fn strip_prefix(&self, prefix: &[T]) -> Option<&[T]> 81 | where T: PartialEq { 82 | let n = prefix.len(); 83 | if n <= self.len() { 84 | let (head, tail) = self.split_at(n); 85 | if head == prefix { 86 | return Some(tail); 87 | } 88 | } 89 | None 90 | } 91 | 92 | #[must_use = "returns the subslice without modifying the original"] 93 | fn strip_suffix(&self, suffix: &[T]) -> Option<&[T]> 94 | where T: PartialEq { 95 | let (len, n) = (self.len(), suffix.len()); 96 | if n <= len { 97 | let (head, tail) = self.split_at(len - n); 98 | if tail == suffix { 99 | return Some(head); 100 | } 101 | } 102 | None 103 | } 104 | } 105 | 106 | macro_rules! impl_integer { 107 | ($($int:ty => $uint:ty)+) => {$( 108 | #[ext] 109 | pub impl $int where Self: Sealed<$int>, { 110 | fn unsigned_abs(self) -> $uint { 111 | self.wrapping_abs() as $uint 112 | } 113 | } 114 | )+}; 115 | } 116 | 117 | impl_integer! { 118 | i8 => u8 119 | i16 => u16 120 | i32 => u32 121 | i64 => u64 122 | i128 => u128 123 | } 124 | 125 | #[ext] 126 | pub impl Poll> 127 | where Self: Sealed>> 128 | { 129 | fn map_ok(self, f: F) -> Poll> 130 | where F: FnOnce(T) -> U { 131 | match self { 132 | Poll::Ready(Ok(t)) => Poll::Ready(Ok(f(t))), 133 | Poll::Ready(Err(e)) => Poll::Ready(Err(e)), 134 | Poll::Pending => Poll::Pending, 135 | } 136 | } 137 | 138 | fn map_err(self, f: F) -> Poll> 139 | where F: FnOnce(E) -> U { 140 | match self { 141 | Poll::Ready(Ok(t)) => Poll::Ready(Ok(t)), 142 | Poll::Ready(Err(e)) => Poll::Ready(Err(f(e))), 143 | Poll::Pending => Poll::Pending, 144 | } 145 | } 146 | } 147 | 148 | #[ext] 149 | pub impl str 150 | where Self: Sealed 151 | { 152 | fn split_inclusive<'a, P: Pattern<'a>>(&'a self, pat: P) -> str::SplitInclusive<'a, P> { 153 | str::SplitInclusive(str::SplitInternal { 154 | start: 0, 155 | end: self.len(), 156 | matcher: pat.into_searcher(self), 157 | allow_trailing_empty: false, 158 | finished: false, 159 | }) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /src/inherent/v1_51/slice.rs: -------------------------------------------------------------------------------- 1 | use core::iter::FusedIterator; 2 | use core::{fmt, mem}; 3 | 4 | pub struct SplitInclusiveMut<'a, T, P> 5 | where P: FnMut(&T) -> bool 6 | { 7 | v: &'a mut [T], 8 | pred: P, 9 | finished: bool, 10 | } 11 | 12 | impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitInclusiveMut<'a, T, P> { 13 | pub(super) fn new(slice: &'a mut [T], pred: P) -> Self { 14 | Self { v: slice, pred, finished: false } 15 | } 16 | } 17 | 18 | impl fmt::Debug for SplitInclusiveMut<'_, T, P> 19 | where P: FnMut(&T) -> bool 20 | { 21 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 22 | f.debug_struct("SplitInclusiveMut") 23 | .field("v", &self.v) 24 | .field("finished", &self.finished) 25 | .finish() 26 | } 27 | } 28 | 29 | impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P> 30 | where P: FnMut(&T) -> bool 31 | { 32 | type Item = &'a mut [T]; 33 | 34 | fn next(&mut self) -> Option<&'a mut [T]> { 35 | if self.finished { 36 | return None; 37 | } 38 | 39 | let idx_opt = { 40 | let pred = &mut self.pred; 41 | self.v.iter().position(|x| (*pred)(x)) 42 | }; 43 | let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len()); 44 | if idx == self.v.len() { 45 | self.finished = true; 46 | } 47 | let tmp = mem::replace(&mut self.v, &mut []); 48 | let (head, tail) = tmp.split_at_mut(idx); 49 | self.v = tail; 50 | Some(head) 51 | } 52 | 53 | fn size_hint(&self) -> (usize, Option) { 54 | if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) } 55 | } 56 | } 57 | 58 | impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P> 59 | where P: FnMut(&T) -> bool 60 | { 61 | fn next_back(&mut self) -> Option<&'a mut [T]> { 62 | if self.finished { 63 | return None; 64 | } 65 | 66 | let idx_opt = if self.v.is_empty() { 67 | None 68 | } else { 69 | let pred = &mut self.pred; 70 | let remainder = &self.v[..(self.v.len() - 1)]; 71 | remainder.iter().rposition(|x| (*pred)(x)) 72 | }; 73 | let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0); 74 | if idx == 0 { 75 | self.finished = true; 76 | } 77 | let tmp = mem::replace(&mut self.v, &mut []); 78 | let (head, tail) = tmp.split_at_mut(idx); 79 | self.v = head; 80 | Some(tail) 81 | } 82 | } 83 | 84 | impl FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {} 85 | 86 | pub struct SplitInclusive<'a, T, P> 87 | where P: FnMut(&T) -> bool 88 | { 89 | v: &'a [T], 90 | pred: P, 91 | finished: bool, 92 | } 93 | 94 | impl<'a, T: 'a, P: FnMut(&T) -> bool> SplitInclusive<'a, T, P> { 95 | pub(super) fn new(slice: &'a [T], pred: P) -> Self { 96 | Self { v: slice, pred, finished: false } 97 | } 98 | } 99 | 100 | impl fmt::Debug for SplitInclusive<'_, T, P> 101 | where P: FnMut(&T) -> bool 102 | { 103 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 104 | f.debug_struct("SplitInclusive") 105 | .field("v", &self.v) 106 | .field("finished", &self.finished) 107 | .finish() 108 | } 109 | } 110 | 111 | impl Clone for SplitInclusive<'_, T, P> 112 | where P: Clone + FnMut(&T) -> bool 113 | { 114 | fn clone(&self) -> Self { 115 | SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished } 116 | } 117 | } 118 | 119 | impl<'a, T, P> Iterator for SplitInclusive<'a, T, P> 120 | where P: FnMut(&T) -> bool 121 | { 122 | type Item = &'a [T]; 123 | 124 | fn next(&mut self) -> Option<&'a [T]> { 125 | if self.finished { 126 | return None; 127 | } 128 | 129 | let idx = 130 | self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len()); 131 | if idx == self.v.len() { 132 | self.finished = true; 133 | } 134 | let ret = Some(&self.v[..idx]); 135 | self.v = &self.v[idx..]; 136 | ret 137 | } 138 | 139 | fn size_hint(&self) -> (usize, Option) { 140 | if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) } 141 | } 142 | } 143 | 144 | impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P> 145 | where P: FnMut(&T) -> bool 146 | { 147 | fn next_back(&mut self) -> Option<&'a [T]> { 148 | if self.finished { 149 | return None; 150 | } 151 | 152 | let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] }; 153 | let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0); 154 | if idx == 0 { 155 | self.finished = true; 156 | } 157 | let ret = Some(&self.v[idx..]); 158 | self.v = &self.v[..idx]; 159 | ret 160 | } 161 | } 162 | 163 | impl FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {} 164 | -------------------------------------------------------------------------------- /src/free/v1_59/available_parallelism.rs: -------------------------------------------------------------------------------- 1 | use core::num::NonZeroUsize; 2 | use std::io; 3 | 4 | #[cfg(windows)] 5 | pub fn available_parallelism() -> io::Result { 6 | use core::ffi::c_void; 7 | use std::os::raw::c_ulong; 8 | 9 | #[link(name = "kernel32")] 10 | extern "system" { 11 | fn GetSystemInfo(lpSystemInfo: *mut SYSTEM_INFO); 12 | } 13 | 14 | #[repr(C)] 15 | #[allow(non_snake_case)] 16 | struct SYSTEM_INFO { 17 | wProcessorArchitecture: u16, 18 | wReserved: u16, 19 | dwPageSize: c_ulong, 20 | lpMinimumApplicationAddress: *mut c_void, 21 | lpMaximumApplicationAddress: *mut c_void, 22 | dwActiveProcessorMask: usize, 23 | dwNumberOfProcessors: c_ulong, 24 | dwProcessorType: c_ulong, 25 | dwAllocationGranularity: c_ulong, 26 | wProcessorLevel: u16, 27 | wProcessorRevision: u16, 28 | } 29 | 30 | let res = unsafe { 31 | let mut sysinfo: SYSTEM_INFO = core::mem::zeroed(); 32 | GetSystemInfo(&mut sysinfo); 33 | sysinfo.dwNumberOfProcessors as usize 34 | }; 35 | match res { 36 | 0 => Err(io::Error::new( 37 | io::ErrorKind::NotFound, 38 | "The number of hardware threads is not known for the target platform", 39 | )), 40 | cpus => Ok(unsafe { NonZeroUsize::new_unchecked(cpus) }), 41 | } 42 | } 43 | 44 | #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "netbsd"))] 45 | pub fn available_parallelism() -> io::Result { 46 | use core::ptr; 47 | 48 | let mut cpus: libc::c_uint = 0; 49 | let mut cpus_size = core::mem::size_of_val(&cpus); 50 | 51 | unsafe { 52 | cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint; 53 | } 54 | 55 | if cpus < 1 { 56 | let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; 57 | let res = unsafe { 58 | libc::sysctl( 59 | mib.as_mut_ptr(), 60 | 2, 61 | &mut cpus as *mut _ as *mut _, 62 | &mut cpus_size as *mut _ as *mut _, 63 | ptr::null_mut(), 64 | 0, 65 | ) 66 | }; 67 | 68 | if res == -1 { 69 | return Err(io::Error::last_os_error()); 70 | } else if cpus == 0 { 71 | return Err(io::Error::new( 72 | io::ErrorKind::NotFound, 73 | "The number of hardware threads is not known for the target platform", 74 | )); 75 | } 76 | } 77 | Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }) 78 | } 79 | 80 | #[cfg(any( 81 | target_os = "android", 82 | target_os = "emscripten", 83 | target_os = "fuchsia", 84 | target_os = "ios", 85 | target_os = "linux", 86 | target_os = "macos", 87 | target_os = "solaris", 88 | target_os = "illumos", 89 | ))] 90 | pub fn available_parallelism() -> io::Result { 91 | #[cfg(any(target_os = "android", target_os = "linux"))] 92 | { 93 | let mut set: libc::cpu_set_t = unsafe { core::mem::zeroed() }; 94 | if unsafe { libc::sched_getaffinity(0, core::mem::size_of::(), &mut set) } 95 | == 0 96 | { 97 | let count = unsafe { libc::CPU_COUNT(&set) }; 98 | return Ok(unsafe { NonZeroUsize::new_unchecked(count as usize) }); 99 | } 100 | } 101 | match unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) } { 102 | -1 => Err(io::Error::last_os_error()), 103 | 0 => Err(io::Error::new( 104 | io::ErrorKind::NotFound, 105 | "The number of hardware threads is not known for the target platform", 106 | )), 107 | cpus => Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }), 108 | } 109 | } 110 | 111 | #[cfg(target_os = "openbsd")] 112 | pub fn available_parallelism() -> io::Result { 113 | use crate::ptr; 114 | 115 | let mut cpus: libc::c_uint = 0; 116 | let mut cpus_size = crate::mem::size_of_val(&cpus); 117 | let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; 118 | 119 | let res = unsafe { 120 | libc::sysctl( 121 | mib.as_mut_ptr(), 122 | 2, 123 | &mut cpus as *mut _ as *mut _, 124 | &mut cpus_size as *mut _ as *mut _, 125 | ptr::null_mut(), 126 | 0, 127 | ) 128 | }; 129 | 130 | if res == -1 { 131 | Err(io::Error::last_os_error()) 132 | } else if cpus == 0 { 133 | Err(io::Error::new( 134 | io::ErrorKind::NotFound, 135 | "The number of hardware threads is not known for the target platform", 136 | )) 137 | } else { 138 | Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }) 139 | } 140 | } 141 | 142 | #[cfg(target_os = "haiku")] 143 | pub fn available_parallelism() -> io::Result { 144 | unsafe { 145 | let mut sinfo: libc::system_info = crate::mem::zeroed(); 146 | let res = libc::get_system_info(&mut sinfo); 147 | 148 | if res != libc::B_OK { 149 | return Err(io::Error::new( 150 | io::ErrorKind::NotFound, 151 | "The number of hardware threads is not known for the target platform", 152 | )); 153 | } 154 | 155 | Ok(NonZeroUsize::new_unchecked(sinfo.cpu_count as usize)) 156 | } 157 | } 158 | 159 | #[cfg(not(any( 160 | windows, 161 | target_os = "freebsd", 162 | target_os = "dragonfly", 163 | target_os = "netbsd", 164 | target_os = "android", 165 | target_os = "emscripten", 166 | target_os = "fuchsia", 167 | target_os = "ios", 168 | target_os = "linux", 169 | target_os = "macos", 170 | target_os = "solaris", 171 | target_os = "illumos", 172 | target_os = "openbsd", 173 | target_os = "haiku", 174 | )))] 175 | pub fn available_parallelism() -> io::Result { 176 | Err(io::Error::new( 177 | io::ErrorKind::Unsupported, 178 | "Getting the number of hardware threads is not supported on the target platform", 179 | )) 180 | } 181 | -------------------------------------------------------------------------------- /src/inherent/v1_53.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] use alloc::rc::Rc; 2 | #[cfg(feature = "alloc")] use alloc::vec::Vec; 3 | use core::cmp::Ordering; 4 | #[cfg(feature = "std")] use core::mem::transmute; 5 | use core::num::FpCategory; 6 | #[cfg(feature = "alloc")] use core::ops; 7 | use core::time::Duration; 8 | use core::{mem, u64}; 9 | #[cfg(feature = "std")] 10 | use std::ffi::{OsStr, OsString}; 11 | 12 | use easy_ext::ext; 13 | 14 | use crate::inherent::Sealed; 15 | 16 | #[ext] 17 | pub impl Ordering 18 | where Self: Sealed 19 | { 20 | #[must_use] 21 | fn is_eq(self) -> bool { 22 | self == Ordering::Equal 23 | } 24 | #[must_use] 25 | fn is_ne(self) -> bool { 26 | self != Ordering::Equal 27 | } 28 | #[must_use] 29 | fn is_lt(self) -> bool { 30 | self == Ordering::Less 31 | } 32 | #[must_use] 33 | fn is_gt(self) -> bool { 34 | self == Ordering::Greater 35 | } 36 | #[must_use] 37 | fn is_le(self) -> bool { 38 | self != Ordering::Greater 39 | } 40 | #[must_use] 41 | fn is_ge(self) -> bool { 42 | self != Ordering::Less 43 | } 44 | } 45 | 46 | #[ext] 47 | pub impl Option 48 | where Self: Sealed> 49 | { 50 | fn insert(&mut self, value: T) -> &mut T { 51 | *self = Some(value); 52 | 53 | match self { 54 | Some(v) => v, 55 | None => unsafe { core::hint::unreachable_unchecked() }, 56 | } 57 | } 58 | } 59 | 60 | #[ext] 61 | pub impl f32 62 | where Self: Sealed 63 | { 64 | fn is_subnormal(self) -> bool { 65 | self.classify() == FpCategory::Subnormal 66 | } 67 | } 68 | 69 | #[ext] 70 | pub impl f64 71 | where Self: Sealed 72 | { 73 | fn is_subnormal(self) -> bool { 74 | self.classify() == FpCategory::Subnormal 75 | } 76 | } 77 | 78 | #[ext] 79 | pub impl Duration 80 | where Self: Sealed 81 | { 82 | const ZERO: Self = Self::from_nanos(0); 83 | 84 | fn is_zero(&self) -> bool { 85 | *self == Self::ZERO 86 | } 87 | fn saturating_add(self, rhs: Self) -> Self { 88 | match self.checked_add(rhs) { 89 | Some(res) => res, 90 | None => Duration::from_secs(u64::MAX) + Duration::from_nanos(999_999_999), 91 | } 92 | } 93 | fn saturating_sub(self, rhs: Self) -> Self { 94 | match self.checked_sub(rhs) { 95 | Some(res) => res, 96 | None => Self::ZERO, 97 | } 98 | } 99 | fn saturating_mul(self, rhs: u32) -> Self { 100 | match self.checked_mul(rhs) { 101 | Some(res) => res, 102 | None => Duration::from_secs(u64::MAX) + Duration::from_nanos(999_999_999), 103 | } 104 | } 105 | } 106 | 107 | macro_rules! impl_integer { 108 | ($($t:ty)+) => {$( 109 | #[ext] 110 | pub impl $t where Self: Sealed<$t>, { 111 | const BITS: u32 = mem::size_of::<$t>() as u32 * 8; 112 | } 113 | )+}; 114 | } 115 | 116 | impl_integer![u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize]; 117 | 118 | #[cfg(feature = "alloc")] 119 | #[ext] 120 | pub impl Rc 121 | where Self: Sealed> 122 | { 123 | unsafe fn increment_strong_count(ptr: *const T) { 124 | let rc = mem::ManuallyDrop::new(Rc::::from_raw(ptr)); 125 | let _rc_clone = rc.clone(); 126 | } 127 | unsafe fn decrement_strong_count(ptr: *const T) { 128 | drop(Rc::from_raw(ptr)); 129 | } 130 | } 131 | 132 | #[cfg(feature = "std")] 133 | #[ext] 134 | pub impl OsStr 135 | where Self: Sealed 136 | { 137 | fn make_ascii_lowercase(&mut self) { 138 | unsafe { transmute::<_, &mut [u8]>(self).make_ascii_lowercase() } 139 | } 140 | fn make_ascii_uppercase(&mut self) { 141 | unsafe { transmute::<_, &mut [u8]>(self).make_ascii_uppercase() } 142 | } 143 | fn to_ascii_lowercase(&self) -> OsString { 144 | unsafe { transmute(transmute::<_, &[u8]>(self).to_ascii_lowercase()) } 145 | } 146 | fn to_ascii_uppercase(&self) -> OsString { 147 | unsafe { transmute(transmute::<_, &[u8]>(self).to_ascii_uppercase()) } 148 | } 149 | fn is_ascii(&self) -> bool { 150 | unsafe { transmute::<_, &[u8]>(self).is_ascii() } 151 | } 152 | fn eq_ignore_ascii_case>(&self, other: S) -> bool { 153 | unsafe { transmute::<_, &[u8]>(self).eq_ignore_ascii_case(transmute(other.as_ref())) } 154 | } 155 | } 156 | 157 | #[cfg(feature = "alloc")] 158 | #[ext] 159 | pub impl Vec 160 | where Self: Sealed> 161 | { 162 | fn extend_from_within< 163 | R: ops::RangeBounds + core::slice::SliceIndex<[T], Output = [T]>, 164 | >( 165 | &mut self, 166 | src: R, 167 | ) { 168 | let start = match src.start_bound() { 169 | ops::Bound::Included(&start) => start, 170 | ops::Bound::Excluded(start) => { 171 | start.checked_add(1).expect("attempted to index slice from after maximum usize") 172 | } 173 | ops::Bound::Unbounded => 0, 174 | }; 175 | let end = match src.end_bound() { 176 | ops::Bound::Included(end) => { 177 | end.checked_add(1).expect("attempted to index slice up to maximum usize") 178 | } 179 | ops::Bound::Excluded(&end) => end, 180 | ops::Bound::Unbounded => self.len(), 181 | }; 182 | if start > end { 183 | panic!("slice index starts at {} but ends at {}", start, end); 184 | } 185 | if end > self.len() { 186 | panic!("range end index {} out of range for slice of length {}", end, self.len()); 187 | } 188 | self.reserve(end - start); 189 | 190 | let ptr = self.as_mut_ptr(); 191 | let spare_ptr = unsafe { ptr.add(self.len()) } as _; 192 | 193 | let this = unsafe { core::slice::from_raw_parts_mut(ptr, self.len()) }; 194 | let spare = 195 | unsafe { core::slice::from_raw_parts_mut(spare_ptr, self.capacity() - self.len()) }; 196 | 197 | unsafe { this.get_unchecked(src) } 198 | .iter() 199 | .cloned() 200 | .zip(spare.iter_mut()) 201 | .map(|(src, dst)| *dst = core::mem::MaybeUninit::new(src)) 202 | .for_each(|_| unsafe { self.set_len(self.len() + 1) }); 203 | } 204 | } 205 | -------------------------------------------------------------------------------- /src/inherent/v1_54.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "alloc")] 2 | use alloc::collections::btree_map; 3 | #[cfg(feature = "alloc")] 4 | use alloc::collections::VecDeque; 5 | #[cfg(feature = "alloc")] use core::cmp::Ordering; 6 | #[cfg(feature = "alloc")] 7 | use core::iter::FusedIterator; 8 | #[cfg(feature = "std")] 9 | use std::collections::hash_map; 10 | 11 | #[cfg(feature = "alloc")] use easy_ext::ext; 12 | 13 | #[cfg(feature = "alloc")] 14 | use crate::inherent::Sealed; 15 | 16 | #[cfg(feature = "alloc")] 17 | #[ext] 18 | pub impl btree_map::BTreeMap 19 | where Self: Sealed> 20 | { 21 | fn into_keys(self) -> BTreeMapIntoKeys { 22 | BTreeMapIntoKeys { inner: self.into_iter() } 23 | } 24 | fn into_values(self) -> BTreeMapIntoValues { 25 | BTreeMapIntoValues { inner: self.into_iter() } 26 | } 27 | } 28 | 29 | #[cfg(feature = "alloc")] 30 | #[derive(Debug)] 31 | pub struct BTreeMapIntoKeys { 32 | inner: btree_map::IntoIter, 33 | } 34 | 35 | #[cfg(feature = "alloc")] 36 | #[derive(Debug)] 37 | pub struct BTreeMapIntoValues { 38 | inner: btree_map::IntoIter, 39 | } 40 | 41 | #[cfg(feature = "alloc")] 42 | impl Iterator for BTreeMapIntoKeys { 43 | type Item = K; 44 | 45 | fn next(&mut self) -> Option { 46 | self.inner.next().map(|(k, _)| k) 47 | } 48 | fn size_hint(&self) -> (usize, Option) { 49 | self.inner.size_hint() 50 | } 51 | fn last(mut self) -> Option { 52 | self.next_back() 53 | } 54 | fn min(mut self) -> Option { 55 | self.next() 56 | } 57 | fn max(mut self) -> Option { 58 | self.next_back() 59 | } 60 | } 61 | 62 | #[cfg(feature = "alloc")] 63 | impl DoubleEndedIterator for BTreeMapIntoKeys { 64 | fn next_back(&mut self) -> Option { 65 | self.inner.next_back().map(|(k, _)| k) 66 | } 67 | } 68 | 69 | #[cfg(feature = "alloc")] 70 | impl ExactSizeIterator for BTreeMapIntoKeys { 71 | fn len(&self) -> usize { 72 | self.inner.len() 73 | } 74 | } 75 | 76 | #[cfg(feature = "alloc")] 77 | impl FusedIterator for BTreeMapIntoKeys {} 78 | 79 | #[cfg(feature = "alloc")] 80 | impl Iterator for BTreeMapIntoValues { 81 | type Item = V; 82 | 83 | fn next(&mut self) -> Option { 84 | self.inner.next().map(|(_, v)| v) 85 | } 86 | fn size_hint(&self) -> (usize, Option) { 87 | self.inner.size_hint() 88 | } 89 | fn last(mut self) -> Option { 90 | self.next_back() 91 | } 92 | } 93 | 94 | #[cfg(feature = "alloc")] 95 | impl DoubleEndedIterator for BTreeMapIntoValues { 96 | fn next_back(&mut self) -> Option { 97 | self.inner.next_back().map(|(_, v)| v) 98 | } 99 | } 100 | 101 | #[cfg(feature = "alloc")] 102 | impl ExactSizeIterator for BTreeMapIntoValues { 103 | fn len(&self) -> usize { 104 | self.inner.len() 105 | } 106 | } 107 | 108 | #[cfg(feature = "alloc")] 109 | impl FusedIterator for BTreeMapIntoValues {} 110 | 111 | #[cfg(feature = "std")] 112 | #[ext] 113 | pub impl hash_map::HashMap 114 | where Self: Sealed> 115 | { 116 | fn into_keys(self) -> HashMapIntoKeys { 117 | HashMapIntoKeys { inner: self.into_iter() } 118 | } 119 | fn into_values(self) -> HashMapIntoValues { 120 | HashMapIntoValues { inner: self.into_iter() } 121 | } 122 | } 123 | 124 | #[cfg(feature = "std")] 125 | #[derive(Debug)] 126 | pub struct HashMapIntoKeys { 127 | inner: hash_map::IntoIter, 128 | } 129 | 130 | #[cfg(feature = "std")] 131 | #[derive(Debug)] 132 | pub struct HashMapIntoValues { 133 | inner: hash_map::IntoIter, 134 | } 135 | 136 | #[cfg(feature = "std")] 137 | impl Iterator for HashMapIntoKeys { 138 | type Item = K; 139 | 140 | fn next(&mut self) -> Option { 141 | self.inner.next().map(|(k, _)| k) 142 | } 143 | fn size_hint(&self) -> (usize, Option) { 144 | self.inner.size_hint() 145 | } 146 | } 147 | #[cfg(feature = "std")] 148 | impl ExactSizeIterator for HashMapIntoKeys { 149 | fn len(&self) -> usize { 150 | self.inner.len() 151 | } 152 | } 153 | #[cfg(feature = "std")] 154 | impl FusedIterator for HashMapIntoKeys {} 155 | #[cfg(feature = "std")] 156 | impl Iterator for HashMapIntoValues { 157 | type Item = V; 158 | 159 | fn next(&mut self) -> Option { 160 | self.inner.next().map(|(_, v)| v) 161 | } 162 | fn size_hint(&self) -> (usize, Option) { 163 | self.inner.size_hint() 164 | } 165 | } 166 | #[cfg(feature = "std")] 167 | impl ExactSizeIterator for HashMapIntoValues { 168 | fn len(&self) -> usize { 169 | self.inner.len() 170 | } 171 | } 172 | #[cfg(feature = "std")] 173 | impl FusedIterator for HashMapIntoValues {} 174 | 175 | #[cfg(feature = "alloc")] 176 | #[ext] 177 | pub impl VecDeque 178 | where Self: Sealed> 179 | { 180 | fn binary_search(&self, x: &T) -> Result 181 | where T: Ord { 182 | self.binary_search_by(|e| e.cmp(x)) 183 | } 184 | fn binary_search_by<'a, F: FnMut(&'a T) -> Ordering>( 185 | &'a self, 186 | mut f: F, 187 | ) -> Result 188 | where 189 | T: 'a, 190 | { 191 | let (front, back) = self.as_slices(); 192 | let cmp_back = back.first().map(|elem| f(elem)); 193 | 194 | if let Some(Ordering::Equal) = cmp_back { 195 | Ok(front.len()) 196 | } else if let Some(Ordering::Less) = cmp_back { 197 | back.binary_search_by(f).map(|idx| idx + front.len()).map_err(|idx| idx + front.len()) 198 | } else { 199 | front.binary_search_by(f) 200 | } 201 | } 202 | fn binary_search_by_key<'a, B: Ord, F: FnMut(&'a T) -> B>( 203 | &'a self, 204 | b: &B, 205 | mut f: F, 206 | ) -> Result 207 | where 208 | T: 'a, 209 | { 210 | self.binary_search_by(|k| f(k).cmp(b)) 211 | } 212 | fn partition_point bool>(&self, mut pred: P) -> usize { 213 | let (front, back) = self.as_slices(); 214 | 215 | if let Some(true) = back.first().map(|v| pred(v)) { 216 | partition_point(back, pred) + front.len() 217 | } else { 218 | partition_point(front, pred) 219 | } 220 | } 221 | } 222 | 223 | #[cfg(feature = "alloc")] 224 | fn partition_point bool>(this: &[T], mut pred: P) -> usize { 225 | let mut left = 0; 226 | let mut right = this.len(); 227 | 228 | while left != right { 229 | let mid = left + (right - left) / 2; 230 | let value = unsafe { this.get_unchecked(mid) }; 231 | if pred(value) { 232 | left = mid + 1; 233 | } else { 234 | right = mid; 235 | } 236 | } 237 | 238 | left 239 | } 240 | -------------------------------------------------------------------------------- /src/inherent/v1_38.rs: -------------------------------------------------------------------------------- 1 | use core::time::Duration; 2 | 3 | use easy_ext::ext; 4 | 5 | use crate::inherent::Sealed; 6 | 7 | #[ext] 8 | pub impl *const T 9 | where Self: Sealed<*const T> 10 | { 11 | fn cast(self) -> *const U { 12 | self as _ 13 | } 14 | } 15 | 16 | #[ext] 17 | pub impl *mut T 18 | where Self: Sealed<*mut T> 19 | { 20 | fn cast(self) -> *mut U { 21 | self as _ 22 | } 23 | } 24 | 25 | #[ext] 26 | pub impl Duration 27 | where Self: Sealed 28 | { 29 | fn as_secs_f32(&self) -> f32 { 30 | (self.as_secs() as f32) + (self.subsec_nanos() as f32) / 1_000_000_000. 31 | } 32 | 33 | fn as_secs_f64(&self) -> f64 { 34 | (self.as_secs() as f64) + (self.subsec_nanos() as f64) / 1_000_000_000. 35 | } 36 | 37 | fn div_f32(&self, rhs: f32) -> Self { 38 | Self::from_secs_f32(self.as_secs_f32() / rhs) 39 | } 40 | 41 | fn div_f64(&self, rhs: f64) -> Self { 42 | Self::from_secs_f64(self.as_secs_f64() / rhs) 43 | } 44 | 45 | fn from_secs_f32(secs: f32) -> Self { 46 | const MAX_NANOS_F32: f32 = ((u64::max_value() as u128 + 1) * 1_000_000_000) as f32; 47 | let nanos = secs * 1_000_000_000.; 48 | if !nanos.is_finite() { 49 | panic!("got non-finite value when converting float to duration"); 50 | } 51 | if nanos >= MAX_NANOS_F32 { 52 | panic!("overflow when converting float to duration"); 53 | } 54 | if nanos < 0.0 { 55 | panic!("underflow when converting float to duration"); 56 | } 57 | let nanos = nanos as u128; 58 | Self::new((nanos / 1_000_000_000) as u64, (nanos % 1_000_000_000) as u32) 59 | } 60 | 61 | fn from_secs_f64(secs: f64) -> Self { 62 | const MAX_NANOS_F64: f64 = ((u64::max_value() as u128 + 1) * 1_000_000_000) as f64; 63 | let nanos = secs * 1_000_000_000.; 64 | if !nanos.is_finite() { 65 | panic!("got non-finite value when converting float to duration"); 66 | } 67 | if nanos >= MAX_NANOS_F64 { 68 | panic!("overflow when converting float to duration"); 69 | } 70 | if nanos < 0.0 { 71 | panic!("underflow when converting float to duration"); 72 | } 73 | let nanos = nanos as u128; 74 | Self::new((nanos / 1_000_000_000) as u64, (nanos % 1_000_000_000) as u32) 75 | } 76 | 77 | fn mul_f32(&self, rhs: f32) -> Self { 78 | Self::from_secs_f32(rhs * self.as_secs_f32()) 79 | } 80 | 81 | fn mul_f64(&self, rhs: f64) -> Self { 82 | Self::from_secs_f64(rhs * self.as_secs_f64()) 83 | } 84 | } 85 | 86 | macro_rules! impl_euclid_for_signed { 87 | ($($type:ty)+) => {$( 88 | #[ext] 89 | pub impl $type where Self: Sealed<$type>, { 90 | #[must_use = "this returns the result of the operation, without modifying the original"] 91 | fn rem_euclid(self, rhs: Self) -> Self { 92 | let r = self % rhs; 93 | if r < 0 { 94 | if rhs < 0 { 95 | r - rhs 96 | } else { 97 | r + rhs 98 | } 99 | } else { 100 | r 101 | } 102 | } 103 | 104 | #[must_use = "this returns the result of the operation, without modifying the original"] 105 | fn checked_rem_euclid(self, rhs: Self) -> Option<$type> { 106 | if rhs == 0 || (self == Self::min_value() && rhs == -1) { 107 | None 108 | } else { 109 | Some(self.rem_euclid(rhs)) 110 | } 111 | } 112 | 113 | #[must_use = "this returns the result of the operation, without modifying the original"] 114 | fn wrapping_rem_euclid(self, rhs: Self) -> Self { 115 | self.overflowing_rem_euclid(rhs).0 116 | } 117 | 118 | #[must_use = "this returns the result of the operation, without modifying the original"] 119 | fn overflowing_rem_euclid(self, rhs: Self) -> ($type, bool) { 120 | if self == Self::min_value() && rhs == -1 { 121 | (0, true) 122 | } else { 123 | (self.rem_euclid(rhs), false) 124 | } 125 | } 126 | 127 | #[must_use = "this returns the result of the operation, without modifying the original"] 128 | fn div_euclid(self, rhs: Self) -> Self { 129 | let q = self / rhs; 130 | if self % rhs < 0 { 131 | return if rhs > 0 { q - 1 } else { q + 1 }; 132 | } 133 | q 134 | } 135 | 136 | #[must_use = "this returns the result of the operation, without modifying the original"] 137 | fn checked_div_euclid(self, rhs: Self) -> Option<$type> { 138 | if rhs == 0 || (self == Self::min_value() && rhs == -1) { 139 | None 140 | } else { 141 | Some(self.div_euclid(rhs)) 142 | } 143 | } 144 | 145 | #[must_use = "this returns the result of the operation, without modifying the original"] 146 | fn wrapping_div_euclid(self, rhs: Self) -> Self { 147 | self.overflowing_div_euclid(rhs).0 148 | } 149 | 150 | #[must_use = "this returns the result of the operation, without modifying the original"] 151 | fn overflowing_div_euclid(self, rhs: Self) -> ($type, bool) { 152 | if self == Self::min_value() && rhs == -1 { 153 | (self, true) 154 | } else { 155 | (self.div_euclid(rhs), false) 156 | } 157 | } 158 | } 159 | )+}; 160 | } 161 | 162 | impl_euclid_for_signed![i8 i16 i32 i64 i128 isize]; 163 | 164 | macro_rules! impl_euclid_for_unsigned { 165 | ($($type:ty)+) => {$( 166 | #[ext] 167 | pub impl $type where Self: Sealed<$type>, { 168 | #[must_use = "this returns the result of the operation, without modifying the original"] 169 | fn rem_euclid(self, rhs: Self) -> Self { 170 | self % rhs 171 | } 172 | 173 | #[must_use = "this returns the result of the operation, without modifying the original"] 174 | fn checked_rem_euclid(self, rhs: Self) -> Option<$type> { 175 | if rhs == 0 { 176 | None 177 | } else { 178 | Some(self.rem_euclid(rhs)) 179 | } 180 | } 181 | 182 | #[must_use = "this returns the result of the operation, without modifying the original"] 183 | fn wrapping_rem_euclid(self, rhs: Self) -> Self { 184 | self % rhs 185 | } 186 | 187 | #[must_use = "this returns the result of the operation, without modifying the original"] 188 | fn overflowing_rem_euclid(self, rhs: Self) -> ($type, bool) { 189 | (self % rhs, false) 190 | } 191 | 192 | #[must_use = "this returns the result of the operation, without modifying the original"] 193 | fn div_euclid(self, rhs: Self) -> Self { 194 | self / rhs 195 | } 196 | 197 | #[must_use = "this returns the result of the operation, without modifying the original"] 198 | fn checked_div_euclid(self, rhs: Self) -> Option<$type> { 199 | if rhs == 0 { 200 | None 201 | } else { 202 | Some(self.div_euclid(rhs)) 203 | } 204 | } 205 | 206 | #[must_use = "this returns the result of the operation, without modifying the original"] 207 | fn wrapping_div_euclid(self, rhs: Self) -> Self { 208 | self / rhs 209 | } 210 | 211 | #[must_use = "this returns the result of the operation, without modifying the original"] 212 | fn overflowing_div_euclid(self, rhs: Self) -> ($type, bool) { 213 | (self / rhs, false) 214 | } 215 | } 216 | )+}; 217 | } 218 | 219 | impl_euclid_for_unsigned![u8 u16 u32 u64 u128 usize]; 220 | 221 | macro_rules! euclid_float { 222 | ($($type:ty)+) => {$( 223 | #[cfg(feature = "std")] 224 | #[ext] 225 | pub impl $type where Self: Sealed<$type>, { 226 | #[must_use = "method returns a new number and does not mutate the original value"] 227 | fn rem_euclid(self, rhs: $type) -> $type { 228 | let r = self % rhs; 229 | if r < 0.0 { r + rhs.abs() } else { r } 230 | } 231 | 232 | #[must_use = "method returns a new number and does not mutate the original value"] 233 | fn div_euclid(self, rhs: $type) -> $type { 234 | let q = (self / rhs).trunc(); 235 | if self % rhs < 0.0 { 236 | return if rhs > 0.0 { q - 1.0 } else { q + 1.0 }; 237 | } 238 | q 239 | } 240 | } 241 | )+}; 242 | } 243 | euclid_float![f32 f64]; 244 | -------------------------------------------------------------------------------- /LICENSE-Apache: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2021 Jacob Pratt 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /src/inherent/v1_49/sort.rs: -------------------------------------------------------------------------------- 1 | use core::mem::MaybeUninit; 2 | use core::{cmp, mem, ptr}; 3 | 4 | fn maybe_uninit_slice_as_mut_ptr(this: &mut [MaybeUninit]) -> *mut T { 5 | this as *mut [MaybeUninit] as *mut T 6 | } 7 | 8 | struct CopyOnDrop { 9 | src: *mut T, 10 | dest: *mut T, 11 | } 12 | 13 | impl Drop for CopyOnDrop { 14 | fn drop(&mut self) { 15 | unsafe { 16 | ptr::copy_nonoverlapping(self.src, self.dest, 1); 17 | } 18 | } 19 | } 20 | 21 | fn shift_tail(v: &mut [T], is_less: &mut F) 22 | where F: FnMut(&T, &T) -> bool { 23 | let len = v.len(); 24 | unsafe { 25 | if len >= 2 && is_less(v.get_unchecked(len - 1), v.get_unchecked(len - 2)) { 26 | let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1))); 27 | let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.get_unchecked_mut(len - 2) }; 28 | ptr::copy_nonoverlapping(v.get_unchecked(len - 2), v.get_unchecked_mut(len - 1), 1); 29 | 30 | for i in (0..len - 2).rev() { 31 | if !is_less(&*tmp, v.get_unchecked(i)) { 32 | break; 33 | } 34 | 35 | ptr::copy_nonoverlapping(v.get_unchecked(i), v.get_unchecked_mut(i + 1), 1); 36 | hole.dest = v.get_unchecked_mut(i); 37 | } 38 | } 39 | } 40 | } 41 | 42 | fn insertion_sort(v: &mut [T], is_less: &mut F) 43 | where F: FnMut(&T, &T) -> bool { 44 | for i in 1..v.len() { 45 | shift_tail(&mut v[..i + 1], is_less); 46 | } 47 | } 48 | 49 | fn partition_in_blocks(v: &mut [T], pivot: &T, is_less: &mut F) -> usize 50 | where F: FnMut(&T, &T) -> bool { 51 | const BLOCK: usize = 128; 52 | 53 | let mut l = v.as_mut_ptr(); 54 | let mut block_l = BLOCK; 55 | let mut start_l = ptr::null_mut(); 56 | let mut end_l = ptr::null_mut(); 57 | let mut offsets_l = [MaybeUninit::::uninit(); BLOCK]; 58 | 59 | let mut r = unsafe { l.add(v.len()) }; 60 | let mut block_r = BLOCK; 61 | let mut start_r = ptr::null_mut(); 62 | let mut end_r = ptr::null_mut(); 63 | let mut offsets_r = [MaybeUninit::::uninit(); BLOCK]; 64 | 65 | fn width(l: *mut T, r: *mut T) -> usize { 66 | assert!(mem::size_of::() > 0); 67 | (r as usize - l as usize) / mem::size_of::() 68 | } 69 | 70 | loop { 71 | let is_done = width(l, r) <= 2 * BLOCK; 72 | 73 | if is_done { 74 | let mut rem = width(l, r); 75 | if start_l < end_l || start_r < end_r { 76 | rem -= BLOCK; 77 | } 78 | 79 | if start_l < end_l { 80 | block_r = rem; 81 | } else if start_r < end_r { 82 | block_l = rem; 83 | } else { 84 | block_l = rem / 2; 85 | block_r = rem - block_l; 86 | } 87 | debug_assert!(block_l <= BLOCK && block_r <= BLOCK); 88 | debug_assert!(width(l, r) == block_l + block_r); 89 | } 90 | 91 | if start_l == end_l { 92 | start_l = maybe_uninit_slice_as_mut_ptr(&mut offsets_l); 93 | end_l = maybe_uninit_slice_as_mut_ptr(&mut offsets_l); 94 | let mut elem = l; 95 | 96 | for i in 0..block_l { 97 | unsafe { 98 | *end_l = i as u8; 99 | end_l = end_l.offset(!is_less(&*elem, pivot) as isize); 100 | elem = elem.offset(1); 101 | } 102 | } 103 | } 104 | 105 | if start_r == end_r { 106 | start_r = maybe_uninit_slice_as_mut_ptr(&mut offsets_r); 107 | end_r = maybe_uninit_slice_as_mut_ptr(&mut offsets_r); 108 | let mut elem = r; 109 | 110 | for i in 0..block_r { 111 | unsafe { 112 | elem = elem.offset(-1); 113 | *end_r = i as u8; 114 | end_r = end_r.offset(is_less(&*elem, pivot) as isize); 115 | } 116 | } 117 | } 118 | 119 | let count = cmp::min(width(start_l, end_l), width(start_r, end_r)); 120 | 121 | if count > 0 { 122 | macro_rules! left { 123 | () => { 124 | l.offset(*start_l as isize) 125 | }; 126 | } 127 | macro_rules! right { 128 | () => { 129 | r.offset(-(*start_r as isize) - 1) 130 | }; 131 | } 132 | 133 | unsafe { 134 | let tmp = ptr::read(left!()); 135 | ptr::copy_nonoverlapping(right!(), left!(), 1); 136 | 137 | for _ in 1..count { 138 | start_l = start_l.offset(1); 139 | ptr::copy_nonoverlapping(left!(), right!(), 1); 140 | start_r = start_r.offset(1); 141 | ptr::copy_nonoverlapping(right!(), left!(), 1); 142 | } 143 | 144 | ptr::copy_nonoverlapping(&tmp, right!(), 1); 145 | mem::forget(tmp); 146 | start_l = start_l.offset(1); 147 | start_r = start_r.offset(1); 148 | } 149 | } 150 | 151 | if start_l == end_l { 152 | l = unsafe { l.add(block_l) }; 153 | } 154 | 155 | if start_r == end_r { 156 | r = unsafe { r.offset(-(block_r as isize)) }; 157 | } 158 | 159 | if is_done { 160 | break; 161 | } 162 | } 163 | 164 | if start_l < end_l { 165 | debug_assert_eq!(width(l, r), block_l); 166 | while start_l < end_l { 167 | unsafe { 168 | end_l = end_l.offset(-1); 169 | ptr::swap(l.offset(*end_l as isize), r.offset(-1)); 170 | r = r.offset(-1); 171 | } 172 | } 173 | width(v.as_mut_ptr(), r) 174 | } else if start_r < end_r { 175 | debug_assert_eq!(width(l, r), block_r); 176 | while start_r < end_r { 177 | unsafe { 178 | end_r = end_r.offset(-1); 179 | ptr::swap(l, r.offset(-(*end_r as isize) - 1)); 180 | l = l.offset(1); 181 | } 182 | } 183 | width(v.as_mut_ptr(), l) 184 | } else { 185 | width(v.as_mut_ptr(), l) 186 | } 187 | } 188 | 189 | fn partition(v: &mut [T], pivot: usize, is_less: &mut F) -> (usize, bool) 190 | where F: FnMut(&T, &T) -> bool { 191 | let (mid, was_partitioned) = { 192 | v.swap(0, pivot); 193 | let (pivot, v) = v.split_at_mut(1); 194 | let pivot = &mut pivot[0]; 195 | 196 | let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); 197 | let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot }; 198 | let pivot = &*tmp; 199 | 200 | let mut l = 0; 201 | let mut r = v.len(); 202 | 203 | unsafe { 204 | while l < r && is_less(v.get_unchecked(l), pivot) { 205 | l += 1; 206 | } 207 | 208 | while l < r && !is_less(v.get_unchecked(r - 1), pivot) { 209 | r -= 1; 210 | } 211 | } 212 | 213 | (l + partition_in_blocks(&mut v[l..r], pivot, is_less), l >= r) 214 | }; 215 | 216 | v.swap(0, mid); 217 | 218 | (mid, was_partitioned) 219 | } 220 | 221 | fn partition_equal(v: &mut [T], pivot: usize, is_less: &mut F) -> usize 222 | where F: FnMut(&T, &T) -> bool { 223 | v.swap(0, pivot); 224 | let (pivot, v) = v.split_at_mut(1); 225 | let pivot = &mut pivot[0]; 226 | 227 | let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); 228 | let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot }; 229 | let pivot = &*tmp; 230 | 231 | let mut l = 0; 232 | let mut r = v.len(); 233 | loop { 234 | unsafe { 235 | while l < r && !is_less(pivot, v.get_unchecked(l)) { 236 | l += 1; 237 | } 238 | 239 | while l < r && is_less(pivot, v.get_unchecked(r - 1)) { 240 | r -= 1; 241 | } 242 | 243 | if l >= r { 244 | break; 245 | } 246 | 247 | r -= 1; 248 | ptr::swap(v.get_unchecked_mut(l), v.get_unchecked_mut(r)); 249 | l += 1; 250 | } 251 | } 252 | 253 | l + 1 254 | } 255 | 256 | fn choose_pivot(v: &mut [T], is_less: &mut F) -> (usize, bool) 257 | where F: FnMut(&T, &T) -> bool { 258 | const SHORTEST_MEDIAN_OF_MEDIANS: usize = 50; 259 | const MAX_SWAPS: usize = 4 * 3; 260 | 261 | let len = v.len(); 262 | 263 | let mut a = len / 4; 264 | let mut b = len / 4 * 2; 265 | let mut c = len / 4 * 3; 266 | 267 | let mut swaps = 0; 268 | 269 | if len >= 8 { 270 | let mut sort2 = |a: &mut usize, b: &mut usize| unsafe { 271 | if is_less(v.get_unchecked(*b), v.get_unchecked(*a)) { 272 | ptr::swap(a, b); 273 | swaps += 1; 274 | } 275 | }; 276 | 277 | let mut sort3 = |a: &mut usize, b: &mut usize, c: &mut usize| { 278 | sort2(a, b); 279 | sort2(b, c); 280 | sort2(a, b); 281 | }; 282 | 283 | if len >= SHORTEST_MEDIAN_OF_MEDIANS { 284 | let mut sort_adjacent = |a: &mut usize| { 285 | let tmp = *a; 286 | sort3(&mut (tmp - 1), a, &mut (tmp + 1)); 287 | }; 288 | 289 | sort_adjacent(&mut a); 290 | sort_adjacent(&mut b); 291 | sort_adjacent(&mut c); 292 | } 293 | 294 | sort3(&mut a, &mut b, &mut c); 295 | } 296 | 297 | if swaps < MAX_SWAPS { 298 | (b, swaps == 0) 299 | } else { 300 | v.reverse(); 301 | (len - 1 - b, true) 302 | } 303 | } 304 | 305 | fn partition_at_index_loop<'a, T, F>( 306 | mut v: &'a mut [T], 307 | mut index: usize, 308 | is_less: &mut F, 309 | mut pred: Option<&'a T>, 310 | ) where 311 | F: FnMut(&T, &T) -> bool, 312 | { 313 | loop { 314 | const MAX_INSERTION: usize = 10; 315 | if v.len() <= MAX_INSERTION { 316 | insertion_sort(v, is_less); 317 | return; 318 | } 319 | 320 | let (pivot, _) = choose_pivot(v, is_less); 321 | 322 | if let Some(p) = pred { 323 | if !is_less(p, &v[pivot]) { 324 | let mid = partition_equal(v, pivot, is_less); 325 | 326 | if mid > index { 327 | return; 328 | } 329 | 330 | v = &mut v[mid..]; 331 | index -= mid; 332 | pred = None; 333 | continue; 334 | } 335 | } 336 | 337 | let (mid, _) = partition(v, pivot, is_less); 338 | 339 | let (left, right) = { v }.split_at_mut(mid); 340 | let (pivot, right) = right.split_at_mut(1); 341 | let pivot = &pivot[0]; 342 | 343 | match mid.cmp(&index) { 344 | cmp::Ordering::Less => { 345 | v = right; 346 | index = index - mid - 1; 347 | pred = Some(pivot); 348 | } 349 | cmp::Ordering::Greater => { 350 | v = left; 351 | } 352 | cmp::Ordering::Equal => { 353 | return; 354 | } 355 | } 356 | } 357 | } 358 | 359 | pub(crate) fn partition_at_index( 360 | v: &mut [T], 361 | index: usize, 362 | mut is_less: F, 363 | ) -> (&mut [T], &mut T, &mut [T]) 364 | where 365 | F: FnMut(&T, &T) -> bool, 366 | { 367 | use self::cmp::Ordering::{Greater, Less}; 368 | 369 | if index >= v.len() { 370 | panic!("partition_at_index index {} greater than length of slice {}", index, v.len()); 371 | } 372 | 373 | if mem::size_of::() == 0 { 374 | } else if index == v.len() - 1 { 375 | let (max_index, _) = v 376 | .iter() 377 | .enumerate() 378 | .max_by(|&(_, x), &(_, y)| if is_less(x, y) { Less } else { Greater }) 379 | .unwrap(); 380 | v.swap(max_index, index); 381 | } else if index == 0 { 382 | let (min_index, _) = v 383 | .iter() 384 | .enumerate() 385 | .min_by(|&(_, x), &(_, y)| if is_less(x, y) { Less } else { Greater }) 386 | .unwrap(); 387 | v.swap(min_index, index); 388 | } else { 389 | partition_at_index_loop(v, index, &mut is_less, None); 390 | } 391 | 392 | let (left, right) = v.split_at_mut(index); 393 | let (pivot, right) = right.split_at_mut(1); 394 | let pivot = &mut pivot[0]; 395 | (left, pivot, right) 396 | } 397 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(feature = "std"), no_std)] 2 | #![deny(rust_2018_idioms, unused_qualifications)] 3 | #![warn(unreachable_pub)] 4 | #![allow(non_camel_case_types, unstable_name_collisions, clippy::all)] 5 | 6 | /*! 7 | Standback backports a number of methods, structs, and macros that have been stabilized in the Rust 8 | standard library since 1.36.0. This allows crate authors to depend on Standback rather than forcing 9 | downstream users to upgrade their compiler (or not use the new feature at all). 10 | 11 | Due to a variety of restrictions in the Rust, it is not possible to implement everything that has 12 | been stabilized. 13 | 14 | # Usage 15 | 16 | For most cases, importing the shims should suffice. 17 | 18 | ```rust,no_run 19 | use standback::shim::*; 20 | ``` 21 | 22 | If you are using anything that would normally have to be imported, just use the `standback` crate 23 | instead of `core`, `alloc`, or `std`. 24 | 25 | ```rust,no_run 26 | use standback::mem::take; 27 | ``` 28 | 29 | It is _highly_ recommended to use `#![allow(unstable_name_collisions)]`, as that's the whole point 30 | of this crate. Just be extra-careful to not do it for anything that _can't_ be backported. 31 | 32 | # `#![no_std]` support 33 | 34 | By default, the standard library is used where necessary. If support for `#![no_std]` is required, 35 | use `default-features = false`. 36 | 37 | Items that require an allocator are gated under the `alloc` feature, which is enabled by default via 38 | the `std` feature. 39 | 40 | # Minimum supported Rust version 41 | 42 | By default, this crate has a minimum supported Rust version of 1.36. If you do not require a MSRV 43 | this low, you can raise it by using `default-features = false` (be sure to re-enable `std` or 44 | `alloc` if you need it) and enabling a `msrv-1.XX` feature flag, substituting the appropriate 45 | version; the standback crate is not guaranteed to work (let alone produce a reasonable result) if no 46 | MSRV is declared when `default-features = false`. All versions up to the most recent stable release 47 | of a compiler are supported. 48 | 49 | Note that items stabilized prior to the declared MSRV _will not_ be re-exported. 50 | 51 | # Inherent items 52 | 53 | The following methods and constants are available via the shim. For brevity, `i*` is `i8`, `i16`, 54 | `i32`, `i64`, `i128`, and `isize`; `u*` is `u8`, `u16`, `u32`, `u64`, `u128`, and `usize`. 55 | 56 | ## 1.60 57 | 58 | ```text 59 | u8::escape_ascii 60 | Vec::spare_capacity_mut 61 | MaybeUninit::assume_init_drop 62 | MaybeUninit::assume_init_read 63 | {i*, u*}::abs_diff 64 | ``` 65 | 66 | ## 1.59 67 | 68 | ```text 69 | Result::copied 70 | Result::cloned 71 | NonZeroU*::is_power_of_two 72 | ``` 73 | 74 | ## 1.58 75 | 76 | ```text 77 | Metadata::is_symlink 78 | Path::is_symlink 79 | {i*, u*}::saturating_div 80 | Option::unwrap_unchecked 81 | Result::unwrap_unchecked 82 | Result::unwrap_err_unchecked 83 | NonZeroU*::is_power_of_two 84 | File::options 85 | ``` 86 | 87 | ## 1.57 88 | 89 | ```text 90 | Iterator::map_while 91 | ``` 92 | 93 | ## 1.56 94 | 95 | None :( 96 | 97 | ## 1.55 98 | 99 | ```text 100 | prelude::rust_2015 101 | prelude::rust_2018 102 | prelude::rust_2021 103 | Bounded::cloned 104 | MaybeUninit::assume_init_mut 105 | MaybeUninit::assume_init_ref 106 | MaybeUninit::write 107 | ``` 108 | 109 | ## 1.54 110 | 111 | ```text 112 | BTreeMap::into_keys 113 | BTreeMap::into_values 114 | HashMap::into_keys 115 | HashMap::into_values 116 | VecDeque::binary_search 117 | VecDeque::binary_search_by 118 | VecDeque::binary_search_by_key 119 | VecDeque::partition_point 120 | ``` 121 | 122 | ## 1.53 123 | 124 | ```text 125 | Ordering::is_eq 126 | Duration::ZERO 127 | Duration::is_zero 128 | Duration::saturating_add 129 | Duration::saturating_mul 130 | Duration::saturating_sub 131 | Option::insert 132 | Ordering::is_eq 133 | Ordering::is_ge 134 | Ordering::is_gt 135 | Ordering::is_le 136 | Ordering::is_lt 137 | Ordering::is_ne 138 | OsStr::make_ascii_lowercase 139 | OsStr::make_ascii_uppercase 140 | OsStr::to_ascii_lowercase 141 | OsStr::to_ascii_uppercase 142 | OsStr::is_ascii 143 | OsStr::eq_ignore_ascii_case 144 | Rc::decrement_strong_count 145 | Rc::increment_strong_count 146 | Vec::extend_from_within 147 | {f32, f64}::is_subnormal 148 | {i*, u*}::BITS 149 | ``` 150 | 151 | ## 1.52 152 | 153 | ```text 154 | char::MAX 155 | char::REPLACEMENT_CHARACTER 156 | char::UNICODE_VERSION 157 | char::decode_utf16 158 | char::from_digit 159 | char::from_u32_unchecked 160 | char::from_u32 161 | slice::partition_point 162 | str::rsplit_once 163 | str::split_once 164 | ``` 165 | 166 | ## 1.51 167 | 168 | ```text 169 | Arc::decrement_strong_count 170 | Arc::increment_strong_count 171 | Peekable::next_if_eq 172 | Peekable::next_if 173 | Seek::stream_position 174 | slice::fill_with 175 | slice::split_inclusive_mut 176 | slice::split_inclusive 177 | slice::strip_prefix 178 | slice::strip_suffix 179 | str::split_inclusive 180 | i*::unsigned_abs 181 | Poll::map_ok 182 | Poll::map_err 183 | ``` 184 | 185 | ## 1.50 186 | 187 | ```text 188 | bool::then 189 | btree_map::Entry::or_insert_with_key 190 | hash_map::Entry::or_insert_with_key 191 | {f32, f64}::clamp 192 | Ord::clamp 193 | RefCell::take 194 | slice::fill 195 | UnsafeCell::get_mut 196 | ``` 197 | 198 | ## 1.49 199 | 200 | ```text 201 | slice::select_nth_unstable 202 | slice::select_nth_unstable_by 203 | slice::select_nth_unstable_by_key 204 | ``` 205 | 206 | ## 1.48 207 | 208 | ```text 209 | slice::as_ptr_range 210 | slice::as_mut_ptr_range 211 | ``` 212 | 213 | ## 1.47 214 | 215 | ```text 216 | Range::is_empty 217 | Result::as_deref 218 | Result::as_deref_mut 219 | Vec::leak 220 | f32::TAU 221 | f64::TAU 222 | ``` 223 | 224 | ## 1.46 225 | 226 | ```text 227 | {i*, u*}::leading_ones 228 | {i*, u*}::trailing_ones 229 | Option::zip 230 | ``` 231 | 232 | ## 1.45 233 | 234 | ```text 235 | i*::saturating_abs 236 | i*::saturating_neg 237 | str::strip_prefix 238 | str::strip_suffix 239 | ``` 240 | 241 | ## 1.44 242 | 243 | ```text 244 | PathBuf::with_capacity 245 | PathBuf::capacity 246 | PathBuf::clear 247 | PathBuf::reserve 248 | PathBuf::reserve_exact 249 | PathBuf::shrink_to_fit 250 | Layout::align_to 251 | Layout::pad_to_align 252 | Layout::array 253 | Layout::extend 254 | {f32, f64}::to_int_unchecked 255 | ``` 256 | 257 | ## 1.43 258 | 259 | ```text 260 | {f32, f64}::RADIX 261 | {f32, f64}::MANTISSA_DIGITS 262 | {f32, f64}::DIGITS 263 | {f32, f64}::EPSILON 264 | {f32, f64}::MIN 265 | {f32, f64}::MIN_POSITIVE 266 | {f32, f64}::MAX 267 | {f32, f64}::MIN_EXP 268 | {f32, f64}::MAX_EXP 269 | {f32, f64}::MIN_10_EXP 270 | {f32, f64}::MAX_10_EXP 271 | {f32, f64}::NAN 272 | {f32, f64}::INFINITY 273 | {f32, f64}::NEG_INFINITY 274 | {i*, u*}::MIN 275 | {i*, u*}::MAX 276 | ``` 277 | 278 | ## 1.42 279 | 280 | ```text 281 | CondVar::wait_while 282 | CondVar::wait_timeout_while 283 | ManuallyDrop::take 284 | ``` 285 | 286 | ## 1.41 287 | 288 | ```text 289 | Result::map_or 290 | Result::map_or_else 291 | ``` 292 | 293 | ## 1.40 294 | 295 | ```text 296 | Option::as_deref 297 | Option::as_deref_mut 298 | {f32, f64}::to_be_bytes 299 | {f32, f64}::to_le_bytes 300 | {f32, f64}::to_ne_bytes 301 | {f32, f64}::from_be_bytes 302 | {f32, f64}::from_le_bytes 303 | {f32, f64}::from_ne_bytes 304 | slice::repeat 305 | ``` 306 | 307 | ## 1.39 308 | 309 | None :( 310 | 311 | ## 1.38 312 | 313 | ```text 314 | <*const T>::cast 315 | <*mut T>::cast 316 | Duration::as_secs_f32 317 | Duration::as_secs_f64 318 | Duration::div_f32 319 | Duration::div_f64 320 | Duration::from_secs_f32 321 | Duration::from_secs_f64 322 | Duration::mul_f32 323 | Duration::mul_f64 324 | {i*, u*}::rem_euclid 325 | {i*, u*}::checked_rem_euclid 326 | {i*, u*}::wrapping_rem_euclid 327 | {i*, u*}::overflowing_rem_euclid 328 | {i*, u*}::div_euclid 329 | {i*, u*}::checked_div_euclid 330 | {i*, u*}::wrapping_div_euclid 331 | {i*, u*}::overflowing_div_euclid 332 | {f32, f64}::rem_euclid 333 | {f32, f64}::div_euclid 334 | ``` 335 | 336 | ## 1.37 337 | 338 | ```text 339 | Cell::from_mut 340 | Cell<[T]>::as_slice_of_cells 341 | DoubleEndedIterator::nth_back 342 | Option::xor 343 | slice::copy_within 344 | ``` 345 | 346 | # Free items 347 | 348 | ```text 349 | thread::available_parallelism // 1.59 350 | iter::zip // 1.59 351 | array::from_ref // 1.53 352 | array::from_mut // 1.53 353 | cmp::min_by // 1.53 354 | cmp::max_by // 1.53 355 | cmp::min_by_key // 1.53 356 | cmp::max_by_key // 1.53 357 | task::Wake // 1.51 358 | future::pending // 1.48 359 | iter::zip // 1.59 360 | thread::available_parallelism // 1.59 361 | future::ready // 1.48 362 | {f32, f64}::consts::TAU // 1.47 363 | char::UNICODE_VERSION // 1.45 364 | {f32, f64}::consts::LOG10_2 // 1.43 365 | {f32, f64}::consts::LOG2_10 // 1.43 366 | iter::once_with // 1.43 367 | mem::take // 1.40 368 | ``` 369 | 370 | # Prelude macros (located in `standback::shim`) 371 | 372 | ```text 373 | matches! // 1.42 374 | todo! // 1.39 375 | ``` 376 | */ 377 | 378 | #[allow(unused_extern_crates)] 379 | #[cfg(feature = "alloc")] 380 | extern crate alloc; 381 | 382 | mod inherent; 383 | mod pattern; 384 | 385 | mod free { 386 | #[cfg(shim = "1.40")] pub(crate) mod v1_40; 387 | #[cfg(shim = "1.43")] pub(crate) mod v1_43; 388 | #[cfg(shim = "1.47")] pub(crate) mod v1_47; 389 | #[cfg(shim = "1.48")] pub(crate) mod v1_48; 390 | #[cfg(shim = "1.51")] pub(crate) mod v1_51; 391 | #[cfg(shim = "1.53")] pub(crate) mod v1_53; 392 | #[cfg(shim = "1.59")] pub(crate) mod v1_59; 393 | } 394 | 395 | #[doc(hidden)] 396 | pub mod shim { 397 | #[cfg(shim = "1.39")] 398 | pub use core::unimplemented as todo; 399 | 400 | pub use crate::inherent::*; 401 | #[cfg(shim = "1.42")] pub use crate::matches; 402 | } 403 | #[doc(hidden)] 404 | pub mod prelude { 405 | #[cfg(not(feature = "std"))] 406 | pub use core::prelude::v1 as rust_2015; 407 | #[cfg(feature = "std")] 408 | pub use std::prelude::v1 as rust_2015; 409 | 410 | pub use rust_2015 as rust_2018; 411 | 412 | pub mod rust_2021 { 413 | pub use core::convert::{TryFrom, TryInto}; 414 | pub use core::iter::FromIterator; 415 | 416 | pub use crate::prelude::rust_2015::*; 417 | } 418 | } 419 | #[doc(hidden)] 420 | pub mod mem { 421 | #[cfg(reexport = "1.40")] pub use core::mem::take; 422 | 423 | #[cfg(shim = "1.40")] 424 | pub use crate::free::v1_40::mem::*; 425 | } 426 | #[doc(hidden)] 427 | pub mod iter { 428 | #[cfg(reexport = "1.57")] 429 | pub use core::iter::MapWhile; 430 | #[cfg(reexport = "1.43")] 431 | pub use core::iter::{once_with, OnceWith}; 432 | #[cfg(reexport = "1.59")] 433 | pub use core::iter::{zip, Zip}; 434 | 435 | #[cfg(shim = "1.43")] 436 | pub use crate::free::v1_43::iter::*; 437 | #[cfg(shim = "1.59")] 438 | pub use crate::free::v1_59::{zip, Zip}; 439 | #[cfg(shim = "1.57")] 440 | pub use crate::inherent::MapWhile; 441 | } 442 | #[doc(hidden)] 443 | pub mod task { 444 | #[cfg(all(reexport = "1.51", feature = "alloc"))] 445 | pub use alloc::task::Wake; 446 | 447 | #[cfg(all(shim = "1.51", feature = "alloc"))] 448 | pub use crate::free::v1_51::task::*; 449 | } 450 | #[doc(hidden)] 451 | pub mod f32 { 452 | pub mod consts { 453 | #[cfg(reexport = "1.47")] 454 | pub use core::f32::consts::TAU; 455 | #[cfg(reexport = "1.43")] 456 | pub use core::f32::consts::{LOG10_2, LOG2_10}; 457 | 458 | #[cfg(shim = "1.43")] 459 | pub use crate::free::v1_43::f32::consts::*; 460 | #[cfg(shim = "1.47")] 461 | pub use crate::free::v1_47::f32::consts::*; 462 | } 463 | } 464 | #[doc(hidden)] 465 | pub mod f64 { 466 | pub mod consts { 467 | #[cfg(reexport = "1.47")] 468 | pub use core::f64::consts::TAU; 469 | #[cfg(reexport = "1.43")] 470 | pub use core::f64::consts::{LOG10_2, LOG2_10}; 471 | 472 | #[cfg(shim = "1.43")] 473 | pub use crate::free::v1_43::f64::consts::*; 474 | #[cfg(shim = "1.47")] 475 | pub use crate::free::v1_47::f64::consts::*; 476 | } 477 | } 478 | #[doc(hidden)] 479 | pub mod char { 480 | #[cfg(shim = "1.38")] 481 | pub const UNICODE_VERSION: (u8, u8, u8) = (11, 0, 0); 482 | #[cfg(all(reexport = "1.38", shim = "1.44"))] 483 | pub const UNICODE_VERSION: (u8, u8, u8) = (12, 1, 0); 484 | #[cfg(all(reexport = "1.44", shim = "1.45"))] 485 | pub const UNICODE_VERSION: (u8, u8, u8) = (13, 0, 0); 486 | #[cfg(reexport = "1.45")] 487 | pub use core::char::UNICODE_VERSION; 488 | } 489 | #[doc(hidden)] 490 | pub mod future { 491 | #[cfg(reexport = "1.48")] 492 | pub use core::future::{pending, ready, Pending, Ready}; 493 | 494 | #[cfg(shim = "1.48")] 495 | pub use crate::free::v1_48::future::*; 496 | } 497 | #[doc(hidden)] 498 | pub mod array { 499 | #[cfg(reexport = "1.53")] 500 | pub use core::array::{from_mut, from_ref}; 501 | 502 | #[cfg(shim = "1.53")] 503 | pub use crate::free::v1_53::array::*; 504 | } 505 | #[doc(hidden)] 506 | pub mod cmp { 507 | #[cfg(reexport = "1.53")] 508 | pub use core::cmp::{max_by, max_by_key, min_by, min_by_key}; 509 | 510 | #[cfg(shim = "1.53")] 511 | pub use crate::free::v1_53::cmp::*; 512 | } 513 | #[doc(hidden)] 514 | pub mod thread { 515 | #[cfg(all(feature = "std", reexport = "1.59"))] 516 | pub use std::thread::available_parallelism; 517 | 518 | #[cfg(all(feature = "std", shim = "1.59"))] 519 | pub use crate::free::v1_59::available_parallelism; 520 | } 521 | -------------------------------------------------------------------------------- /src/pattern/mod.rs: -------------------------------------------------------------------------------- 1 | mod memchr; 2 | 3 | #[allow(unused_imports)] 4 | #[cfg(feature = "alloc")] 5 | use alloc::string::String; 6 | use core::{cmp, fmt, usize}; 7 | 8 | mod sealed { 9 | use super::*; 10 | 11 | #[allow(unreachable_pub)] 12 | pub trait Sealed {} 13 | impl Sealed for &str {} 14 | impl Sealed for char {} 15 | impl Sealed for &[char] {} 16 | impl bool> Sealed for F {} 17 | impl Sealed for &&str {} 18 | #[cfg(feature = "alloc")] 19 | impl Sealed for &String {} 20 | 21 | impl Sealed for MultiCharEqSearcher<'_, C> {} 22 | impl Sealed for MultiCharEqPattern {} 23 | } 24 | 25 | #[allow(unreachable_pub)] 26 | pub trait Pattern<'a>: Sized + sealed::Sealed { 27 | type Searcher: Searcher<'a>; 28 | 29 | fn into_searcher(self, haystack: &'a str) -> Self::Searcher; 30 | 31 | fn is_contained_in(self, haystack: &'a str) -> bool { 32 | self.into_searcher(haystack).next_match().is_some() 33 | } 34 | 35 | fn is_prefix_of(self, haystack: &'a str) -> bool { 36 | match self.into_searcher(haystack).next() { 37 | SearchStep::Match(0, _) => true, 38 | _ => false, 39 | } 40 | } 41 | 42 | fn is_suffix_of(self, haystack: &'a str) -> bool 43 | where Self::Searcher: ReverseSearcher<'a> { 44 | match self.into_searcher(haystack).next_back() { 45 | SearchStep::Match(_, j) if haystack.len() == j => true, 46 | _ => false, 47 | } 48 | } 49 | 50 | fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> { 51 | if let SearchStep::Match(start, len) = self.into_searcher(haystack).next() { 52 | debug_assert_eq!( 53 | start, 0, 54 | "The first search step from Searcher must include the first character" 55 | ); 56 | unsafe { Some(haystack.get_unchecked(len..)) } 57 | } else { 58 | None 59 | } 60 | } 61 | 62 | fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> 63 | where Self::Searcher: ReverseSearcher<'a> { 64 | if let SearchStep::Match(start, end) = self.into_searcher(haystack).next_back() { 65 | debug_assert_eq!( 66 | end, 67 | haystack.len(), 68 | "The first search step from ReverseSearcher must include the last character" 69 | ); 70 | unsafe { Some(haystack.get_unchecked(..start)) } 71 | } else { 72 | None 73 | } 74 | } 75 | } 76 | 77 | #[allow(unreachable_pub)] 78 | #[derive(Copy, Clone, Eq, PartialEq, Debug)] 79 | pub enum SearchStep { 80 | Match(usize, usize), 81 | Reject(usize, usize), 82 | Done, 83 | } 84 | 85 | #[allow(unreachable_pub)] 86 | pub unsafe trait Searcher<'a> { 87 | fn haystack(&self) -> &'a str; 88 | fn next(&mut self) -> SearchStep; 89 | 90 | fn next_match(&mut self) -> Option<(usize, usize)> { 91 | loop { 92 | match self.next() { 93 | SearchStep::Match(a, b) => return Some((a, b)), 94 | SearchStep::Done => return None, 95 | _ => continue, 96 | } 97 | } 98 | } 99 | 100 | fn next_reject(&mut self) -> Option<(usize, usize)> { 101 | loop { 102 | match self.next() { 103 | SearchStep::Reject(a, b) => return Some((a, b)), 104 | SearchStep::Done => return None, 105 | _ => continue, 106 | } 107 | } 108 | } 109 | } 110 | 111 | #[allow(unreachable_pub)] 112 | pub unsafe trait ReverseSearcher<'a>: Searcher<'a> { 113 | fn next_back(&mut self) -> SearchStep; 114 | 115 | fn next_match_back(&mut self) -> Option<(usize, usize)> { 116 | loop { 117 | match self.next_back() { 118 | SearchStep::Match(a, b) => return Some((a, b)), 119 | SearchStep::Done => return None, 120 | _ => continue, 121 | } 122 | } 123 | } 124 | 125 | fn next_reject_back(&mut self) -> Option<(usize, usize)> { 126 | loop { 127 | match self.next_back() { 128 | SearchStep::Reject(a, b) => return Some((a, b)), 129 | SearchStep::Done => return None, 130 | _ => continue, 131 | } 132 | } 133 | } 134 | } 135 | 136 | #[allow(unreachable_pub)] 137 | #[derive(Clone, Debug)] 138 | pub struct CharSearcher<'a> { 139 | haystack: &'a str, 140 | finger: usize, 141 | finger_back: usize, 142 | needle: char, 143 | utf8_size: usize, 144 | utf8_encoded: [u8; 4], 145 | } 146 | 147 | unsafe impl<'a> Searcher<'a> for CharSearcher<'a> { 148 | fn haystack(&self) -> &'a str { 149 | self.haystack 150 | } 151 | fn next(&mut self) -> SearchStep { 152 | let old_finger = self.finger; 153 | let slice = unsafe { self.haystack.get_unchecked(old_finger..self.finger_back) }; 154 | let mut iter = slice.chars(); 155 | let old_len = iter.as_str().len(); 156 | if let Some(ch) = iter.next() { 157 | self.finger += old_len - iter.as_str().len(); 158 | if ch == self.needle { 159 | SearchStep::Match(old_finger, self.finger) 160 | } else { 161 | SearchStep::Reject(old_finger, self.finger) 162 | } 163 | } else { 164 | SearchStep::Done 165 | } 166 | } 167 | fn next_match(&mut self) -> Option<(usize, usize)> { 168 | loop { 169 | let bytes = self.haystack.as_bytes().get(self.finger..self.finger_back)?; 170 | let last_byte = unsafe { *self.utf8_encoded.get_unchecked(self.utf8_size - 1) }; 171 | if let Some(index) = memchr::memchr(last_byte, bytes) { 172 | self.finger += index + 1; 173 | if self.finger >= self.utf8_size { 174 | let found_char = self.finger - self.utf8_size; 175 | if let Some(slice) = self.haystack.as_bytes().get(found_char..self.finger) { 176 | if slice == &self.utf8_encoded[0..self.utf8_size] { 177 | return Some((found_char, self.finger)); 178 | } 179 | } 180 | } 181 | } else { 182 | self.finger = self.finger_back; 183 | return None; 184 | } 185 | } 186 | } 187 | } 188 | 189 | unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> { 190 | fn next_back(&mut self) -> SearchStep { 191 | let old_finger = self.finger_back; 192 | let slice = unsafe { self.haystack.get_unchecked(self.finger..old_finger) }; 193 | let mut iter = slice.chars(); 194 | let old_len = iter.as_str().len(); 195 | if let Some(ch) = iter.next_back() { 196 | self.finger_back -= old_len - iter.as_str().len(); 197 | if ch == self.needle { 198 | SearchStep::Match(self.finger_back, old_finger) 199 | } else { 200 | SearchStep::Reject(self.finger_back, old_finger) 201 | } 202 | } else { 203 | SearchStep::Done 204 | } 205 | } 206 | fn next_match_back(&mut self) -> Option<(usize, usize)> { 207 | let haystack = self.haystack.as_bytes(); 208 | loop { 209 | let bytes = haystack.get(self.finger..self.finger_back)?; 210 | let last_byte = unsafe { *self.utf8_encoded.get_unchecked(self.utf8_size - 1) }; 211 | if let Some(index) = memchr::memrchr(last_byte, bytes) { 212 | let index = self.finger + index; 213 | let shift = self.utf8_size - 1; 214 | if index >= shift { 215 | let found_char = index - shift; 216 | if let Some(slice) = haystack.get(found_char..(found_char + self.utf8_size)) { 217 | if slice == &self.utf8_encoded[0..self.utf8_size] { 218 | self.finger_back = found_char; 219 | return Some((self.finger_back, self.finger_back + self.utf8_size)); 220 | } 221 | } 222 | } 223 | self.finger_back = index; 224 | } else { 225 | self.finger_back = self.finger; 226 | return None; 227 | } 228 | } 229 | } 230 | } 231 | 232 | impl<'a> Pattern<'a> for char { 233 | type Searcher = CharSearcher<'a>; 234 | 235 | fn into_searcher(self, haystack: &'a str) -> Self::Searcher { 236 | let mut utf8_encoded = [0; 4]; 237 | let utf8_size = self.encode_utf8(&mut utf8_encoded).len(); 238 | CharSearcher { 239 | haystack, 240 | finger: 0, 241 | finger_back: haystack.len(), 242 | needle: self, 243 | utf8_size, 244 | utf8_encoded, 245 | } 246 | } 247 | 248 | fn is_contained_in(self, haystack: &'a str) -> bool { 249 | if (self as u32) < 128 { 250 | haystack.as_bytes().contains(&(self as u8)) 251 | } else { 252 | let mut buffer = [0u8; 4]; 253 | self.encode_utf8(&mut buffer).is_contained_in(haystack) 254 | } 255 | } 256 | 257 | fn is_prefix_of(self, haystack: &'a str) -> bool { 258 | self.encode_utf8(&mut [0u8; 4]).is_prefix_of(haystack) 259 | } 260 | 261 | fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> { 262 | self.encode_utf8(&mut [0u8; 4]).strip_prefix_of(haystack) 263 | } 264 | 265 | fn is_suffix_of(self, haystack: &'a str) -> bool 266 | where Self::Searcher: ReverseSearcher<'a> { 267 | self.encode_utf8(&mut [0u8; 4]).is_suffix_of(haystack) 268 | } 269 | 270 | fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> 271 | where Self::Searcher: ReverseSearcher<'a> { 272 | self.encode_utf8(&mut [0u8; 4]).strip_suffix_of(haystack) 273 | } 274 | } 275 | 276 | #[doc(hidden)] 277 | trait MultiCharEq { 278 | fn matches(&mut self, c: char) -> bool; 279 | } 280 | 281 | impl MultiCharEq for F 282 | where F: FnMut(char) -> bool 283 | { 284 | fn matches(&mut self, c: char) -> bool { 285 | (*self)(c) 286 | } 287 | } 288 | 289 | impl MultiCharEq for &[char] { 290 | fn matches(&mut self, c: char) -> bool { 291 | self.iter().any(|&m| m == c) 292 | } 293 | } 294 | 295 | struct MultiCharEqPattern(C); 296 | 297 | #[derive(Clone, Debug)] 298 | struct MultiCharEqSearcher<'a, C: MultiCharEq> { 299 | char_eq: C, 300 | haystack: &'a str, 301 | char_indices: core::str::CharIndices<'a>, 302 | } 303 | 304 | impl<'a, C: MultiCharEq> Pattern<'a> for MultiCharEqPattern { 305 | type Searcher = MultiCharEqSearcher<'a, C>; 306 | 307 | fn into_searcher(self, haystack: &'a str) -> MultiCharEqSearcher<'a, C> { 308 | MultiCharEqSearcher { haystack, char_eq: self.0, char_indices: haystack.char_indices() } 309 | } 310 | } 311 | 312 | unsafe impl<'a, C: MultiCharEq> Searcher<'a> for MultiCharEqSearcher<'a, C> { 313 | fn haystack(&self) -> &'a str { 314 | self.haystack 315 | } 316 | 317 | fn next(&mut self) -> SearchStep { 318 | let s = &mut self.char_indices; 319 | let pre_len = s.as_str().len(); 320 | if let Some((i, c)) = s.next() { 321 | let len = s.as_str().len(); 322 | let char_len = pre_len - len; 323 | if self.char_eq.matches(c) { 324 | return SearchStep::Match(i, i + char_len); 325 | } else { 326 | return SearchStep::Reject(i, i + char_len); 327 | } 328 | } 329 | SearchStep::Done 330 | } 331 | } 332 | 333 | unsafe impl<'a, C: MultiCharEq> ReverseSearcher<'a> for MultiCharEqSearcher<'a, C> { 334 | fn next_back(&mut self) -> SearchStep { 335 | let s = &mut self.char_indices; 336 | let pre_len = s.as_str().len(); 337 | if let Some((i, c)) = s.next_back() { 338 | let len = s.as_str().len(); 339 | let char_len = pre_len - len; 340 | if self.char_eq.matches(c) { 341 | return SearchStep::Match(i, i + char_len); 342 | } else { 343 | return SearchStep::Reject(i, i + char_len); 344 | } 345 | } 346 | SearchStep::Done 347 | } 348 | } 349 | 350 | macro_rules! pattern_methods { 351 | ($t:ty, $pmap:expr, $smap:expr) => { 352 | type Searcher = $t; 353 | 354 | #[allow(clippy::redundant_closure_call)] 355 | fn into_searcher(self, haystack: &'a str) -> $t { 356 | ($smap)(($pmap)(self).into_searcher(haystack)) 357 | } 358 | 359 | #[allow(clippy::redundant_closure_call)] 360 | fn is_contained_in(self, haystack: &'a str) -> bool { 361 | ($pmap)(self).is_contained_in(haystack) 362 | } 363 | 364 | #[allow(clippy::redundant_closure_call)] 365 | fn is_prefix_of(self, haystack: &'a str) -> bool { 366 | ($pmap)(self).is_prefix_of(haystack) 367 | } 368 | 369 | #[allow(clippy::redundant_closure_call)] 370 | fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> { 371 | ($pmap)(self).strip_prefix_of(haystack) 372 | } 373 | 374 | #[allow(clippy::redundant_closure_call)] 375 | fn is_suffix_of(self, haystack: &'a str) -> bool 376 | where $t: ReverseSearcher<'a> { 377 | ($pmap)(self).is_suffix_of(haystack) 378 | } 379 | 380 | #[allow(clippy::redundant_closure_call)] 381 | fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> 382 | where $t: ReverseSearcher<'a> { 383 | ($pmap)(self).strip_suffix_of(haystack) 384 | } 385 | }; 386 | } 387 | 388 | macro_rules! searcher_methods { 389 | (forward) => { 390 | fn haystack(&self) -> &'a str { 391 | self.0.haystack() 392 | } 393 | fn next(&mut self) -> SearchStep { 394 | self.0.next() 395 | } 396 | fn next_match(&mut self) -> Option<(usize, usize)> { 397 | self.0.next_match() 398 | } 399 | fn next_reject(&mut self) -> Option<(usize, usize)> { 400 | self.0.next_reject() 401 | } 402 | }; 403 | (reverse) => { 404 | fn next_back(&mut self) -> SearchStep { 405 | self.0.next_back() 406 | } 407 | fn next_match_back(&mut self) -> Option<(usize, usize)> { 408 | self.0.next_match_back() 409 | } 410 | fn next_reject_back(&mut self) -> Option<(usize, usize)> { 411 | self.0.next_reject_back() 412 | } 413 | }; 414 | } 415 | 416 | #[allow(unreachable_pub)] 417 | #[derive(Clone, Debug)] 418 | pub struct CharSliceSearcher<'a, 'b>( as Pattern<'a>>::Searcher); 419 | 420 | unsafe impl<'a, 'b> Searcher<'a> for CharSliceSearcher<'a, 'b> { 421 | searcher_methods!(forward); 422 | } 423 | 424 | unsafe impl<'a, 'b> ReverseSearcher<'a> for CharSliceSearcher<'a, 'b> { 425 | searcher_methods!(reverse); 426 | } 427 | 428 | impl<'a, 'b> Pattern<'a> for &'b [char] { 429 | pattern_methods!(CharSliceSearcher<'a, 'b>, MultiCharEqPattern, CharSliceSearcher); 430 | } 431 | 432 | #[allow(unreachable_pub)] 433 | #[derive(Clone)] 434 | pub struct CharPredicateSearcher<'a, F>( as Pattern<'a>>::Searcher) 435 | where F: FnMut(char) -> bool; 436 | 437 | impl fmt::Debug for CharPredicateSearcher<'_, F> 438 | where F: FnMut(char) -> bool 439 | { 440 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 441 | f.debug_struct("CharPredicateSearcher") 442 | .field("haystack", &self.0.haystack) 443 | .field("char_indices", &self.0.char_indices) 444 | .finish() 445 | } 446 | } 447 | unsafe impl<'a, F> Searcher<'a> for CharPredicateSearcher<'a, F> 448 | where F: FnMut(char) -> bool 449 | { 450 | searcher_methods!(forward); 451 | } 452 | 453 | unsafe impl<'a, F> ReverseSearcher<'a> for CharPredicateSearcher<'a, F> 454 | where F: FnMut(char) -> bool 455 | { 456 | searcher_methods!(reverse); 457 | } 458 | 459 | impl<'a, F> Pattern<'a> for F 460 | where F: FnMut(char) -> bool 461 | { 462 | pattern_methods!(CharPredicateSearcher<'a, F>, MultiCharEqPattern, CharPredicateSearcher); 463 | } 464 | 465 | impl<'a, 'b, 'c> Pattern<'a> for &'c &'b str { 466 | pattern_methods!(StrSearcher<'a, 'b>, |&s| s, |s| s); 467 | } 468 | 469 | impl<'a, 'b> Pattern<'a> for &'b str { 470 | type Searcher = StrSearcher<'a, 'b>; 471 | 472 | fn into_searcher(self, haystack: &'a str) -> StrSearcher<'a, 'b> { 473 | StrSearcher::new(haystack, self) 474 | } 475 | 476 | fn is_prefix_of(self, haystack: &'a str) -> bool { 477 | haystack.as_bytes().starts_with(self.as_bytes()) 478 | } 479 | 480 | fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> { 481 | if self.is_prefix_of(haystack) { 482 | unsafe { Some(haystack.get_unchecked(self.as_bytes().len()..)) } 483 | } else { 484 | None 485 | } 486 | } 487 | 488 | fn is_suffix_of(self, haystack: &'a str) -> bool { 489 | haystack.as_bytes().ends_with(self.as_bytes()) 490 | } 491 | 492 | fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> { 493 | if self.is_suffix_of(haystack) { 494 | let i = haystack.len() - self.as_bytes().len(); 495 | unsafe { Some(haystack.get_unchecked(..i)) } 496 | } else { 497 | None 498 | } 499 | } 500 | } 501 | 502 | #[allow(unreachable_pub)] 503 | #[derive(Clone, Debug)] 504 | pub struct StrSearcher<'a, 'b> { 505 | haystack: &'a str, 506 | needle: &'b str, 507 | 508 | searcher: StrSearcherImpl, 509 | } 510 | 511 | #[derive(Clone, Debug)] 512 | enum StrSearcherImpl { 513 | Empty(EmptyNeedle), 514 | TwoWay(TwoWaySearcher), 515 | } 516 | 517 | #[derive(Clone, Debug)] 518 | struct EmptyNeedle { 519 | position: usize, 520 | end: usize, 521 | is_match_fw: bool, 522 | is_match_bw: bool, 523 | } 524 | 525 | impl<'a, 'b> StrSearcher<'a, 'b> { 526 | fn new(haystack: &'a str, needle: &'b str) -> StrSearcher<'a, 'b> { 527 | if needle.is_empty() { 528 | StrSearcher { 529 | haystack, 530 | needle, 531 | searcher: StrSearcherImpl::Empty(EmptyNeedle { 532 | position: 0, 533 | end: haystack.len(), 534 | is_match_fw: true, 535 | is_match_bw: true, 536 | }), 537 | } 538 | } else { 539 | StrSearcher { 540 | haystack, 541 | needle, 542 | searcher: StrSearcherImpl::TwoWay(TwoWaySearcher::new( 543 | needle.as_bytes(), 544 | haystack.len(), 545 | )), 546 | } 547 | } 548 | } 549 | } 550 | 551 | unsafe impl<'a, 'b> Searcher<'a> for StrSearcher<'a, 'b> { 552 | fn haystack(&self) -> &'a str { 553 | self.haystack 554 | } 555 | 556 | fn next(&mut self) -> SearchStep { 557 | match self.searcher { 558 | StrSearcherImpl::Empty(ref mut searcher) => { 559 | let is_match = searcher.is_match_fw; 560 | searcher.is_match_fw = !searcher.is_match_fw; 561 | let pos = searcher.position; 562 | match self.haystack[pos..].chars().next() { 563 | _ if is_match => SearchStep::Match(pos, pos), 564 | None => SearchStep::Done, 565 | Some(ch) => { 566 | searcher.position += ch.len_utf8(); 567 | SearchStep::Reject(pos, searcher.position) 568 | } 569 | } 570 | } 571 | StrSearcherImpl::TwoWay(ref mut searcher) => { 572 | if searcher.position == self.haystack.len() { 573 | return SearchStep::Done; 574 | } 575 | let is_long = searcher.memory == usize::MAX; 576 | match searcher.next::( 577 | self.haystack.as_bytes(), 578 | self.needle.as_bytes(), 579 | is_long, 580 | ) { 581 | SearchStep::Reject(a, mut b) => { 582 | while !self.haystack.is_char_boundary(b) { 583 | b += 1; 584 | } 585 | searcher.position = cmp::max(b, searcher.position); 586 | SearchStep::Reject(a, b) 587 | } 588 | otherwise => otherwise, 589 | } 590 | } 591 | } 592 | } 593 | 594 | fn next_match(&mut self) -> Option<(usize, usize)> { 595 | match self.searcher { 596 | StrSearcherImpl::Empty(..) => loop { 597 | match self.next() { 598 | SearchStep::Match(a, b) => return Some((a, b)), 599 | SearchStep::Done => return None, 600 | SearchStep::Reject(..) => {} 601 | } 602 | }, 603 | StrSearcherImpl::TwoWay(ref mut searcher) => { 604 | let is_long = searcher.memory == usize::MAX; 605 | if is_long { 606 | searcher.next::( 607 | self.haystack.as_bytes(), 608 | self.needle.as_bytes(), 609 | true, 610 | ) 611 | } else { 612 | searcher.next::( 613 | self.haystack.as_bytes(), 614 | self.needle.as_bytes(), 615 | false, 616 | ) 617 | } 618 | } 619 | } 620 | } 621 | } 622 | 623 | unsafe impl<'a, 'b> ReverseSearcher<'a> for StrSearcher<'a, 'b> { 624 | fn next_back(&mut self) -> SearchStep { 625 | match self.searcher { 626 | StrSearcherImpl::Empty(ref mut searcher) => { 627 | let is_match = searcher.is_match_bw; 628 | searcher.is_match_bw = !searcher.is_match_bw; 629 | let end = searcher.end; 630 | match self.haystack[..end].chars().next_back() { 631 | _ if is_match => SearchStep::Match(end, end), 632 | None => SearchStep::Done, 633 | Some(ch) => { 634 | searcher.end -= ch.len_utf8(); 635 | SearchStep::Reject(searcher.end, end) 636 | } 637 | } 638 | } 639 | StrSearcherImpl::TwoWay(ref mut searcher) => { 640 | if searcher.end == 0 { 641 | return SearchStep::Done; 642 | } 643 | let is_long = searcher.memory == usize::MAX; 644 | match searcher.next_back::( 645 | self.haystack.as_bytes(), 646 | self.needle.as_bytes(), 647 | is_long, 648 | ) { 649 | SearchStep::Reject(mut a, b) => { 650 | while !self.haystack.is_char_boundary(a) { 651 | a -= 1; 652 | } 653 | searcher.end = cmp::min(a, searcher.end); 654 | SearchStep::Reject(a, b) 655 | } 656 | otherwise => otherwise, 657 | } 658 | } 659 | } 660 | } 661 | 662 | fn next_match_back(&mut self) -> Option<(usize, usize)> { 663 | match self.searcher { 664 | StrSearcherImpl::Empty(..) => loop { 665 | match self.next_back() { 666 | SearchStep::Match(a, b) => return Some((a, b)), 667 | SearchStep::Done => return None, 668 | SearchStep::Reject(..) => {} 669 | } 670 | }, 671 | StrSearcherImpl::TwoWay(ref mut searcher) => { 672 | let is_long = searcher.memory == usize::MAX; 673 | if is_long { 674 | searcher.next_back::( 675 | self.haystack.as_bytes(), 676 | self.needle.as_bytes(), 677 | true, 678 | ) 679 | } else { 680 | searcher.next_back::( 681 | self.haystack.as_bytes(), 682 | self.needle.as_bytes(), 683 | false, 684 | ) 685 | } 686 | } 687 | } 688 | } 689 | } 690 | 691 | #[derive(Clone, Debug)] 692 | struct TwoWaySearcher { 693 | crit_pos: usize, 694 | crit_pos_back: usize, 695 | period: usize, 696 | byteset: u64, 697 | position: usize, 698 | end: usize, 699 | memory: usize, 700 | memory_back: usize, 701 | } 702 | 703 | impl TwoWaySearcher { 704 | fn new(needle: &[u8], end: usize) -> TwoWaySearcher { 705 | let (crit_pos_false, period_false) = TwoWaySearcher::maximal_suffix(needle, false); 706 | let (crit_pos_true, period_true) = TwoWaySearcher::maximal_suffix(needle, true); 707 | 708 | let (crit_pos, period) = if crit_pos_false > crit_pos_true { 709 | (crit_pos_false, period_false) 710 | } else { 711 | (crit_pos_true, period_true) 712 | }; 713 | 714 | if needle[..crit_pos] == needle[period..period + crit_pos] { 715 | let crit_pos_back = needle.len() 716 | - cmp::max( 717 | TwoWaySearcher::reverse_maximal_suffix(needle, period, false), 718 | TwoWaySearcher::reverse_maximal_suffix(needle, period, true), 719 | ); 720 | 721 | TwoWaySearcher { 722 | crit_pos, 723 | crit_pos_back, 724 | period, 725 | byteset: Self::byteset_create(&needle[..period]), 726 | 727 | position: 0, 728 | end, 729 | memory: 0, 730 | memory_back: needle.len(), 731 | } 732 | } else { 733 | TwoWaySearcher { 734 | crit_pos, 735 | crit_pos_back: crit_pos, 736 | period: cmp::max(crit_pos, needle.len() - crit_pos) + 1, 737 | byteset: Self::byteset_create(needle), 738 | 739 | position: 0, 740 | end, 741 | memory: usize::MAX, 742 | memory_back: usize::MAX, 743 | } 744 | } 745 | } 746 | 747 | fn byteset_create(bytes: &[u8]) -> u64 { 748 | bytes.iter().fold(0, |a, &b| (1 << (b & 0x3f)) | a) 749 | } 750 | 751 | fn byteset_contains(&self, byte: u8) -> bool { 752 | (self.byteset >> ((byte & 0x3f) as usize)) & 1 != 0 753 | } 754 | 755 | fn next(&mut self, haystack: &[u8], needle: &[u8], long_period: bool) -> S::Output 756 | where S: TwoWayStrategy { 757 | let old_pos = self.position; 758 | let needle_last = needle.len() - 1; 759 | 'search: loop { 760 | let tail_byte = match haystack.get(self.position + needle_last) { 761 | Some(&b) => b, 762 | None => { 763 | self.position = haystack.len(); 764 | return S::rejecting(old_pos, self.position); 765 | } 766 | }; 767 | 768 | if S::use_early_reject() && old_pos != self.position { 769 | return S::rejecting(old_pos, self.position); 770 | } 771 | 772 | if !self.byteset_contains(tail_byte) { 773 | self.position += needle.len(); 774 | if !long_period { 775 | self.memory = 0; 776 | } 777 | continue 'search; 778 | } 779 | 780 | let start = 781 | if long_period { self.crit_pos } else { cmp::max(self.crit_pos, self.memory) }; 782 | for i in start..needle.len() { 783 | if needle[i] != haystack[self.position + i] { 784 | self.position += i - self.crit_pos + 1; 785 | if !long_period { 786 | self.memory = 0; 787 | } 788 | continue 'search; 789 | } 790 | } 791 | 792 | let start = if long_period { 0 } else { self.memory }; 793 | for i in (start..self.crit_pos).rev() { 794 | if needle[i] != haystack[self.position + i] { 795 | self.position += self.period; 796 | if !long_period { 797 | self.memory = needle.len() - self.period; 798 | } 799 | continue 'search; 800 | } 801 | } 802 | 803 | let match_pos = self.position; 804 | 805 | self.position += needle.len(); 806 | if !long_period { 807 | self.memory = 0; 808 | } 809 | 810 | return S::matching(match_pos, match_pos + needle.len()); 811 | } 812 | } 813 | 814 | fn next_back(&mut self, haystack: &[u8], needle: &[u8], long_period: bool) -> S::Output 815 | where S: TwoWayStrategy { 816 | let old_end = self.end; 817 | 'search: loop { 818 | let front_byte = match haystack.get(self.end.wrapping_sub(needle.len())) { 819 | Some(&b) => b, 820 | None => { 821 | self.end = 0; 822 | return S::rejecting(0, old_end); 823 | } 824 | }; 825 | 826 | if S::use_early_reject() && old_end != self.end { 827 | return S::rejecting(self.end, old_end); 828 | } 829 | 830 | if !self.byteset_contains(front_byte) { 831 | self.end -= needle.len(); 832 | if !long_period { 833 | self.memory_back = needle.len(); 834 | } 835 | continue 'search; 836 | } 837 | 838 | let crit = if long_period { 839 | self.crit_pos_back 840 | } else { 841 | cmp::min(self.crit_pos_back, self.memory_back) 842 | }; 843 | for i in (0..crit).rev() { 844 | if needle[i] != haystack[self.end - needle.len() + i] { 845 | self.end -= self.crit_pos_back - i; 846 | if !long_period { 847 | self.memory_back = needle.len(); 848 | } 849 | continue 'search; 850 | } 851 | } 852 | 853 | let needle_end = if long_period { needle.len() } else { self.memory_back }; 854 | for i in self.crit_pos_back..needle_end { 855 | if needle[i] != haystack[self.end - needle.len() + i] { 856 | self.end -= self.period; 857 | if !long_period { 858 | self.memory_back = self.period; 859 | } 860 | continue 'search; 861 | } 862 | } 863 | 864 | let match_pos = self.end - needle.len(); 865 | self.end -= needle.len(); 866 | if !long_period { 867 | self.memory_back = needle.len(); 868 | } 869 | 870 | return S::matching(match_pos, match_pos + needle.len()); 871 | } 872 | } 873 | 874 | fn maximal_suffix(arr: &[u8], order_greater: bool) -> (usize, usize) { 875 | let mut left = 0; 876 | let mut right = 1; 877 | let mut offset = 0; 878 | let mut period = 1; 879 | 880 | while let Some(&a) = arr.get(right + offset) { 881 | let b = arr[left + offset]; 882 | if (a < b && !order_greater) || (a > b && order_greater) { 883 | right += offset + 1; 884 | offset = 0; 885 | period = right - left; 886 | } else if a == b { 887 | if offset + 1 == period { 888 | right += offset + 1; 889 | offset = 0; 890 | } else { 891 | offset += 1; 892 | } 893 | } else { 894 | left = right; 895 | right += 1; 896 | offset = 0; 897 | period = 1; 898 | } 899 | } 900 | (left, period) 901 | } 902 | 903 | fn reverse_maximal_suffix(arr: &[u8], known_period: usize, order_greater: bool) -> usize { 904 | let mut left = 0; 905 | let mut right = 1; 906 | let mut offset = 0; 907 | let mut period = 1; 908 | let n = arr.len(); 909 | 910 | while right + offset < n { 911 | let a = arr[n - (1 + right + offset)]; 912 | let b = arr[n - (1 + left + offset)]; 913 | if (a < b && !order_greater) || (a > b && order_greater) { 914 | right += offset + 1; 915 | offset = 0; 916 | period = right - left; 917 | } else if a == b { 918 | if offset + 1 == period { 919 | right += offset + 1; 920 | offset = 0; 921 | } else { 922 | offset += 1; 923 | } 924 | } else { 925 | left = right; 926 | right += 1; 927 | offset = 0; 928 | period = 1; 929 | } 930 | if period == known_period { 931 | break; 932 | } 933 | } 934 | debug_assert!(period <= known_period); 935 | left 936 | } 937 | } 938 | 939 | trait TwoWayStrategy { 940 | type Output; 941 | fn use_early_reject() -> bool; 942 | fn rejecting(a: usize, b: usize) -> Self::Output; 943 | fn matching(a: usize, b: usize) -> Self::Output; 944 | } 945 | 946 | enum MatchOnly {} 947 | 948 | impl TwoWayStrategy for MatchOnly { 949 | type Output = Option<(usize, usize)>; 950 | 951 | fn use_early_reject() -> bool { 952 | false 953 | } 954 | fn rejecting(_a: usize, _b: usize) -> Self::Output { 955 | None 956 | } 957 | fn matching(a: usize, b: usize) -> Self::Output { 958 | Some((a, b)) 959 | } 960 | } 961 | 962 | enum RejectAndMatch {} 963 | 964 | impl TwoWayStrategy for RejectAndMatch { 965 | type Output = SearchStep; 966 | 967 | fn use_early_reject() -> bool { 968 | true 969 | } 970 | fn rejecting(a: usize, b: usize) -> Self::Output { 971 | SearchStep::Reject(a, b) 972 | } 973 | fn matching(a: usize, b: usize) -> Self::Output { 974 | SearchStep::Match(a, b) 975 | } 976 | } 977 | --------------------------------------------------------------------------------