├── static-tests ├── src │ └── lib.rs ├── Cargo.toml └── tests │ └── tests.rs ├── .gitignore ├── src ├── ext_indexmap_v1.rs ├── ext_indexmap_v2.rs ├── ext_uuid.rs ├── ext_regex.rs ├── ext_chrono.rs ├── context.rs ├── ext_smallvec.rs ├── utils.rs ├── ext_indexmap.rs ├── readable_unsized_impl.rs ├── endianness.rs ├── writer.rs ├── private.rs ├── error.rs ├── writable.rs ├── lib.rs ├── ext_glam.rs ├── varint.rs ├── circular_buffer.rs ├── readable_impl.rs ├── writable_impl.rs └── readable.rs ├── ci └── run_tests.sh ├── speedy-derive └── Cargo.toml ├── LICENSE-MIT ├── Cargo.toml ├── README.md ├── benches └── bench.rs └── LICENSE-APACHE /static-tests/src/lib.rs: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/*.rs.bk 2 | target 3 | Cargo.lock 4 | speedy-derive/target 5 | speedy-derive/Cargo.lock 6 | -------------------------------------------------------------------------------- /src/ext_indexmap_v1.rs: -------------------------------------------------------------------------------- 1 | use { 2 | indexmap_v1::{ 3 | IndexMap, 4 | IndexSet 5 | } 6 | }; 7 | 8 | include!("ext_indexmap.rs"); 9 | -------------------------------------------------------------------------------- /src/ext_indexmap_v2.rs: -------------------------------------------------------------------------------- 1 | use { 2 | indexmap_v2::{ 3 | IndexMap, 4 | IndexSet 5 | } 6 | }; 7 | 8 | include!("ext_indexmap.rs"); 9 | -------------------------------------------------------------------------------- /static-tests/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "static-tests" 3 | version = "0.1.0" 4 | authors = ["Jan Bujak "] 5 | edition = "2018" 6 | publish = false 7 | 8 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 9 | 10 | [dev-dependencies] 11 | static_test = "0.1" 12 | speedy = { path = ".." } 13 | -------------------------------------------------------------------------------- /ci/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | IFS=$'\n\t' 5 | 6 | export RUST_BACKTRACE=1 7 | export QUICKCHECK_TESTS=5000 8 | 9 | set +e 10 | echo "$(rustc --version)" | grep -q "nightly" 11 | if [ "$?" = "0" ]; then 12 | export IS_NIGHTLY=1 13 | else 14 | export IS_NIGHTLY=0 15 | fi 16 | set -e 17 | 18 | echo "Is Rust from nightly: $IS_NIGHTLY" 19 | 20 | cargo check --no-default-features 21 | cargo build 22 | 23 | if [ "$IS_NIGHTLY" = "1" ]; then 24 | cargo test --features external_doc 25 | cargo test -p static-tests --release 26 | else 27 | cargo test 28 | fi 29 | 30 | cd speedy-derive 31 | cargo test 32 | -------------------------------------------------------------------------------- /speedy-derive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "speedy-derive" 3 | version = "0.8.7" 4 | authors = ["Jan Bujak "] 5 | repository = "https://github.com/koute/speedy" 6 | homepage = "https://github.com/koute/speedy" 7 | documentation = "https://docs.rs/speedy/*/speedy/" 8 | license = "MIT/Apache-2.0" 9 | keywords = ["serialization"] 10 | categories = ["encoding"] 11 | description = "A fast binary serialization framework, #[derive(Readable, Writable)] support" 12 | edition = "2018" 13 | 14 | [lib] 15 | proc-macro = true 16 | 17 | [dependencies] 18 | syn = { version = "2.0.2", features = ["parsing"] } 19 | quote = "1" 20 | proc-macro2 = "1" 21 | -------------------------------------------------------------------------------- /src/ext_uuid.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{ 3 | Context, 4 | Readable, 5 | Reader, 6 | Writable, 7 | Writer 8 | }, 9 | uuid::Uuid 10 | }; 11 | 12 | 13 | const UUID_SIZE: usize = 16; 14 | 15 | impl< 'a, C > Readable< 'a, C > for Uuid 16 | where C: Context 17 | { 18 | #[inline] 19 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 20 | let mut buffer = [0; UUID_SIZE]; 21 | reader.read_bytes(&mut buffer)?; 22 | Ok(Uuid::from_bytes(buffer)) 23 | } 24 | 25 | #[inline] 26 | fn minimum_bytes_needed() -> usize { 27 | UUID_SIZE 28 | } 29 | } 30 | 31 | 32 | impl< C > Writable< C > for Uuid 33 | where C: Context 34 | { 35 | #[inline] 36 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 37 | writer.write_bytes(self.as_bytes()) 38 | } 39 | 40 | #[inline] 41 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 42 | Ok(UUID_SIZE) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017 Jan Bujak 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /src/ext_regex.rs: -------------------------------------------------------------------------------- 1 | use { 2 | regex::{ 3 | Regex 4 | }, 5 | crate::{ 6 | Context, 7 | Readable, 8 | Reader, 9 | Writable, 10 | Writer 11 | } 12 | }; 13 | 14 | 15 | impl< 'a, C > Readable< 'a, C > for Regex 16 | where C: Context 17 | { 18 | #[inline] 19 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 20 | let regex: std::borrow::Cow< str > = reader.read_value()?; 21 | Regex::new( ®ex ).map_err( |error| { 22 | crate::error::Error::custom( format!( "failed to read a regex: {}", error ) ).into() 23 | }) 24 | } 25 | 26 | #[inline] 27 | fn minimum_bytes_needed() -> usize { 28 | 4 29 | } 30 | } 31 | 32 | 33 | impl< C > Writable< C > for Regex 34 | where C: Context 35 | { 36 | #[inline] 37 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 38 | self.as_str().write_to( writer ) 39 | } 40 | 41 | #[inline] 42 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 43 | Writable::< C >::bytes_needed( self.as_str() ) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/ext_chrono.rs: -------------------------------------------------------------------------------- 1 | use { 2 | chrono::{ 3 | DateTime, 4 | TimeZone, 5 | Utc 6 | }, 7 | crate::{ 8 | Context, 9 | Readable, 10 | Reader, 11 | Writable, 12 | Writer 13 | } 14 | }; 15 | 16 | impl< 'a, C > Readable< 'a, C > for DateTime< Utc > 17 | where C: Context 18 | { 19 | #[inline] 20 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 21 | let seconds = reader.read_i64()?; 22 | let subsec_nanos = reader.read_u32()?; 23 | Ok( Utc.timestamp( seconds, subsec_nanos ) ) 24 | } 25 | 26 | #[inline] 27 | fn minimum_bytes_needed() -> usize { 28 | 12 29 | } 30 | } 31 | 32 | 33 | impl< C > Writable< C > for DateTime< Utc > 34 | where C: Context 35 | { 36 | #[inline] 37 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 38 | writer.write_i64( self.timestamp() )?; 39 | writer.write_u32( self.timestamp_subsec_nanos() ) 40 | } 41 | 42 | #[inline] 43 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 44 | Ok( 12 ) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/context.rs: -------------------------------------------------------------------------------- 1 | use crate::endianness::Endianness; 2 | 3 | pub trait Context { 4 | type Error: From< crate::Error > + crate::IsEof; 5 | fn endianness( &self ) -> Endianness; 6 | } 7 | 8 | impl Context for Endianness { 9 | type Error = crate::Error; 10 | 11 | #[inline(always)] 12 | fn endianness( &self ) -> Endianness { 13 | *self 14 | } 15 | } 16 | 17 | #[derive(Default)] 18 | pub struct LittleEndian {} 19 | 20 | #[derive(Default)] 21 | pub struct BigEndian {} 22 | 23 | impl Context for LittleEndian { 24 | type Error = crate::Error; 25 | 26 | #[inline(always)] 27 | fn endianness( &self ) -> Endianness { 28 | Endianness::LittleEndian 29 | } 30 | } 31 | 32 | impl Context for BigEndian { 33 | type Error = crate::Error; 34 | 35 | #[inline(always)] 36 | fn endianness( &self ) -> Endianness { 37 | Endianness::BigEndian 38 | } 39 | } 40 | 41 | #[cfg(target_endian = "little")] 42 | pub use LittleEndian as NativeContext; 43 | 44 | #[cfg(target_endian = "big")] 45 | pub use BigEndian as NativeContext; 46 | 47 | pub trait DefaultContext { 48 | type Context; 49 | } 50 | 51 | impl< T > DefaultContext for T where T: ?Sized { 52 | type Context = LittleEndian; 53 | } 54 | -------------------------------------------------------------------------------- /src/ext_smallvec.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{ 3 | Context, 4 | Readable, 5 | Reader, 6 | Writable, 7 | Writer 8 | }, 9 | smallvec::{ 10 | Array, 11 | SmallVec 12 | } 13 | }; 14 | 15 | impl< 'a, C, A > Readable< 'a, C > for SmallVec< A > 16 | where C: Context, 17 | A: Array, 18 | ::Item: Readable< 'a, C > 19 | { 20 | #[inline] 21 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 22 | let length = crate::private::read_length( reader )?; 23 | // TODO: This can be more efficient if we directly read into the `SmallVec`. 24 | let value = reader.read_vec( length )?; 25 | Ok( value.into() ) 26 | } 27 | 28 | #[inline] 29 | fn minimum_bytes_needed() -> usize { 30 | 4 31 | } 32 | } 33 | 34 | 35 | impl< C, A > Writable< C > for SmallVec< A > 36 | where C: Context, 37 | A: Array, 38 | ::Item: Writable< C > 39 | { 40 | #[inline] 41 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 42 | self.as_slice().write_to( writer ) 43 | } 44 | 45 | #[inline] 46 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 47 | Writable::< C >::bytes_needed( self.as_slice() ) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "speedy" 3 | version = "0.8.7" 4 | authors = ["Jan Bujak "] 5 | repository = "https://github.com/koute/speedy" 6 | homepage = "https://github.com/koute/speedy" 7 | documentation = "https://docs.rs/speedy/*/speedy/" 8 | license = "MIT/Apache-2.0" 9 | readme = "README.md" 10 | keywords = ["serialization"] 11 | categories = ["encoding"] 12 | description = "A fast binary serialization framework" 13 | edition = "2018" 14 | 15 | [dependencies] 16 | memoffset = "0.9" 17 | speedy-derive = { version = "= 0.8.7", path = "speedy-derive", optional = true } 18 | chrono = { version = "0.4", optional = true } 19 | glam = { version = ">= 0.15, <= 0.29", optional = true } 20 | smallvec = { version = "1", optional = true } 21 | regex = { version = "1", optional = true, default-features = false } 22 | uuid = {version = "1", optional = true, default-features = false } 23 | indexmap_v1 = { package = "indexmap", version = "1", optional = true } 24 | indexmap_v2 = { package = "indexmap", version = "2", optional = true } 25 | 26 | [dev-dependencies] 27 | quickcheck = "1" 28 | paste = "1" 29 | tempfile = "3" 30 | 31 | [profile.release] 32 | panic = "abort" 33 | 34 | [workspace] 35 | members = [".", "speedy-derive", "static-tests"] 36 | 37 | [features] 38 | default = ["speedy-derive", "std"] 39 | external_doc = [] 40 | indexmap_v1 = ["dep:indexmap_v1"] 41 | indexmap_v2 = ["dep:indexmap_v2"] 42 | indexmap = ["indexmap_v1"] 43 | std = ["alloc"] 44 | alloc = [] 45 | 46 | [package.metadata.docs.rs] 47 | features = ["external_doc"] 48 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | macro_rules! unsafe_is_length { 2 | ($expr:expr) => { 3 | if $expr as u64 >= 0x7FFFFFFF_FFFFFFFF { 4 | // It's not physically possible to have a valid slice 5 | // which is bigger than 8 exabytes. No machine exists 6 | // which could have this much RAM, and you can't even 7 | // have this much virtual address space on any modern CPU. 8 | // 9 | // So in practice this should be totally harmless while 10 | // it will allow the LLVM optimizer to better do its job. 11 | // 12 | // It actually *does* affect optimization in practice 13 | // allowing LLVM to assume the length won't overflow 14 | // in certain cases. 15 | unsafe { core::hint::unreachable_unchecked() } 16 | } 17 | } 18 | } 19 | 20 | // TODO: Remove the T parameter once #![feature(trivial_bounds)] is stable. 21 | pub unsafe trait ZeroCopyable< C, T > where T: ?Sized {} 22 | unsafe impl< C, T > ZeroCopyable< C, T > for i8 {} 23 | unsafe impl< C, T > ZeroCopyable< C, T > for u8 {} 24 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for i16 {} 25 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for u16 {} 26 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for i32 {} 27 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for u32 {} 28 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for i64 {} 29 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for u64 {} 30 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for i128 {} 31 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for u128 {} 32 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for f32 {} 33 | unsafe impl< T > ZeroCopyable< crate::context::NativeContext, T > for f64 {} 34 | 35 | pub trait SwapBytes { 36 | fn swap_bytes( self ) -> Self; 37 | } 38 | 39 | impl SwapBytes for f32 { 40 | #[inline(always)] 41 | fn swap_bytes( self ) -> Self { 42 | union Union { 43 | float: f32, 44 | int: u32 45 | } 46 | 47 | unsafe { 48 | let mut u = Union { float: self }; 49 | u.int = u.int.swap_bytes(); 50 | u.float 51 | } 52 | } 53 | } 54 | 55 | impl SwapBytes for f64 { 56 | #[inline(always)] 57 | fn swap_bytes( self ) -> Self { 58 | union Union { 59 | float: f64, 60 | int: u64 61 | } 62 | 63 | unsafe { 64 | let mut u = Union { float: self }; 65 | u.int = u.int.swap_bytes(); 66 | u.float 67 | } 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/ext_indexmap.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{ 3 | Context, 4 | Readable, 5 | Reader, 6 | Writable, 7 | Writer, 8 | private::{ 9 | write_length 10 | } 11 | }, 12 | std::{ 13 | hash::{ 14 | BuildHasher, 15 | Hash 16 | } 17 | } 18 | }; 19 | 20 | impl< 'a, C, K, V, S > Readable< 'a, C > for IndexMap< K, V, S > 21 | where C: Context, 22 | K: Readable< 'a, C > + Eq + Hash, 23 | V: Readable< 'a, C >, 24 | S: BuildHasher + Default 25 | { 26 | #[inline] 27 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 28 | let length = crate::private::read_length( reader )?; 29 | reader.read_collection( length ) 30 | } 31 | 32 | #[inline] 33 | fn minimum_bytes_needed() -> usize { 34 | 4 35 | } 36 | } 37 | 38 | impl< 'a, C, T, S > Readable< 'a, C > for IndexSet< T, S > 39 | where C: Context, 40 | T: Readable< 'a, C > + Eq + Hash, 41 | S: BuildHasher + Default 42 | { 43 | #[inline] 44 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 45 | let length = crate::private::read_length( reader )?; 46 | reader.read_collection( length ) 47 | } 48 | 49 | #[inline] 50 | fn minimum_bytes_needed() -> usize { 51 | 4 52 | } 53 | } 54 | 55 | impl< C, K, V, S > Writable< C > for IndexMap< K, V, S > 56 | where C: Context, 57 | K: Writable< C >, 58 | V: Writable< C > 59 | { 60 | #[inline] 61 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 62 | write_length( self.len(), writer )?; 63 | writer.write_collection( self.iter() ) 64 | } 65 | 66 | #[inline] 67 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 68 | unsafe_is_length!( self.len() ); 69 | 70 | let mut count = std::mem::size_of::< u32 >(); 71 | for (key, value) in self { 72 | count += key.bytes_needed()? + value.bytes_needed()?; 73 | } 74 | 75 | Ok( count ) 76 | } 77 | } 78 | 79 | impl< C, T, S > Writable< C > for IndexSet< T, S > 80 | where C: Context, 81 | T: Writable< C > 82 | { 83 | #[inline] 84 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 85 | write_length( self.len(), writer )?; 86 | writer.write_collection( self.iter() ) 87 | } 88 | 89 | #[inline] 90 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 91 | unsafe_is_length!( self.len() ); 92 | 93 | let mut count = std::mem::size_of::< u32 >(); 94 | for value in self { 95 | count += value.bytes_needed()?; 96 | } 97 | 98 | Ok( count ) 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/readable_unsized_impl.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{ 3 | Readable, 4 | Reader, 5 | Context 6 | } 7 | }; 8 | 9 | 10 | #[cfg(feature = "alloc")] 11 | use alloc::string::String; 12 | 13 | impl< 'a, C: Context > Readable< 'a, C > for &'a str { 14 | #[inline] 15 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 16 | let length = crate::private::read_length( reader )?; 17 | let bytes = reader.read_bytes_borrowed( length ).ok_or_else( crate::error::error_unsized )??; 18 | let value = core::str::from_utf8( bytes ).map_err( crate::error::error_invalid_str_utf8 )?; 19 | Ok( value ) 20 | } 21 | 22 | #[cfg(all(not(feature = "std"), not(feature = "alloc")))] 23 | #[inline] 24 | fn minimum_bytes_needed() -> usize { 25 | 4 26 | } 27 | 28 | #[cfg(feature = "alloc")] 29 | #[inline] 30 | fn minimum_bytes_needed() -> usize { 31 | >::minimum_bytes_needed() 32 | } 33 | } 34 | 35 | impl< 'a, C: Context, T > Readable< 'a, C > for &'a [T] where T: crate::utils::ZeroCopyable< C, T > { 36 | #[inline] 37 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 38 | if core::mem::size_of::< T >() != 1 && reader.endianness().conversion_necessary() { 39 | return Err( crate::error::error_endianness_mismatch() ); 40 | } 41 | 42 | let length = crate::private::read_length( reader )?; 43 | let bytelength = length.checked_mul( core::mem::size_of::< T >() ).ok_or_else( crate::error::error_out_of_range_length )?; 44 | let bytes = reader.read_bytes_borrowed( bytelength ).ok_or_else( crate::error::error_unsized )??; 45 | let slice = unsafe { 46 | core::slice::from_raw_parts( bytes.as_ptr() as *const T, length ) 47 | }; 48 | Ok( slice ) 49 | } 50 | 51 | #[inline] 52 | fn minimum_bytes_needed() -> usize { 53 | 4 54 | } 55 | } 56 | 57 | #[inline(always)] 58 | unsafe fn cast_slice< T, const N: usize >( slice: &[u8] ) -> &[T; N] { 59 | unsafe { 60 | &*(slice.as_ptr() as *const [T; N]) 61 | } 62 | } 63 | 64 | impl< 'a, C: Context, T, const N: usize > Readable< 'a, C > for &'a [T; N] where T: crate::utils::ZeroCopyable< C, T > { 65 | #[inline] 66 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 67 | if core::mem::size_of::< T >() != 1 && reader.endianness().conversion_necessary() { 68 | return Err( crate::error::error_endianness_mismatch() ); 69 | } 70 | 71 | let bytes = reader.read_bytes_borrowed( core::mem::size_of::< T >() * N ).ok_or_else( crate::error::error_unsized )??; 72 | let slice = unsafe { cast_slice( bytes ) }; 73 | Ok( slice ) 74 | } 75 | 76 | #[inline] 77 | fn minimum_bytes_needed() -> usize { 78 | core::mem::size_of::< T >() * N 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/endianness.rs: -------------------------------------------------------------------------------- 1 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] 2 | pub enum Endianness { 3 | LittleEndian, 4 | BigEndian 5 | } 6 | 7 | impl Endianness { 8 | #[cfg( target_endian = "little" )] 9 | pub const NATIVE: Endianness = Endianness::LittleEndian; 10 | 11 | #[cfg( target_endian = "big" )] 12 | pub const NATIVE: Endianness = Endianness::BigEndian; 13 | } 14 | 15 | impl Endianness { 16 | #[inline(always)] 17 | pub fn conversion_necessary( self ) -> bool { 18 | self != Endianness::NATIVE 19 | } 20 | 21 | #[inline(always)] 22 | pub fn swap_slice_u8( self, _: &mut [u8] ) {} 23 | 24 | #[inline(always)] 25 | pub fn swap_slice_i8( self, _: &mut [i8] ) {} 26 | } 27 | 28 | macro_rules! emit_wrapper { 29 | ($type:ty, $reader:ident, $swapper:ident, $slice_swapper:ident) => { 30 | impl Endianness { 31 | #[inline] 32 | pub fn $reader( self, slice: &[u8] ) -> $type { 33 | assert!( slice.len() == core::mem::size_of::< $type >() ); 34 | 35 | let mut value: $type = 0; 36 | unsafe { 37 | core::ptr::copy_nonoverlapping( 38 | slice.as_ptr(), 39 | &mut value as *mut $type as *mut u8, 40 | core::mem::size_of::< $type >() 41 | ); 42 | } 43 | 44 | if self.conversion_necessary() { 45 | value = value.swap_bytes(); 46 | } 47 | 48 | value 49 | } 50 | 51 | #[inline] 52 | pub fn $swapper( self, value: &mut $type ) { 53 | if self.conversion_necessary() { 54 | *value = value.swap_bytes(); 55 | } 56 | } 57 | 58 | #[inline] 59 | pub fn $slice_swapper( self, slice: &mut [$type] ) { 60 | if self.conversion_necessary() { 61 | for value in slice { 62 | *value = value.swap_bytes(); 63 | } 64 | } 65 | } 66 | } 67 | } 68 | } 69 | 70 | emit_wrapper!( u16, read_u16, swap_u16, swap_slice_u16 ); 71 | emit_wrapper!( u32, read_u32, swap_u32, swap_slice_u32 ); 72 | emit_wrapper!( u64, read_u64, swap_u64, swap_slice_u64 ); 73 | emit_wrapper!( u128, read_u128, swap_u128, swap_slice_u128 ); 74 | emit_wrapper!( i16, read_i16, swap_i16, swap_slice_i16 ); 75 | emit_wrapper!( i32, read_i32, swap_i32, swap_slice_i32 ); 76 | emit_wrapper!( i64, read_i64, swap_i64, swap_slice_i64 ); 77 | emit_wrapper!( i128, read_i128, swap_i128, swap_slice_i128 ); 78 | 79 | impl Endianness { 80 | #[inline] 81 | pub fn read_f32( self, slice: &[u8] ) -> f32 { 82 | f32::from_bits( self.read_u32( slice ) ) 83 | } 84 | 85 | #[inline] 86 | pub fn read_f64( self, slice: &[u8] ) -> f64 { 87 | f64::from_bits( self.read_u64( slice ) ) 88 | } 89 | 90 | #[inline] 91 | pub fn swap_f32( self, value: &mut f32 ) { 92 | let value = unsafe { 93 | &mut *(value as *mut f32 as *mut u32) 94 | }; 95 | 96 | self.swap_u32( value ); 97 | } 98 | 99 | #[inline] 100 | pub fn swap_f64( self, value: &mut f64 ) { 101 | let value = unsafe { 102 | &mut *(value as *mut f64 as *mut u64) 103 | }; 104 | 105 | self.swap_u64( value ); 106 | } 107 | 108 | #[inline] 109 | pub fn swap_slice_f32( self, slice: &mut [f32] ) { 110 | let slice = unsafe { 111 | core::slice::from_raw_parts_mut( slice.as_mut_ptr() as *mut u32, slice.len() ) 112 | }; 113 | 114 | self.swap_slice_u32( slice ); 115 | } 116 | 117 | #[inline] 118 | pub fn swap_slice_f64( self, slice: &mut [f64] ) { 119 | let slice = unsafe { 120 | core::slice::from_raw_parts_mut( slice.as_mut_ptr() as *mut u64, slice.len() ) 121 | }; 122 | 123 | self.swap_slice_u64( slice ); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/writer.rs: -------------------------------------------------------------------------------- 1 | use core::mem; 2 | 3 | use crate::context::Context; 4 | use crate::endianness::Endianness; 5 | use crate::writable::Writable; 6 | use crate::varint::VarInt64; 7 | 8 | pub trait Writer< C: Context > { 9 | fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error >; 10 | 11 | fn context( &self ) -> &C; 12 | fn context_mut( &mut self ) -> &mut C; 13 | 14 | #[inline(always)] 15 | fn can_write_at_least( &self, _size: usize ) -> Option< bool > { 16 | None 17 | } 18 | 19 | #[inline(always)] 20 | fn write_u8( &mut self, value: u8 ) -> Result< (), C::Error > { 21 | let slice = unsafe { core::slice::from_raw_parts( &value, 1 ) }; 22 | self.write_bytes( slice ) 23 | } 24 | 25 | #[inline(always)] 26 | fn write_u16( &mut self, mut value: u16 ) -> Result< (), C::Error > { 27 | self.context().endianness().swap_u16( &mut value ); 28 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u16 as *const u8, 2 ) }; 29 | self.write_bytes( slice ) 30 | } 31 | 32 | #[inline(always)] 33 | fn write_u32( &mut self, mut value: u32 ) -> Result< (), C::Error > { 34 | self.context().endianness().swap_u32( &mut value ); 35 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u32 as *const u8, 4 ) }; 36 | self.write_bytes( slice ) 37 | } 38 | 39 | #[inline(always)] 40 | fn write_u64( &mut self, mut value: u64 ) -> Result< (), C::Error > { 41 | self.context().endianness().swap_u64( &mut value ); 42 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 8 ) }; 43 | self.write_bytes( slice ) 44 | } 45 | 46 | #[inline(always)] 47 | fn write_u128( &mut self, mut value: u128 ) -> Result< (), C::Error > { 48 | self.context().endianness().swap_u128( &mut value ); 49 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u128 as *const u8, 16 ) }; 50 | self.write_bytes( slice ) 51 | } 52 | 53 | #[inline(always)] 54 | fn write_i8( &mut self, value: i8 ) -> Result< (), C::Error > { 55 | self.write_u8( value as u8 ) 56 | } 57 | 58 | #[inline(always)] 59 | fn write_i16( &mut self, value: i16 ) -> Result< (), C::Error > { 60 | self.write_u16( value as u16 ) 61 | } 62 | 63 | #[inline(always)] 64 | fn write_i32( &mut self, value: i32 ) -> Result< (), C::Error > { 65 | self.write_u32( value as u32 ) 66 | } 67 | 68 | #[inline(always)] 69 | fn write_i64( &mut self, value: i64 ) -> Result< (), C::Error > { 70 | self.write_u64( value as u64 ) 71 | } 72 | 73 | #[inline(always)] 74 | fn write_i128( &mut self, value: i128 ) -> Result< (), C::Error > { 75 | self.write_u128( value as u128 ) 76 | } 77 | 78 | #[inline(always)] 79 | fn write_f32( &mut self, value: f32 ) -> Result< (), C::Error > { 80 | self.write_u32( value.to_bits() ) 81 | } 82 | 83 | #[inline(always)] 84 | fn write_f64( &mut self, value: f64 ) -> Result< (), C::Error > { 85 | self.write_u64( value.to_bits() ) 86 | } 87 | 88 | #[inline(always)] 89 | fn endianness( &self ) -> Endianness { 90 | self.context().endianness() 91 | } 92 | 93 | #[inline(always)] 94 | fn write_value< T: Writable< C > >( &mut self, item: &T ) -> Result< (), C::Error > { 95 | item.write_to( self ) 96 | } 97 | 98 | #[inline] 99 | fn write_slice< T >( &mut self, slice: &[T] ) -> Result< (), C::Error > 100 | where T: Writable< C > 101 | { 102 | if T::speedy_is_primitive() && (mem::size_of::< T >() == 1 || !self.endianness().conversion_necessary()) { 103 | let bytes = unsafe { T::speedy_slice_as_bytes( slice ) }; 104 | self.write_bytes( bytes ) 105 | } else { 106 | for value in slice { 107 | self.write_value( value )?; 108 | } 109 | Ok(()) 110 | } 111 | } 112 | 113 | #[inline] 114 | fn write_collection< T >( &mut self, collection: impl IntoIterator< Item = T > ) -> Result< (), C::Error > 115 | where T: Writable< C > 116 | { 117 | for item in collection { 118 | item.write_to( self )?; 119 | } 120 | 121 | Ok(()) 122 | } 123 | 124 | #[inline] 125 | fn write_u64_varint( &mut self, value: u64 ) -> Result< (), C::Error > { 126 | VarInt64::from( value ).write_to( self ) 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /src/private.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{ 3 | Context, 4 | Readable, 5 | Reader, 6 | Writable, 7 | Writer, 8 | }, 9 | }; 10 | 11 | #[cfg(feature = "alloc")] 12 | use crate::{Error, error::{error_expected_constant, error_invalid_string_utf8}}; 13 | 14 | #[cfg(feature = "alloc")] 15 | use alloc::{borrow::Cow, string::String, vec::Vec}; 16 | 17 | pub use crate::varint::VarInt64; 18 | pub use crate::error::{ 19 | ErrorKind, 20 | 21 | error_length_is_not_the_same_as_length_attribute, 22 | error_out_of_range_length, 23 | error_invalid_enum_variant, 24 | error_invalid_str_utf8, 25 | error_unsized, 26 | error_endianness_mismatch, 27 | 28 | get_error_kind, 29 | }; 30 | pub use crate::utils::ZeroCopyable; 31 | 32 | pub use memoffset::offset_of; 33 | 34 | #[cfg(feature = "alloc")] 35 | #[inline] 36 | pub fn vec_to_string< E >( bytes: Vec< u8 > ) -> Result< String, E > where E: From< Error > { 37 | String::from_utf8( bytes ).map_err( error_invalid_string_utf8 ) 38 | } 39 | 40 | #[cfg(feature = "alloc")] 41 | #[inline] 42 | pub fn cow_bytes_to_cow_str< E >( bytes: Cow<'_, [u8] > ) -> Result< Cow<'_, str >, E > where E: From< Error > { 43 | match bytes { 44 | Cow::Borrowed( bytes ) => { 45 | core::str::from_utf8( bytes ) 46 | .map( Cow::Borrowed ) 47 | .map_err( error_invalid_str_utf8 ) 48 | }, 49 | Cow::Owned( bytes ) => { 50 | String::from_utf8( bytes ) 51 | .map( Cow::Owned ) 52 | .map_err( error_invalid_string_utf8 ) 53 | } 54 | } 55 | } 56 | 57 | #[inline] 58 | pub fn write_length_u64_varint< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 59 | where C: Context, 60 | W: ?Sized + Writer< C > 61 | { 62 | let length = VarInt64::from( length as u64 ); 63 | length.write_to( writer ) 64 | } 65 | 66 | #[inline] 67 | pub fn write_length_u64< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 68 | where C: Context, 69 | W: ?Sized + Writer< C > 70 | { 71 | writer.write_u64( length as u64 ) 72 | } 73 | 74 | #[inline] 75 | pub fn write_length_u32< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 76 | where C: Context, 77 | W: ?Sized + Writer< C > 78 | { 79 | if length as u64 > core::u32::MAX as u64 { 80 | return Err( error_out_of_range_length() ); 81 | } 82 | 83 | writer.write_u32( length as u32 ) 84 | } 85 | 86 | #[inline] 87 | pub fn write_length_u16< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 88 | where C: Context, 89 | W: ?Sized + Writer< C > 90 | { 91 | if length as u64 > core::u16::MAX as u64 { 92 | return Err( error_out_of_range_length() ); 93 | } 94 | 95 | writer.write_u16( length as u16 ) 96 | } 97 | 98 | #[inline] 99 | pub fn write_length_u8< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 100 | where C: Context, 101 | W: ?Sized + Writer< C > 102 | { 103 | if length as u64 > core::u8::MAX as u64 { 104 | return Err( error_out_of_range_length() ); 105 | } 106 | 107 | writer.write_u8( length as u8 ) 108 | } 109 | 110 | #[inline] 111 | pub fn write_length_u7< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 112 | where C: Context, 113 | W: ?Sized + Writer< C > 114 | { 115 | if length > 0b01111111 { 116 | return Err( error_out_of_range_length() ); 117 | } 118 | 119 | writer.write_u8( length as u8 ) 120 | } 121 | 122 | #[inline] 123 | pub fn write_length< C, W >( length: usize, writer: &mut W ) -> Result< (), C::Error > 124 | where C: Context, 125 | W: ?Sized + Writer< C > 126 | { 127 | write_length_u32( length, writer ) 128 | } 129 | 130 | #[inline] 131 | pub fn read_length_u64_varint< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 132 | where C: Context, 133 | R: Reader< 'a, C > 134 | { 135 | let length: u64 = VarInt64::read_from( reader )?.into(); 136 | if length > core::usize::MAX as u64 { 137 | return Err( error_out_of_range_length() ); 138 | } 139 | 140 | Ok( length as usize ) 141 | } 142 | 143 | #[inline] 144 | pub fn read_length_u64< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 145 | where C: Context, 146 | R: Reader< 'a, C > 147 | { 148 | let length = reader.read_u64()?; 149 | if length > core::usize::MAX as u64 { 150 | return Err( error_out_of_range_length() ); 151 | } 152 | 153 | Ok( length as usize ) 154 | } 155 | 156 | #[inline] 157 | pub fn read_length_u32< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 158 | where C: Context, 159 | R: Reader< 'a, C > 160 | { 161 | reader.read_u32().map( |value| value as usize ) 162 | } 163 | 164 | #[inline] 165 | pub fn read_length_u16< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 166 | where C: Context, 167 | R: Reader< 'a, C > 168 | { 169 | reader.read_u16().map( |value| value as usize ) 170 | } 171 | 172 | #[inline] 173 | pub fn read_length_u8< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 174 | where C: Context, 175 | R: Reader< 'a, C > 176 | { 177 | reader.read_u8().map( |value| value as usize ) 178 | } 179 | 180 | #[inline] 181 | pub fn read_length_u7< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 182 | where C: Context, 183 | R: Reader< 'a, C > 184 | { 185 | let length = reader.read_u8()?; 186 | if length > 0b01111111 { 187 | return Err( error_out_of_range_length() ); 188 | } 189 | Ok( length as usize ) 190 | } 191 | 192 | #[inline] 193 | pub fn read_length< 'a, C, R >( reader: &mut R ) -> Result< usize, C::Error > 194 | where C: Context, 195 | R: Reader< 'a, C > 196 | { 197 | read_length_u32( reader ) 198 | } 199 | 200 | pub trait IntoLength { 201 | fn into_length( self ) -> usize; 202 | } 203 | 204 | impl IntoLength for usize { 205 | fn into_length( self ) -> usize { self } 206 | } 207 | 208 | impl IntoLength for u32 { 209 | fn into_length( self ) -> usize { self as usize } 210 | } 211 | 212 | impl IntoLength for u16 { 213 | fn into_length( self ) -> usize { self as usize } 214 | } 215 | 216 | impl IntoLength for u8 { 217 | fn into_length( self ) -> usize { self as usize } 218 | } 219 | 220 | impl< 'a, T > IntoLength for &'a T where T: IntoLength + Copy { 221 | fn into_length( self ) -> usize { (*self).into_length() } 222 | } 223 | 224 | impl< 'a, T > IntoLength for &'a mut T where T: IntoLength + Copy { 225 | fn into_length( self ) -> usize { (*self).into_length() } 226 | } 227 | 228 | #[inline] 229 | pub fn are_lengths_the_same( lhs: usize, rhs: impl IntoLength ) -> bool { 230 | lhs == rhs.into_length() 231 | } 232 | 233 | #[cfg(feature = "alloc")] 234 | pub fn read_constant< 'a, C, R >( reader: &mut R, constant: &'static [u8] ) -> Result< (), C::Error > 235 | where C: Context, 236 | R: Reader< 'a, C > 237 | { 238 | let is_ok = 239 | if let Some( result ) = reader.read_bytes_borrowed( constant.len() ) { 240 | result? == constant 241 | } else { 242 | // TODO: Do this more efficiently for sufficiently small constants. 243 | let data: Vec< u8 > = reader.read_vec( constant.len() )?; 244 | data == constant 245 | }; 246 | 247 | if !is_ok { 248 | let error = error_expected_constant( constant ); 249 | return Err( error ); 250 | } 251 | 252 | Ok(()) 253 | } 254 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug)] 2 | pub struct Error { 3 | kind: ErrorKind 4 | } 5 | 6 | #[derive(Debug)] 7 | pub enum ErrorKind { 8 | InvalidChar, 9 | InvalidEnumVariant, 10 | InvalidUtf8, 11 | InvalidSystemTime, 12 | ZeroNonZero, 13 | OutOfRangeLength, 14 | OutOfRangeUsize, 15 | UnexpectedEndOfInput, 16 | UnexpectedEndOfOutputBuffer, 17 | InputBufferIsTooSmall { 18 | actual_size: usize, 19 | expected_size: usize 20 | }, 21 | OutputBufferIsTooSmall { 22 | actual_size: usize, 23 | expected_size: usize 24 | }, 25 | 26 | LengthIsNotTheSameAsLengthAttribute { 27 | field_name: &'static str 28 | }, 29 | ExpectedConstant { 30 | constant: &'static [u8] 31 | }, 32 | 33 | Unsized, 34 | EndiannessMismatch, 35 | 36 | #[cfg(feature = "std")] 37 | IoError( std::io::Error ) 38 | } 39 | 40 | impl Error { 41 | #[inline] 42 | fn new( kind: ErrorKind ) -> Self { 43 | Error { kind } 44 | } 45 | 46 | #[cfg(feature = "std")] 47 | pub fn custom( message: impl core::fmt::Display ) -> Self { 48 | // The LLVM optimizer doesn't like us adding a new variant, 49 | // so instead we reuse the `IoError` one. 50 | Error { 51 | kind: ErrorKind::IoError( std::io::Error::new( std::io::ErrorKind::Other, message.to_string() ) ) 52 | } 53 | } 54 | 55 | #[cfg(feature = "std")] 56 | #[inline] 57 | pub(crate) fn from_io_error( error: std::io::Error ) -> Self { 58 | Error { 59 | kind: ErrorKind::IoError( error ) 60 | } 61 | } 62 | } 63 | 64 | #[cfg(feature = "std")] 65 | impl From< Error > for std::io::Error { 66 | fn from( error: Error ) -> Self { 67 | if let ErrorKind::IoError( error ) = error.kind { 68 | return error; 69 | } 70 | 71 | let is_eof = error.is_eof(); 72 | let kind = if is_eof { 73 | std::io::ErrorKind::UnexpectedEof 74 | } else { 75 | std::io::ErrorKind::InvalidData 76 | }; 77 | 78 | std::io::Error::new( kind, format!( "{}", error ) ) 79 | } 80 | } 81 | 82 | #[inline] 83 | pub fn get_error_kind( error: &Error ) -> &ErrorKind { 84 | &error.kind 85 | } 86 | 87 | #[cfg(feature = "alloc")] 88 | impl core::fmt::Display for Error { 89 | fn fmt( &self, fmt: &mut core::fmt::Formatter<'_> ) -> core::fmt::Result { 90 | match self.kind { 91 | ErrorKind::InvalidChar => write!( fmt, "out of range char" ), 92 | ErrorKind::InvalidEnumVariant => write!( fmt, "invalid enum variant" ), 93 | ErrorKind::InvalidUtf8 => write!( fmt, "encountered invalid utf-8" ), 94 | ErrorKind::InvalidSystemTime => write!( fmt, "encountered invalid system time object" ), 95 | ErrorKind::ZeroNonZero => write!( fmt, "a field which is supposed to be non-zero is zero" ), 96 | ErrorKind::OutOfRangeLength => write!( fmt, "out of range length" ), 97 | ErrorKind::OutOfRangeUsize => write!( fmt, "value cannot fit into an usize on this architecture" ), 98 | ErrorKind::UnexpectedEndOfInput => write!( fmt, "unexpected end of input" ), 99 | ErrorKind::UnexpectedEndOfOutputBuffer => write!( fmt, "unexpected end of output buffer" ), 100 | ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } => write!( fmt, "input buffer is too small; expected at least {} bytes, got {}", expected_size, actual_size ), 101 | ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } => write!( fmt, "output buffer is too small; expected at least {} bytes, got {}", expected_size, actual_size ), 102 | ErrorKind::LengthIsNotTheSameAsLengthAttribute { field_name } => write!( fmt, "the length of '{}' is not the same as its 'length' attribute", field_name ), 103 | ErrorKind::ExpectedConstant { constant } => write!( fmt, "expected a predefined {} bytes(s) long constant", constant.len() ), 104 | ErrorKind::Unsized => write!( fmt, "type is unsized hence requires zero-copy deserialization; use `read_from_buffer` or similar to deserialize it" ), 105 | ErrorKind::EndiannessMismatch => write!( fmt, "endianness mismatch" ), 106 | 107 | #[cfg(feature = "std")] 108 | ErrorKind::IoError( ref error ) => write!( fmt, "{}", error ), 109 | } 110 | } 111 | } 112 | 113 | #[cfg(feature = "std")] 114 | impl std::error::Error for Error { 115 | fn source( &self ) -> Option< &(dyn std::error::Error + 'static) > { 116 | match self.kind { 117 | ErrorKind::IoError( ref error ) => Some( error ), 118 | _ => None 119 | } 120 | } 121 | } 122 | 123 | pub trait IsEof { 124 | fn is_eof( &self ) -> bool; 125 | } 126 | 127 | impl IsEof for Error { 128 | fn is_eof( &self ) -> bool { 129 | match self.kind { 130 | ErrorKind::UnexpectedEndOfInput | 131 | ErrorKind::UnexpectedEndOfOutputBuffer => true, 132 | 133 | #[cfg(feature = "std")] 134 | ErrorKind::IoError( ref error ) => error.kind() == std::io::ErrorKind::UnexpectedEof, 135 | 136 | _ => false 137 | } 138 | } 139 | } 140 | 141 | #[cfg(feature = "alloc")] 142 | #[cold] 143 | pub fn error_invalid_string_utf8< T >( _: alloc::string::FromUtf8Error ) -> T where T: From< Error > { 144 | T::from( Error::new( ErrorKind::InvalidUtf8 ) ) 145 | } 146 | 147 | #[cold] 148 | pub fn error_invalid_str_utf8< T >( _: core::str::Utf8Error ) -> T where T: From< Error > { 149 | T::from( Error::new( ErrorKind::InvalidUtf8 ) ) 150 | } 151 | 152 | #[cold] 153 | pub fn error_length_is_not_the_same_as_length_attribute< T >( field_name: &'static str ) -> T where T: From< Error > { 154 | T::from( Error::new( ErrorKind::LengthIsNotTheSameAsLengthAttribute { field_name } ) ) 155 | } 156 | 157 | #[cold] 158 | pub fn error_out_of_range_length< T >() -> T where T: From< Error > { 159 | T::from( Error::new( ErrorKind::OutOfRangeLength ) ) 160 | } 161 | 162 | #[cold] 163 | pub fn error_invalid_enum_variant< T >() -> T where T: From< Error > { 164 | T::from( Error::new( ErrorKind::InvalidEnumVariant ) ) 165 | } 166 | 167 | #[cold] 168 | pub fn error_out_of_range_char< T >() -> T where T: From< Error > { 169 | T::from( Error::new( ErrorKind::InvalidChar ) ) 170 | } 171 | 172 | #[cold] 173 | pub fn error_too_big_usize_for_this_architecture< T >() -> T where T: From< Error > { 174 | T::from( Error::new( ErrorKind::OutOfRangeUsize ) ) 175 | } 176 | 177 | #[cold] 178 | pub fn error_end_of_input< T >() -> T where T: From< Error > { 179 | T::from( Error::new( ErrorKind::UnexpectedEndOfInput ) ) 180 | } 181 | 182 | #[cold] 183 | pub fn error_end_of_output_buffer< T >() -> T where T: From< Error > { 184 | T::from( Error::new( ErrorKind::UnexpectedEndOfOutputBuffer ) ) 185 | } 186 | 187 | #[cold] 188 | pub fn error_input_buffer_is_too_small< T >( actual_size: usize, expected_size: usize ) -> T where T: From< Error > { 189 | T::from( Error::new( ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } ) ) 190 | } 191 | 192 | #[cold] 193 | pub fn error_output_buffer_is_too_small< T >( actual_size: usize, expected_size: usize ) -> T where T: From< Error > { 194 | T::from( Error::new( ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } ) ) 195 | } 196 | 197 | #[cold] 198 | pub fn error_zero_non_zero< T >() -> T where T: From< Error > { 199 | T::from( Error::new( ErrorKind::ZeroNonZero ) ) 200 | } 201 | 202 | #[cfg(feature = "std")] 203 | #[cold] 204 | pub fn error_invalid_system_time< T >() -> T where T: From< Error > { 205 | T::from( Error::new( ErrorKind::InvalidSystemTime ) ) 206 | } 207 | 208 | #[cfg(feature = "alloc")] 209 | #[cold] 210 | pub fn error_expected_constant< T >( constant: &'static [u8] ) -> T where T: From< Error > { 211 | T::from( Error::new( ErrorKind::ExpectedConstant { constant } ) ) 212 | } 213 | 214 | #[cold] 215 | pub fn error_unsized< T >() -> T where T: From< Error > { 216 | T::from( Error::new( ErrorKind::Unsized ) ) 217 | } 218 | 219 | #[cold] 220 | pub fn error_endianness_mismatch< T >() -> T where T: From< Error > { 221 | T::from( Error::new( ErrorKind::EndiannessMismatch ) ) 222 | } 223 | -------------------------------------------------------------------------------- /src/writable.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "std")] 2 | use std::io::{ 3 | self, 4 | Write 5 | }; 6 | 7 | #[cfg(feature = "std")] 8 | use std::fs::File; 9 | 10 | #[cfg(feature = "std")] 11 | use std::path::Path; 12 | 13 | #[cfg(feature = "alloc")] 14 | use alloc::vec::Vec; 15 | 16 | use crate::writer::Writer; 17 | use crate::context::{Context, DefaultContext}; 18 | use crate::endianness::Endianness; 19 | 20 | #[cfg(feature = "std")] 21 | use crate::Error; 22 | 23 | use crate::error::{ 24 | error_end_of_output_buffer, 25 | error_output_buffer_is_too_small 26 | }; 27 | 28 | struct BufferCollector< 'a, C: Context > { 29 | context: &'a mut C, 30 | buffer: &'a mut [u8], 31 | position: usize 32 | } 33 | 34 | impl< 'a, C: Context > Writer< C > for BufferCollector< 'a, C > { 35 | #[inline] 36 | fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { 37 | let buffer = self.buffer.get_mut( self.position..self.position + slice.len() ).ok_or_else( error_end_of_output_buffer )?; 38 | buffer.copy_from_slice( slice ); 39 | self.position += slice.len(); 40 | Ok(()) 41 | } 42 | 43 | #[inline] 44 | fn context( &self ) -> &C { 45 | &self.context 46 | } 47 | 48 | #[inline] 49 | fn context_mut( &mut self ) -> &mut C { 50 | &mut self.context 51 | } 52 | 53 | #[inline(always)] 54 | fn can_write_at_least( &self, size: usize ) -> Option< bool > { 55 | Some( self.buffer.get( self.position..self.position + size ).is_some() ) 56 | } 57 | } 58 | 59 | 60 | #[cfg(feature = "std")] 61 | struct WritingCollector< C: Context, T: Write > { 62 | context: C, 63 | writer: T 64 | } 65 | 66 | #[cfg(feature = "std")] 67 | impl< C: Context, T: Write > Writer< C > for WritingCollector< C, T > { 68 | #[inline] 69 | fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { 70 | self.writer.write_all( slice ).map_err( |error| { 71 | let error = Error::from_io_error( error ); 72 | >::from( error ) 73 | }) 74 | } 75 | 76 | #[inline] 77 | fn context( &self ) -> &C { 78 | &self.context 79 | } 80 | 81 | #[inline] 82 | fn context_mut( &mut self ) -> &mut C { 83 | &mut self.context 84 | } 85 | } 86 | 87 | struct SizeCalculatorCollector { 88 | size: usize 89 | } 90 | 91 | impl< C: Context > Writer< C > for SizeCalculatorCollector { 92 | #[inline] 93 | fn write_bytes( &mut self, slice: &[u8] ) -> Result< (), C::Error > { 94 | self.size += slice.len(); 95 | Ok(()) 96 | } 97 | 98 | #[inline] 99 | fn write_u8( &mut self, _: u8 ) -> Result< (), C::Error > { 100 | self.size += 1; 101 | Ok(()) 102 | } 103 | 104 | #[inline] 105 | fn write_u16( &mut self, _: u16 ) -> Result< (), C::Error > { 106 | self.size += 2; 107 | Ok(()) 108 | } 109 | 110 | #[inline] 111 | fn write_u32( &mut self, _: u32 ) -> Result< (), C::Error > { 112 | self.size += 4; 113 | Ok(()) 114 | } 115 | 116 | #[inline] 117 | fn write_u64( &mut self, _: u64 ) -> Result< (), C::Error > { 118 | self.size += 8; 119 | Ok(()) 120 | } 121 | 122 | #[inline] 123 | fn write_u128( &mut self, _: u128 ) -> Result< (), C::Error > { 124 | self.size += 16; 125 | Ok(()) 126 | } 127 | 128 | #[inline] 129 | fn endianness( &self ) -> Endianness { 130 | Endianness::NATIVE 131 | } 132 | 133 | #[inline] 134 | fn context( &self ) -> &C { 135 | panic!(); 136 | } 137 | 138 | #[inline] 139 | fn context_mut( &mut self ) -> &mut C { 140 | panic!(); 141 | } 142 | } 143 | 144 | pub trait Writable< C: Context > { 145 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error >; 146 | 147 | #[inline] 148 | fn write_to_buffer( &self, buffer: &mut [u8] ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { 149 | self.write_to_buffer_with_ctx( Default::default(), buffer ) 150 | } 151 | 152 | #[cfg(feature = "alloc")] 153 | fn write_to_vec( &self ) -> Result< Vec< u8 >, C::Error > where Self: DefaultContext< Context = C >, C: Default { 154 | self.write_to_vec_with_ctx( Default::default() ) 155 | } 156 | 157 | #[cfg(feature = "std")] 158 | #[inline] 159 | fn write_to_stream< S: Write >( &self, stream: S ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { 160 | self.write_to_stream_with_ctx( Default::default(), stream ) 161 | } 162 | 163 | #[cfg(feature = "std")] 164 | #[inline] 165 | fn write_to_file( &self, path: impl AsRef< Path > ) -> Result< (), C::Error > where Self: DefaultContext< Context = C >, C: Default { 166 | self.write_to_file_with_ctx( Default::default(), path ) 167 | } 168 | 169 | #[inline] 170 | fn write_to_buffer_with_ctx( &self, mut context: C, buffer: &mut [u8] ) -> Result< (), C::Error > { 171 | self.write_to_buffer_with_ctx_mut( &mut context, buffer ) 172 | } 173 | 174 | #[inline] 175 | fn write_to_buffer_with_ctx_mut( &self, context: &mut C, buffer: &mut [u8] ) -> Result< (), C::Error > { 176 | let bytes_needed = self.bytes_needed()?; 177 | let buffer_length = buffer.len(); 178 | let buffer = buffer.get_mut( 0..bytes_needed ).ok_or_else( || error_output_buffer_is_too_small( buffer_length, bytes_needed ) )?; 179 | let mut writer = BufferCollector { 180 | context, 181 | buffer, 182 | position: 0 183 | }; 184 | 185 | self.write_to( &mut writer )?; 186 | Ok(()) 187 | } 188 | 189 | #[cfg(feature = "alloc")] 190 | #[inline] 191 | fn write_to_vec_with_ctx( &self, mut context: C ) -> Result< Vec< u8 >, C::Error > { 192 | self.write_to_vec_with_ctx_mut( &mut context ) 193 | } 194 | 195 | #[cfg(feature = "alloc")] 196 | #[inline] 197 | fn write_to_vec_with_ctx_mut( &self, context: &mut C ) -> Result< Vec< u8 >, C::Error > { 198 | let capacity = self.bytes_needed()?; 199 | let mut vec = Vec::with_capacity( capacity ); 200 | unsafe { 201 | vec.set_len( capacity ); 202 | } 203 | 204 | let mut writer = BufferCollector { 205 | context, 206 | buffer: vec.as_mut_slice(), 207 | position: 0 208 | }; 209 | 210 | self.write_to( &mut writer )?; 211 | 212 | let position = writer.position; 213 | unsafe { 214 | vec.set_len( position ); 215 | } 216 | 217 | debug_assert_eq!( position, capacity ); 218 | Ok( vec ) 219 | } 220 | 221 | #[cfg(feature = "std")] 222 | #[inline] 223 | fn write_to_stream_with_ctx< S: Write >( &self, context: C, stream: S ) -> Result< (), C::Error > { 224 | let mut writer = WritingCollector { 225 | context, 226 | writer: stream 227 | }; 228 | 229 | self.write_to( &mut writer ) 230 | } 231 | 232 | #[cfg(feature = "std")] 233 | #[inline] 234 | fn write_to_file_with_ctx( &self, context: C, path: impl AsRef< Path > ) -> Result< (), C::Error > { 235 | let stream = File::create( path ).map_err( |error| { 236 | let error = Error::from_io_error( error ); 237 | >::from( error ) 238 | })?; 239 | let stream = io::BufWriter::new( stream ); 240 | self.write_to_stream_with_ctx( context, stream ) 241 | } 242 | 243 | #[inline] 244 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 245 | let mut writer = SizeCalculatorCollector { 246 | size: 0 247 | }; 248 | 249 | self.write_to( &mut writer )?; 250 | Ok( writer.size ) 251 | } 252 | 253 | // Since specialization is not stable yet we do it this way. 254 | #[doc(hidden)] 255 | #[inline] 256 | fn speedy_is_primitive() -> bool { 257 | false 258 | } 259 | 260 | #[doc(hidden)] 261 | #[inline] 262 | unsafe fn speedy_slice_as_bytes( _: &[Self] ) -> &[u8] where Self: Sized { 263 | panic!(); 264 | } 265 | } 266 | -------------------------------------------------------------------------------- /static-tests/tests/tests.rs: -------------------------------------------------------------------------------- 1 | use { 2 | static_test::{ 3 | static_test 4 | }, 5 | speedy::{ 6 | Endianness, 7 | Readable, 8 | Writable, 9 | 10 | private::{ 11 | ErrorKind, 12 | get_error_kind 13 | } 14 | }, 15 | std::{ 16 | borrow::{ 17 | Cow 18 | } 19 | } 20 | }; 21 | 22 | #[static_test] 23 | fn read_u16_from_buffer_when_buffer_length_is_known_and_is_big_enough( slice: &[u8] ) { 24 | assume!( slice.len() == 2 ); 25 | static_assert!( u16::read_from_buffer_with_ctx( Endianness::NATIVE, slice ).is_ok() ); 26 | } 27 | 28 | #[static_test] 29 | fn read_u16_from_buffer_when_buffer_length_is_known_and_is_not_big_enough_1( slice: &[u8] ) { 30 | assume!( slice.len() == 1 ); 31 | static_assert!( u16::read_from_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() ); 32 | } 33 | 34 | #[static_test] 35 | fn read_u16_from_buffer_when_buffer_length_is_known_and_is_not_big_enough_2( slice: &[u8] ) { 36 | assume!( slice.len() == 1 ); 37 | match u16::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) { 38 | Ok( _ ) => static_unreachable!(), 39 | Err( error ) => { 40 | match get_error_kind( &error ) { 41 | ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } => { 42 | static_assert!( *actual_size == 1 ); 43 | static_assert!( *expected_size == 2 ); 44 | }, 45 | _ => static_unreachable!() 46 | } 47 | } 48 | } 49 | } 50 | 51 | #[static_test] 52 | fn read_vec_u8_from_buffer_when_buffer_length_is_known_and_is_not_big_enough( slice: &[u8] ) { 53 | assume!( slice.len() == 3 ); 54 | match Vec::< u8 >::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) { 55 | Ok( _ ) => {}, 56 | Err( error ) => { 57 | match get_error_kind( &error ) { 58 | ErrorKind::InputBufferIsTooSmall { actual_size, expected_size } => { 59 | static_assert!( *actual_size == 3 ); 60 | static_assert!( *expected_size == 4 ); 61 | }, 62 | _ => static_unreachable!() 63 | } 64 | } 65 | } 66 | } 67 | 68 | #[static_test] 69 | fn read_vec_u8_from_buffer_when_buffer_length_is_not_known( slice: &[u8] ) { 70 | match Vec::< u8 >::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) { 71 | Ok( _ ) => {}, 72 | Err( error ) => { 73 | match get_error_kind( &error ) { 74 | ErrorKind::InputBufferIsTooSmall { expected_size, .. } => { 75 | static_assert!( *expected_size == 4 ); 76 | }, 77 | ErrorKind::UnexpectedEndOfInput => {}, 78 | _ => static_unreachable!() 79 | } 80 | } 81 | } 82 | } 83 | 84 | #[static_test] 85 | fn read_cow_u8_from_buffer_when_buffer_length_is_not_known( slice: &[u8] ) { 86 | match Cow::< [u8] >::read_from_buffer_with_ctx( Endianness::NATIVE, slice ) { 87 | Ok( _ ) => {}, 88 | Err( error ) => { 89 | match get_error_kind( &error ) { 90 | ErrorKind::InputBufferIsTooSmall { expected_size, .. } => { 91 | static_assert!( *expected_size == 4 ); 92 | }, 93 | ErrorKind::UnexpectedEndOfInput => {}, 94 | _ => static_unreachable!() 95 | } 96 | } 97 | } 98 | } 99 | 100 | #[static_test] 101 | fn write_u16_to_buffer_when_buffer_length_is_known_and_there_is_enough_space( slice: &mut [u8], value: u16 ) { 102 | assume!( slice.len() == 2 ); 103 | static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_ok() ); 104 | } 105 | 106 | #[static_test] 107 | fn write_u16_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_1( slice: &mut [u8], value: u16 ) { 108 | assume!( slice.len() == 1 ); 109 | static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() ); 110 | } 111 | 112 | #[static_test] 113 | fn write_u16_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_2( slice: &mut [u8], value: u16 ) { 114 | assume!( slice.len() == 1 ); 115 | match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) { 116 | Ok( _ ) => static_unreachable!(), 117 | Err( error ) => { 118 | match get_error_kind( &error ) { 119 | ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } => { 120 | static_assert!( *actual_size == 1 ); 121 | static_assert!( *expected_size == 2 ); 122 | }, 123 | _ => static_unreachable!() 124 | } 125 | } 126 | } 127 | } 128 | 129 | #[static_test] 130 | fn write_vec_u8_to_buffer_when_both_lengths_are_known_and_there_is_enough_space( slice: &mut [u8], value: Vec< u8 > ) { 131 | assume!( slice.len() == 5 ); 132 | assume!( value.len() == 1 ); 133 | static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_ok() ); 134 | } 135 | 136 | #[static_test] 137 | fn write_vec_u8_to_buffer_when_both_lengths_are_known_and_there_is_not_enough_space_1( slice: &mut [u8], value: Vec< u8 > ) { 138 | assume!( slice.len() == 5 ); 139 | assume!( value.len() == 2 ); 140 | static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() ); 141 | } 142 | 143 | #[static_test] 144 | fn write_vec_u8_to_buffer_when_both_lengths_are_known_and_there_is_not_enough_space_2( slice: &mut [u8], value: Vec< u8 > ) { 145 | assume!( slice.len() == 5 ); 146 | assume!( value.len() == 2 ); 147 | match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) { 148 | Ok( _ ) => static_unreachable!(), 149 | Err( error ) => { 150 | match get_error_kind( &error ) { 151 | ErrorKind::OutputBufferIsTooSmall { actual_size, expected_size } => { 152 | static_assert!( *actual_size == 5 ); 153 | static_assert!( *expected_size == 6 ); 154 | }, 155 | _ => static_unreachable!() 156 | } 157 | } 158 | } 159 | } 160 | 161 | #[static_test] 162 | fn write_vec_u8_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_1( slice: &mut [u8], value: Vec< u8 > ) { 163 | assume!( slice.len() == 3 ); 164 | static_assert!( value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ).is_err() ); 165 | } 166 | 167 | #[static_test] 168 | fn write_vec_u8_to_buffer_when_buffer_length_is_known_and_there_is_not_enough_space_2( slice: &mut [u8], value: Vec< u8 > ) { 169 | assume!( slice.len() == 3 ); 170 | match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) { 171 | Ok( _ ) => static_unreachable!(), 172 | Err( error ) => { 173 | match get_error_kind( &error ) { 174 | ErrorKind::OutputBufferIsTooSmall { actual_size, .. } => { 175 | static_assert!( *actual_size == 3 ); 176 | } 177 | _ => static_unreachable!() 178 | } 179 | } 180 | } 181 | } 182 | 183 | #[static_test] 184 | fn write_vec_u8_to_buffer_when_vec_length_is_known( slice: &mut [u8], value: Vec< u8 > ) { 185 | assume!( value.len() == 2 ); 186 | match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) { 187 | Ok( _ ) => {}, 188 | Err( error ) => { 189 | match get_error_kind( &error ) { 190 | ErrorKind::OutputBufferIsTooSmall { expected_size, .. } => { 191 | static_assert!( *expected_size == 6 ); 192 | } 193 | _ => static_unreachable!() 194 | } 195 | } 196 | } 197 | } 198 | 199 | #[static_test] 200 | fn write_vec_u8_to_buffer_when_no_lengths_are_known( slice: &mut [u8], value: Vec< u8 > ) { 201 | match value.write_to_buffer_with_ctx( Endianness::NATIVE, slice ) { 202 | Ok( _ ) => {}, 203 | Err( error ) => { 204 | match get_error_kind( &error ) { 205 | ErrorKind::OutOfRangeLength => {}, 206 | ErrorKind::OutputBufferIsTooSmall { .. } => {}, 207 | _ => static_unreachable!() 208 | } 209 | } 210 | } 211 | } 212 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # A fast binary serialization framework 2 | 3 | [![Documentation](https://docs.rs/speedy/badge.svg)](https://docs.rs/speedy/*/speedy/) 4 | 5 | The goal of this crate is to provide fast, simple and easy binary serialization. 6 | 7 | ## Benchmarks 8 | 9 | See [rust_serialization_benchmark](https://github.com/djkoloski/rust_serialization_benchmark) for benchmarks. 10 | 11 | ## Example 12 | 13 | ```rust 14 | use std::borrow::Cow; 15 | use speedy::{Readable, Writable, Endianness}; 16 | 17 | #[derive(PartialEq, Debug, Readable, Writable)] 18 | enum Enum { 19 | A, 20 | B, 21 | C, 22 | } 23 | 24 | #[derive(PartialEq, Debug, Readable, Writable)] 25 | struct Struct< 'a > { 26 | number: u64, 27 | string: String, 28 | vector: Vec< u8 >, 29 | cow: Cow< 'a, [i64] >, 30 | float: f32, 31 | enumeration: Enum 32 | } 33 | 34 | fn main() { 35 | let original = Struct { 36 | number: 0x12345678ABCDEF00, 37 | string: "A totally pointless string".to_owned(), 38 | vector: vec![ 1, 2, 3 ], 39 | cow: Cow::Borrowed( &[ 4, 5, 6 ] ), 40 | float: 3.1415, 41 | enumeration: Enum::C 42 | }; 43 | 44 | let bytes = original.write_to_vec().unwrap(); 45 | let deserialized: Struct = 46 | Struct::read_from_buffer( &bytes ).unwrap(); 47 | 48 | assert_eq!( original, deserialized ); 49 | } 50 | ``` 51 | 52 | ## Supported types 53 | 54 | Out-of-box the following types are supported: 55 | 56 | | Type | Serialized as | 57 | | ----------------------- | ---------------------------------------- | 58 | | `u8` | as-is | 59 | | `u16` | as-is | 60 | | `u32` | as-is | 61 | | `u64` | as-is | 62 | | `usize` | `u64` | 63 | | `i8` | as-is | 64 | | `i16` | as-is | 65 | | `i32` | as-is | 66 | | `i64` | as-is | 67 | | `f32` | as-is | 68 | | `f64` | as-is | 69 | | `bool` | `u8`, either `0` or `1` | 70 | | `char` | `u32` | 71 | | `String` | `{length: u32, bytes: [u8]}` | 72 | | `Cow<'a, str>` | `{length: u32, bytes: [u8]}` | 73 | | `Vec` | `{length: u32, values: [T]}` | 74 | | `Cow<'a, [T]>` | `{length: u32, values: [T]}` | 75 | | `HashMap` | `{length: u32, values: [K, V]}` | 76 | | `BTreeMap` | `{length: u32, values: [K, V]}` | 77 | | `HashSet` | `{length: u32, values: [T]}` | 78 | | `BTreeSet` | `{length: u32, values: [T]}` | 79 | | `Range` | `(T, T)` | 80 | | `RangeInclusive` | `(T, T)` | 81 | | `Option` | `(1_u8, T)` or `0_u8` | 82 | | `Result` | `(1_u8, T)` or `(0_u8, E)` | 83 | | `()` | nothing | 84 | | `(T)` | as-is | 85 | | `(T, T)` | as-is | 86 | | `(T, .., T)` | as-is | 87 | | `enum`s | `{tag: u32, variant: T}` | 88 | | `AtomicU8` | `u8` | 89 | | `AtomicI8` | `i8` | 90 | | `AtomicU16` | `u16` | 91 | | `AtomicI16` | `i16` | 92 | | `AtomicU32` | `u32` | 93 | | `AtomicI32` | `i32` | 94 | | `AtomicU64` | `u64` | 95 | | `AtomicI64` | `i64` | 96 | | `NonZeroU32` | `u32` | 97 | | `std::net::Ipv4Addr` | `u32` | 98 | | `std::net::Ipv6Addr` | `u128` | 99 | | `std::net::IpAddr` | `{is_ipv4: u8, value: {u32 or u128}}` | 100 | | `std::time::Duration` | `{secs: u64, subsec_nanos: u32}` | 101 | | `std::time::SystemTime` | `std::time::Duration` since `UNIX_EPOCH` | 102 | | `uuid::Uuid` | `[u8; 16]` | 103 | 104 | These are stable and will not change in the future. 105 | 106 | ## Field attributes 107 | 108 | ### `#[speedy(length = $expr)]` 109 | 110 | Can be used on most standard containers to specify the field's length. 111 | Can refer to any of the previous fields. 112 | 113 | For example: 114 | 115 | ```rust 116 | use speedy::{Readable, Writable}; 117 | 118 | #[derive(Readable, Writable)] 119 | struct Struct { 120 | byte_count: u8, 121 | #[speedy(length = byte_count / 4)] 122 | data: Vec< u32 > 123 | } 124 | ``` 125 | 126 | Before serializing you need to make sure that whatever is set as `length` 127 | is equal to the `.len()` of the field; if it's not then you will get 128 | an error when trying to serialize it. 129 | 130 | Setting this attribute changes the serialization format as follows: 131 | 132 | 133 | | Type | Serialized as | 134 | | ---------------- | ---------------------------- | 135 | | `Vec` | `[T]` | 136 | | `Cow<'a, [T]>` | `[T]` | 137 | | `String` | `[u8]` | 138 | | `Cow<'a, str>` | `[u8]` | 139 | | `HashMap` | `[K, V]` | 140 | | `BTreeMap` | `[K, V]` | 141 | | `HashSet` | `[T]` | 142 | | `BTreeSet` | `[T]` | 143 | 144 | ### `#[speedy(length_type = $ty)]` 145 | 146 | Can be used to specify the exact size of the implicit length field of a container 147 | as it is read or written. 148 | 149 | Possible values: 150 | - `u7` (same as u8, but restricted to 7 bits for `u64_varint` compatibility) 151 | - `u8` 152 | - `u16` 153 | - `u32` (default) 154 | - `u64_varint` 155 | 156 | ### `#[speedy(varint)]` 157 | 158 | Can be used only on `u64` fields. Forces the field to be serialized as a varint. 159 | 160 | ### `#[speedy(skip)]` 161 | 162 | Skips a given field when reading and writing. 163 | 164 | ### `#[speedy(default_on_eof)]` 165 | 166 | If an EOF is encountered when reading this field its value will be set 167 | to the default value for its type and the EOF will be ignored. 168 | 169 | ### `#[speedy(constant_prefix = $expr)]` 170 | 171 | Specifies a static string of bytes which will be written or has to be present 172 | when reading before a given field. 173 | 174 | ## Enum attributes 175 | 176 | ### `#[speedy(tag_type = $ty)]` 177 | 178 | Can be used to specify the exact size of the enum's tag as it is read or written. 179 | 180 | Possible values: 181 | - `u7` (same as u8, but restricted to 7 bits for `u64_varint` compatibility) 182 | - `u8` 183 | - `u16` 184 | - `u32` (default) 185 | - `u64_varint` 186 | 187 | ### `#[speedy(peek_tag)]` 188 | 189 | An enum marked with this attribute will not consume its tag value when reading 190 | from a stream, nor will it write its own tag when writing. 191 | 192 | ## Enum variant attributes 193 | 194 | ### `#[speedy(tag = $expr)]` 195 | 196 | Specifies a preset tag value to be used for a given enum variant. 197 | 198 | ## License 199 | 200 | Licensed under either of 201 | 202 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 203 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 204 | 205 | at your option. 206 | 207 | ### Contribution 208 | 209 | Unless you explicitly state otherwise, any contribution intentionally submitted 210 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be 211 | dual licensed as above, without any additional terms or conditions. 212 | -------------------------------------------------------------------------------- /benches/bench.rs: -------------------------------------------------------------------------------- 1 | #![feature(test)] 2 | 3 | extern crate test; 4 | 5 | use std::io::Write; 6 | use std::borrow::Cow; 7 | use test::{Bencher, black_box}; 8 | use speedy::{Context, Readable, Reader, Writable, Endianness}; 9 | 10 | #[bench] 11 | fn write_manual_megabyte_buffer( b: &mut Bencher ) { 12 | let mut buffer: Vec< u8 > = Vec::new(); 13 | buffer.resize( 1024 * 1024, 1 ); 14 | buffer = black_box( buffer ); 15 | b.iter( || { 16 | let mut output = Vec::new(); 17 | Write::write_all( &mut output, &buffer ).unwrap(); 18 | output 19 | }) 20 | } 21 | 22 | // These two benchmarks should have exactly the same speeds. 23 | #[bench] 24 | fn write_speedy_megabyte_buffer_le( b: &mut Bencher ) { 25 | let mut buffer: Vec< u8 > = Vec::new(); 26 | buffer.resize( 1024 * 1024, 1 ); 27 | buffer = black_box( buffer ); 28 | b.iter( || { 29 | let mut output = Vec::new(); 30 | buffer.write_to_stream_with_ctx( Endianness::LittleEndian, &mut output ).unwrap(); 31 | output 32 | }) 33 | } 34 | 35 | #[bench] 36 | fn write_speedy_megabyte_buffer_be( b: &mut Bencher ) { 37 | let mut buffer: Vec< u8 > = Vec::new(); 38 | buffer.resize( 1024 * 1024, 1 ); 39 | buffer = black_box( buffer ); 40 | b.iter( || { 41 | let mut output = Vec::new(); 42 | buffer.write_to_stream_with_ctx( Endianness::BigEndian, &mut output ).unwrap(); 43 | output 44 | }) 45 | } 46 | 47 | #[bench] 48 | fn read_speedy_megabyte_buffer_cow_borrowed( b: &mut Bencher ) { 49 | let mut buffer: Vec< u8 > = Vec::new(); 50 | buffer.resize( 1024 * 1024, 1 ); 51 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 52 | 53 | buffer = black_box( buffer ); 54 | b.iter( || { 55 | let deserialized: Cow< [u8] > = Readable::read_from_buffer_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 56 | deserialized 57 | }) 58 | } 59 | 60 | #[bench] 61 | fn read_speedy_megabyte_buffer_cow_owned( b: &mut Bencher ) { 62 | let mut buffer: Vec< u8 > = Vec::new(); 63 | buffer.resize( 1024 * 1024, 1 ); 64 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 65 | 66 | buffer = black_box( buffer ); 67 | b.iter( || { 68 | let deserialized: Cow< [u8] > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 69 | deserialized 70 | }) 71 | } 72 | 73 | #[repr(transparent)] 74 | #[derive(Copy, Clone, Writable)] 75 | struct Byte( u8 ); 76 | 77 | impl< 'a, C: Context > Readable< 'a, C > for Byte { 78 | #[inline] 79 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 80 | Ok( Byte( reader.read_u8()? ) ) 81 | } 82 | 83 | #[inline] 84 | fn minimum_bytes_needed() -> usize { 85 | 1 86 | } 87 | } 88 | 89 | #[bench] 90 | fn read_speedy_megabyte_buffer_vec_non_primitive( b: &mut Bencher ) { 91 | let mut buffer: Vec< Byte > = Vec::new(); 92 | buffer.resize( 1024 * 1024, Byte( 1 ) ); 93 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 94 | 95 | buffer = black_box( buffer ); 96 | b.iter( || { 97 | let deserialized: Vec< Byte > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 98 | deserialized 99 | }) 100 | } 101 | 102 | #[derive(Clone, Readable, Writable)] 103 | struct Dummy { 104 | a: u64, 105 | b: u32, 106 | c: u16, 107 | d: u8, 108 | e: f32, 109 | f: f64, 110 | g: bool 111 | } 112 | 113 | #[bench] 114 | fn read_speedy_many_small_structs( b: &mut Bencher ) { 115 | let mut buffer: Vec< Dummy > = Vec::new(); 116 | let dummy = Dummy { 117 | a: 1, 118 | b: 2, 119 | c: 3, 120 | d: 4, 121 | e: 5.0, 122 | f: 6.0, 123 | g: true 124 | }; 125 | buffer.resize( 1024 * 1024, dummy ); 126 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 127 | 128 | buffer = black_box( buffer ); 129 | b.iter( || { 130 | let deserialized: Vec< Dummy > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 131 | deserialized 132 | }) 133 | } 134 | 135 | #[bench] 136 | fn write_speedy_many_small_structs( b: &mut Bencher ) { 137 | let mut buffer: Vec< Dummy > = Vec::new(); 138 | let dummy = Dummy { 139 | a: 1, 140 | b: 2, 141 | c: 3, 142 | d: 4, 143 | e: 5.0, 144 | f: 6.0, 145 | g: true 146 | }; 147 | buffer.resize( 1024 * 1024, dummy ); 148 | 149 | buffer = black_box( buffer ); 150 | b.iter( || { 151 | buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap() 152 | }) 153 | } 154 | 155 | pub struct XorShift64 { 156 | a: u64, 157 | } 158 | 159 | impl XorShift64 { 160 | pub fn new( seed: u64 ) -> XorShift64 { 161 | XorShift64 { a: seed } 162 | } 163 | 164 | pub fn next( &mut self ) -> u64 { 165 | let mut x = self.a; 166 | x ^= x << 13; 167 | x ^= x >> 7; 168 | x ^= x << 17; 169 | self.a = x; 170 | x 171 | } 172 | } 173 | 174 | #[bench] 175 | fn read_varint_random( b: &mut Bencher ) { 176 | use speedy::private::VarInt64; 177 | let mut rng = XorShift64 { a: 1234 }; 178 | 179 | let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (1_u64 << (rng.next() % 63)).into() ).collect(); 180 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 181 | 182 | buffer = black_box( buffer ); 183 | b.iter( || { 184 | let deserialized: Vec< VarInt64 > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 185 | deserialized 186 | }) 187 | } 188 | 189 | #[bench] 190 | fn read_varint_always_one_byte( b: &mut Bencher ) { 191 | use speedy::private::VarInt64; 192 | let mut rng = XorShift64 { a: 1234 }; 193 | 194 | let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (rng.next() % 100).into() ).collect(); 195 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 196 | 197 | buffer = black_box( buffer ); 198 | b.iter( || { 199 | let deserialized: Vec< VarInt64 > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 200 | deserialized 201 | }) 202 | } 203 | 204 | #[bench] 205 | fn read_varint_always_eight_bytes( b: &mut Bencher ) { 206 | use speedy::private::VarInt64; 207 | let mut rng = XorShift64 { a: 1234 }; 208 | 209 | let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| ((rng.next() % 100) | (1 << 63)).into() ).collect(); 210 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 211 | 212 | buffer = black_box( buffer ); 213 | b.iter( || { 214 | let deserialized: Vec< VarInt64 > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::NATIVE, &buffer ).unwrap(); 215 | deserialized 216 | }) 217 | } 218 | 219 | #[bench] 220 | fn write_varint_random( b: &mut Bencher ) { 221 | use speedy::private::VarInt64; 222 | let mut rng = XorShift64 { a: 1234 }; 223 | 224 | let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (1_u64 << (rng.next() % 63)).into() ).collect(); 225 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 226 | 227 | buffer = black_box( buffer ); 228 | b.iter( || { 229 | buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap() 230 | }) 231 | } 232 | 233 | #[bench] 234 | fn write_varint_always_one_byte( b: &mut Bencher ) { 235 | use speedy::private::VarInt64; 236 | let mut rng = XorShift64 { a: 1234 }; 237 | 238 | let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| (rng.next() % 100).into() ).collect(); 239 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 240 | 241 | buffer = black_box( buffer ); 242 | b.iter( || { 243 | buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap() 244 | }) 245 | } 246 | 247 | #[bench] 248 | fn write_varint_always_eight_bytes( b: &mut Bencher ) { 249 | use speedy::private::VarInt64; 250 | let mut rng = XorShift64 { a: 1234 }; 251 | 252 | let buffer: Vec< VarInt64 > = (0..1024 * 1024).into_iter().map( |_| ((rng.next() % 100) | (1 << 63)).into() ).collect(); 253 | let mut buffer = buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 254 | 255 | buffer = black_box( buffer ); 256 | b.iter( || { 257 | buffer.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap() 258 | }) 259 | } 260 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(feature = "external_doc", doc = include_str!("../README.md"))] 2 | #![forbid(unsafe_op_in_unsafe_fn)] 3 | #![cfg_attr(all(not(test), not(feature = "std")), no_std)] 4 | 5 | #[doc(hidden)] 6 | #[cfg(feature = "alloc")] 7 | pub extern crate alloc; 8 | 9 | mod error; 10 | #[macro_use] 11 | mod utils; 12 | mod readable; 13 | mod readable_impl; 14 | mod readable_unsized_impl; 15 | mod reader; 16 | mod writable; 17 | mod writable_impl; 18 | mod writer; 19 | mod context; 20 | mod endianness; 21 | mod varint; 22 | 23 | #[cfg(feature = "std")] 24 | mod circular_buffer; 25 | 26 | #[cfg(feature = "chrono")] 27 | mod ext_chrono; 28 | 29 | #[cfg(feature = "glam")] 30 | mod ext_glam; 31 | 32 | #[cfg(feature = "smallvec")] 33 | mod ext_smallvec; 34 | 35 | #[cfg(feature = "regex")] 36 | mod ext_regex; 37 | 38 | #[cfg(feature = "indexmap_v1")] 39 | mod ext_indexmap_v1; 40 | 41 | #[cfg(feature = "indexmap_v2")] 42 | mod ext_indexmap_v2; 43 | 44 | #[cfg(feature = "uuid")] 45 | mod ext_uuid; 46 | 47 | #[doc(hidden)] 48 | pub mod private; 49 | 50 | #[cfg(feature = "speedy-derive")] 51 | pub use speedy_derive::{Readable, Writable}; 52 | 53 | pub use crate::readable::Readable; 54 | pub use crate::reader::Reader; 55 | 56 | pub use crate::writable::Writable; 57 | pub use crate::writer::Writer; 58 | 59 | pub use crate::endianness::Endianness; 60 | pub use crate::context::{BigEndian, Context, LittleEndian}; 61 | 62 | pub use crate::error::{Error, IsEof}; 63 | 64 | #[cfg(test)] 65 | mod tests { 66 | use std::io; 67 | use std::borrow::Cow; 68 | 69 | use super::{ 70 | Reader, 71 | Readable, 72 | Writer, 73 | Writable, 74 | Context, 75 | Endianness 76 | }; 77 | 78 | #[derive(PartialEq, Debug)] 79 | struct SimpleStruct { 80 | a: u8, 81 | b: u8, 82 | c: u8 83 | } 84 | 85 | impl< C: Context > Writable< C > for SimpleStruct { 86 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 87 | writer.write_value( &self.a )?; 88 | writer.write_value( &self.b )?; 89 | writer.write_value( &self.c )?; 90 | 91 | Ok(()) 92 | } 93 | } 94 | 95 | impl< 'a, C: Context > Readable< 'a, C > for SimpleStruct { 96 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 97 | let a = reader.read_u8()?; 98 | let b = reader.read_u8()?; 99 | let c = reader.read_u8()?; 100 | Ok( SimpleStruct { a, b, c } ) 101 | } 102 | } 103 | 104 | #[test] 105 | fn simple_write_to_vec() { 106 | let value = SimpleStruct { a: 1, b: 2, c: 3 }; 107 | let data = value.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 108 | assert_eq!( data, vec![ 1, 2, 3 ] ); 109 | } 110 | 111 | #[cfg(feature = "std")] 112 | #[test] 113 | fn simple_read_from_stream_unbuffered() { 114 | let data = vec![ 1, 2, 3 ]; 115 | let cursor = io::Cursor::new( data ); 116 | let value = SimpleStruct::read_from_stream_unbuffered_with_ctx( Endianness::NATIVE, cursor ).unwrap(); 117 | assert_eq!( value, SimpleStruct { a: 1, b: 2, c: 3 } ); 118 | } 119 | 120 | #[cfg(feature = "std")] 121 | #[test] 122 | fn simple_read_from_stream_buffered() { 123 | let data = vec![ 1, 2, 3 ]; 124 | let cursor = io::Cursor::new( data ); 125 | let value = SimpleStruct::read_from_stream_buffered_with_ctx( Endianness::NATIVE, cursor ).unwrap(); 126 | assert_eq!( value, SimpleStruct { a: 1, b: 2, c: 3 } ); 127 | } 128 | 129 | #[test] 130 | fn simple_read_from_buffer() { 131 | let data = vec![ 1, 2, 3 ]; 132 | let value = SimpleStruct::read_from_buffer_with_ctx( Endianness::NATIVE, &data ).unwrap(); 133 | assert_eq!( value, SimpleStruct { a: 1, b: 2, c: 3 } ); 134 | } 135 | 136 | #[test] 137 | fn simple_read_bytes_from_buffer_owned() { 138 | let data = vec![ 2, 0, 0, 0, 12, 34 ]; 139 | let value: Cow< [u8] > = Readable::read_from_buffer_copying_data_with_ctx( Endianness::LittleEndian, &data ).unwrap(); 140 | assert_eq!( &*value, &[12, 34] ); 141 | assert_ne!( value.as_ptr(), data[ 4.. ].as_ptr() ); 142 | } 143 | 144 | #[test] 145 | fn simple_read_bytes_from_buffer_borrowed() { 146 | let data = vec![ 2, 0, 0, 0, 12, 34 ]; 147 | let value: Cow< [u8] > = Readable::read_from_buffer_with_ctx( Endianness::LittleEndian, &data ).unwrap(); 148 | assert_eq!( &*value, &[12, 34] ); 149 | assert_eq!( value.as_ptr(), data[ 4.. ].as_ptr() ); 150 | } 151 | 152 | #[test] 153 | fn read_from_buffer_copying_data_with_default_ctx() { 154 | let data = vec![ 2, 0 ]; 155 | let value = u16::read_from_buffer_copying_data( &data ).unwrap(); 156 | assert_eq!( value, 2 ); 157 | } 158 | 159 | #[test] 160 | fn read_from_buffer_borrowed_with_default_ctx() { 161 | let data = vec![ 2, 0 ]; 162 | let value = u16::read_from_buffer( &data ).unwrap(); 163 | assert_eq!( value, 2 ); 164 | } 165 | 166 | #[cfg(feature = "std")] 167 | #[test] 168 | fn read_from_stream_unbuffered_with_default_ctx() { 169 | let data = vec![ 2, 0 ]; 170 | let value = u16::read_from_stream_unbuffered( io::Cursor::new( data ) ).unwrap(); 171 | assert_eq!( value, 2 ); 172 | } 173 | 174 | #[cfg(feature = "std")] 175 | #[test] 176 | fn read_from_stream_buffered_with_default_ctx() { 177 | let data = vec![ 2, 0 ]; 178 | let value = u16::read_from_stream_buffered( io::Cursor::new( data ) ).unwrap(); 179 | assert_eq!( value, 2 ); 180 | } 181 | 182 | #[test] 183 | fn write_to_buffer_with_default_ctx() { 184 | let mut buffer = [0, 0]; 185 | 2_u16.write_to_buffer( &mut buffer ).unwrap(); 186 | assert_eq!( buffer, [2, 0] ); 187 | } 188 | 189 | #[test] 190 | fn write_to_vec_with_default_ctx() { 191 | let buffer = 2_u16.write_to_vec().unwrap(); 192 | assert_eq!( buffer, [2, 0] ); 193 | } 194 | 195 | #[cfg(feature = "std")] 196 | #[test] 197 | fn write_to_stream_with_default_ctx() { 198 | let mut buffer = [0, 0]; 199 | 2_u16.write_to_stream( io::Cursor::new( &mut buffer[..] ) ).unwrap(); 200 | assert_eq!( buffer, [2, 0] ); 201 | } 202 | 203 | #[test] 204 | fn read_write_u8_vec() { 205 | let original: Vec< u8 > = vec![ 1, 2, 3 ]; 206 | let serialized = original.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 207 | let deserialized: Vec< u8 > = Vec::< u8 >::read_from_buffer_with_ctx( Endianness::NATIVE, &serialized ).unwrap(); 208 | assert_eq!( original, deserialized ); 209 | } 210 | 211 | #[test] 212 | fn read_write_u64_vec() { 213 | let original: Vec< u64 > = vec![ 1, 2, 3 ]; 214 | let serialized = original.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 215 | let deserialized: Vec< u64 > = Vec::< u64 >::read_from_buffer_with_ctx( Endianness::NATIVE, &serialized ).unwrap(); 216 | assert_eq!( original, deserialized ); 217 | } 218 | 219 | #[test] 220 | fn read_write_string() { 221 | let original: String = "Hello world!".to_owned(); 222 | let serialized = original.write_to_vec_with_ctx( Endianness::NATIVE ).unwrap(); 223 | let deserialized: String = String::read_from_buffer_with_ctx( Endianness::NATIVE, &serialized ).unwrap(); 224 | assert_eq!( original, deserialized ); 225 | } 226 | 227 | #[cfg(feature = "std")] 228 | #[cfg(not(miri))] 229 | #[test] 230 | fn read_big_vector_of_vectors_from_stream_buffered() { 231 | const fn hash32( x: u32 ) -> u32 { 232 | let mut x = x.wrapping_mul( 0xa4d94a4f ); 233 | let a = x >> 16; 234 | let b = x >> 30; 235 | x ^= a >> b; 236 | x.wrapping_mul( 0xa4d94a4f ) 237 | } 238 | 239 | struct TestStream { 240 | buffer: Vec< u8 >, 241 | position: usize 242 | } 243 | 244 | impl io::Read for TestStream { 245 | fn read( &mut self, output: &mut [u8] ) -> Result< usize, io::Error > { 246 | if self.position >= self.buffer.len() || output.len() == 0 { 247 | return Ok(0); 248 | } 249 | 250 | let random = hash32( self.position as u32 + output.len() as u32 ) as usize; 251 | let length = std::cmp::min( random % output.len() + 1, self.buffer.len() - self.position ); 252 | output[ ..length ].copy_from_slice( &self.buffer[ self.position..self.position + length ] ); 253 | self.position += length; 254 | 255 | Ok( length ) 256 | } 257 | } 258 | 259 | let mut original: Vec< Vec< u8 > > = Vec::new(); 260 | for nth in 0..10000 { 261 | let mut buffer = Vec::new(); 262 | let random = hash32( nth as u32 ); 263 | for byte in 0..(random % 128) as u8 { 264 | buffer.push( byte ); 265 | } 266 | 267 | original.push( buffer ); 268 | } 269 | let serialized = original.write_to_vec().unwrap(); 270 | let stream = TestStream { buffer: serialized, position: 0 }; 271 | let deserialized: Vec< Vec< u8 > > = Readable::read_from_stream_buffered( stream ).unwrap(); 272 | assert_eq!( original, deserialized ); 273 | } 274 | } 275 | -------------------------------------------------------------------------------- /src/ext_glam.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{Context, Readable, Reader, Writable, Writer}, 3 | glam::{ 4 | Affine2, Affine3A, BVec2, BVec3, BVec4, DAffine2, DAffine3, DMat2, DMat3, DMat4, DQuat, 5 | DVec2, DVec3, DVec4, IVec2, IVec3, IVec4, Mat2, Mat3, Mat3A, Mat4, Quat, UVec2, UVec3, 6 | UVec4, Vec2, Vec3, Vec3A, Vec4, 7 | }, 8 | }; 9 | 10 | macro_rules! impl_for_vec { 11 | ($T:ty, $ctor:ident, $comp_ty:ty, $comp_read_fn:ident, $comp_write_fn:ident, $($comp:ident),+) => { 12 | impl<'a, C> Readable<'a, C> for $T 13 | where 14 | C: Context, 15 | { 16 | #[inline] 17 | fn read_from>(reader: &mut R) -> Result { 18 | $( 19 | let $comp = reader.$comp_read_fn()?; 20 | )+ 21 | 22 | Ok(<$T>::$ctor($($comp),+)) 23 | } 24 | 25 | #[inline] 26 | fn minimum_bytes_needed() -> usize { 27 | let mut size = 0; 28 | $( 29 | let $comp = <$comp_ty as Readable::<'a, C>>::minimum_bytes_needed(); 30 | size += $comp; 31 | )+ 32 | size 33 | } 34 | } 35 | 36 | impl Writable for $T 37 | where 38 | C: Context, 39 | { 40 | #[inline] 41 | fn write_to>(&self, writer: &mut T) -> Result<(), C::Error> { 42 | $( 43 | writer.$comp_write_fn(self.$comp)?; 44 | )+ 45 | 46 | Ok(()) 47 | } 48 | 49 | #[inline] 50 | fn bytes_needed(&self) -> Result { 51 | let mut size = 0; 52 | $( 53 | size += Writable::::bytes_needed(&self.$comp)?; 54 | )+ 55 | Ok( size ) 56 | } 57 | } 58 | }; 59 | } 60 | 61 | impl_for_vec! {Vec2, new, f32, read_f32, write_f32, x, y} 62 | impl_for_vec! {Vec3, new, f32, read_f32, write_f32, x, y, z} 63 | impl_for_vec! {Vec3A, new, f32, read_f32, write_f32, x, y, z} 64 | impl_for_vec! {Vec4, new, f32, read_f32, write_f32, x, y, z, w} 65 | 66 | impl_for_vec! {DVec2, new, f64, read_f64, write_f64, x, y} 67 | impl_for_vec! {DVec3, new, f64, read_f64, write_f64, x, y, z} 68 | impl_for_vec! {DVec4, new, f64, read_f64, write_f64, x, y, z, w} 69 | 70 | impl_for_vec! {IVec2, new, i32, read_i32, write_i32, x, y} 71 | impl_for_vec! {IVec3, new, i32, read_i32, write_i32, x, y, z} 72 | impl_for_vec! {IVec4, new, i32, read_i32, write_i32, x, y, z, w} 73 | 74 | impl_for_vec! {UVec2, new, u32, read_u32, write_u32, x, y} 75 | impl_for_vec! {UVec3, new, u32, read_u32, write_u32, x, y, z} 76 | impl_for_vec! {UVec4, new, u32, read_u32, write_u32, x, y, z, w} 77 | 78 | impl_for_vec! {Quat, from_xyzw, f32, read_f32, write_f32, x, y, z, w} 79 | impl_for_vec! {DQuat, from_xyzw, f64, read_f64, write_f64, x, y, z, w} 80 | 81 | macro_rules! impl_for_bvec { 82 | ($T:ty, $($comp:ident),+) => { 83 | impl<'a, C> Readable<'a, C> for $T 84 | where 85 | C: Context, 86 | { 87 | #[inline] 88 | #[allow(unused_assignments)] 89 | fn read_from>(reader: &mut R) -> Result { 90 | let mask = reader.read_u8()?; 91 | let mut shift = 0; 92 | 93 | $( 94 | let $comp = (mask & (1 << shift)) != 0; 95 | shift += 1; 96 | )+ 97 | 98 | Ok(<$T>::new($($comp),+)) 99 | } 100 | 101 | #[inline] 102 | fn minimum_bytes_needed() -> usize { 103 | >::minimum_bytes_needed() 104 | } 105 | } 106 | 107 | impl Writable for $T 108 | where 109 | C: Context, 110 | { 111 | #[inline] 112 | fn write_to>(&self, writer: &mut T) -> Result<(), C::Error> { 113 | writer.write_u8(self.bitmask() as u8) 114 | } 115 | 116 | #[inline] 117 | fn bytes_needed(&self) -> Result { 118 | Writable::::bytes_needed(&0u8) 119 | } 120 | } 121 | }; 122 | } 123 | 124 | impl_for_bvec! {BVec2, x, y} 125 | impl_for_bvec! {BVec3, x, y, z} 126 | impl_for_bvec! {BVec4, x, y, z, w} 127 | 128 | macro_rules! impl_for_mat { 129 | ($T:ty, $comp_count:literal, $comp_ty:ty) => { 130 | impl<'a, C> Readable<'a, C> for $T 131 | where 132 | C: Context, 133 | { 134 | #[inline] 135 | fn read_from>(reader: &mut R) -> Result { 136 | let mut values = [Default::default(); $comp_count]; 137 | for v in &mut values { 138 | *v = reader.read_value()?; 139 | } 140 | Ok(<$T>::from_cols_array(&values)) 141 | } 142 | 143 | #[inline] 144 | fn minimum_bytes_needed() -> usize { 145 | <$comp_ty as Readable<'a, C>>::minimum_bytes_needed() * $comp_count 146 | } 147 | } 148 | 149 | impl Writable for $T 150 | where 151 | C: Context, 152 | { 153 | #[inline] 154 | fn write_to>(&self, writer: &mut T) -> Result<(), C::Error> { 155 | for comp in self.to_cols_array().iter() { 156 | writer.write_value(comp)? 157 | } 158 | 159 | Ok(()) 160 | } 161 | 162 | #[inline] 163 | fn bytes_needed(&self) -> Result { 164 | let mut size = 0; 165 | for comp in self.to_cols_array().iter() { 166 | size += Writable::::bytes_needed(comp)?; 167 | } 168 | Ok(size) 169 | } 170 | } 171 | }; 172 | } 173 | 174 | impl_for_mat! { Mat2, 4, f32 } 175 | impl_for_mat! { Mat3, 9, f32 } 176 | impl_for_mat! { Mat3A, 9, f32 } 177 | impl_for_mat! { Mat4, 16, f32 } 178 | 179 | impl_for_mat! { DMat2, 4, f64 } 180 | impl_for_mat! { DMat3, 9, f64 } 181 | impl_for_mat! { DMat4, 16, f64 } 182 | 183 | impl_for_mat! { Affine2, 6, f32 } 184 | impl_for_mat! { Affine3A, 12, f32 } 185 | 186 | impl_for_mat! { DAffine2, 6, f64 } 187 | impl_for_mat! { DAffine3, 12, f64 } 188 | 189 | #[test] 190 | fn test_glam() { 191 | use crate::endianness::Endianness; 192 | 193 | macro_rules! test_vec { 194 | ($T:ty, $ctor:ident, $($values:literal),+) => {{ 195 | let original = <$T>::$ctor($($values as _),+); 196 | let serialized = original.write_to_vec_with_ctx(Endianness::NATIVE).unwrap(); 197 | let deserialized = <$T>::read_from_buffer_with_ctx(Endianness::NATIVE, &serialized).unwrap(); 198 | assert_eq!(original, deserialized); 199 | }} 200 | } 201 | 202 | test_vec!(Vec2, new, 1, 2); 203 | test_vec!(Vec3, new, 1, 2, 3); 204 | test_vec!(Vec3A, new, 1, 2, 3); 205 | test_vec!(Vec4, new, 1, 2, 3, 4); 206 | 207 | test_vec!(DVec2, new, 1, 2); 208 | test_vec!(DVec3, new, 1, 2, 3); 209 | test_vec!(DVec4, new, 1, 2, 3, 4); 210 | 211 | test_vec!(IVec2, new, 1, 2); 212 | test_vec!(IVec3, new, 1, 2, 3); 213 | test_vec!(IVec4, new, 1, 2, 3, 4); 214 | 215 | test_vec!(UVec2, new, 1, 2); 216 | test_vec!(UVec3, new, 1, 2, 3); 217 | test_vec!(UVec4, new, 1, 2, 3, 4); 218 | 219 | test_vec!(Quat, from_xyzw, 1, 2, 3, 4); 220 | test_vec!(DQuat, from_xyzw, 1, 2, 3, 4); 221 | 222 | for a in [false, true] { 223 | for b in [false, true] { 224 | let original = BVec2::new(a, b); 225 | let serialized = original.write_to_vec_with_ctx(Endianness::NATIVE).unwrap(); 226 | let deserialized = 227 | BVec2::read_from_buffer_with_ctx(Endianness::NATIVE, &serialized).unwrap(); 228 | assert_eq!(original, deserialized); 229 | } 230 | } 231 | 232 | for a in [false, true] { 233 | for b in [false, true] { 234 | for c in [false, true] { 235 | let original = BVec3::new(a, b, c); 236 | let serialized = original.write_to_vec_with_ctx(Endianness::NATIVE).unwrap(); 237 | let deserialized = 238 | BVec3::read_from_buffer_with_ctx(Endianness::NATIVE, &serialized).unwrap(); 239 | assert_eq!(original, deserialized); 240 | } 241 | } 242 | } 243 | 244 | for a in [false, true] { 245 | for b in [false, true] { 246 | for c in [false, true] { 247 | for d in [false, true] { 248 | let original = BVec4::new(a, b, c, d); 249 | let serialized = original.write_to_vec_with_ctx(Endianness::NATIVE).unwrap(); 250 | let deserialized = 251 | BVec4::read_from_buffer_with_ctx(Endianness::NATIVE, &serialized).unwrap(); 252 | assert_eq!(original, deserialized); 253 | } 254 | } 255 | } 256 | } 257 | 258 | macro_rules! test_mat { 259 | ($T:ty) => {{ 260 | let mut cols = <$T>::IDENTITY.to_cols_array(); 261 | for (i, c) in cols.iter_mut().enumerate() { 262 | *c = (i + 1) as _; 263 | } 264 | 265 | let original = <$T>::from_cols_array(&cols); 266 | let serialized = original.write_to_vec_with_ctx(Endianness::NATIVE).unwrap(); 267 | let deserialized = 268 | <$T>::read_from_buffer_with_ctx(Endianness::NATIVE, &serialized).unwrap(); 269 | assert_eq!(original, deserialized); 270 | }}; 271 | } 272 | 273 | test_mat!(Mat2); 274 | test_mat!(Mat3); 275 | test_mat!(Mat3A); 276 | test_mat!(Mat4); 277 | 278 | test_mat!(DMat2); 279 | test_mat!(DMat3); 280 | test_mat!(DMat4); 281 | 282 | test_mat!(Affine2); 283 | test_mat!(Affine3A); 284 | 285 | test_mat!(DAffine2); 286 | test_mat!(DAffine3); 287 | } 288 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2017 Jan Bujak 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/varint.rs: -------------------------------------------------------------------------------- 1 | use { 2 | crate::{ 3 | Context, 4 | Readable, 5 | Reader, 6 | Writable, 7 | Writer 8 | } 9 | }; 10 | 11 | // Encoding: 12 | // At most 7bit - 0xxxxxxx 13 | // At most 14bit - 10xxxxxx xxxxxxxx 14 | // At most 21bit - 110xxxxx xxxxxxxx xxxxxxxx 15 | // At most 28bit - 1110xxxx xxxxxxxx xxxxxxxx xxxxxxxx 16 | // At most 35bit - 11110xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx 17 | // At most 42bit - 111110xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx 18 | // At most 49bit - 1111110x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx 19 | // At most 56bit - 11111110 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx 20 | // At most 64bit - 11111111 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx 21 | // 22 | // The first byte always contains the most significant bits. The rest of the bytes are always 23 | // written in a little endian order. 24 | // 25 | // Signed integers are transformed with a zigzag transformation before being serialized. 26 | 27 | #[repr(transparent)] 28 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] 29 | pub struct VarInt64( u64 ); 30 | 31 | impl From< u64 > for VarInt64 { 32 | #[inline] 33 | fn from( value: u64 ) -> Self { 34 | VarInt64( value ) 35 | } 36 | } 37 | 38 | impl From< u32 > for VarInt64 { 39 | #[inline] 40 | fn from( value: u32 ) -> Self { 41 | VarInt64( value as u64 ) 42 | } 43 | } 44 | 45 | impl From< u16 > for VarInt64 { 46 | #[inline] 47 | fn from( value: u16 ) -> Self { 48 | VarInt64( value as u64 ) 49 | } 50 | } 51 | 52 | impl From< u8 > for VarInt64 { 53 | #[inline] 54 | fn from( value: u8 ) -> Self { 55 | VarInt64( value as u64 ) 56 | } 57 | } 58 | 59 | impl From< i64 > for VarInt64 { 60 | #[inline] 61 | fn from( value: i64 ) -> Self { 62 | let value = (value << 1) ^ (value >> 63); 63 | VarInt64( value as u64 ) 64 | } 65 | } 66 | 67 | impl From< i32 > for VarInt64 { 68 | #[inline] 69 | fn from( value: i32 ) -> Self { 70 | let value = (value << 1) ^ (value >> 31); 71 | VarInt64( value as u32 as u64 ) 72 | } 73 | } 74 | 75 | impl From< i16 > for VarInt64 { 76 | #[inline] 77 | fn from( value: i16 ) -> Self { 78 | let value = (value << 1) ^ (value >> 15); 79 | VarInt64( value as u16 as u64 ) 80 | } 81 | } 82 | 83 | impl From< i8 > for VarInt64 { 84 | #[inline] 85 | fn from( value: i8 ) -> Self { 86 | let value = (value << 1) ^ (value >> 7); 87 | VarInt64( value as u8 as u64 ) 88 | } 89 | } 90 | 91 | impl From< VarInt64 > for u64 { 92 | #[inline] 93 | fn from( value: VarInt64 ) -> Self { 94 | value.0 95 | } 96 | } 97 | 98 | impl From< VarInt64 > for i64 { 99 | #[inline] 100 | fn from( value: VarInt64 ) -> Self { 101 | let value = value.0; 102 | ((value >> 1) ^ (value << 15)) as i64 103 | } 104 | } 105 | 106 | macro_rules! impl_read { 107 | ($reader:ident, $read_u8:ident, $read_bytes:ident) => {{ 108 | let first_byte = $reader.$read_u8()?; 109 | let length = (!first_byte).leading_zeros(); 110 | 111 | let upper_mask = 0b11111111_u64 >> length; 112 | let upper_bits = (upper_mask & (first_byte as u64)).wrapping_shl( length * 8 ); 113 | 114 | macro_rules! read { 115 | ($count:expr) => {{ 116 | let mut value: u64 = 0; 117 | { 118 | let slice = unsafe { core::slice::from_raw_parts_mut( &mut value as *mut u64 as *mut u8, $count ) }; 119 | $reader.$read_bytes( slice )?; 120 | } 121 | value = value.to_le(); 122 | Ok( VarInt64( upper_bits | value ) ) 123 | }} 124 | } 125 | 126 | match length { 127 | 0 => read! { 0 }, 128 | 1 => read! { 1 }, 129 | 2 => read! { 2 }, 130 | 3 => read! { 3 }, 131 | 4 => read! { 4 }, 132 | 5 => read! { 5 }, 133 | 6 => read! { 6 }, 134 | 7 => read! { 7 }, 135 | 8 => read! { 8 }, 136 | _ => { 137 | if cfg!( debug_assertions ) { 138 | unreachable!() 139 | } else { 140 | unsafe { 141 | core::hint::unreachable_unchecked(); 142 | } 143 | } 144 | } 145 | } 146 | }} 147 | } 148 | 149 | impl< 'a, C: Context > Readable< 'a, C > for VarInt64 { 150 | #[inline] 151 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 152 | impl_read!( reader, read_u8, read_bytes ) 153 | } 154 | 155 | #[inline] 156 | fn minimum_bytes_needed() -> usize { 157 | 1 158 | } 159 | } 160 | 161 | impl VarInt64 { 162 | #[inline] 163 | pub(crate) fn peek_from< 'a, C, R >( reader: &mut R ) -> Result< Self, C::Error > 164 | where C: Context, 165 | R: Reader< 'a, C > 166 | { 167 | impl_read!( reader, peek_u8, peek_bytes ) 168 | } 169 | } 170 | 171 | #[cfg(test)] 172 | fn get_length_slow( leading_zeros: u32 ) -> u32 { 173 | let bits_required = 64 - leading_zeros; 174 | match bits_required { 175 | 0..=7 => 0, 176 | 8..=14 => 1, 177 | 15..=21 => 2, 178 | 22..=28 => 3, 179 | 29..=35 => 4, 180 | 36..=42 => 5, 181 | 43..=49 => 6, 182 | 50..=56 => 7, 183 | 57..=64 => 8, 184 | _ => unreachable!() 185 | } 186 | } 187 | 188 | #[inline] 189 | fn get_length( leading_zeros: u32 ) -> u32 { 190 | let bits_required = 64 - leading_zeros; 191 | let x = bits_required >> 3; 192 | ((x + bits_required) ^ x) >> 3 193 | } 194 | 195 | #[test] 196 | fn test_get_length() { 197 | for zeros in 0..=64 { 198 | assert_eq!( 199 | get_length( zeros ), 200 | get_length_slow( zeros ) 201 | ); 202 | } 203 | } 204 | 205 | impl< C: Context > Writable< C > for VarInt64 { 206 | #[inline] 207 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 208 | let mut value = self.0; 209 | let length = get_length( value.leading_zeros() ); 210 | 211 | if let Some( false ) = writer.can_write_at_least( length as usize + 1 ) { 212 | return Err( crate::error::error_end_of_output_buffer() ); 213 | } 214 | 215 | match length { 216 | 0 => writer.write_u8( value as u8 ), 217 | 1 => { 218 | writer.write_u8( 0b10000000 | (value >> 8) as u8 )?; 219 | value = value.to_le(); 220 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 1 ) }; 221 | writer.write_bytes( slice ) 222 | }, 223 | 2 => { 224 | writer.write_u8( 0b11000000 | (value >> 16) as u8 )?; 225 | value = value.to_le(); 226 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 2 ) }; 227 | writer.write_bytes( slice ) 228 | }, 229 | 3 => { 230 | writer.write_u8( 0b11100000 | (value >> 24) as u8 )?; 231 | value = value.to_le(); 232 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 3 ) }; 233 | writer.write_bytes( slice ) 234 | }, 235 | 4 => { 236 | writer.write_u8( 0b11110000 | (value >> 32) as u8 )?; 237 | value = value.to_le(); 238 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 4 ) }; 239 | writer.write_bytes( slice ) 240 | }, 241 | 5 => { 242 | writer.write_u8( 0b11111000 | (value >> 40) as u8 )?; 243 | value = value.to_le(); 244 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 5 ) }; 245 | writer.write_bytes( slice ) 246 | }, 247 | 6 => { 248 | writer.write_u8( 0b11111100 | (value >> 48) as u8 )?; 249 | value = value.to_le(); 250 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 6 ) }; 251 | writer.write_bytes( slice ) 252 | }, 253 | 7 => { 254 | writer.write_u8( 0b11111110 | (value >> 56) as u8 )?; 255 | value = value.to_le(); 256 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 7 ) }; 257 | writer.write_bytes( slice ) 258 | }, 259 | 8 => { 260 | writer.write_u8( 0b11111111 )?; 261 | value = value.to_le(); 262 | let slice = unsafe { core::slice::from_raw_parts( &value as *const u64 as *const u8, 8 ) }; 263 | writer.write_bytes( slice ) 264 | }, 265 | _ => { 266 | if cfg!( debug_assertions ) { 267 | unreachable!() 268 | } else { 269 | unsafe { 270 | core::hint::unreachable_unchecked(); 271 | } 272 | } 273 | } 274 | } 275 | } 276 | 277 | #[inline] 278 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 279 | Ok( get_length( self.0.leading_zeros() ) as usize + 1 ) 280 | } 281 | } 282 | 283 | #[test] 284 | fn test_varint64_serialization() { 285 | use crate::Endianness; 286 | 287 | macro_rules! check { 288 | ($($value:expr => $expected:expr),+) => { 289 | $( 290 | assert_eq!( 291 | VarInt64( $value ).write_to_vec_with_ctx( Endianness::LittleEndian ).unwrap(), 292 | &$expected 293 | ); 294 | )+ 295 | $( 296 | assert_eq!( 297 | VarInt64( $value ).write_to_vec_with_ctx( Endianness::BigEndian ).unwrap(), 298 | &$expected 299 | ); 300 | )+ 301 | } 302 | } 303 | 304 | check! { 305 | 0b_00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000 => [0b00000000], 306 | 0b_00000000_00000000_00000000_00000000_00000000_00000000_00000000_01111111 => [0b01111111], 307 | 0b_00000000_00000000_00000000_00000000_00000000_00000000_00000000_10000000 => [0b10000000, 0b10000000], 308 | 0b_00000000_00000000_00000000_00000000_00000000_00000000_00111111_11111111 => [0b10111111, 0b11111111], 309 | 0b_00000000_00000000_00000000_00000000_00000000_00000000_01000000_00000000 => [0b11000000, 0b00000000, 0b01000000], 310 | 0b_00000000_00000000_00000000_00000000_00000000_00011111_11111111_11111111 => [0b11011111, 0b11111111, 0b11111111], 311 | 0b_00000000_00000000_00000000_00000000_00000000_00100000_00000000_00000000 => [0b11100000, 0b00000000, 0b00000000, 0b00100000], 312 | 0b_00000000_00000000_00000000_00000000_00001111_11111111_11111111_11111111 => [0b11101111, 0b11111111, 0b11111111, 0b11111111], 313 | 0b_00000000_00000000_00000000_00000000_00010000_00000000_00000000_00000000 => [0b11110000, 0b00000000, 0b00000000, 0b00000000, 0b00010000], 314 | 0b_00000000_00000000_00000000_00000111_11111111_11111111_11111111_11111111 => [0b11110111, 0b11111111, 0b11111111, 0b11111111, 0b11111111], 315 | 0b_00000000_00000000_00000000_00001000_00000000_00000000_00000000_00000000 => [0b11111000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00001000], 316 | 0b_00000000_00000000_00000011_11111111_11111111_11111111_11111111_11111111 => [0b11111011, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111], 317 | 0b_00000000_00000000_00000100_00000000_00000000_00000000_00000000_00000000 => [0b11111100, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000100], 318 | 0b_00000000_00000001_11111111_11111111_11111111_11111111_11111111_11111111 => [0b11111101, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111], 319 | 0b_00000000_00000010_00000000_00000000_00000000_00000000_00000000_00000000 => [0b11111110, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000010], 320 | 0b_00000000_11111111_11111111_11111111_11111111_11111111_11111111_11111111 => [0b11111110, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111], 321 | 0b_00000001_00000000_00000000_00000000_00000000_00000000_00000000_00000000 => [0b11111111, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000000, 0b00000001], 322 | 0b_11111111_11111111_11111111_11111111_11111111_11111111_11111111_11111111 => [0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111, 0b11111111] 323 | } 324 | } 325 | 326 | #[test] 327 | fn test_varint64_bruteforce() { 328 | const fn hash32( x: u32 ) -> u32 { 329 | let mut x = x.wrapping_mul( 0xa4d94a4f ); 330 | let a = x >> 16; 331 | let b = x >> 30; 332 | x ^= a >> b; 333 | x.wrapping_mul( 0xa4d94a4f ) 334 | } 335 | 336 | const fn hash64( x: u64 ) -> u64 { 337 | (hash32( x as u32 ) as u64) | ((hash32( (x.wrapping_mul( 0xaaaaaaaa ) ^ 0xaaaaaaaa) as u32 ) as u64) << 32) 338 | } 339 | 340 | for n in 0..64 { 341 | let values = [ 342 | 1 << n, 343 | (1 << n) - 1, 344 | ((1 << n) - 1) << 1, 345 | (1_u64 << n).wrapping_mul( 0b00000010 ), 346 | (1_u64 << n).wrapping_mul( 0b00000101 ), 347 | (1_u64 << n).wrapping_mul( 0b00001010 ), 348 | (1_u64 << n).wrapping_mul( 0b00010101 ), 349 | (1_u64 << n).wrapping_mul( 0b00101010 ), 350 | (1_u64 << n).wrapping_mul( 0b01010101 ), 351 | (1_u64 << n).wrapping_mul( 0b10101010 ), 352 | hash64( n ) 353 | ]; 354 | 355 | for &value in &values { 356 | let value = VarInt64( value ); 357 | let serialized_le = value.write_to_vec_with_ctx( crate::Endianness::LittleEndian ).unwrap(); 358 | let deserialized = VarInt64::read_from_buffer_with_ctx( crate::Endianness::LittleEndian, &serialized_le ).unwrap(); 359 | assert_eq!( deserialized, value ); 360 | 361 | let serialized_be = value.write_to_vec_with_ctx( crate::Endianness::BigEndian ).unwrap(); 362 | assert_eq!( serialized_be, serialized_le ); 363 | } 364 | } 365 | } 366 | -------------------------------------------------------------------------------- /src/circular_buffer.rs: -------------------------------------------------------------------------------- 1 | use { 2 | core::{ 3 | ops::{ 4 | Range 5 | } 6 | } 7 | }; 8 | 9 | pub struct CircularBuffer { 10 | buffer: Box< [u8] >, 11 | position: usize, 12 | length: usize 13 | } 14 | 15 | #[inline(always)] 16 | fn occupied( position: usize, length: usize, capacity: usize ) -> (Range< usize >, Option< Range< usize > >) { 17 | if position + length <= capacity { 18 | let a = position..position + length; 19 | 20 | debug_assert_eq!( a.len(), length ); 21 | (a, None) 22 | } else { 23 | let a = position..capacity; 24 | let b = 0..length - a.len(); 25 | 26 | debug_assert_eq!( a.len() + b.len(), length ); 27 | (a, Some( b )) 28 | } 29 | } 30 | 31 | fn empty( position: usize, length: usize, capacity: usize, mut max: usize ) -> (Range< usize >, Option< Range< usize > >) { 32 | if position == 0 { 33 | let mut a = length..capacity; 34 | debug_assert_eq!( a.len(), capacity - length ); 35 | 36 | let chunk_length = a.len(); 37 | if max < chunk_length { 38 | a.end = a.start + max; 39 | } 40 | 41 | (a, None) 42 | } else if position + length >= capacity { 43 | let right_chunk_length = capacity - position; 44 | let left_chunk_length = length - right_chunk_length; 45 | let mut a = left_chunk_length..capacity - right_chunk_length; 46 | debug_assert_eq!( a.len(), capacity - length ); 47 | 48 | let chunk_length = a.len(); 49 | if max < chunk_length { 50 | a.end = a.start + max; 51 | } 52 | 53 | (a, None) 54 | } else { 55 | let mut a = position + length..capacity; 56 | let mut b = 0..position; 57 | debug_assert_eq!( a.len() + b.len(), capacity - length ); 58 | 59 | let chunk_length = a.len(); 60 | if max <= chunk_length { 61 | a.end = a.start + max; 62 | return (a, None); 63 | } 64 | 65 | max -= chunk_length; 66 | let chunk_length = b.len(); 67 | if max <= chunk_length { 68 | b.end = b.start + max; 69 | } 70 | 71 | (a, Some( b )) 72 | } 73 | } 74 | 75 | #[test] 76 | fn test_circular_index_occupied() { 77 | // Empty. 78 | assert_eq!( 79 | occupied( 0, 0, 10 ), 80 | (0..0, None) 81 | ); 82 | 83 | // Empty with position in the middle. 84 | assert_eq!( 85 | occupied( 5, 0, 10 ), 86 | (5..5, None) 87 | ); 88 | 89 | // Fully occupied. 90 | assert_eq!( 91 | occupied( 0, 10, 10 ), 92 | (0..10, None) 93 | ); 94 | 95 | // Occupied only in the left half. 96 | assert_eq!( 97 | occupied( 0, 5, 10 ), 98 | (0..5, None) 99 | ); 100 | 101 | // Occupied only in the right half. 102 | assert_eq!( 103 | occupied( 5, 5, 10 ), 104 | (5..10, None) 105 | ); 106 | 107 | // Occupied only in the middle. 108 | assert_eq!( 109 | occupied( 1, 8, 10 ), 110 | (1..9, None) 111 | ); 112 | 113 | // Fully occupied overflowing the end. 114 | assert_eq!( 115 | occupied( 5, 10, 10 ), 116 | (5..10, Some(0..5)) 117 | ); 118 | } 119 | 120 | #[test] 121 | fn test_circular_index_empty() { 122 | // Empty. 123 | assert_eq!( 124 | empty( 0, 0, 10, !0 ), 125 | (0..10, None) 126 | ); 127 | 128 | // Empty (with limit). 129 | assert_eq!( 130 | empty( 0, 0, 10, 1 ), 131 | (0..1, None) 132 | ); 133 | 134 | // Empty with position in the middle. 135 | assert_eq!( 136 | empty( 5, 0, 10, !0 ), 137 | (5..10, Some( 0..5 )) 138 | ); 139 | 140 | // Empty with position in the middle (with limit). 141 | assert_eq!( 142 | empty( 5, 0, 10, 1 ), 143 | (5..6, None) 144 | ); 145 | 146 | // Fully occupied. 147 | assert_eq!( 148 | empty( 0, 10, 10, !0 ), 149 | (10..10, None) 150 | ); 151 | 152 | // Fully occupied (with limit). 153 | assert_eq!( 154 | empty( 0, 10, 10, 1 ), 155 | (10..10, None) 156 | ); 157 | 158 | // Occupied only in the left half. 159 | assert_eq!( 160 | empty( 0, 5, 10, !0 ), 161 | (5..10, None) 162 | ); 163 | 164 | // Occupied only in the left half (with limit). 165 | assert_eq!( 166 | empty( 0, 5, 10, 1 ), 167 | (5..6, None) 168 | ); 169 | 170 | // Occupied only in the right half. 171 | assert_eq!( 172 | empty( 5, 5, 10, !0 ), 173 | (0..5, None) 174 | ); 175 | 176 | // Occupied only in the right half (with limit). 177 | assert_eq!( 178 | empty( 5, 5, 10, 1 ), 179 | (0..1, None) 180 | ); 181 | 182 | // Occupied only in the middle. 183 | assert_eq!( 184 | empty( 2, 6, 10, !0 ), 185 | (8..10, Some( 0..2 )) 186 | ); 187 | 188 | // Occupied only in the middle (with limit which prevents a boundary cross). 189 | assert_eq!( 190 | empty( 2, 6, 10, 2 ), 191 | (8..10, None ) 192 | ); 193 | 194 | // Occupied only in the middle (with limit which doesn't prevent a boundary cross). 195 | assert_eq!( 196 | empty( 2, 6, 10, 3 ), 197 | (8..10, Some( 0..1 )) 198 | ); 199 | 200 | // Fully occupied overflowing the end. 201 | assert_eq!( 202 | empty( 5, 10, 10, !0 ), 203 | (5..5, None) 204 | ); 205 | } 206 | 207 | impl CircularBuffer { 208 | #[cfg(test)] 209 | pub fn new() -> Self { 210 | CircularBuffer { 211 | buffer: Vec::new().into_boxed_slice(), 212 | position: 0, 213 | length: 0 214 | } 215 | } 216 | 217 | pub fn with_capacity( capacity: usize ) -> Self { 218 | let mut buffer = Vec::with_capacity( capacity ); 219 | unsafe { 220 | buffer.set_len( capacity ); 221 | if cfg!( debug_assertions ) { 222 | std::ptr::write_bytes( buffer.as_mut_ptr(), 0xFF, buffer.len() ); 223 | } 224 | } 225 | 226 | CircularBuffer { 227 | buffer: buffer.into_boxed_slice(), 228 | position: 0, 229 | length: 0 230 | } 231 | } 232 | 233 | #[cfg(test)] 234 | pub fn is_empty( &self ) -> bool { 235 | self.length == 0 236 | } 237 | 238 | #[inline(always)] 239 | pub fn len( &self ) -> usize { 240 | self.length 241 | } 242 | 243 | pub fn capacity( &self ) -> usize { 244 | self.buffer.len() 245 | } 246 | 247 | #[cfg(test)] 248 | pub fn reserve_exact( &mut self, size: usize ) { 249 | self.reserve_impl( size, true ) 250 | } 251 | 252 | pub fn reserve( &mut self, size: usize ) { 253 | self.reserve_impl( size, false ) 254 | } 255 | 256 | fn reserve_impl( &mut self, size: usize, is_exact: bool ) { 257 | let mut new_capacity = self.length + size; 258 | if !is_exact && self.buffer.len() >= new_capacity { 259 | return; 260 | } 261 | 262 | if is_exact && self.buffer.len() == new_capacity { 263 | return; 264 | } 265 | 266 | let mut new_buffer = Vec::new(); 267 | if !is_exact { 268 | new_buffer.reserve( new_capacity ); 269 | } else { 270 | new_buffer.reserve_exact( new_capacity ); 271 | } 272 | 273 | new_capacity = new_buffer.capacity(); 274 | unsafe { 275 | new_buffer.set_len( new_capacity ); 276 | } 277 | 278 | let (a, b) = occupied( self.position, self.length, self.buffer.len() ); 279 | new_buffer[ 0..a.len() ].copy_from_slice( &self.buffer[ a.clone() ] ); 280 | 281 | if let Some( b ) = b { 282 | new_buffer[ a.len()..self.length ].copy_from_slice( &self.buffer[ b ] ); 283 | } 284 | 285 | self.buffer = new_buffer.into_boxed_slice(); 286 | self.position = 0; 287 | } 288 | 289 | pub fn try_append_with< E >( &mut self, size: usize, callback: impl FnOnce( &mut [u8] ) -> Result< usize, E > ) -> Result< usize, E > { 290 | self.reserve( size ); 291 | 292 | let (range, _) = empty( self.position, self.length, self.buffer.len(), size ); 293 | let bytes_written = callback( &mut self.buffer[ range ] )?; 294 | 295 | self.length += bytes_written; 296 | Ok( bytes_written ) 297 | } 298 | 299 | #[inline(always)] 300 | pub fn as_slices( &self ) -> (&[u8], Option< &[u8] >) { 301 | let (a, b) = occupied( self.position, self.length, self.buffer.len() ); 302 | debug_assert_eq!( a.start, self.position ); 303 | (&self.buffer[ a ], b.map( |b| &self.buffer[ b ] )) 304 | } 305 | 306 | #[inline(always)] 307 | pub fn as_slices_of_length( &self, length: usize ) -> (&[u8], Option< &[u8] >) { 308 | assert!( length <= self.length ); 309 | let (a, b) = self.as_slices(); 310 | if length <= a.len() { 311 | (&a[ ..length ], None) 312 | } else { 313 | (a, b.map( |b| &b[ ..length - a.len() ] )) 314 | } 315 | } 316 | 317 | #[cfg(test)] 318 | pub fn to_vec( &self ) -> Vec< u8 > { 319 | let mut output = Vec::with_capacity( self.len() ); 320 | let (a, b) = self.as_slices(); 321 | output.extend_from_slice( a ); 322 | 323 | if let Some( b ) = b { 324 | output.extend_from_slice( b ); 325 | } 326 | 327 | output 328 | } 329 | 330 | #[cfg(test)] 331 | pub fn extend_from_slice( &mut self, slice: &[u8] ) { 332 | self.reserve( slice.len() ); 333 | let (range_1, range_2) = empty( self.position, self.length, self.buffer.len(), slice.len() ); 334 | 335 | self.buffer[ range_1.clone() ].copy_from_slice( &slice[ ..range_1.len() ] ); 336 | if let Some( range_2 ) = range_2 { 337 | self.buffer[ range_2.clone() ].copy_from_slice( &slice[ range_1.len().. ] ); 338 | } 339 | 340 | self.length += slice.len(); 341 | } 342 | 343 | #[inline(always)] 344 | pub fn consume( &mut self, length: usize ) { 345 | assert!( length <= self.length ); 346 | self.position = (self.position + length) % self.buffer.len(); 347 | self.length -= length; 348 | 349 | if self.length == 0 { 350 | self.position = 0; 351 | } 352 | } 353 | 354 | #[inline(always)] 355 | pub fn consume_into( &mut self, buffer: &mut [u8] ) { 356 | let length = core::cmp::min( self.length, buffer.len() ); 357 | if self.position + length < self.buffer.len() { 358 | buffer[ ..length ].copy_from_slice( &self.buffer[ self.position..self.position + length ] ); 359 | self.position += length; 360 | self.length -= length; 361 | if self.length == 0 { 362 | self.position = 0; 363 | } 364 | 365 | return; 366 | } 367 | 368 | self.consume_into_slow( buffer ) 369 | } 370 | 371 | #[inline(never)] 372 | fn consume_into_slow( &mut self, buffer: &mut [u8] ) { 373 | if buffer.is_empty() { 374 | return; 375 | } 376 | 377 | assert!( buffer.len() <= self.length ); 378 | let (a, b) = self.as_slices_of_length( buffer.len() ); 379 | buffer[ ..a.len() ].copy_from_slice( a ); 380 | 381 | if let Some( b ) = b { 382 | buffer[ a.len().. ].copy_from_slice( b ); 383 | } 384 | 385 | self.consume( buffer.len() ); 386 | } 387 | } 388 | 389 | #[test] 390 | fn test_circular_buffer_basic() { 391 | let mut buf = CircularBuffer::new(); 392 | assert_eq!( buf.len(), 0 ); 393 | assert_eq!( buf.capacity(), 0 ); 394 | assert_eq!( buf.is_empty(), true ); 395 | 396 | buf.reserve_exact( 3 ); 397 | assert_eq!( buf.len(), 0 ); 398 | assert_eq!( buf.capacity(), 3 ); 399 | assert_eq!( buf.is_empty(), true ); 400 | 401 | buf.extend_from_slice( &[1, 2] ); 402 | assert_eq!( buf.len(), 2 ); 403 | assert_eq!( buf.capacity(), 3 ); 404 | assert_eq!( buf.is_empty(), false ); 405 | assert_eq!( buf.as_slices(), (&[1, 2][..], None) ); 406 | assert_eq!( buf.to_vec(), vec![1, 2] ); 407 | assert_eq!( buf.as_slices_of_length(0), (&[][..], None) ); 408 | assert_eq!( buf.as_slices_of_length(1), (&[1][..], None) ); 409 | assert_eq!( buf.as_slices_of_length(2), (&[1, 2][..], None) ); 410 | 411 | buf.extend_from_slice( &[3] ); 412 | assert_eq!( buf.len(), 3 ); 413 | assert_eq!( buf.capacity(), 3 ); 414 | assert_eq!( buf.as_slices(), (&[1, 2, 3][..], None) ); 415 | assert_eq!( buf.to_vec(), vec![1, 2, 3] ); 416 | assert_eq!( buf.as_slices_of_length(0), (&[][..], None) ); 417 | assert_eq!( buf.as_slices_of_length(1), (&[1][..], None) ); 418 | assert_eq!( buf.as_slices_of_length(2), (&[1, 2][..], None) ); 419 | assert_eq!( buf.as_slices_of_length(3), (&[1, 2, 3][..], None) ); 420 | 421 | buf.consume(1); 422 | assert_eq!( buf.len(), 2 ); 423 | assert_eq!( buf.capacity(), 3 ); 424 | assert_eq!( buf.as_slices(), (&[2, 3][..], None) ); 425 | assert_eq!( buf.to_vec(), vec![2, 3] ); 426 | 427 | buf.extend_from_slice( &[4] ); 428 | assert_eq!( buf.len(), 3 ); 429 | assert_eq!( buf.capacity(), 3 ); 430 | assert_eq!( buf.as_slices(), (&[2, 3][..], Some(&[4][..])) ); 431 | assert_eq!( buf.to_vec(), vec![2, 3, 4] ); 432 | assert_eq!( buf.as_slices_of_length(0), (&[][..], None) ); 433 | assert_eq!( buf.as_slices_of_length(1), (&[2][..], None) ); 434 | assert_eq!( buf.as_slices_of_length(2), (&[2, 3][..], None) ); 435 | assert_eq!( buf.as_slices_of_length(3), (&[2, 3][..], Some(&[4][..])) ); 436 | 437 | buf.extend_from_slice( &[5] ); 438 | assert_eq!( buf.len(), 4 ); 439 | assert_eq!( buf.as_slices(), (&[2, 3, 4, 5][..], None) ); 440 | assert_eq!( buf.to_vec(), vec![2, 3, 4, 5] ); 441 | 442 | let tmp = &mut [0, 0, 0, 0]; 443 | buf.consume_into( &mut tmp[..] ); 444 | assert_eq!( tmp, &[2, 3, 4, 5] ); 445 | } 446 | 447 | #[test] 448 | fn test_circular_buffer_partial_try_append_with() { 449 | let mut buf = CircularBuffer::new(); 450 | buf.reserve_exact( 3 ); 451 | buf.extend_from_slice( &[1, 2] ); 452 | buf.consume( 1 ); 453 | assert_eq!( buf.to_vec(), vec![2] ); 454 | buf.try_append_with( 2, |chunk| { 455 | assert_eq!( chunk.len(), 1 ); 456 | chunk[0] = 3; 457 | let result: Result< _, () > = Ok(1); 458 | result 459 | }).unwrap(); 460 | 461 | assert_eq!( buf.to_vec(), vec![2, 3] ); 462 | } 463 | 464 | #[test] 465 | fn test_circular_buffer_append_and_consume() { 466 | let mut buf = CircularBuffer::with_capacity( 1024 ); 467 | for _ in 0..2 { 468 | buf.try_append_with( 1, |output| { 469 | output[ 0 ] = 0; 470 | let result: Result< _, () > = Ok( output.len() ); 471 | result 472 | }).unwrap(); 473 | 474 | let mut actual = [0xaa]; 475 | buf.consume_into( &mut actual ); 476 | assert_eq!( actual[0], 0 ); 477 | } 478 | } 479 | 480 | #[cfg(not(miri))] 481 | #[cfg(test)] 482 | quickcheck::quickcheck! { 483 | fn test_circular_buffer_quickcheck_try_append_with( data: Vec< Vec< u8 > > ) -> bool { 484 | let result = std::panic::catch_unwind( || { 485 | let mut buffer = CircularBuffer::with_capacity( 1024 ); 486 | let mut control_buffer = Vec::new(); 487 | for chunk in data { 488 | let written = buffer.try_append_with( chunk.len(), |output| { 489 | output.copy_from_slice( &chunk[ ..output.len() ] ); 490 | let result: Result< _, () > = Ok( output.len() ); 491 | result 492 | }).unwrap(); 493 | 494 | if written < chunk.len() { 495 | let remaining = chunk.len() - written; 496 | buffer.try_append_with( remaining, |output| { 497 | assert_eq!( output.len(), remaining ); 498 | output.copy_from_slice( &chunk[ written..written + output.len() ] ); 499 | let result: Result< _, () > = Ok( output.len() ); 500 | result 501 | }).unwrap(); 502 | } 503 | 504 | control_buffer.extend_from_slice( &chunk ); 505 | assert!( buffer.capacity() >= buffer.len() ); 506 | assert_eq!( buffer.len(), control_buffer.len() ); 507 | assert_eq!( buffer.to_vec(), control_buffer ); 508 | 509 | if buffer.len() > 0 { 510 | let expected = control_buffer.remove( 0 ); 511 | let mut actual = [!expected]; 512 | buffer.consume_into( &mut actual ); 513 | assert_eq!( actual[0], expected ); 514 | } 515 | } 516 | }); 517 | 518 | result.is_ok() 519 | } 520 | } 521 | -------------------------------------------------------------------------------- /src/readable_impl.rs: -------------------------------------------------------------------------------- 1 | use core::mem; 2 | use core::ops::{Range, RangeInclusive}; 3 | use core::mem::MaybeUninit; 4 | 5 | use crate::readable::Readable; 6 | use crate::reader::Reader; 7 | 8 | use crate::context::Context; 9 | use crate::utils::SwapBytes; 10 | use crate::endianness::Endianness; 11 | 12 | #[cfg(feature = "std")] 13 | use std::{ 14 | collections::{HashMap, HashSet}, 15 | hash::{BuildHasher, Hash}, 16 | }; 17 | 18 | #[cfg(feature = "alloc")] 19 | use alloc::{ 20 | borrow::{Cow, ToOwned}, 21 | boxed::Box, 22 | string::String, 23 | vec::Vec, 24 | collections::{BTreeMap, BTreeSet} 25 | }; 26 | 27 | #[cfg(feature = "alloc")] 28 | impl< 'a, C, K, V > Readable< 'a, C > for BTreeMap< K, V > 29 | where C: Context, 30 | K: Readable< 'a, C > + Ord, 31 | V: Readable< 'a, C >, 32 | { 33 | #[inline] 34 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 35 | let length = crate::private::read_length( reader )?; 36 | reader.read_key_value_collection( length ) 37 | } 38 | 39 | #[inline] 40 | fn minimum_bytes_needed() -> usize { 41 | 4 42 | } 43 | } 44 | 45 | #[cfg(feature = "alloc")] 46 | impl< 'a, C, T > Readable< 'a, C > for BTreeSet< T > 47 | where C: Context, 48 | T: Readable< 'a, C > + Ord 49 | { 50 | #[inline] 51 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 52 | let length = crate::private::read_length( reader )?; 53 | reader.read_collection( length ) 54 | } 55 | 56 | #[inline] 57 | fn minimum_bytes_needed() -> usize { 58 | 4 59 | } 60 | } 61 | 62 | #[cfg(feature = "std")] 63 | impl< 'a, C, K, V, S > Readable< 'a, C > for HashMap< K, V, S > 64 | where C: Context, 65 | K: Readable< 'a, C > + Eq + Hash, 66 | V: Readable< 'a, C >, 67 | S: BuildHasher + Default 68 | { 69 | #[inline] 70 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 71 | let length = crate::private::read_length( reader )?; 72 | reader.read_key_value_collection( length ) 73 | } 74 | 75 | #[inline] 76 | fn minimum_bytes_needed() -> usize { 77 | 4 78 | } 79 | } 80 | 81 | #[cfg(feature = "std")] 82 | impl< 'a, C, T, S > Readable< 'a, C > for HashSet< T, S > 83 | where C: Context, 84 | T: Readable< 'a, C > + Eq + Hash, 85 | S: BuildHasher + Default 86 | { 87 | #[inline] 88 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 89 | let length = crate::private::read_length( reader )?; 90 | reader.read_collection( length ) 91 | } 92 | 93 | #[inline] 94 | fn minimum_bytes_needed() -> usize { 95 | 4 96 | } 97 | } 98 | 99 | impl< 'a, C: Context > Readable< 'a, C > for bool { 100 | #[inline] 101 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 102 | let value = reader.read_u8()?; 103 | if value == 0 { 104 | Ok( false ) 105 | } else { 106 | Ok( true ) 107 | } 108 | } 109 | 110 | #[inline] 111 | fn minimum_bytes_needed() -> usize { 112 | 1 113 | } 114 | } 115 | 116 | impl< 'a, C: Context > Readable< 'a, C > for char { 117 | #[inline] 118 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 119 | let value = reader.read_u32()?; 120 | core::char::from_u32( value ).ok_or_else( crate::error::error_out_of_range_char ) 121 | } 122 | 123 | #[inline] 124 | fn minimum_bytes_needed() -> usize { 125 | 4 126 | } 127 | } 128 | 129 | macro_rules! impl_for_primitive { 130 | ($type:ty, $getter:ident, $endianness_swap:ident) => { 131 | impl< 'a, C: Context > Readable< 'a, C > for $type { 132 | #[inline(always)] 133 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 134 | reader.$getter() 135 | } 136 | 137 | #[inline] 138 | fn minimum_bytes_needed() -> usize { 139 | mem::size_of::< Self >() 140 | } 141 | 142 | #[doc(hidden)] 143 | #[inline(always)] 144 | fn speedy_is_primitive() -> bool { 145 | true 146 | } 147 | 148 | #[doc(hidden)] 149 | #[inline] 150 | unsafe fn speedy_slice_from_bytes( slice: &[u8] ) -> &[Self] { 151 | unsafe { 152 | core::slice::from_raw_parts( slice.as_ptr() as *const $type, slice.len() / mem::size_of::< Self >() ) 153 | } 154 | } 155 | 156 | #[doc(hidden)] 157 | #[inline(always)] 158 | unsafe fn speedy_flip_endianness( itself: *mut Self ) { 159 | unsafe { 160 | core::ptr::write_unaligned( itself, core::ptr::read_unaligned( itself ).swap_bytes() ); 161 | } 162 | } 163 | 164 | #[doc(hidden)] 165 | #[inline(always)] 166 | fn speedy_convert_slice_endianness( endianness: Endianness, slice: &mut [$type] ) { 167 | endianness.$endianness_swap( slice ); 168 | } 169 | } 170 | } 171 | } 172 | 173 | impl_for_primitive!( i8, read_i8, swap_slice_i8 ); 174 | impl_for_primitive!( i16, read_i16, swap_slice_i16 ); 175 | impl_for_primitive!( i32, read_i32, swap_slice_i32 ); 176 | impl_for_primitive!( i64, read_i64, swap_slice_i64 ); 177 | impl_for_primitive!( i128, read_i128, swap_slice_i128 ); 178 | impl_for_primitive!( u8, read_u8, swap_slice_u8 ); 179 | impl_for_primitive!( u16, read_u16, swap_slice_u16 ); 180 | impl_for_primitive!( u32, read_u32, swap_slice_u32 ); 181 | impl_for_primitive!( u64, read_u64, swap_slice_u64 ); 182 | impl_for_primitive!( u128, read_u128, swap_slice_u128 ); 183 | impl_for_primitive!( f32, read_f32, swap_slice_f32 ); 184 | impl_for_primitive!( f64, read_f64, swap_slice_f64 ); 185 | 186 | impl< 'a, C: Context > Readable< 'a, C > for usize { 187 | #[inline] 188 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 189 | let value = u64::read_from( reader )?; 190 | if value > core::usize::MAX as u64 { 191 | return Err( crate::error::error_too_big_usize_for_this_architecture() ); 192 | } 193 | Ok( value as usize ) 194 | } 195 | 196 | #[inline] 197 | fn minimum_bytes_needed() -> usize { 198 | >::minimum_bytes_needed() 199 | } 200 | } 201 | 202 | #[cfg(feature = "alloc")] 203 | impl< 'a, C: Context > Readable< 'a, C > for String { 204 | #[inline] 205 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 206 | let bytes: Vec< u8 > = reader.read_value()?; 207 | let value = crate::private::vec_to_string( bytes )?; 208 | Ok( value ) 209 | } 210 | 211 | #[inline] 212 | fn minimum_bytes_needed() -> usize { 213 | as Readable< 'a, C >>::minimum_bytes_needed() 214 | } 215 | } 216 | 217 | #[cfg(feature = "alloc")] 218 | impl< 'a, C: Context > Readable< 'a, C > for Cow< 'a, str > { 219 | #[inline] 220 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 221 | let length = crate::private::read_length( reader )?; 222 | let bytes: Cow< 'a, [u8] > = reader.read_cow( length )?; 223 | let value = crate::private::cow_bytes_to_cow_str( bytes )?; 224 | Ok( value ) 225 | } 226 | 227 | #[inline] 228 | fn minimum_bytes_needed() -> usize { 229 | >::minimum_bytes_needed() 230 | } 231 | } 232 | 233 | #[cfg(feature = "alloc")] 234 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for Vec< T > { 235 | #[inline] 236 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 237 | let length = crate::private::read_length( reader )?; 238 | reader.read_vec( length ) 239 | } 240 | 241 | #[inline] 242 | fn minimum_bytes_needed() -> usize { 243 | 4 244 | } 245 | } 246 | 247 | #[cfg(feature = "alloc")] 248 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for Cow< 'a, [T] > where [T]: ToOwned< Owned = Vec< T > > { 249 | #[inline] 250 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 251 | let length = crate::private::read_length( reader )?; 252 | reader.read_cow( length ) 253 | } 254 | 255 | #[inline] 256 | fn minimum_bytes_needed() -> usize { 257 | as Readable< 'a, C >>::minimum_bytes_needed() 258 | } 259 | } 260 | 261 | #[cfg(feature = "std")] 262 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for Cow< 'a, HashSet< T > > where T: Readable< 'a, C > + Clone + Hash + Eq { 263 | #[inline] 264 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 265 | Ok( Cow::Owned( reader.read_value()? ) ) 266 | } 267 | 268 | #[inline] 269 | fn minimum_bytes_needed() -> usize { 270 | as Readable< 'a, C >>::minimum_bytes_needed() 271 | } 272 | } 273 | 274 | #[cfg(feature = "alloc")] 275 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for Cow< 'a, BTreeSet< T > > where T: Readable< 'a, C > + Clone + Ord { 276 | #[inline] 277 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 278 | Ok( Cow::Owned( reader.read_value()? ) ) 279 | } 280 | 281 | #[inline] 282 | fn minimum_bytes_needed() -> usize { 283 | as Readable< 'a, C >>::minimum_bytes_needed() 284 | } 285 | } 286 | 287 | #[cfg(feature = "std")] 288 | impl< 'a, C: Context, K: Readable< 'a, C >, V: Readable< 'a, C > > Readable< 'a, C > for Cow< 'a, HashMap< K, V > > where K: Readable< 'a, C > + Clone + Hash + Eq, V: Readable< 'a, C > + Clone { 289 | #[inline] 290 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 291 | Ok( Cow::Owned( reader.read_value()? ) ) 292 | } 293 | 294 | #[inline] 295 | fn minimum_bytes_needed() -> usize { 296 | as Readable< 'a, C >>::minimum_bytes_needed() 297 | } 298 | } 299 | 300 | #[cfg(feature = "alloc")] 301 | impl< 'a, C: Context, K: Readable< 'a, C >, V: Readable< 'a, C > > Readable< 'a, C > for Cow< 'a, BTreeMap< K, V > > where K: Readable< 'a, C > + Clone + Ord, V: Readable< 'a, C > + Clone { 302 | #[inline] 303 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 304 | Ok( Cow::Owned( reader.read_value()? ) ) 305 | } 306 | 307 | #[inline] 308 | fn minimum_bytes_needed() -> usize { 309 | as Readable< 'a, C >>::minimum_bytes_needed() 310 | } 311 | } 312 | 313 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for Range< T > { 314 | #[inline] 315 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 316 | let start = reader.read_value()?; 317 | let end = reader.read_value()?; 318 | Ok( start..end ) 319 | } 320 | 321 | #[inline] 322 | fn minimum_bytes_needed() -> usize { 323 | >::minimum_bytes_needed() * 2 324 | } 325 | } 326 | 327 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for RangeInclusive< T > { 328 | #[inline] 329 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 330 | let start = reader.read_value()?; 331 | let end = reader.read_value()?; 332 | Ok( start..=end ) 333 | } 334 | 335 | #[inline] 336 | fn minimum_bytes_needed() -> usize { 337 | >::minimum_bytes_needed() * 2 338 | } 339 | } 340 | 341 | impl< 'a, C: Context, T: Readable< 'a, C > > Readable< 'a, C > for Option< T > { 342 | #[inline] 343 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 344 | let flag = reader.read_value()?; 345 | let value = if flag { 346 | Some( reader.read_value()? ) 347 | } else { 348 | None 349 | }; 350 | 351 | Ok( value ) 352 | } 353 | 354 | #[inline] 355 | fn minimum_bytes_needed() -> usize { 356 | 1 357 | } 358 | } 359 | 360 | impl< 'a, C: Context, T: Readable< 'a, C >, E: Readable< 'a, C > > Readable< 'a, C > for Result< T, E > { 361 | #[inline] 362 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 363 | let flag = reader.read_value()?; 364 | let value = if flag { 365 | Ok( reader.read_value()? ) 366 | } else { 367 | Err( reader.read_value()? ) 368 | }; 369 | 370 | Ok( value ) 371 | } 372 | 373 | #[inline] 374 | fn minimum_bytes_needed() -> usize { 375 | 1 376 | } 377 | } 378 | 379 | impl< 'a, C: Context > Readable< 'a, C > for () { 380 | #[inline] 381 | fn read_from< R: Reader< 'a, C > >( _: &mut R ) -> Result< Self, C::Error > { 382 | Ok(()) 383 | } 384 | 385 | #[inline] 386 | fn minimum_bytes_needed() -> usize { 387 | 0 388 | } 389 | } 390 | 391 | macro_rules! impl_for_tuple { 392 | ($($name:ident),+) => { 393 | impl< 'a, C: Context, $($name: Readable< 'a, C >),+ > Readable< 'a, C > for ($($name,)+) { 394 | #[inline] 395 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 396 | $( 397 | #[allow(non_snake_case)] 398 | let $name = reader.read_value()?; 399 | )+ 400 | 401 | Ok( ($($name,)+) ) 402 | } 403 | 404 | #[inline] 405 | fn minimum_bytes_needed() -> usize { 406 | let mut size = 0; 407 | $( 408 | size += <$name as Readable< 'a, C >>::minimum_bytes_needed(); 409 | )+ 410 | size 411 | } 412 | } 413 | } 414 | } 415 | 416 | impl_for_tuple!( A0 ); 417 | impl_for_tuple!( A0, A1 ); 418 | impl_for_tuple!( A0, A1, A2 ); 419 | impl_for_tuple!( A0, A1, A2, A3 ); 420 | impl_for_tuple!( A0, A1, A2, A3, A4 ); 421 | impl_for_tuple!( A0, A1, A2, A3, A4, A5 ); 422 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6 ); 423 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7 ); 424 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7, A8 ); 425 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7, A8, A9 ); 426 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10 ); 427 | 428 | impl< 'a, C: Context > Readable< 'a, C > for Endianness { 429 | #[inline] 430 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 431 | let value = reader.read_u8()?; 432 | match value { 433 | 0 => Ok( Endianness::LittleEndian ), 434 | 1 => Ok( Endianness::BigEndian ), 435 | _ => Err( crate::error::error_invalid_enum_variant() ) 436 | } 437 | } 438 | 439 | #[inline] 440 | fn minimum_bytes_needed() -> usize { 441 | 1 442 | } 443 | } 444 | 445 | macro_rules! impl_for_non_zero { 446 | ($type:ident, $base_type:ty) => { 447 | impl< 'a, C: Context > Readable< 'a, C > for core::num::$type { 448 | #[inline] 449 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 450 | let value: $base_type = reader.read_value()?; 451 | core::num::$type::new( value ).ok_or_else( crate::error::error_zero_non_zero ) 452 | } 453 | 454 | #[inline] 455 | fn minimum_bytes_needed() -> usize { 456 | mem::size_of::< $base_type >() 457 | } 458 | } 459 | } 460 | } 461 | 462 | impl_for_non_zero!( NonZeroU8, u8 ); 463 | impl_for_non_zero!( NonZeroU16, u16 ); 464 | impl_for_non_zero!( NonZeroU32, u32 ); 465 | impl_for_non_zero!( NonZeroU64, u64 ); 466 | impl_for_non_zero!( NonZeroI8, i8 ); 467 | impl_for_non_zero!( NonZeroI16, i16 ); 468 | impl_for_non_zero!( NonZeroI32, i32 ); 469 | impl_for_non_zero!( NonZeroI64, i64 ); 470 | 471 | macro_rules! impl_for_atomic { 472 | ($type:ident, $base_type:ty) => { 473 | impl< 'a, C: Context > Readable< 'a, C > for core::sync::atomic::$type { 474 | #[inline(always)] 475 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 476 | let value: $base_type = reader.read_value()?; 477 | Ok( value.into() ) 478 | } 479 | 480 | #[inline] 481 | fn minimum_bytes_needed() -> usize { 482 | mem::size_of::< $base_type >() 483 | } 484 | } 485 | } 486 | } 487 | 488 | #[cfg(target_has_atomic = "8")] 489 | impl_for_atomic!( AtomicI8, i8 ); 490 | 491 | #[cfg(target_has_atomic = "16")] 492 | impl_for_atomic!( AtomicI16, i16 ); 493 | 494 | #[cfg(target_has_atomic = "32")] 495 | impl_for_atomic!( AtomicI32, i32 ); 496 | 497 | #[cfg(target_has_atomic = "64")] 498 | impl_for_atomic!( AtomicI64, i64 ); 499 | 500 | #[cfg(target_has_atomic = "8")] 501 | impl_for_atomic!( AtomicU8, u8 ); 502 | 503 | #[cfg(target_has_atomic = "16")] 504 | impl_for_atomic!( AtomicU16, u16 ); 505 | 506 | #[cfg(target_has_atomic = "32")] 507 | impl_for_atomic!( AtomicU32, u32 ); 508 | 509 | #[cfg(target_has_atomic = "64")] 510 | impl_for_atomic!( AtomicU64, u64 ); 511 | 512 | impl< 'a, C > Readable< 'a, C > for core::net::Ipv4Addr where C: Context { 513 | #[inline] 514 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 515 | let value = reader.read_u32()?; 516 | Ok( value.into() ) 517 | } 518 | 519 | #[inline] 520 | fn minimum_bytes_needed() -> usize { 521 | 4 522 | } 523 | } 524 | 525 | impl< 'a, C > Readable< 'a, C > for core::net::Ipv6Addr where C: Context { 526 | #[inline] 527 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 528 | let mut octets = [0; 16]; 529 | reader.read_bytes( &mut octets )?; 530 | if !reader.endianness().conversion_necessary() { 531 | octets.reverse(); 532 | } 533 | 534 | Ok( octets.into() ) 535 | } 536 | 537 | #[inline] 538 | fn minimum_bytes_needed() -> usize { 539 | 16 540 | } 541 | } 542 | 543 | impl< 'a, C > Readable< 'a, C > for core::net::IpAddr where C: Context { 544 | #[inline] 545 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 546 | let kind = reader.read_u8()?; 547 | match kind { 548 | 0 => Ok( core::net::IpAddr::V4( reader.read_value()? ) ), 549 | 1 => Ok( core::net::IpAddr::V6( reader.read_value()? ) ), 550 | _ => Err( crate::error::error_invalid_enum_variant() ) 551 | } 552 | } 553 | 554 | #[inline] 555 | fn minimum_bytes_needed() -> usize { 556 | 5 557 | } 558 | } 559 | 560 | impl< 'a, C > Readable< 'a, C > for core::time::Duration where C: Context { 561 | #[inline] 562 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 563 | let secs = reader.read_u64()?; 564 | let nanos = reader.read_u32()?; 565 | Ok( core::time::Duration::new( secs, nanos ) ) 566 | } 567 | 568 | #[inline] 569 | fn minimum_bytes_needed() -> usize { 570 | 12 571 | } 572 | } 573 | 574 | #[cfg(feature = "std")] 575 | impl< 'a, C > Readable< 'a, C > for std::time::SystemTime where C: Context { 576 | #[inline] 577 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error > { 578 | let duration = std::time::Duration::read_from( reader )?; 579 | std::time::SystemTime::UNIX_EPOCH.checked_add( duration ).ok_or_else( crate::error::error_invalid_system_time ) 580 | } 581 | 582 | #[inline] 583 | fn minimum_bytes_needed() -> usize { 584 | 12 585 | } 586 | } 587 | 588 | impl< 'a, C, T, const N: usize > Readable< 'a, C > for [T; N] where C: Context, T: Readable< 'a, C > { 589 | #[inline(always)] 590 | fn read_from< R >( reader: &mut R ) -> Result< Self, C::Error > where R: Reader< 'a, C > { 591 | if T::speedy_is_primitive() { 592 | let mut slice: MaybeUninit< [T; N] > = MaybeUninit::uninit(); 593 | unsafe { 594 | reader.read_bytes_into_ptr( slice.as_mut_ptr().cast::< u8 >(), N * core::mem::size_of::< T >() )?; 595 | T::speedy_convert_slice_endianness( reader.endianness(), slice.assume_init_mut() ); 596 | return Ok( slice.assume_init() ); 597 | } 598 | } 599 | 600 | struct State< 'a, T, const N: usize > { 601 | count: usize, 602 | slice: &'a mut [MaybeUninit; N] 603 | } 604 | 605 | impl< 'a, T, const N: usize > State< 'a, T, N > { 606 | fn new( slice: &'a mut MaybeUninit< [T; N] > ) -> Self { 607 | let slice: *mut [T; N] = slice.as_mut_ptr(); 608 | let slice: *mut [MaybeUninit; N] = slice.cast(); 609 | State { 610 | count: 0, 611 | slice: unsafe { &mut *slice }, 612 | } 613 | } 614 | } 615 | 616 | impl< 'a, T, const N: usize > Drop for State< 'a, T, N > { 617 | fn drop( &mut self ) { 618 | if !core::mem::needs_drop::< T >() { 619 | return; 620 | } 621 | 622 | for item in &mut self.slice[ ..self.count ] { 623 | unsafe { 624 | item.assume_init_drop(); 625 | } 626 | } 627 | } 628 | } 629 | 630 | let mut slice: MaybeUninit< [T; N] > = MaybeUninit::uninit(); 631 | let mut state = State::new( &mut slice ); 632 | 633 | while state.count < N { 634 | state.slice[ state.count ] = MaybeUninit::new( reader.read_value()? ); 635 | state.count += 1; 636 | } 637 | 638 | core::mem::forget( state ); 639 | unsafe { 640 | Ok( slice.assume_init() ) 641 | } 642 | } 643 | 644 | #[inline] 645 | fn minimum_bytes_needed() -> usize { 646 | T::minimum_bytes_needed() * N 647 | } 648 | } 649 | 650 | #[cfg(feature = "alloc")] 651 | impl< 'a, C, T > Readable< 'a, C > for Box< T > 652 | where C: Context, 653 | T: Readable< 'a, C > 654 | { 655 | #[inline] 656 | fn read_from< R >( reader: &mut R ) -> Result< Self, C::Error > where R: Reader< 'a, C > { 657 | Ok( Box::new( T::read_from( reader )? ) ) 658 | } 659 | 660 | #[inline] 661 | fn minimum_bytes_needed() -> usize { 662 | T::minimum_bytes_needed() 663 | } 664 | } 665 | 666 | #[cfg(feature = "alloc")] 667 | impl< 'a, C, T > Readable< 'a, C > for Box< [T] > 668 | where C: Context, 669 | T: Readable< 'a, C > 670 | { 671 | #[inline] 672 | fn read_from< R >( reader: &mut R ) -> Result< Self, C::Error > where R: Reader< 'a, C > { 673 | let data = Vec::< T >::read_from( reader )?; 674 | Ok( data.into() ) 675 | } 676 | 677 | #[inline] 678 | fn minimum_bytes_needed() -> usize { 679 | Vec::< T >::minimum_bytes_needed() 680 | } 681 | } 682 | 683 | #[cfg(feature = "alloc")] 684 | impl< 'a, C > Readable< 'a, C > for Box< str > 685 | where C: Context 686 | { 687 | #[inline] 688 | fn read_from< R >( reader: &mut R ) -> Result< Self, C::Error > where R: Reader< 'a, C > { 689 | let data = String::read_from( reader )?; 690 | Ok( data.into() ) 691 | } 692 | 693 | #[inline] 694 | fn minimum_bytes_needed() -> usize { 695 | >::minimum_bytes_needed() 696 | } 697 | } 698 | -------------------------------------------------------------------------------- /src/writable_impl.rs: -------------------------------------------------------------------------------- 1 | use core::mem; 2 | use core::ops::{Range, RangeInclusive}; 3 | 4 | use crate::endianness::Endianness; 5 | use crate::writable::Writable; 6 | use crate::writer::Writer; 7 | 8 | use crate::context::Context; 9 | 10 | use crate::private::write_length; 11 | 12 | #[cfg(feature = "std")] 13 | use std::{ 14 | collections::{HashMap, HashSet}, 15 | hash::Hash, 16 | }; 17 | 18 | #[cfg(feature = "alloc")] 19 | use alloc::{ 20 | borrow::{Cow, ToOwned}, 21 | boxed::Box, 22 | string::String, 23 | vec::Vec, 24 | collections::{BTreeMap, BTreeSet} 25 | }; 26 | 27 | #[cfg(feature = "alloc")] 28 | impl< C, K, V > Writable< C > for BTreeMap< K, V > 29 | where C: Context, 30 | K: Writable< C >, 31 | V: Writable< C > 32 | { 33 | #[inline] 34 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 35 | write_length( self.len(), writer )?; 36 | writer.write_collection( self.iter() ) 37 | } 38 | 39 | #[inline] 40 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 41 | unsafe_is_length!( self.len() ); 42 | 43 | let mut count = mem::size_of::< u32 >(); 44 | for (key, value) in self { 45 | count += key.bytes_needed()? + value.bytes_needed()?; 46 | } 47 | 48 | Ok( count ) 49 | } 50 | } 51 | 52 | #[cfg(feature = "alloc")] 53 | impl< C, T > Writable< C > for BTreeSet< T > 54 | where C: Context, 55 | T: Writable< C > 56 | { 57 | #[inline] 58 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 59 | write_length( self.len(), writer )?; 60 | writer.write_collection( self.iter() ) 61 | } 62 | 63 | #[inline] 64 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 65 | unsafe_is_length!( self.len() ); 66 | 67 | let mut count = mem::size_of::< u32 >(); 68 | for value in self { 69 | count += value.bytes_needed()?; 70 | } 71 | 72 | Ok( count ) 73 | } 74 | } 75 | 76 | #[cfg(feature = "std")] 77 | impl< C, K, V, S > Writable< C > for HashMap< K, V, S > 78 | where C: Context, 79 | K: Writable< C >, 80 | V: Writable< C > 81 | { 82 | #[inline] 83 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 84 | write_length( self.len(), writer )?; 85 | writer.write_collection( self.iter() ) 86 | } 87 | 88 | #[inline] 89 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 90 | unsafe_is_length!( self.len() ); 91 | 92 | let mut count = mem::size_of::< u32 >(); 93 | for (key, value) in self { 94 | count += key.bytes_needed()? + value.bytes_needed()?; 95 | } 96 | 97 | Ok( count ) 98 | } 99 | } 100 | 101 | #[cfg(feature = "std")] 102 | impl< C, T, S > Writable< C > for HashSet< T, S > 103 | where C: Context, 104 | T: Writable< C > 105 | { 106 | #[inline] 107 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 108 | write_length( self.len(), writer )?; 109 | writer.write_collection( self.iter() ) 110 | } 111 | 112 | #[inline] 113 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 114 | unsafe_is_length!( self.len() ); 115 | 116 | let mut count = mem::size_of::< u32 >(); 117 | for value in self { 118 | count += value.bytes_needed()?; 119 | } 120 | 121 | Ok( count ) 122 | } 123 | } 124 | 125 | macro_rules! impl_for_primitive { 126 | ($type:ty, $write_name:ident) => { 127 | impl< C: Context > Writable< C > for $type { 128 | #[inline(always)] 129 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 130 | writer.$write_name( *self ) 131 | } 132 | 133 | #[inline] 134 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 135 | Ok( mem::size_of::< Self >() ) 136 | } 137 | 138 | #[doc(hidden)] 139 | #[inline(always)] 140 | fn speedy_is_primitive() -> bool { 141 | true 142 | } 143 | 144 | #[doc(hidden)] 145 | #[inline(always)] 146 | unsafe fn speedy_slice_as_bytes( slice: &[Self] ) -> &[u8] where Self: Sized { 147 | unsafe { 148 | core::slice::from_raw_parts( slice.as_ptr() as *const u8, slice.len() * mem::size_of::< Self >() ) 149 | } 150 | } 151 | } 152 | } 153 | } 154 | 155 | impl_for_primitive!( i8, write_i8 ); 156 | impl_for_primitive!( i16, write_i16 ); 157 | impl_for_primitive!( i32, write_i32 ); 158 | impl_for_primitive!( i64, write_i64 ); 159 | impl_for_primitive!( i128, write_i128 ); 160 | impl_for_primitive!( u8, write_u8 ); 161 | impl_for_primitive!( u16, write_u16 ); 162 | impl_for_primitive!( u32, write_u32 ); 163 | impl_for_primitive!( u64, write_u64 ); 164 | impl_for_primitive!( u128, write_u128 ); 165 | impl_for_primitive!( f32, write_f32 ); 166 | impl_for_primitive!( f64, write_f64 ); 167 | 168 | impl< C: Context > Writable< C > for usize { 169 | #[inline] 170 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 171 | writer.write_u64( *self as u64 ) 172 | } 173 | 174 | #[inline] 175 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 176 | Ok( mem::size_of::< u64 >() ) 177 | } 178 | } 179 | 180 | impl< C: Context > Writable< C > for bool { 181 | #[inline] 182 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 183 | writer.write_u8( if *self { 1 } else { 0 } ) 184 | } 185 | 186 | #[inline] 187 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 188 | Ok( 1 ) 189 | } 190 | } 191 | 192 | impl< C: Context > Writable< C > for char { 193 | #[inline] 194 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 195 | writer.write_u32( *self as u32 ) 196 | } 197 | 198 | #[inline] 199 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 200 | Ok( mem::size_of::< u32 >() ) 201 | } 202 | } 203 | 204 | #[cfg(feature = "alloc")] 205 | impl< C: Context > Writable< C > for String { 206 | #[inline] 207 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 208 | self.as_bytes().write_to( writer ) 209 | } 210 | 211 | #[inline] 212 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 213 | Writable::< C >::bytes_needed( self.as_bytes() ) 214 | } 215 | } 216 | 217 | impl< C: Context > Writable< C > for str { 218 | #[inline] 219 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 220 | self.as_bytes().write_to( writer ) 221 | } 222 | 223 | #[inline] 224 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 225 | Writable::< C >::bytes_needed( self.as_bytes() ) 226 | } 227 | } 228 | 229 | impl< 'a, C: Context > Writable< C > for &'a str { 230 | #[inline] 231 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 232 | self.as_bytes().write_to( writer ) 233 | } 234 | 235 | #[inline] 236 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 237 | Writable::< C >::bytes_needed( self.as_bytes() ) 238 | } 239 | } 240 | 241 | #[cfg(feature = "alloc")] 242 | impl< 'r, C: Context > Writable< C > for Cow< 'r, str > { 243 | #[inline] 244 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 245 | self.as_bytes().write_to( writer ) 246 | } 247 | 248 | #[inline] 249 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 250 | Writable::< C >::bytes_needed( self.as_bytes() ) 251 | } 252 | } 253 | 254 | impl< C: Context, T: Writable< C > > Writable< C > for [T] { 255 | #[inline] 256 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 257 | write_length( self.len(), writer )?; 258 | writer.write_slice( self ) 259 | } 260 | 261 | #[inline] 262 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 263 | unsafe_is_length!( self.len() ); 264 | 265 | if T::speedy_is_primitive() { 266 | return Ok( 4 + self.len() * mem::size_of::< T >() ); 267 | } 268 | 269 | let mut sum = 4; 270 | for element in self { 271 | sum += element.bytes_needed()?; 272 | } 273 | 274 | Ok( sum ) 275 | } 276 | } 277 | 278 | #[cfg(feature = "alloc")] 279 | impl< 'r, C: Context, T: Writable< C > > Writable< C > for Cow< 'r, [T] > where [T]: ToOwned { 280 | #[inline] 281 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 282 | self.as_ref().write_to( writer ) 283 | } 284 | 285 | #[inline] 286 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 287 | Writable::< C >::bytes_needed( self.as_ref() ) 288 | } 289 | } 290 | 291 | #[cfg(feature = "alloc")] 292 | impl< 'a, C: Context, T: Writable< C > > Writable< C > for &'a [T] where [T]: ToOwned { 293 | #[inline] 294 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 295 | <[T] as Writable< C >>::write_to( self, writer ) 296 | } 297 | 298 | #[inline] 299 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 300 | <[T] as Writable::< C >>::bytes_needed( self.as_ref() ) 301 | } 302 | } 303 | 304 | #[cfg(feature = "std")] 305 | impl< 'r, C, T > Writable< C > for Cow< 'r, HashSet< T > > where C: Context, T: Writable< C > + Clone + Hash + Eq { 306 | #[inline] 307 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 308 | (&**self).write_to( writer ) 309 | } 310 | 311 | #[inline] 312 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 313 | Writable::< C >::bytes_needed( &**self ) 314 | } 315 | } 316 | 317 | #[cfg(feature = "alloc")] 318 | impl< 'r, C, T > Writable< C > for Cow< 'r, BTreeSet< T > > where C: Context, T: Writable< C > + Clone + Ord { 319 | #[inline] 320 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 321 | (&**self).write_to( writer ) 322 | } 323 | 324 | #[inline] 325 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 326 | Writable::< C >::bytes_needed( &**self ) 327 | } 328 | } 329 | 330 | #[cfg(feature = "std")] 331 | impl< 'r, C, K, V > Writable< C > for Cow< 'r, HashMap< K, V > > where C: Context, K: Writable< C > + Clone + Hash + Eq, V: Writable< C > + Clone { 332 | #[inline] 333 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 334 | (&**self).write_to( writer ) 335 | } 336 | 337 | #[inline] 338 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 339 | Writable::< C >::bytes_needed( &**self ) 340 | } 341 | } 342 | 343 | #[cfg(feature = "alloc")] 344 | impl< 'r, C, K, V > Writable< C > for Cow< 'r, BTreeMap< K, V > > where C: Context, K: Writable< C > + Clone + Ord, V: Writable< C > + Clone { 345 | #[inline] 346 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 347 | (&**self).write_to( writer ) 348 | } 349 | 350 | #[inline] 351 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 352 | Writable::< C >::bytes_needed( &**self ) 353 | } 354 | } 355 | 356 | #[cfg(feature = "alloc")] 357 | impl< C: Context, T: Writable< C > > Writable< C > for Vec< T > { 358 | #[inline] 359 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 360 | self.as_slice().write_to( writer ) 361 | } 362 | 363 | #[inline] 364 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 365 | Writable::< C >::bytes_needed( self.as_slice() ) 366 | } 367 | } 368 | 369 | impl< C: Context, T: Writable< C > > Writable< C > for Range< T > { 370 | #[inline] 371 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 372 | self.start.write_to( writer )?; 373 | self.end.write_to( writer ) 374 | } 375 | 376 | #[inline] 377 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 378 | Ok( Writable::< C >::bytes_needed( &self.start )? + Writable::< C >::bytes_needed( &self.end )? ) 379 | } 380 | } 381 | 382 | impl< C: Context, T: Writable< C > > Writable< C > for RangeInclusive< T > { 383 | #[inline] 384 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 385 | self.start().write_to( writer )?; 386 | self.end().write_to( writer ) 387 | } 388 | 389 | #[inline] 390 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 391 | Ok( Writable::< C >::bytes_needed( self.start() )? + Writable::< C >::bytes_needed( self.end() )? ) 392 | } 393 | } 394 | 395 | impl< C: Context, T: Writable< C > > Writable< C > for Option< T > { 396 | #[inline] 397 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 398 | if let Some( ref value ) = *self { 399 | writer.write_u8( 1 )?; 400 | value.write_to( writer ) 401 | } else { 402 | writer.write_u8( 0 ) 403 | } 404 | } 405 | 406 | #[inline] 407 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 408 | if let Some( ref value ) = *self { 409 | Ok( 1 + Writable::< C >::bytes_needed( value )? ) 410 | } else { 411 | Ok( 1 ) 412 | } 413 | } 414 | } 415 | 416 | impl< C: Context, T: Writable< C >, E: Writable< C > > Writable< C > for Result< T, E > { 417 | #[inline] 418 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 419 | match *self { 420 | Ok( ref value ) => { 421 | writer.write_u8( 1 )?; 422 | value.write_to( writer ) 423 | }, 424 | Err( ref value ) => { 425 | writer.write_u8( 0 )?; 426 | value.write_to( writer ) 427 | } 428 | } 429 | } 430 | 431 | #[inline] 432 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 433 | match *self { 434 | Ok( ref value ) => { 435 | Ok( 1 + Writable::< C >::bytes_needed( value )? ) 436 | }, 437 | Err( ref value ) => { 438 | Ok( 1 + Writable::< C >::bytes_needed( value )? ) 439 | } 440 | } 441 | } 442 | } 443 | 444 | impl< C: Context > Writable< C > for () { 445 | #[inline] 446 | fn write_to< W: ?Sized + Writer< C > >( &self, _: &mut W ) -> Result< (), C::Error > { 447 | Ok(()) 448 | } 449 | 450 | #[inline] 451 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 452 | Ok( 0 ) 453 | } 454 | } 455 | 456 | macro_rules! impl_for_tuple { 457 | ($($name:ident),+) => { 458 | impl< C: Context, $($name: Writable< C >),+ > Writable< C > for ($($name,)+) { 459 | #[inline] 460 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 461 | #[allow(non_snake_case)] 462 | let &($(ref $name,)+) = self; 463 | $( 464 | $name.write_to( writer )?; 465 | )+ 466 | Ok(()) 467 | } 468 | 469 | #[inline] 470 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 471 | #[allow(non_snake_case)] 472 | let &($(ref $name,)+) = self; 473 | let mut size = 0; 474 | $( 475 | size += Writable::< C >::bytes_needed( $name )?; 476 | )+ 477 | Ok( size ) 478 | } 479 | } 480 | } 481 | } 482 | 483 | impl_for_tuple!( A0 ); 484 | impl_for_tuple!( A0, A1 ); 485 | impl_for_tuple!( A0, A1, A2 ); 486 | impl_for_tuple!( A0, A1, A2, A3 ); 487 | impl_for_tuple!( A0, A1, A2, A3, A4 ); 488 | impl_for_tuple!( A0, A1, A2, A3, A4, A5 ); 489 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6 ); 490 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7 ); 491 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7, A8 ); 492 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7, A8, A9 ); 493 | impl_for_tuple!( A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10 ); 494 | 495 | impl< C: Context > Writable< C > for Endianness { 496 | #[inline] 497 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 498 | let value = match *self { 499 | Endianness::LittleEndian => 0, 500 | Endianness::BigEndian => 1 501 | }; 502 | 503 | writer.write_u8( value ) 504 | } 505 | 506 | #[inline] 507 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 508 | Ok( 1 ) 509 | } 510 | } 511 | 512 | impl< 'a, C, T > Writable< C > for &'a T where C: Context, T: Writable< C > { 513 | #[inline(always)] 514 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 515 | (**self).write_to( writer ) 516 | } 517 | 518 | #[inline(always)] 519 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 520 | (**self).bytes_needed() 521 | } 522 | } 523 | 524 | impl< 'a, C, T > Writable< C > for &'a mut T where C: Context, T: Writable< C > { 525 | #[inline(always)] 526 | fn write_to< W: ?Sized + Writer< C > >( &self, writer: &mut W ) -> Result< (), C::Error > { 527 | (**self).write_to( writer ) 528 | } 529 | 530 | #[inline(always)] 531 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 532 | (**self).bytes_needed() 533 | } 534 | } 535 | 536 | macro_rules! impl_for_non_zero { 537 | ($type:ident, $base_type:ty) => { 538 | impl< C > Writable< C > for core::num::$type where C: Context { 539 | #[inline] 540 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 541 | self.get().write_to( writer ) 542 | } 543 | 544 | #[inline] 545 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 546 | Ok( mem::size_of::< $base_type >() ) 547 | } 548 | } 549 | } 550 | } 551 | 552 | impl_for_non_zero!( NonZeroU8, u8 ); 553 | impl_for_non_zero!( NonZeroU16, u16 ); 554 | impl_for_non_zero!( NonZeroU32, u32 ); 555 | impl_for_non_zero!( NonZeroU64, u64 ); 556 | impl_for_non_zero!( NonZeroI8, i8 ); 557 | impl_for_non_zero!( NonZeroI16, i16 ); 558 | impl_for_non_zero!( NonZeroI32, i32 ); 559 | impl_for_non_zero!( NonZeroI64, i64 ); 560 | 561 | macro_rules! impl_for_atomic { 562 | ($type:ident, $base_type:ty) => { 563 | impl< C: Context > Writable< C > for core::sync::atomic::$type { 564 | #[inline(always)] 565 | fn write_to< T: ?Sized + Writer< C > >( &self, writer: &mut T ) -> Result< (), C::Error > { 566 | writer.write_value( &self.load( core::sync::atomic::Ordering::SeqCst ) ) 567 | } 568 | 569 | #[inline] 570 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 571 | Ok( mem::size_of::< $base_type >() ) 572 | } 573 | } 574 | } 575 | } 576 | 577 | #[cfg(target_has_atomic = "8")] 578 | impl_for_atomic!( AtomicI8, i8 ); 579 | 580 | #[cfg(target_has_atomic = "16")] 581 | impl_for_atomic!( AtomicI16, i16 ); 582 | 583 | #[cfg(target_has_atomic = "32")] 584 | impl_for_atomic!( AtomicI32, i32 ); 585 | 586 | #[cfg(target_has_atomic = "64")] 587 | impl_for_atomic!( AtomicI64, i64 ); 588 | 589 | #[cfg(target_has_atomic = "8")] 590 | impl_for_atomic!( AtomicU8, u8 ); 591 | 592 | #[cfg(target_has_atomic = "16")] 593 | impl_for_atomic!( AtomicU16, u16 ); 594 | 595 | #[cfg(target_has_atomic = "32")] 596 | impl_for_atomic!( AtomicU32, u32 ); 597 | 598 | #[cfg(target_has_atomic = "64")] 599 | impl_for_atomic!( AtomicU64, u64 ); 600 | 601 | impl< C > Writable< C > for core::net::Ipv4Addr where C: Context { 602 | #[inline] 603 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 604 | let raw: u32 = (*self).into(); 605 | writer.write_u32( raw ) 606 | } 607 | 608 | #[inline] 609 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 610 | Ok( 4 ) 611 | } 612 | } 613 | 614 | impl< C > Writable< C > for core::net::Ipv6Addr where C: Context { 615 | #[inline] 616 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 617 | let mut octets = self.octets(); 618 | if !writer.endianness().conversion_necessary() { 619 | octets.reverse(); 620 | } 621 | 622 | writer.write_bytes( &octets ) 623 | } 624 | 625 | #[inline] 626 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 627 | Ok( 16 ) 628 | } 629 | } 630 | 631 | impl< C > Writable< C > for core::net::IpAddr where C: Context { 632 | #[inline] 633 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 634 | match self { 635 | core::net::IpAddr::V4( address ) => { 636 | writer.write_u8( 0 )?; 637 | address.write_to( writer ) 638 | }, 639 | core::net::IpAddr::V6( address ) => { 640 | writer.write_u8( 1 )?; 641 | address.write_to( writer ) 642 | } 643 | } 644 | } 645 | 646 | #[inline] 647 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 648 | match self { 649 | core::net::IpAddr::V4( address ) => Writable::< C >::bytes_needed( address ).map( |count| count + 1 ), 650 | core::net::IpAddr::V6( address ) => Writable::< C >::bytes_needed( address ).map( |count| count + 1 ) 651 | } 652 | } 653 | } 654 | 655 | impl< C > Writable< C > for core::time::Duration where C: Context { 656 | #[inline] 657 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 658 | writer.write_u64( self.as_secs() )?; 659 | writer.write_u32( self.subsec_nanos() ) 660 | } 661 | 662 | #[inline] 663 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 664 | Ok( 12 ) 665 | } 666 | } 667 | 668 | #[cfg(feature = "std")] 669 | impl< C > Writable< C > for std::time::SystemTime where C: Context { 670 | #[inline] 671 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 672 | let duration = self.duration_since( std::time::SystemTime::UNIX_EPOCH ).map_err( |_| crate::error::error_invalid_system_time() )?; 673 | writer.write_value( &duration ) 674 | } 675 | 676 | #[inline] 677 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 678 | Ok( 12 ) 679 | } 680 | } 681 | 682 | impl< C, T, const N: usize > Writable< C > for [T; N] where C: Context, T: Writable< C > { 683 | #[inline] 684 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 685 | for item in self { 686 | item.write_to( writer )?; 687 | } 688 | Ok(()) 689 | } 690 | 691 | #[inline] 692 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 693 | let mut size = 0; 694 | for item in self { 695 | size += Writable::< C >::bytes_needed( item )?; 696 | } 697 | Ok( size ) 698 | } 699 | } 700 | 701 | #[cfg(feature = "alloc")] 702 | impl< C, T > Writable< C > for Box< T > 703 | where C: Context, 704 | T: Writable< C > 705 | { 706 | #[inline] 707 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 708 | (**self).write_to( writer ) 709 | } 710 | 711 | #[inline] 712 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 713 | (**self).bytes_needed() 714 | } 715 | } 716 | 717 | #[cfg(feature = "alloc")] 718 | impl< C, T > Writable< C > for Box< [T] > 719 | where C: Context, 720 | T: Writable< C > 721 | { 722 | #[inline] 723 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 724 | (**self).write_to( writer ) 725 | } 726 | 727 | #[inline] 728 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 729 | (**self).bytes_needed() 730 | } 731 | } 732 | 733 | #[cfg(feature = "alloc")] 734 | impl< C > Writable< C > for Box< str > 735 | where C: Context 736 | { 737 | #[inline] 738 | fn write_to< W >( &self, writer: &mut W ) -> Result< (), C::Error > where W: ?Sized + Writer< C > { 739 | let value: &str = &**self; 740 | value.write_to( writer ) 741 | } 742 | 743 | #[inline] 744 | fn bytes_needed( &self ) -> Result< usize, C::Error > { 745 | let value: &str = &**self; 746 | Writable::< C >::bytes_needed( value ) 747 | } 748 | } 749 | -------------------------------------------------------------------------------- /src/readable.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "std")] 2 | use std::io::{ 3 | Read 4 | }; 5 | 6 | #[cfg(feature = "std")] 7 | use std::fs::File; 8 | 9 | #[cfg(feature = "std")] 10 | use std::path::Path; 11 | use core::marker::PhantomData; 12 | 13 | use crate::reader::Reader; 14 | use crate::context::{Context, DefaultContext}; 15 | use crate::endianness::Endianness; 16 | 17 | #[cfg(feature = "std")] 18 | use crate::Error; 19 | 20 | #[cfg(feature = "std")] 21 | use crate::circular_buffer::CircularBuffer; 22 | 23 | use crate::error::{ 24 | error_end_of_input, 25 | error_input_buffer_is_too_small 26 | }; 27 | 28 | struct BufferReader< 'a, C > where C: Context { 29 | context: C, 30 | ptr: *const u8, 31 | end: *const u8, 32 | phantom: PhantomData< &'a [u8] > 33 | } 34 | 35 | impl< 'a, C > BufferReader< 'a, C > where C: Context { 36 | #[inline] 37 | fn new( context: C, buffer: &'a [u8] ) -> Self { 38 | BufferReader { 39 | context, 40 | ptr: buffer.as_ptr(), 41 | end: unsafe { buffer.as_ptr().add( buffer.len() ) }, 42 | phantom: PhantomData 43 | } 44 | } 45 | } 46 | 47 | impl< 'a, C: Context > Reader< 'a, C > for BufferReader< 'a, C > { 48 | #[inline(always)] 49 | fn read_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error > { 50 | let length = output.len(); 51 | if self.can_read_at_least( length ) == Some( false ) { 52 | return Err( error_end_of_input() ); 53 | } 54 | 55 | unsafe { 56 | core::ptr::copy_nonoverlapping( self.ptr, output.as_mut_ptr(), length ); 57 | self.ptr = self.ptr.add( length ); 58 | } 59 | 60 | Ok(()) 61 | } 62 | 63 | #[inline(always)] 64 | unsafe fn read_bytes_into_ptr( &mut self, output: *mut u8, length: usize ) -> Result< (), C::Error > { 65 | if self.can_read_at_least( length ) == Some( false ) { 66 | return Err( error_end_of_input() ); 67 | } 68 | 69 | unsafe { 70 | core::ptr::copy_nonoverlapping( self.ptr, output, length ); 71 | self.ptr = self.ptr.add( length ); 72 | } 73 | 74 | Ok(()) 75 | } 76 | 77 | #[inline(always)] 78 | fn peek_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error > { 79 | let length = output.len(); 80 | if self.can_read_at_least( length ) == Some( false ) { 81 | return Err( error_end_of_input() ); 82 | } 83 | 84 | unsafe { 85 | core::ptr::copy_nonoverlapping( self.ptr, output.as_mut_ptr(), length ); 86 | } 87 | 88 | Ok(()) 89 | } 90 | 91 | #[inline(always)] 92 | unsafe fn peek_bytes_into_ptr( &mut self, output: *mut u8, length: usize ) -> Result< (), C::Error > { 93 | if self.can_read_at_least( length ) == Some( false ) { 94 | return Err( error_end_of_input() ); 95 | } 96 | 97 | unsafe { 98 | core::ptr::copy_nonoverlapping( self.ptr, output, length ); 99 | } 100 | Ok(()) 101 | } 102 | 103 | #[inline(always)] 104 | fn skip_bytes( &mut self, length: usize ) -> Result< (), C::Error > { 105 | if self.can_read_at_least( length ) == Some( false ) { 106 | return Err( error_end_of_input() ); 107 | } 108 | 109 | unsafe { 110 | self.ptr = self.ptr.add( length ); 111 | } 112 | Ok(()) 113 | } 114 | 115 | #[inline(always)] 116 | fn read_bytes_borrowed( &mut self, length: usize ) -> Option< Result< &'a [u8], C::Error > > { 117 | if self.can_read_at_least( length ) == Some( false ) { 118 | return Some( Err( error_end_of_input() ) ); 119 | } 120 | 121 | let slice; 122 | unsafe { 123 | slice = core::slice::from_raw_parts( self.ptr, length ); 124 | self.ptr = self.ptr.add( length ); 125 | } 126 | 127 | Some( Ok( slice ) ) 128 | } 129 | 130 | #[inline(always)] 131 | fn read_bytes_borrowed_from_reader< 'r >( &'r mut self, length: usize ) -> Option< Result< &'r [u8], C::Error > > { 132 | if self.can_read_at_least( length ) == Some( false ) { 133 | return Some( Err( error_end_of_input() ) ); 134 | } 135 | 136 | let slice; 137 | unsafe { 138 | slice = core::slice::from_raw_parts( self.ptr, length ); 139 | self.ptr = self.ptr.add( length ); 140 | } 141 | 142 | Some( Ok( slice ) ) 143 | } 144 | 145 | #[inline(always)] 146 | fn read_bytes_borrowed_until_eof( &mut self ) -> Option< &'a [u8] > { 147 | let length = self.end as usize - self.ptr as usize; 148 | let slice; 149 | unsafe { 150 | slice = core::slice::from_raw_parts( self.ptr, length ); 151 | self.ptr = self.ptr.add( length ); 152 | } 153 | 154 | Some( slice ) 155 | } 156 | 157 | #[inline(always)] 158 | fn can_read_at_least( &self, size: usize ) -> Option< bool > { 159 | Some( (self.end as usize - self.ptr as usize) >= size ) 160 | } 161 | 162 | #[inline(always)] 163 | fn context( &self ) -> &C { 164 | &self.context 165 | } 166 | 167 | #[inline(always)] 168 | fn context_mut( &mut self ) -> &mut C { 169 | &mut self.context 170 | } 171 | } 172 | 173 | struct CopyingBufferReader< 'ctx, 'a, C > where C: Context { 174 | context: &'ctx mut C, 175 | ptr: *const u8, 176 | end: *const u8, 177 | phantom: PhantomData< &'a [u8] > 178 | } 179 | 180 | impl< 'ctx, 'a, C > CopyingBufferReader< 'ctx, 'a, C > where C: Context { 181 | #[inline] 182 | fn new( context: &'ctx mut C, buffer: &'a [u8] ) -> Self { 183 | CopyingBufferReader { 184 | context, 185 | ptr: buffer.as_ptr(), 186 | end: unsafe { buffer.as_ptr().add( buffer.len() ) }, 187 | phantom: PhantomData 188 | } 189 | } 190 | } 191 | 192 | impl< 'ctx, 'r, 'a, C: Context > Reader< 'r, C > for CopyingBufferReader< 'ctx, 'a, C > { 193 | #[inline(always)] 194 | fn read_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error > { 195 | let length = output.len(); 196 | if self.can_read_at_least( length ) == Some( false ) { 197 | return Err( error_end_of_input() ); 198 | } 199 | 200 | unsafe { 201 | core::ptr::copy_nonoverlapping( self.ptr, output.as_mut_ptr(), length ); 202 | self.ptr = self.ptr.add( length ); 203 | } 204 | 205 | Ok(()) 206 | } 207 | 208 | #[inline(always)] 209 | unsafe fn read_bytes_into_ptr( &mut self, output: *mut u8, length: usize ) -> Result< (), C::Error > { 210 | if self.can_read_at_least( length ) == Some( false ) { 211 | return Err( error_end_of_input() ); 212 | } 213 | 214 | unsafe { 215 | core::ptr::copy_nonoverlapping( self.ptr, output, length ); 216 | self.ptr = self.ptr.add( length ); 217 | } 218 | 219 | Ok(()) 220 | } 221 | 222 | #[inline(always)] 223 | fn peek_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error > { 224 | let length = output.len(); 225 | if self.can_read_at_least( length ) == Some( false ) { 226 | return Err( error_end_of_input() ); 227 | } 228 | 229 | unsafe { 230 | core::ptr::copy_nonoverlapping( self.ptr, output.as_mut_ptr(), length ); 231 | } 232 | 233 | Ok(()) 234 | } 235 | 236 | #[inline(always)] 237 | unsafe fn peek_bytes_into_ptr( &mut self, output: *mut u8, length: usize ) -> Result< (), C::Error > { 238 | if self.can_read_at_least( length ) == Some( false ) { 239 | return Err( error_end_of_input() ); 240 | } 241 | 242 | unsafe { 243 | core::ptr::copy_nonoverlapping( self.ptr, output, length ); 244 | } 245 | Ok(()) 246 | } 247 | 248 | #[inline(always)] 249 | fn skip_bytes( &mut self, length: usize ) -> Result< (), C::Error > { 250 | if self.can_read_at_least( length ) == Some( false ) { 251 | return Err( error_end_of_input() ); 252 | } 253 | 254 | unsafe { 255 | self.ptr = self.ptr.add( length ); 256 | } 257 | Ok(()) 258 | } 259 | 260 | #[inline(always)] 261 | fn read_bytes_borrowed_from_reader< 'reader >( &'reader mut self, length: usize ) -> Option< Result< &'reader [u8], C::Error > > { 262 | if self.can_read_at_least( length ) == Some( false ) { 263 | return Some( Err( error_end_of_input() ) ); 264 | } 265 | 266 | let slice; 267 | unsafe { 268 | slice = core::slice::from_raw_parts( self.ptr, length ); 269 | self.ptr = self.ptr.add( length ); 270 | } 271 | 272 | Some( Ok( slice ) ) 273 | } 274 | 275 | #[inline(always)] 276 | fn can_read_at_least( &self, size: usize ) -> Option< bool > { 277 | Some( (self.end as usize - self.ptr as usize) >= size ) 278 | } 279 | 280 | #[inline(always)] 281 | fn context( &self ) -> &C { 282 | &self.context 283 | } 284 | 285 | #[inline(always)] 286 | fn context_mut( &mut self ) -> &mut C { 287 | &mut self.context 288 | } 289 | } 290 | 291 | #[cfg(feature = "std")] 292 | struct StreamReader< C: Context, S: Read > { 293 | context: C, 294 | reader: S, 295 | buffer: CircularBuffer, 296 | is_buffering: bool 297 | } 298 | 299 | #[cfg(feature = "std")] 300 | impl< 'a, C, S > StreamReader< C, S > where C: Context, S: Read { 301 | #[inline(never)] 302 | fn read_bytes_slow( &mut self, mut output: &mut [u8] ) -> Result< (), C::Error > { 303 | if self.is_buffering && output.len() < self.buffer.capacity() { 304 | let reader = &mut self.reader; 305 | while self.buffer.len() < self.buffer.capacity() { 306 | let bytes_written = self.buffer.try_append_with( self.buffer.capacity() - self.buffer.len(), |chunk| { 307 | reader.read( chunk ) 308 | }).map_err( |error| { 309 | let error = Error::from_io_error( error ); 310 | >::from( error ) 311 | })?; 312 | 313 | if bytes_written == 0 { 314 | if self.buffer.len() < output.len() { 315 | return Err( error_end_of_input() ); 316 | } else { 317 | break; 318 | } 319 | } 320 | 321 | if self.buffer.len() >= output.len() { 322 | break; 323 | } 324 | } 325 | } 326 | 327 | if self.buffer.len() > 0 { 328 | let length = core::cmp::min( self.buffer.len(), output.len() ); 329 | self.buffer.consume_into( &mut output[ ..length ] ); 330 | output = &mut output[ length.. ]; 331 | } 332 | 333 | if output.is_empty() { 334 | return Ok(()); 335 | } 336 | 337 | self.reader.read_exact( output ).map_err( |error| { 338 | let error = Error::from_io_error( error ); 339 | >::from( error ) 340 | }) 341 | } 342 | } 343 | 344 | #[cfg(feature = "std")] 345 | impl< 'a, C: Context, S: Read > Reader< 'a, C > for StreamReader< C, S > { 346 | #[inline(always)] 347 | fn read_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error > { 348 | if self.buffer.len() >= output.len() { 349 | self.buffer.consume_into( output ); 350 | return Ok(()); 351 | } 352 | 353 | self.read_bytes_slow( output ) 354 | } 355 | 356 | fn peek_bytes( &mut self, output: &mut [u8] ) -> Result< (), C::Error > { 357 | if output.len() > self.buffer.len() { 358 | let reader = &mut self.reader; 359 | while self.buffer.len() < output.len() { 360 | let mut chunk_size = output.len() - self.buffer.len(); 361 | if self.is_buffering { 362 | chunk_size = core::cmp::max( chunk_size, self.buffer.capacity() - self.buffer.len() ); 363 | } 364 | 365 | let bytes_written = self.buffer.try_append_with( chunk_size, |chunk| { 366 | reader.read( chunk ) 367 | }).map_err( |error| { 368 | let error = Error::from_io_error( error ); 369 | >::from( error ) 370 | })?; 371 | 372 | if bytes_written == 0 { 373 | return Err( error_end_of_input() ); 374 | } 375 | } 376 | } 377 | 378 | let (a, b) = self.buffer.as_slices_of_length( output.len() ); 379 | output[ ..a.len() ].copy_from_slice( a ); 380 | 381 | if let Some( b ) = b { 382 | output[ a.len().. ].copy_from_slice( b ); 383 | } 384 | 385 | Ok(()) 386 | } 387 | 388 | #[inline(always)] 389 | fn context( &self ) -> &C { 390 | &self.context 391 | } 392 | 393 | #[inline(always)] 394 | fn context_mut( &mut self ) -> &mut C { 395 | &mut self.context 396 | } 397 | } 398 | 399 | #[cfg(feature = "std")] 400 | impl< C: Context, S: Read > StreamReader< C, S > { 401 | #[inline] 402 | fn deserialize< 'a, T: Readable< 'a, C > >( context: C, reader: S, is_buffering: bool ) -> Result< T, C::Error > { 403 | let capacity = if is_buffering { 404 | 8 * 1024 405 | } else { 406 | 0 407 | }; 408 | 409 | let mut reader = StreamReader { 410 | context, 411 | reader, 412 | buffer: CircularBuffer::with_capacity( capacity ), 413 | is_buffering 414 | }; 415 | 416 | T::read_from( &mut reader ) 417 | } 418 | } 419 | 420 | pub trait Readable< 'a, C: Context >: Sized { 421 | fn read_from< R: Reader< 'a, C > >( reader: &mut R ) -> Result< Self, C::Error >; 422 | 423 | #[inline] 424 | fn minimum_bytes_needed() -> usize { 425 | 0 426 | } 427 | 428 | /// Deserializes from a given buffer. 429 | /// 430 | /// This performs zero-copy deserialization when possible. 431 | #[inline] 432 | fn read_from_buffer( buffer: &'a [u8] ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default { 433 | Self::read_from_buffer_with_ctx( Default::default(), buffer ) 434 | } 435 | 436 | /// Deserializes from a given buffer while also returning the amount of bytes consumed. 437 | /// 438 | /// This performs zero-copy deserialization when possible. 439 | #[inline] 440 | fn read_with_length_from_buffer( buffer: &'a [u8] ) -> (Result< Self, C::Error >, usize) where Self: DefaultContext< Context = C >, C: Default { 441 | Self::read_with_length_from_buffer_with_ctx( Default::default(), buffer ) 442 | } 443 | 444 | /// Deserializes from a given buffer. 445 | /// 446 | /// This never performs zero-copy deserialization. 447 | #[inline] 448 | fn read_from_buffer_copying_data( buffer: &[u8] ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default { 449 | Self::read_from_buffer_copying_data_with_ctx( Default::default(), buffer ) 450 | } 451 | 452 | /// Deserializes from a given buffer while also returning the amount of bytes consumed. 453 | /// 454 | /// This never performs zero-copy deserialization. 455 | #[inline] 456 | fn read_with_length_from_buffer_copying_data( buffer: &[u8] ) -> (Result< Self, C::Error >, usize) where Self: DefaultContext< Context = C >, C: Default { 457 | Self::read_with_length_from_buffer_copying_data_with_ctx( Default::default(), buffer ) 458 | } 459 | 460 | /// Reads from a given stream without any buffering. 461 | /// 462 | /// This will only read what is necessary from the stream to deserialize 463 | /// a given type, but is going to be slow. 464 | /// 465 | /// Use [`read_from_stream_buffered`](Readable::read_from_stream_buffered) if you need 466 | /// to read from a stream and you don't care about not overreading. 467 | #[cfg(feature = "std")] 468 | #[inline] 469 | fn read_from_stream_unbuffered( stream: impl Read ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default { 470 | Self::read_from_stream_unbuffered_with_ctx( Default::default(), stream ) 471 | } 472 | 473 | /// Reads from a given stream with internal buffering. 474 | /// 475 | /// This will read more data from the stream than is necessary to deserialize 476 | /// a given type, however it should be orders of magnitude faster than unbuffered streaming, 477 | /// especially when reading relatively complex objects. 478 | /// 479 | /// Use the slower [`read_from_stream_unbuffered`](Readable::read_from_stream_unbuffered) if you want 480 | /// to avoid overreading. 481 | #[cfg(feature = "std")] 482 | #[inline] 483 | fn read_from_stream_buffered( stream: impl Read ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default { 484 | Self::read_from_stream_buffered_with_ctx( Default::default(), stream ) 485 | } 486 | 487 | #[cfg(feature = "std")] 488 | #[inline] 489 | fn read_from_file( path: impl AsRef< Path > ) -> Result< Self, C::Error > where Self: DefaultContext< Context = C >, C: Default { 490 | Self::read_from_file_with_ctx( Default::default(), path ) 491 | } 492 | 493 | #[inline] 494 | fn read_from_buffer_with_ctx( context: C, buffer: &'a [u8] ) -> Result< Self, C::Error > { 495 | Self::read_with_length_from_buffer_with_ctx( context, buffer ).0 496 | } 497 | 498 | #[inline] 499 | fn read_with_length_from_buffer_with_ctx( context: C, buffer: &'a [u8] ) -> (Result< Self, C::Error >, usize) { 500 | let bytes_needed = Self::minimum_bytes_needed(); 501 | let buffer_length = buffer.len(); 502 | if buffer_length < bytes_needed { 503 | return (Err( error_input_buffer_is_too_small( buffer_length, bytes_needed ) ), 0); 504 | } 505 | 506 | let mut reader = BufferReader::new( context, buffer ); 507 | let value = Self::read_from( &mut reader ); 508 | let bytes_read = reader.ptr as usize - buffer.as_ptr() as usize; 509 | (value, bytes_read) 510 | } 511 | 512 | #[inline] 513 | fn read_from_buffer_copying_data_with_ctx( context: C, buffer: &[u8] ) -> Result< Self, C::Error > { 514 | Self::read_with_length_from_buffer_copying_data_with_ctx( context, buffer ).0 515 | } 516 | 517 | #[inline] 518 | fn read_with_length_from_buffer_copying_data_with_ctx( mut context: C, buffer: &[u8] ) -> (Result< Self, C::Error >, usize) { 519 | Self::read_with_length_from_buffer_copying_data_with_ctx_mut( &mut context, buffer ) 520 | } 521 | 522 | #[inline] 523 | fn read_with_length_from_buffer_copying_data_with_ctx_mut( context: &mut C, buffer: &[u8] ) -> (Result< Self, C::Error >, usize) { 524 | let bytes_needed = Self::minimum_bytes_needed(); 525 | let buffer_length = buffer.len(); 526 | if buffer_length < bytes_needed { 527 | return (Err( error_input_buffer_is_too_small( buffer_length, bytes_needed ) ), 0); 528 | } 529 | 530 | let mut reader = CopyingBufferReader::new( context, buffer ); 531 | let value = Self::read_from( &mut reader ); 532 | let bytes_read = reader.ptr as usize - buffer.as_ptr() as usize; 533 | (value, bytes_read) 534 | } 535 | 536 | #[cfg(feature = "std")] 537 | #[inline] 538 | fn read_from_stream_unbuffered_with_ctx< S: Read >( context: C, stream: S ) -> Result< Self, C::Error > { 539 | StreamReader::deserialize( context, stream, false ) 540 | } 541 | 542 | #[cfg(feature = "std")] 543 | #[inline] 544 | fn read_from_stream_buffered_with_ctx< S: Read >( context: C, stream: S ) -> Result< Self, C::Error > { 545 | StreamReader::deserialize( context, stream, true ) 546 | } 547 | 548 | #[cfg(feature = "std")] 549 | #[inline] 550 | fn read_from_file_with_ctx( context: C, path: impl AsRef< Path > ) -> Result< Self, C::Error > { 551 | let stream = File::open( path ).map_err( |error| { 552 | let error = Error::from_io_error( error ); 553 | >::from( error ) 554 | })?; 555 | 556 | #[cfg(not(all(target_os = "linux", target_arch = "x86_64")))] 557 | { 558 | Self::read_from_stream_buffered_with_ctx( context, stream ) 559 | } 560 | 561 | #[cfg(all(target_os = "linux", target_arch = "x86_64"))] 562 | { 563 | use std::os::unix::io::AsRawFd; 564 | 565 | // Define our own bindings to avoid extra dependencies. 566 | extern "C" { 567 | fn mmap( 568 | addr: *mut std::ffi::c_void, 569 | len: usize, 570 | prot: i32, 571 | flags: i32, 572 | fd: i32, 573 | offset: i64 574 | ) -> *mut std::ffi::c_void; 575 | 576 | fn madvise( 577 | addr: *mut std::ffi::c_void, 578 | len: usize, 579 | advice: i32 580 | ) -> i32; 581 | 582 | fn munmap( 583 | addr: *mut std::ffi::c_void, 584 | len: usize 585 | ) -> i32; 586 | } 587 | 588 | const MAP_PRIVATE: i32 = 0x0002; 589 | const PROT_READ: i32 = 1; 590 | const MAP_FAILED: *mut std::ffi::c_void = !0 as *mut std::ffi::c_void; 591 | const MADV_SEQUENTIAL: i32 = 2; 592 | const MADV_WILLNEED: i32 = 3; 593 | static EMPTY: &[u8] = &[]; 594 | 595 | struct Mmap( *mut std::ffi::c_void, usize ); 596 | impl Mmap { 597 | fn open( fp: &std::fs::File ) -> Result< Self, Error > { 598 | let size = fp.metadata().map_err( Error::from_io_error )?.len(); 599 | if size > std::usize::MAX as u64 { 600 | return Err( crate::error::error_too_big_usize_for_this_architecture() ); 601 | } 602 | 603 | if size == 0 { 604 | return Ok( Mmap( EMPTY.as_ptr() as _, 0 ) ); 605 | } 606 | 607 | let size = size as usize; 608 | let pointer = unsafe { mmap( std::ptr::null_mut(), size, PROT_READ, MAP_PRIVATE, fp.as_raw_fd(), 0 ) }; 609 | if pointer == MAP_FAILED { 610 | Err( Error::from_io_error( std::io::Error::last_os_error() ) ) 611 | } else { 612 | Ok( Mmap( pointer, size ) ) 613 | } 614 | } 615 | 616 | unsafe fn madvise( &mut self, advice: i32 ) -> Result< (), Error > { 617 | if self.1 == 0 { 618 | return Ok(()); 619 | } 620 | 621 | if unsafe { madvise( self.0, self.1, advice ) } < 0 { 622 | Err( Error::from_io_error( std::io::Error::last_os_error() ) ) 623 | } else { 624 | Ok(()) 625 | } 626 | } 627 | } 628 | 629 | impl std::ops::Deref for Mmap { 630 | type Target = [u8]; 631 | #[inline] 632 | fn deref( &self ) -> &Self::Target { 633 | unsafe { 634 | std::slice::from_raw_parts( self.0.cast::< u8 >(), self.1 ) 635 | } 636 | } 637 | } 638 | 639 | impl Drop for Mmap { 640 | fn drop( &mut self ) { 641 | if self.1 != 0 { 642 | unsafe { 643 | munmap( self.0, self.1 ); 644 | } 645 | } 646 | } 647 | } 648 | 649 | let mut mmap = Mmap::open( &stream )?; 650 | unsafe { 651 | mmap.madvise( MADV_SEQUENTIAL )?; 652 | mmap.madvise( MADV_WILLNEED )?; 653 | } 654 | 655 | Self::read_from_buffer_copying_data_with_ctx( context, &mmap ) 656 | } 657 | } 658 | 659 | // Since specialization is not stable yet we do it this way. 660 | #[doc(hidden)] 661 | #[inline(always)] 662 | fn speedy_is_primitive() -> bool { 663 | false 664 | } 665 | 666 | #[doc(hidden)] 667 | #[inline] 668 | unsafe fn speedy_slice_from_bytes( _: &[u8] ) -> &[Self] { 669 | panic!(); 670 | } 671 | 672 | #[doc(hidden)] 673 | #[inline] 674 | unsafe fn speedy_flip_endianness( _: *mut Self ) { 675 | panic!(); 676 | } 677 | 678 | #[doc(hidden)] 679 | #[inline] 680 | fn speedy_convert_slice_endianness( _: Endianness, _: &mut [Self] ) { 681 | panic!() 682 | } 683 | } 684 | 685 | #[test] 686 | fn test_peek() { 687 | let value: &[f64] = &[2.0, 123.0]; 688 | let data = unsafe { 689 | std::slice::from_raw_parts( value.as_ptr() as *const u8, 16 ) 690 | }; 691 | 692 | let mut ctx = crate::LittleEndian {}; 693 | 694 | macro_rules! test { 695 | ($peek:ident, $read:ident) => { 696 | let mut reader = CopyingBufferReader::new( &mut ctx, data ); 697 | let value = reader.$peek().unwrap(); 698 | for _ in 0..8 { 699 | assert_eq!( value, reader.$peek().unwrap() ); 700 | } 701 | assert_eq!( value, reader.$read().unwrap() ); 702 | } 703 | } 704 | 705 | test!( peek_f64, read_f64 ); 706 | test!( peek_f32, read_f32 ); 707 | test!( peek_u128, read_u128 ); 708 | test!( peek_u64, read_u64 ); 709 | test!( peek_u32, read_u32 ); 710 | test!( peek_u16, read_u16 ); 711 | test!( peek_u8, read_u8 ); 712 | test!( peek_i128, read_i128 ); 713 | test!( peek_i64, read_i64 ); 714 | test!( peek_i32, read_i32 ); 715 | test!( peek_i16, read_i16 ); 716 | test!( peek_i8, read_i8 ); 717 | test!( peek_u64_varint, read_u64_varint ); 718 | 719 | let mut reader = CopyingBufferReader::new( &mut ctx, data ); 720 | reader.peek_u8().unwrap(); 721 | assert_eq!( reader.read_f64().unwrap(), 2.0 ); 722 | } 723 | --------------------------------------------------------------------------------