├── .gitignore ├── derive ├── .gitignore ├── Cargo.toml └── src │ ├── lib.rs │ └── shared.rs ├── benchmarks ├── rust-toolchain.toml ├── .gitignore ├── src │ ├── basic │ │ ├── mod.rs │ │ ├── full.rs │ │ ├── mutate.rs │ │ ├── apply.rs │ │ └── generate.rs │ ├── large │ │ ├── mod.rs │ │ ├── full.rs │ │ ├── mutate.rs │ │ ├── apply.rs │ │ └── generate.rs │ └── lib.rs ├── benches │ ├── basic.rs │ └── large.rs └── Cargo.toml ├── src ├── collections │ ├── mod.rs │ ├── rope │ │ └── mod.rs │ ├── unordered_map_like.rs │ ├── unordered_map_like_recursive.rs │ └── unordered_array_like.rs └── lib.rs ├── justfile ├── LICENSE-MIT ├── Cargo.toml ├── tests ├── derives.rs ├── enums.rs ├── expose.rs ├── types.rs └── integration.rs ├── README.md ├── .github └── workflows │ └── rust.yml └── LICENSE-APACHE /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | /.vscode -------------------------------------------------------------------------------- /derive/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /benchmarks/rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly" -------------------------------------------------------------------------------- /benchmarks/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | flamegraph.svg 4 | perf.data* -------------------------------------------------------------------------------- /benchmarks/src/basic/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod apply; 2 | pub mod full; 3 | pub mod generate; 4 | pub mod mutate; 5 | -------------------------------------------------------------------------------- /benchmarks/src/large/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod apply; 2 | pub mod full; 3 | pub mod generate; 4 | pub mod mutate; 5 | -------------------------------------------------------------------------------- /src/collections/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod rope; 2 | 3 | pub mod unordered_array_like; 4 | pub mod unordered_map_like; 5 | pub mod unordered_map_like_recursive; 6 | 7 | pub mod ordered_array_like; 8 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | fmt: 2 | cargo fmt && cargo fmt --manifest-path ./derive/Cargo.toml 3 | clippy: 4 | cargo clippy --all-features && cargo clippy --all-features --manifest-path ./derive/Cargo.toml 5 | test: 6 | cargo test && cargo test --all-features 7 | -------------------------------------------------------------------------------- /benchmarks/benches/basic.rs: -------------------------------------------------------------------------------- 1 | use criterion::criterion_main; 2 | 3 | extern crate structdiff_benchmarks; 4 | 5 | criterion_main!( 6 | structdiff_benchmarks::basic::apply::benches, 7 | structdiff_benchmarks::basic::generate::benches, 8 | structdiff_benchmarks::basic::mutate::benches, 9 | structdiff_benchmarks::basic::full::benches, 10 | ); 11 | -------------------------------------------------------------------------------- /benchmarks/benches/large.rs: -------------------------------------------------------------------------------- 1 | use criterion::criterion_main; 2 | 3 | extern crate structdiff_benchmarks; 4 | 5 | criterion_main!( 6 | structdiff_benchmarks::large::apply::benches, 7 | structdiff_benchmarks::large::generate::benches, 8 | structdiff_benchmarks::large::mutate::benches, 9 | structdiff_benchmarks::large::full::benches, 10 | ); 11 | -------------------------------------------------------------------------------- /derive/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "structdiff-derive" 3 | version = "0.7.4" 4 | authors = ["Makepad ", "Fedor ", "Kirk proc_macro::TokenStream { 17 | let input = parse::parse_data(input); 18 | 19 | let ts = match &input { 20 | parse::Data::Struct(struct_) if struct_.named => derive_struct_diff_struct(struct_), 21 | parse::Data::Enum(enum_) => derive_struct_diff_enum(enum_), 22 | _ => unimplemented!("Only structs and enums are supported"), 23 | }; 24 | 25 | ts 26 | } 27 | -------------------------------------------------------------------------------- /benchmarks/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "structdiff-benchmarks" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | assert_unordered = "0.3.5" 10 | structdiff = { path = "..", features = ["serde", "debug_diffs"] } 11 | nanorand = { version = "0.7.0" } 12 | diff-struct = { version = "0.5.3", optional = true} 13 | serde = { version = "^1.0.0", features = ["derive"] } 14 | serde-diff = { version = "0.4.1", optional = true} 15 | bincode = { version = "1.3.3" } 16 | criterion = "0.5.1" 17 | 18 | [features] 19 | default = ["compare"] 20 | compare = ["dep:serde-diff", "dep:diff-struct"] 21 | 22 | [profile.release] 23 | lto = "fat" 24 | opt-level = 3 25 | debug = true 26 | 27 | [profile.bench] 28 | lto = "fat" 29 | opt-level = 3 30 | debug = true 31 | 32 | [[bench]] 33 | name = "basic" 34 | harness = false 35 | 36 | [[bench]] 37 | name = "large" 38 | harness = false 39 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "structdiff" 3 | version = "0.7.4" 4 | edition = "2021" 5 | license = "Apache-2.0 OR MIT" 6 | repository = "https://github.com/knickish/structdiff" 7 | description = """zero-dependency crate for generating and applying partial diffs between struct instances""" 8 | keywords = ["delta-compression", "difference"] 9 | categories = ["compression"] 10 | rust-version = "1.82.0" 11 | 12 | [dependencies] 13 | nanoserde = { version = "^0.1.37", optional = true } 14 | rustc-hash = { version = "1.1.0", optional = true } 15 | serde = { version = "^1.0.0", optional = true, features = ["derive"] } 16 | structdiff-derive = { path = "derive", version = "=0.7.4" } 17 | 18 | [features] 19 | "default" = [] 20 | "nanoserde" = ["dep:nanoserde", "structdiff-derive/nanoserde"] 21 | "serde" = ["dep:serde", "structdiff-derive/serde"] 22 | "debug_diffs" = ["structdiff-derive/debug_diffs"] 23 | "generated_setters" = ["structdiff-derive/generated_setters"] 24 | "rustc_hash" = ["dep:rustc-hash"] 25 | "debug_asserts" = [] 26 | 27 | [dev-dependencies] 28 | bincode = "1.3.3" 29 | assert_unordered = "0.3.5" 30 | nanorand = "0.7.0" 31 | pretty_assertions = "1.4.1" 32 | 33 | [lints.rust] 34 | unexpected_cfgs = { level = "warn", check-cfg = ['cfg(unused)'] } 35 | -------------------------------------------------------------------------------- /tests/derives.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports, clippy::type_complexity)] 2 | 3 | use std::{ 4 | collections::{BTreeMap, BTreeSet, HashMap, HashSet}, 5 | fmt::Debug, 6 | num::Wrapping, 7 | }; 8 | 9 | use structdiff::{Difference, StructDiff}; 10 | 11 | // Trying to come up with all the edge cases that might be relevant 12 | #[allow(dead_code)] 13 | #[cfg(not(any(feature = "serde", feature = "nanoserde")))] 14 | #[derive(Difference)] 15 | // #[difference(setters)] 16 | pub struct TestDeriveAll< 17 | 'a, 18 | 'b: 'a, 19 | A: PartialEq + 'static, 20 | const C: usize, 21 | B, 22 | D, 23 | LM: Ord = Option, 24 | const N: usize = 4, 25 | > where 26 | A: core::hash::Hash + std::cmp::Eq + Default, 27 | LM: Ord + IntoIterator, 28 | [A; N]: Default, 29 | [B; C]: Default, 30 | [i32; N]: Default, 31 | [B; N]: Default, 32 | dyn Fn(&B): PartialEq + Clone + core::fmt::Debug, 33 | dyn core::fmt::Debug + Send + 'static: Debug, 34 | { 35 | f1: (), 36 | f2: [A; N], 37 | f3: [i32; N], 38 | f4: BTreeMap::Item>>, 39 | f5: Option<(A, Option<&'a ::Item>)>, 40 | f6: HashMap>, 41 | f7: Box<(Vec, HashSet, [i128; u8::MIN as usize])>, 42 | f8: BTreeSet>, 43 | #[difference(skip)] 44 | f9: [B; C], 45 | f10: [B; N], 46 | r#f11: Option<&'b Option>, 47 | #[difference(skip)] 48 | f12: Option>, 49 | #[difference(skip)] 50 | f13: Vec !>, 51 | #[difference(skip)] 52 | f14: Vec Box i32>>>, 53 | #[difference(skip)] 54 | f15: Vec, 55 | } 56 | -------------------------------------------------------------------------------- /tests/enums.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "serde")] 2 | use serde::{Deserialize, Serialize}; 3 | #[allow(unused_imports)] 4 | use std::{ 5 | collections::{BTreeMap, BTreeSet, HashMap}, 6 | fmt::Debug, 7 | num::Wrapping, 8 | }; 9 | use structdiff::Difference; 10 | 11 | #[allow(dead_code)] 12 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 13 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 14 | #[difference(setters)] 15 | pub struct Test { 16 | pub test1: i32, 17 | pub test2: String, 18 | pub test3: Vec, 19 | pub test4: f32, 20 | pub test5: Option, 21 | } 22 | 23 | #[allow(dead_code)] 24 | #[derive(Debug, PartialEq, Clone, Difference)] 25 | #[difference(setters)] 26 | pub struct TestSkip 27 | where 28 | A: PartialEq, 29 | { 30 | pub test1: A, 31 | pub test2: String, 32 | #[difference(skip)] 33 | pub test3skip: Vec, 34 | pub test4: f32, 35 | } 36 | 37 | #[allow(unused)] 38 | #[cfg(not(any(feature = "serde", feature = "nanoserde")))] 39 | #[derive(PartialEq, Difference, Clone, Debug)] 40 | // #[derive(PartialEq, Clone, Debug)] 41 | pub enum TestDeriveAllEnum< 42 | 'a, 43 | 'b: 'a, 44 | A: PartialEq + 'static, 45 | const C: usize, 46 | B: PartialEq, 47 | D, 48 | LM: Ord = Option, 49 | const N: usize = 4, 50 | > where 51 | A: core::hash::Hash + std::cmp::Eq + Default, 52 | LM: Ord + IntoIterator, 53 | [A; N]: Default, 54 | [B; C]: Default, 55 | [i32; N]: Default, 56 | [B; N]: Default, 57 | dyn Fn(&B): PartialEq + Clone + core::fmt::Debug, 58 | (dyn std::cmp::PartialEq + Send + 'static): Debug + Clone + PartialEq, 59 | { 60 | F1(()), 61 | F2([A; N]), 62 | F3([i32; N]), 63 | F4(BTreeMap::Item>>), 64 | F5(Option<(A, Option<&'a ::Item>)>), 65 | F6(HashMap>), 66 | F8(BTreeSet>, BTreeSet>), 67 | F9 {}, 68 | F10 { subfield1: u64, subfield2: Test }, 69 | r#F11(Option<&'b Option>), 70 | F12(TestSkip, TestSkip), 71 | F13((TestSkip, TestSkip)), 72 | } 73 | -------------------------------------------------------------------------------- /tests/expose.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports)] 2 | 3 | use assert_unordered::{assert_eq_unordered, assert_eq_unordered_sort}; 4 | 5 | use std::f64::consts::PI; 6 | use std::hash::Hash; 7 | use std::{ 8 | collections::{BTreeMap, BTreeSet, HashMap, HashSet, LinkedList}, 9 | fmt::Debug, 10 | num::Wrapping, 11 | }; 12 | use structdiff::{Difference, StructDiff}; 13 | 14 | #[cfg(feature = "serde")] 15 | use serde::{Deserialize, Serialize}; 16 | 17 | #[cfg(feature = "nanoserde")] 18 | use nanoserde::{DeBin, SerBin}; 19 | 20 | #[test] 21 | fn test_expose() { 22 | #[derive(Debug, PartialEq, Clone, Difference)] 23 | #[difference(expose)] 24 | struct Example { 25 | field1: f64, 26 | } 27 | 28 | let first = Example { field1: 0.0 }; 29 | 30 | let second = Example { field1: PI }; 31 | 32 | for diff in first.diff(&second) { 33 | match diff { 34 | ExampleStructDiffEnum::field1(v) => { 35 | dbg!(&v); 36 | } 37 | } 38 | } 39 | 40 | for diff in first.diff_ref(&second) { 41 | match diff { 42 | ExampleStructDiffEnumRef::field1(v) => { 43 | dbg!(&v); 44 | } 45 | } 46 | } 47 | } 48 | 49 | #[test] 50 | fn test_expose_rename() { 51 | #[derive(Debug, PartialEq, Clone, Difference)] 52 | #[difference(expose = "Cheese")] 53 | struct Example { 54 | field1: f64, 55 | } 56 | 57 | let first = Example { field1: 0.0 }; 58 | 59 | let second = Example { field1: PI }; 60 | 61 | for diff in first.diff(&second) { 62 | match diff { 63 | Cheese::field1(_v) => {} 64 | } 65 | } 66 | 67 | for diff in first.diff_ref(&second) { 68 | match diff { 69 | CheeseRef::field1(_v) => {} 70 | } 71 | } 72 | } 73 | 74 | #[test] 75 | fn test_expose_enum() { 76 | #[derive(Debug, Clone, PartialEq, Difference)] 77 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 78 | #[cfg_attr(feature = "nanoserde", derive(SerBin, DeBin))] 79 | #[difference(expose)] 80 | pub enum Test { 81 | A, 82 | B(u32), 83 | } 84 | 85 | let first = Test::A; 86 | let second = Test::B(1); 87 | 88 | for diff in first.diff(&second) { 89 | match diff { 90 | TestStructDiffEnum::Replace(_) => {} 91 | } 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /derive/src/shared.rs: -------------------------------------------------------------------------------- 1 | macro_rules! l { 2 | ($target:ident, $line:expr) => { 3 | $target.push_str($line) 4 | }; 5 | 6 | ($target:ident, $line:expr, $($param:expr),*) => { 7 | $target.push_str(&::alloc::format!($line, $($param,)*)) 8 | }; 9 | } 10 | 11 | #[derive(Debug, Default)] 12 | pub enum MapStrategy { 13 | KeyOnly, 14 | #[default] 15 | KeyAndValue, 16 | } 17 | 18 | #[derive(Debug)] 19 | pub enum CollectionStrategy { 20 | OrderedArrayLike, 21 | UnorderedArrayLikeHash, 22 | UnorderedMapLikeHash(MapStrategy), 23 | } 24 | 25 | #[cfg(feature = "generated_setters")] 26 | pub fn attrs_setter(attributes: &[crate::parse::Attribute]) -> (bool, bool, Option<&str>) { 27 | let skip = attributes 28 | .iter() 29 | .any(|attr| attr.tokens.len() == 1 && attr.tokens[0] == "skip_setter"); 30 | let local = attributes 31 | .iter() 32 | .any(|attr| attr.tokens.len() == 1 && attr.tokens[0] == "setter"); 33 | 34 | let Some(name_override) = attributes.iter().find_map(|attr| { 35 | if attr.tokens.len() == 2 && attr.tokens[0] == "setter_name" { 36 | Some(&attr.tokens[1]) 37 | } else { 38 | None 39 | } 40 | }) else { 41 | return (local, skip, None); 42 | }; 43 | 44 | (local, skip, Some(name_override)) 45 | } 46 | 47 | #[cfg(feature = "generated_setters")] 48 | pub fn attrs_all_setters(attributes: &[crate::parse::Attribute]) -> bool { 49 | attributes 50 | .iter() 51 | .any(|attr| attr.tokens.len() == 1 && attr.tokens[0] == "setters") 52 | } 53 | 54 | pub fn attrs_recurse(attributes: &[crate::parse::Attribute]) -> bool { 55 | attributes 56 | .iter() 57 | .any(|attr| attr.tokens.len() == 1 && attr.tokens[0] == "recurse") 58 | } 59 | 60 | pub fn attrs_skip(attributes: &[crate::parse::Attribute]) -> bool { 61 | attributes 62 | .iter() 63 | .any(|attr| attr.tokens.len() == 1 && attr.tokens[0] == "skip") 64 | } 65 | 66 | pub fn attrs_collection_type(attributes: &[crate::parse::Attribute]) -> Option { 67 | attributes.iter().find_map(|attr| { 68 | if attr.tokens.len() == 2 && attr.tokens[0] == "collection_strategy" { 69 | let strategy = match attr.tokens[1].clone().as_str() { 70 | "ordered_array_like" => CollectionStrategy::OrderedArrayLike, 71 | "unordered_array_like" => CollectionStrategy::UnorderedArrayLikeHash, 72 | "unordered_map_like" => { 73 | let map_compare_type = attrs_map_strategy(attributes).unwrap_or_default(); 74 | CollectionStrategy::UnorderedMapLikeHash(map_compare_type) 75 | } 76 | _ => { 77 | return None; 78 | } 79 | }; 80 | Some(strategy) 81 | } else { 82 | None 83 | } 84 | }) 85 | } 86 | 87 | pub fn attrs_map_strategy(attributes: &[crate::parse::Attribute]) -> Option { 88 | attributes.iter().find_map(|attr| { 89 | if attr.tokens.len() == 2 && attr.tokens[0] == "map_equality" { 90 | let strategy = match attr.tokens[1].as_str() { 91 | "key_only" => MapStrategy::KeyOnly, 92 | "key_and_value" => MapStrategy::KeyAndValue, 93 | _ => { 94 | return None; 95 | } 96 | }; 97 | Some(strategy) 98 | } else { 99 | None 100 | } 101 | }) 102 | } 103 | 104 | pub fn attrs_expose(attributes: &[crate::parse::Attribute]) -> Option> { 105 | attributes.iter().find_map(|attr| match attr.tokens.len() { 106 | 1 if attr.tokens[0].starts_with("expose") => Some(None), 107 | 2.. if attr.tokens[0] == "expose" => Some(Some(attr.tokens[1].as_str())), 108 | _ => None, 109 | }) 110 | } 111 | -------------------------------------------------------------------------------- /benchmarks/src/basic/full.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | 9 | const SAMPLE_SIZE: usize = 1000; 10 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 11 | const SEED: u64 = 42; 12 | 13 | #[cfg(feature = "compare")] 14 | criterion_group!( 15 | benches, 16 | full, 17 | diff_struct_bench::bench, 18 | serde_diff_bench::bench 19 | ); 20 | #[cfg(not(feature = "compare"))] 21 | criterion_group!(benches, full); 22 | 23 | const GROUP_NAME: &str = "full"; 24 | 25 | fn full(c: &mut Criterion) { 26 | const BENCH_NAME: &str = "full"; 27 | let mut rng = WyRand::new_seed(SEED); 28 | let mut first = black_box(TestBench::generate_random(&mut rng)); 29 | 30 | let second = black_box(TestBench::generate_random(&mut rng)); 31 | let mut diff: Vec<::Diff> = Vec::new(); 32 | let mut group = c.benchmark_group(GROUP_NAME); 33 | group 34 | .sample_size(SAMPLE_SIZE) 35 | .measurement_time(MEASUREMENT_TIME); 36 | group.bench_function(BENCH_NAME, |b| { 37 | b.iter(|| { 38 | diff = black_box(StructDiff::diff(&first, &second)); 39 | black_box(first.apply_mut(diff.clone())); 40 | }) 41 | }); 42 | group.finish(); 43 | first.assert_eq(second, &diff); 44 | } 45 | 46 | #[cfg(feature = "compare")] 47 | mod diff_struct_bench { 48 | use super::{ 49 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 50 | }; 51 | use diff::Diff; 52 | 53 | pub(super) fn bench(c: &mut Criterion) { 54 | const BENCH_NAME: &str = "diff_struct_full"; 55 | 56 | let mut rng = WyRand::new_seed(SEED); 57 | let mut first = black_box(TestBench::generate_random(&mut rng)); 58 | let second = black_box(TestBench::generate_random(&mut rng)); 59 | let mut group = c.benchmark_group(GROUP_NAME); 60 | group 61 | .sample_size(SAMPLE_SIZE) 62 | .measurement_time(MEASUREMENT_TIME); 63 | group.bench_function(BENCH_NAME, |b| { 64 | b.iter(|| { 65 | let diff = black_box(Diff::diff(&first, &second)); 66 | black_box(Diff::apply(&mut first, &diff)) 67 | }) 68 | }); 69 | group.finish(); 70 | assert_eq!(first.b, second.b); 71 | } 72 | } 73 | 74 | #[cfg(feature = "compare")] 75 | mod serde_diff_bench { 76 | use super::{ 77 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 78 | }; 79 | use bincode::Options; 80 | 81 | pub(super) fn bench(c: &mut Criterion) { 82 | const BENCH_NAME: &str = "serde_diff_full"; 83 | 84 | let mut rng = WyRand::new_seed(SEED); 85 | let mut first = black_box(TestBench::generate_random(&mut rng)); 86 | let second = black_box(TestBench::generate_random(&mut rng)); 87 | let options = bincode::DefaultOptions::new() 88 | .with_fixint_encoding() 89 | .allow_trailing_bytes(); 90 | let mut group = c.benchmark_group(GROUP_NAME); 91 | group 92 | .sample_size(SAMPLE_SIZE) 93 | .measurement_time(MEASUREMENT_TIME); 94 | group.bench_function(BENCH_NAME, |b| { 95 | b.iter(|| { 96 | let mut diff = black_box( 97 | options 98 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 99 | .unwrap(), 100 | ); 101 | let mut deserializer = 102 | black_box(bincode::Deserializer::from_slice(&mut diff[..], options)); 103 | serde_diff::Apply::apply(&mut deserializer, &mut first).unwrap(); 104 | }) 105 | }); 106 | group.finish(); 107 | assert_eq!(first.b, second.b); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /benchmarks/src/large/full.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | const SAMPLE_SIZE: usize = 1000; 9 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 10 | const SEED: u64 = 42; 11 | 12 | #[cfg(feature = "compare")] 13 | criterion_group!( 14 | benches, 15 | full, 16 | diff_struct_bench::bench, 17 | serde_diff_bench::bench 18 | ); 19 | #[cfg(not(feature = "compare"))] 20 | criterion_group!(benches, full); 21 | 22 | const GROUP_NAME: &str = "large_full"; 23 | 24 | fn full(c: &mut Criterion) { 25 | const BENCH_NAME: &str = "owned"; 26 | let mut rng = WyRand::new_seed(SEED); 27 | let mut first = black_box(TestBench::generate_random_large(&mut rng)); 28 | 29 | let second = black_box(TestBench::generate_random_large(&mut rng)); 30 | let mut diff: Vec<::Diff> = Vec::new(); 31 | let mut group = c.benchmark_group(GROUP_NAME); 32 | group 33 | .sample_size(SAMPLE_SIZE) 34 | .measurement_time(MEASUREMENT_TIME); 35 | group.bench_function(BENCH_NAME, |b| { 36 | b.iter(|| { 37 | diff = black_box(StructDiff::diff(&first, &second)); 38 | black_box(first.apply_mut(diff.clone())); 39 | }) 40 | }); 41 | group.finish(); 42 | first.assert_eq(second, &diff); 43 | } 44 | 45 | #[cfg(feature = "compare")] 46 | mod diff_struct_bench { 47 | use super::{ 48 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 49 | }; 50 | use diff::Diff; 51 | 52 | pub(super) fn bench(c: &mut Criterion) { 53 | const BENCH_NAME: &str = "diff_struct_full"; 54 | let mut rng = WyRand::new_seed(SEED); 55 | let mut first = black_box(TestBench::generate_random_large(&mut rng)); 56 | let second = black_box(TestBench::generate_random_large(&mut rng)); 57 | let mut group = c.benchmark_group(GROUP_NAME); 58 | group 59 | .sample_size(SAMPLE_SIZE) 60 | .measurement_time(MEASUREMENT_TIME); 61 | group.bench_function(BENCH_NAME, |b| { 62 | b.iter(|| { 63 | let diff = black_box(Diff::diff(&first, &second)); 64 | black_box(Diff::apply(&mut first, &diff)) 65 | }) 66 | }); 67 | group.finish(); 68 | assert_eq!(first.b, second.b); 69 | } 70 | } 71 | 72 | #[cfg(feature = "compare")] 73 | mod serde_diff_bench { 74 | use super::{ 75 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 76 | }; 77 | use bincode::Options; 78 | 79 | pub(super) fn bench(c: &mut Criterion) { 80 | const BENCH_NAME: &str = "serde_diff_full"; 81 | let mut rng = WyRand::new_seed(SEED); 82 | let mut first = black_box(TestBench::generate_random_large(&mut rng)); 83 | let second = black_box(TestBench::generate_random_large(&mut rng)); 84 | let options = bincode::DefaultOptions::new() 85 | .with_fixint_encoding() 86 | .allow_trailing_bytes(); 87 | let mut group = c.benchmark_group(GROUP_NAME); 88 | group 89 | .sample_size(SAMPLE_SIZE) 90 | .measurement_time(MEASUREMENT_TIME); 91 | group.bench_function(BENCH_NAME, |b| { 92 | b.iter(|| { 93 | let mut diff = black_box( 94 | options 95 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 96 | .unwrap(), 97 | ); 98 | let mut deserializer = 99 | black_box(bincode::Deserializer::from_slice(&mut diff[..], options)); 100 | serde_diff::Apply::apply(&mut deserializer, &mut first).unwrap(); 101 | }) 102 | }); 103 | group.finish(); 104 | assert_eq!(first.b, second.b); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # structdiff 2 | 3 | A lightweight, zero-dependency struct diffing library which allows changed fields to be collected and applied. Derive `Difference` on a struct, then use the `StructDiff` trait to make and apply diffs. Supports optional serialization of the generated diff types with `serde` or `nanoserde` for ease of use. 4 | 5 | [![Crates.io][crates_img]][crates_lnk] 6 | 7 | [crates_img]: https://img.shields.io/crates/v/structdiff.svg 8 | [crates_lnk]: https://crates.io/crates/structdiff 9 | 10 | ## Example: 11 | 12 | ```rust 13 | use structdiff::{Difference, StructDiff}; 14 | 15 | #[derive(Debug, PartialEq, Clone, Difference)] 16 | struct Example { 17 | field1: f64, 18 | #[difference(skip)] 19 | field2: Vec, 20 | #[difference(collection_strategy="unordered_array_like")] 21 | field3: BTreeSet, 22 | } 23 | 24 | let first = Example { 25 | field1: 0.0, 26 | field2: vec![], 27 | field3: vec![1, 2, 3].into_iter().collect(), 28 | }; 29 | 30 | let second = Example { 31 | field1: 3.14, 32 | field2: vec![1], 33 | field3: vec![2, 3, 4].into_iter().collect(), 34 | }; 35 | 36 | let diffs = first.diff(&second); 37 | // diffs is now a Vec of differences between the two instances, 38 | // with length equal to number of changed/unskipped fields 39 | assert_eq!(diffs.len(), 2); 40 | 41 | let diffed = first.apply(diffs); 42 | // diffed is now equal to second, except for skipped field 43 | assert_eq!(diffed.field1, second.field1); 44 | assert_eq!(&diffed.field3, &second.field3); 45 | assert_ne!(diffed, second); 46 | ``` 47 | 48 | For more examples take a look at [integration tests](/tests) 49 | 50 | ## Derive macro attributes 51 | - Field level 52 | - `#[difference(skip)]` - Do not consider this field when creating a diff 53 | - `#[difference(recurse)]` - Generate a StructDiff for this field when creating a diff 54 | - `#[difference(collection_strategy = {})]` 55 | - `"ordered_array_like"` - Generates a minimal changeset for ordered, array-like collections of items which implement `PartialEq`. (uses levenshtein difference) 56 | - `"unordered_array_like"` - Generates a minimal changeset for unordered, array-like collections of items which implement `Hash + Eq`. 57 | - `"unordered_map_like"` - Generates a minimal changeset for unordered, map-like collections for which the key implements `Hash + Eq`. 58 | - `#[difference(map_equality = {})]` - Used with `unordered_map_like` 59 | - `"key_only"` - only replace a key-value pair for which the key has changed 60 | - `"key_and_value"` - replace a key-value pair if either the key or value has changed 61 | - `#[difference(setter)]` - Generate setters for this struct field 62 | - `#[difference(setter_name = {})]` - Use this name instead of the default value when generating a setter for this field (used on field) 63 | - Struct Level 64 | - `#[difference(setters)]` - Generate setters for all fields in the struct 65 | - Example: for the `field1` of the `Example` struct used above, a function with the signature `set_field1_with_diff(&mut self, value: Option) -> Option<::Diff>` will be generated. Useful when a single field will be changed in a struct with many fields, as it saves the comparison of all other fields. 66 | - `#[difference(expose)]`/`#[difference(expose = "MyDiffTypeName")]` - expose the generated difference type (optionally, with the specified name) 67 | 68 | ## Optional features 69 | - [`nanoserde`, `serde`] - Serialization of `Difference` derived associated types. Allows diffs to easily be sent over network. 70 | - `debug_diffs` - Derive `Debug` on the generated diff type 71 | - `generated_setters` - Enable generation of setters for struct fields. These setters automatically return a diff if a field's value is changed by the assignment. 72 | - `rustc_hash` - Use the (non-cryptographic) hash implementation from the `rustc-hash` crate instead of the default hasher. Much faster diff generation for collections at the cost of a dependency. 73 | 74 | ### Development status 75 | This is being used actively for my own projects, although it's mostly working now. PRs will be accepted for either more tests or functionality. -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: CI 4 | 5 | jobs: 6 | rustfmt: 7 | name: Rustfmt 8 | runs-on: ${{ matrix.config.os }} 9 | strategy: 10 | fail-fast: true 11 | matrix: 12 | config: 13 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: actions-rs/toolchain@v1 17 | with: 18 | toolchain: stable 19 | target: ${{ matrix.config.target }} 20 | override: true 21 | - uses: actions-rs/cargo@v1 22 | with: 23 | command: fmt 24 | args: --check 25 | 26 | clippy: 27 | name: Clippy 28 | runs-on: ${{ matrix.config.os }} 29 | strategy: 30 | fail-fast: false 31 | matrix: 32 | config: 33 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 34 | steps: 35 | - uses: actions/checkout@v2 36 | - uses: actions-rs/toolchain@v1 37 | with: 38 | toolchain: stable 39 | target: ${{ matrix.config.target }} 40 | override: true 41 | - uses: actions-rs/cargo@v1 42 | with: 43 | command: clippy 44 | 45 | test_all: 46 | name: TestAll 47 | runs-on: ${{ matrix.config.os }} 48 | strategy: 49 | fail-fast: false 50 | matrix: 51 | config: 52 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 53 | - { os: macos-latest, target: 'x86_64-apple-darwin' } 54 | 55 | steps: 56 | - uses: actions/checkout@v2 57 | - uses: actions-rs/toolchain@v1 58 | with: 59 | toolchain: stable 60 | target: ${{ matrix.config.target }} 61 | override: true 62 | - uses: actions-rs/cargo@v1 63 | with: 64 | command: test 65 | args: --all-features 66 | 67 | test_serde: 68 | name: TestSerde 69 | runs-on: ${{ matrix.config.os }} 70 | strategy: 71 | fail-fast: false 72 | matrix: 73 | config: 74 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 75 | - { os: macos-latest, target: 'x86_64-apple-darwin' } 76 | 77 | steps: 78 | - uses: actions/checkout@v2 79 | - uses: actions-rs/toolchain@v1 80 | with: 81 | toolchain: stable 82 | target: ${{ matrix.config.target }} 83 | override: true 84 | - uses: actions-rs/cargo@v1 85 | with: 86 | command: test 87 | args: --features serde 88 | 89 | test_setters: 90 | name: TestSetters 91 | runs-on: ${{ matrix.config.os }} 92 | strategy: 93 | fail-fast: false 94 | matrix: 95 | config: 96 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 97 | - { os: macos-latest, target: 'x86_64-apple-darwin' } 98 | 99 | steps: 100 | - uses: actions/checkout@v2 101 | - uses: actions-rs/toolchain@v1 102 | with: 103 | toolchain: stable 104 | target: ${{ matrix.config.target }} 105 | override: true 106 | - uses: actions-rs/cargo@v1 107 | with: 108 | command: test 109 | args: --features generated_setters 110 | 111 | test_min: 112 | name: TestMin 113 | runs-on: ${{ matrix.config.os }} 114 | strategy: 115 | fail-fast: false 116 | matrix: 117 | config: 118 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 119 | - { os: macos-latest, target: 'x86_64-apple-darwin' } 120 | 121 | steps: 122 | - uses: actions/checkout@v2 123 | - uses: actions-rs/toolchain@v1 124 | with: 125 | toolchain: stable 126 | target: ${{ matrix.config.target }} 127 | override: true 128 | - uses: actions-rs/cargo@v1 129 | with: 130 | command: test 131 | 132 | build_release: 133 | name: BuildRelease 134 | runs-on: ${{ matrix.config.os }} 135 | strategy: 136 | fail-fast: false 137 | matrix: 138 | config: 139 | - { os: ubuntu-latest, target: 'x86_64-unknown-linux-gnu' } 140 | - { os: macos-latest, target: 'x86_64-apple-darwin' } 141 | 142 | steps: 143 | - uses: actions/checkout@v2 144 | - uses: actions-rs/toolchain@v1 145 | with: 146 | toolchain: stable 147 | target: ${{ matrix.config.target }} 148 | override: true 149 | - uses: actions-rs/cargo@v1 150 | with: 151 | command: build 152 | args: --release --all-features 153 | -------------------------------------------------------------------------------- /benchmarks/src/basic/mutate.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, BatchSize, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | 9 | const SAMPLE_SIZE: usize = 1000; 10 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 11 | const SEED: u64 = 42; 12 | 13 | #[cfg(feature = "compare")] 14 | criterion_group!( 15 | benches, 16 | mutate, 17 | diff_struct_bench::mutate, 18 | serde_diff_bench::mutate 19 | ); 20 | #[cfg(not(feature = "compare"))] 21 | criterion_group!(benches, mutate); 22 | 23 | const GROUP_NAME: &str = "mutate"; 24 | 25 | fn mutate(c: &mut Criterion) { 26 | const BENCH_NAME: &str = "mutate"; 27 | let mut group = c.benchmark_group(GROUP_NAME); 28 | group 29 | .sample_size(SAMPLE_SIZE) 30 | .measurement_time(MEASUREMENT_TIME); 31 | group.bench_function(BENCH_NAME, |b| { 32 | b.iter_batched( 33 | || { 34 | let mut rng = WyRand::new_seed(SEED); 35 | let first = TestBench::generate_random(&mut rng); 36 | let second = first.clone().random_mutate(&mut rng); 37 | (first, second) 38 | }, 39 | |(first, second)| { 40 | let diff = black_box(StructDiff::diff(&first, &second)); 41 | black_box(StructDiff::apply(first, diff)); 42 | }, 43 | BatchSize::LargeInput, 44 | ) 45 | }); 46 | group.finish(); 47 | } 48 | 49 | #[cfg(feature = "compare")] 50 | mod diff_struct_bench { 51 | use super::{ 52 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 53 | }; 54 | use criterion::BatchSize; 55 | use diff::Diff; 56 | 57 | pub(super) fn mutate(c: &mut Criterion) { 58 | const BENCH_NAME: &str = "diff_struct_mutate"; 59 | 60 | let mut group = c.benchmark_group(GROUP_NAME); 61 | group 62 | .sample_size(SAMPLE_SIZE) 63 | .measurement_time(MEASUREMENT_TIME); 64 | group.bench_function(BENCH_NAME, |b| { 65 | b.iter_batched( 66 | || { 67 | let mut rng = WyRand::new_seed(SEED); 68 | let first = black_box(TestBench::generate_random(&mut rng)); 69 | let second = black_box(first.clone().random_mutate(&mut rng)); 70 | (first, second) 71 | }, 72 | |(mut first, second)| { 73 | let diff = black_box(Diff::diff(&first, &second)); 74 | black_box(Diff::apply(&mut first, &diff)) 75 | }, 76 | BatchSize::LargeInput, 77 | ) 78 | }); 79 | group.finish(); 80 | } 81 | } 82 | 83 | #[cfg(feature = "compare")] 84 | mod serde_diff_bench { 85 | use super::{ 86 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 87 | }; 88 | use bincode::Options; 89 | use criterion::BatchSize; 90 | 91 | pub(super) fn mutate(c: &mut Criterion) { 92 | const BENCH_NAME: &str = "serde_diff_mutate"; 93 | let mut group = c.benchmark_group(GROUP_NAME); 94 | group 95 | .sample_size(SAMPLE_SIZE) 96 | .measurement_time(MEASUREMENT_TIME); 97 | group.bench_function(BENCH_NAME, |b| { 98 | b.iter_batched( 99 | || { 100 | let mut rng = WyRand::new_seed(SEED); 101 | let first = black_box(TestBench::generate_random(&mut rng)); 102 | let second = black_box(first.clone().random_mutate(&mut rng)); 103 | let options = bincode::DefaultOptions::new() 104 | .with_fixint_encoding() 105 | .allow_trailing_bytes(); 106 | (first, second, options) 107 | }, 108 | |(mut first, second, options)| { 109 | let mut diff = black_box( 110 | options 111 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 112 | .unwrap(), 113 | ); 114 | let mut deserializer = 115 | black_box(bincode::Deserializer::from_slice(&mut diff[..], options)); 116 | serde_diff::Apply::apply(&mut deserializer, &mut first).unwrap(); 117 | }, 118 | BatchSize::LargeInput, 119 | ) 120 | }); 121 | group.finish(); 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /benchmarks/src/large/mutate.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, BatchSize, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | const SAMPLE_SIZE: usize = 1000; 9 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 10 | const SEED: u64 = 42; 11 | 12 | #[cfg(feature = "compare")] 13 | criterion_group!( 14 | benches, 15 | mutate, 16 | diff_struct_bench::mutate, 17 | serde_diff_bench::mutate 18 | ); 19 | #[cfg(not(feature = "compare"))] 20 | criterion_group!(benches, mutate); 21 | 22 | const GROUP_NAME: &str = "large_mutate"; 23 | 24 | fn mutate(c: &mut Criterion) { 25 | const BENCH_NAME: &str = "mutate"; 26 | let mut group = c.benchmark_group(GROUP_NAME); 27 | group 28 | .sample_size(SAMPLE_SIZE) 29 | .measurement_time(MEASUREMENT_TIME); 30 | group.bench_function(BENCH_NAME, |b| { 31 | b.iter_batched( 32 | || { 33 | let mut rng = WyRand::new_seed(SEED); 34 | let first = TestBench::generate_random_large(&mut rng); 35 | let second = first.clone().random_mutate_large(&mut rng); 36 | (first, second) 37 | }, 38 | |(first, second)| { 39 | let diff = black_box(StructDiff::diff(&first, &second)); 40 | black_box(StructDiff::apply(first, diff)); 41 | }, 42 | BatchSize::LargeInput, 43 | ) 44 | }); 45 | group.finish(); 46 | } 47 | 48 | #[cfg(feature = "compare")] 49 | mod diff_struct_bench { 50 | use super::{ 51 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 52 | }; 53 | use criterion::BatchSize; 54 | use diff::Diff; 55 | 56 | pub(super) fn mutate(c: &mut Criterion) { 57 | const BENCH_NAME: &str = "diff_struct_mutate"; 58 | 59 | let mut group = c.benchmark_group(GROUP_NAME); 60 | group 61 | .sample_size(SAMPLE_SIZE) 62 | .measurement_time(MEASUREMENT_TIME); 63 | group.bench_function(BENCH_NAME, |b| { 64 | b.iter_batched( 65 | || { 66 | let mut rng = WyRand::new_seed(SEED); 67 | let first = black_box(TestBench::generate_random_large(&mut rng)); 68 | let second = black_box(first.clone().random_mutate_large(&mut rng)); 69 | (first, second) 70 | }, 71 | |(mut first, second)| { 72 | let diff = black_box(Diff::diff(&first, &second)); 73 | black_box(Diff::apply(&mut first, &diff)) 74 | }, 75 | BatchSize::LargeInput, 76 | ) 77 | }); 78 | group.finish(); 79 | } 80 | } 81 | 82 | #[cfg(feature = "compare")] 83 | mod serde_diff_bench { 84 | use super::{ 85 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 86 | }; 87 | use bincode::Options; 88 | use criterion::BatchSize; 89 | 90 | pub(super) fn mutate(c: &mut Criterion) { 91 | const BENCH_NAME: &str = "serde_diff_mutate"; 92 | let mut group = c.benchmark_group(GROUP_NAME); 93 | group 94 | .sample_size(SAMPLE_SIZE) 95 | .measurement_time(MEASUREMENT_TIME); 96 | group.bench_function(BENCH_NAME, |b| { 97 | b.iter_batched( 98 | || { 99 | let mut rng = WyRand::new_seed(SEED); 100 | let first = black_box(TestBench::generate_random_large(&mut rng)); 101 | let second = black_box(first.clone().random_mutate_large(&mut rng)); 102 | let options = bincode::DefaultOptions::new() 103 | .with_fixint_encoding() 104 | .allow_trailing_bytes(); 105 | (first, second, options) 106 | }, 107 | |(mut first, second, options)| { 108 | let mut diff = black_box( 109 | options 110 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 111 | .unwrap(), 112 | ); 113 | let mut deserializer = 114 | black_box(bincode::Deserializer::from_slice(&mut diff[..], options)); 115 | serde_diff::Apply::apply(&mut deserializer, &mut first).unwrap(); 116 | }, 117 | BatchSize::LargeInput, 118 | ) 119 | }); 120 | group.finish(); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /benchmarks/src/basic/apply.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, BatchSize, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | 9 | const SAMPLE_SIZE: usize = 1000; 10 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 11 | const SEED: u64 = 42; 12 | 13 | #[cfg(feature = "compare")] 14 | criterion_group!( 15 | benches, 16 | mutate_application, 17 | diff_struct_bench::mutate, 18 | serde_diff_bench::mutate 19 | ); 20 | #[cfg(not(feature = "compare"))] 21 | criterion_group!(benches, mutate_application); 22 | 23 | const GROUP_NAME: &str = "application"; 24 | 25 | fn mutate_application(c: &mut Criterion) { 26 | const BENCH_NAME: &str = "mutate_application"; 27 | 28 | let mut group = c.benchmark_group(GROUP_NAME); 29 | group 30 | .sample_size(SAMPLE_SIZE) 31 | .measurement_time(MEASUREMENT_TIME); 32 | group.bench_function(BENCH_NAME, |b| { 33 | b.iter_batched( 34 | || { 35 | let mut rng = WyRand::new_seed(SEED); 36 | let first = TestBench::generate_random(&mut rng); 37 | let second = first.clone().random_mutate(&mut rng); 38 | let diff = StructDiff::diff(&first, &second); 39 | (first, diff) 40 | }, 41 | |(first, diff)| { 42 | black_box(StructDiff::apply(first, diff)); 43 | }, 44 | BatchSize::LargeInput, 45 | ) 46 | }); 47 | group.finish(); 48 | } 49 | 50 | #[cfg(feature = "compare")] 51 | mod diff_struct_bench { 52 | use super::{ 53 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 54 | }; 55 | use criterion::BatchSize; 56 | use diff::Diff; 57 | 58 | pub(super) fn mutate(c: &mut Criterion) { 59 | const BENCH_NAME: &str = "diff_struct_mutate_application"; 60 | let mut group = c.benchmark_group(GROUP_NAME); 61 | group 62 | .sample_size(SAMPLE_SIZE) 63 | .measurement_time(MEASUREMENT_TIME); 64 | group.bench_function(BENCH_NAME, |b| { 65 | b.iter_batched( 66 | || { 67 | let mut rng = WyRand::new_seed(SEED); 68 | let first = black_box(TestBench::generate_random(&mut rng)); 69 | let second = black_box(first.clone().random_mutate(&mut rng)); 70 | let diff = Diff::diff(&first, &second); 71 | (first, diff) 72 | }, 73 | |(mut first, diff)| { 74 | black_box(Diff::apply(&mut first, &diff)); 75 | }, 76 | BatchSize::LargeInput, 77 | ) 78 | }); 79 | group.finish(); 80 | } 81 | } 82 | 83 | #[cfg(feature = "compare")] 84 | mod serde_diff_bench { 85 | use super::{ 86 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 87 | }; 88 | use bincode::Options; 89 | use criterion::BatchSize; 90 | 91 | pub(super) fn mutate(c: &mut Criterion) { 92 | const BENCH_NAME: &str = "serde_diff_mutate_application"; 93 | 94 | let mut group = c.benchmark_group(GROUP_NAME); 95 | group 96 | .sample_size(SAMPLE_SIZE) 97 | .measurement_time(MEASUREMENT_TIME); 98 | group.bench_function(BENCH_NAME, |b| { 99 | b.iter_batched( 100 | || { 101 | let mut rng = WyRand::new_seed(SEED); 102 | let first = black_box(TestBench::generate_random(&mut rng)); 103 | let second = black_box(first.clone().random_mutate(&mut rng)); 104 | let options = bincode::DefaultOptions::new() 105 | .with_fixint_encoding() 106 | .allow_trailing_bytes(); 107 | let diff = black_box( 108 | options 109 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 110 | .unwrap(), 111 | ); 112 | (first, diff, options) 113 | }, 114 | |(mut first, mut diff, options)| { 115 | let mut deserializer = 116 | black_box(bincode::Deserializer::from_slice(&mut diff[..], options)); 117 | serde_diff::Apply::apply(&mut deserializer, &mut first).unwrap(); 118 | }, 119 | BatchSize::LargeInput, 120 | ) 121 | }); 122 | group.finish(); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /benchmarks/src/large/apply.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, BatchSize, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | const SAMPLE_SIZE: usize = 1000; 9 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 10 | const SEED: u64 = 42; 11 | 12 | #[cfg(feature = "compare")] 13 | criterion_group!( 14 | benches, 15 | mutate_application, 16 | diff_struct_bench::mutate, 17 | serde_diff_bench::mutate 18 | ); 19 | #[cfg(not(feature = "compare"))] 20 | criterion_group!(benches, mutate_application); 21 | 22 | const GROUP_NAME: &str = "large_application"; 23 | 24 | fn mutate_application(c: &mut Criterion) { 25 | const BENCH_NAME: &str = "mutate_application"; 26 | 27 | let mut group = c.benchmark_group(GROUP_NAME); 28 | group 29 | .sample_size(SAMPLE_SIZE) 30 | .measurement_time(MEASUREMENT_TIME); 31 | group.bench_function(BENCH_NAME, |b| { 32 | b.iter_batched( 33 | || { 34 | let mut rng = WyRand::new_seed(SEED); 35 | let first = TestBench::generate_random_large(&mut rng); 36 | let second = first.clone().random_mutate_large(&mut rng); 37 | let diff = StructDiff::diff(&first, &second); 38 | (first, diff) 39 | }, 40 | |(first, diff)| { 41 | black_box(StructDiff::apply(first, diff)); 42 | }, 43 | BatchSize::LargeInput, 44 | ) 45 | }); 46 | group.finish(); 47 | } 48 | 49 | #[cfg(feature = "compare")] 50 | mod diff_struct_bench { 51 | use super::{ 52 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 53 | }; 54 | use criterion::BatchSize; 55 | use diff::Diff; 56 | 57 | pub(super) fn mutate(c: &mut Criterion) { 58 | const BENCH_NAME: &str = "diff_struct_mutate"; 59 | let mut group = c.benchmark_group(GROUP_NAME); 60 | group 61 | .sample_size(SAMPLE_SIZE) 62 | .measurement_time(MEASUREMENT_TIME); 63 | group.bench_function(BENCH_NAME, |b| { 64 | b.iter_batched( 65 | || { 66 | let mut rng = WyRand::new_seed(SEED); 67 | let first = black_box(TestBench::generate_random_large(&mut rng)); 68 | let second = black_box(first.clone().random_mutate_large(&mut rng)); 69 | let diff = Diff::diff(&first, &second); 70 | (first, diff) 71 | }, 72 | |(mut first, diff)| { 73 | black_box(Diff::apply(&mut first, &diff)); 74 | }, 75 | BatchSize::LargeInput, 76 | ) 77 | }); 78 | group.finish(); 79 | } 80 | } 81 | 82 | #[cfg(feature = "compare")] 83 | mod serde_diff_bench { 84 | use super::{ 85 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 86 | }; 87 | use bincode::Options; 88 | use criterion::BatchSize; 89 | 90 | pub(super) fn mutate(c: &mut Criterion) { 91 | const BENCH_NAME: &str = "serde_diff_mutate"; 92 | 93 | let mut group = c.benchmark_group(GROUP_NAME); 94 | group 95 | .sample_size(SAMPLE_SIZE) 96 | .measurement_time(MEASUREMENT_TIME); 97 | group.bench_function(BENCH_NAME, |b| { 98 | b.iter_batched( 99 | || { 100 | let mut rng = WyRand::new_seed(SEED); 101 | let first = black_box(TestBench::generate_random_large(&mut rng)); 102 | let second = black_box(first.clone().random_mutate_large(&mut rng)); 103 | let options = bincode::DefaultOptions::new() 104 | .with_fixint_encoding() 105 | .allow_trailing_bytes(); 106 | let diff = black_box( 107 | options 108 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 109 | .unwrap(), 110 | ); 111 | (first, diff, options) 112 | }, 113 | |(mut first, mut diff, options)| { 114 | let mut deserializer = 115 | black_box(bincode::Deserializer::from_slice(&mut diff[..], options)); 116 | serde_diff::Apply::apply(&mut deserializer, &mut first).unwrap(); 117 | }, 118 | BatchSize::LargeInput, 119 | ) 120 | }); 121 | group.finish(); 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /benchmarks/src/basic/generate.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, BatchSize, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | 9 | const SAMPLE_SIZE: usize = 1000; 10 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 11 | const SEED: u64 = 42; 12 | 13 | #[cfg(feature = "compare")] 14 | criterion_group!( 15 | benches, 16 | mutate_generation_ref, 17 | mutate_generation_owned, 18 | diff_struct_bench::mutate, 19 | serde_diff_bench::mutate 20 | ); 21 | #[cfg(not(feature = "compare"))] 22 | criterion_group!(benches, mutate_generation_ref, mutate_generation_owned); 23 | 24 | const GROUP_NAME: &str = "generation"; 25 | 26 | fn mutate_generation_ref(c: &mut Criterion) { 27 | const BENCH_NAME: &str = "mutate_ref"; 28 | let mut group = c.benchmark_group(GROUP_NAME); 29 | group 30 | .sample_size(SAMPLE_SIZE) 31 | .measurement_time(MEASUREMENT_TIME); 32 | group.bench_function(BENCH_NAME, |b| { 33 | b.iter_batched( 34 | || { 35 | let mut rng = WyRand::new_seed(SEED); 36 | let first = TestBench::generate_random(&mut rng); 37 | let second = first.clone().random_mutate(&mut rng); 38 | (first, second) 39 | }, 40 | |(first, second)| { 41 | black_box(StructDiff::diff_ref(&first, &second)); 42 | }, 43 | BatchSize::LargeInput, 44 | ) 45 | }); 46 | group.finish(); 47 | } 48 | 49 | fn mutate_generation_owned(c: &mut Criterion) { 50 | const BENCH_NAME: &str = "mutate_owned"; 51 | let mut group = c.benchmark_group(GROUP_NAME); 52 | group 53 | .sample_size(SAMPLE_SIZE) 54 | .measurement_time(MEASUREMENT_TIME); 55 | group.bench_function(BENCH_NAME, |b| { 56 | b.iter_batched( 57 | || { 58 | let mut rng = WyRand::new_seed(SEED); 59 | let first = TestBench::generate_random(&mut rng); 60 | let second = first.clone().random_mutate(&mut rng); 61 | (first, second) 62 | }, 63 | |(first, second)| { 64 | black_box(StructDiff::diff(&first, &second)); 65 | }, 66 | BatchSize::LargeInput, 67 | ) 68 | }); 69 | group.finish(); 70 | } 71 | 72 | #[cfg(feature = "compare")] 73 | mod diff_struct_bench { 74 | use super::{ 75 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 76 | }; 77 | use criterion::BatchSize; 78 | use diff::Diff; 79 | 80 | pub(super) fn mutate(c: &mut Criterion) { 81 | const BENCH_NAME: &str = "diff_struct_mutate"; 82 | let mut group = c.benchmark_group(GROUP_NAME); 83 | group 84 | .sample_size(SAMPLE_SIZE) 85 | .measurement_time(MEASUREMENT_TIME); 86 | group.bench_function(BENCH_NAME, |b| { 87 | b.iter_batched( 88 | || { 89 | let mut rng = WyRand::new_seed(SEED); 90 | let first = TestBench::generate_random(&mut rng); 91 | let second = first.clone().random_mutate(&mut rng); 92 | (first, second) 93 | }, 94 | |(first, second)| { 95 | black_box(Diff::diff(&first, &second)); 96 | }, 97 | BatchSize::LargeInput, 98 | ) 99 | }); 100 | group.finish(); 101 | } 102 | } 103 | 104 | #[cfg(feature = "compare")] 105 | mod serde_diff_bench { 106 | use super::{ 107 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 108 | }; 109 | use bincode::Options; 110 | use criterion::BatchSize; 111 | 112 | pub(super) fn mutate(c: &mut Criterion) { 113 | const BENCH_NAME: &str = "serde_diff_mutate"; 114 | 115 | let mut group = c.benchmark_group(GROUP_NAME); 116 | group 117 | .sample_size(SAMPLE_SIZE) 118 | .measurement_time(MEASUREMENT_TIME); 119 | group.bench_function(BENCH_NAME, |b| { 120 | b.iter_batched( 121 | || { 122 | let mut rng = WyRand::new_seed(SEED); 123 | let first = TestBench::generate_random(&mut rng); 124 | let second = first.clone().random_mutate(&mut rng); 125 | let options = bincode::DefaultOptions::new() 126 | .with_fixint_encoding() 127 | .allow_trailing_bytes(); 128 | 129 | (first, second, options) 130 | }, 131 | |(first, second, options)| { 132 | black_box( 133 | options 134 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 135 | .unwrap(), 136 | ); 137 | }, 138 | BatchSize::LargeInput, 139 | ) 140 | }); 141 | group.finish(); 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /benchmarks/src/large/generate.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use criterion::{black_box, criterion_group, BatchSize, Criterion}; 4 | use nanorand::WyRand; 5 | use structdiff::StructDiff; 6 | 7 | use crate::TestBench; 8 | const SAMPLE_SIZE: usize = 1000; 9 | const MEASUREMENT_TIME: Duration = Duration::from_secs(25); 10 | const SEED: u64 = 42; 11 | 12 | #[cfg(feature = "compare")] 13 | criterion_group!( 14 | benches, 15 | mutate_generation_ref, 16 | mutate_generation_owned, 17 | diff_struct_bench::mutate, 18 | serde_diff_bench::mutate 19 | ); 20 | #[cfg(not(feature = "compare"))] 21 | criterion_group!(benches, mutate_generation_ref, mutate_generation_owned); 22 | 23 | const GROUP_NAME: &str = "large_generation"; 24 | 25 | fn mutate_generation_ref(c: &mut Criterion) { 26 | const BENCH_NAME: &str = "mutate_ref"; 27 | let mut group = c.benchmark_group(GROUP_NAME); 28 | group 29 | .sample_size(SAMPLE_SIZE) 30 | .measurement_time(MEASUREMENT_TIME); 31 | group.bench_function(BENCH_NAME, |b| { 32 | b.iter_batched( 33 | || { 34 | let mut rng = WyRand::new_seed(SEED); 35 | let first = TestBench::generate_random_large(&mut rng); 36 | let second = first.clone().random_mutate_large(&mut rng); 37 | (first, second) 38 | }, 39 | |(first, second)| { 40 | black_box(StructDiff::diff_ref(&first, &second)); 41 | }, 42 | BatchSize::LargeInput, 43 | ) 44 | }); 45 | group.finish(); 46 | } 47 | 48 | fn mutate_generation_owned(c: &mut Criterion) { 49 | const BENCH_NAME: &str = "mutate_owned"; 50 | let mut group = c.benchmark_group(GROUP_NAME); 51 | group 52 | .sample_size(SAMPLE_SIZE) 53 | .measurement_time(MEASUREMENT_TIME); 54 | group.bench_function(BENCH_NAME, |b| { 55 | b.iter_batched( 56 | || { 57 | let mut rng = WyRand::new_seed(SEED); 58 | let first = TestBench::generate_random_large(&mut rng); 59 | let second = first.clone().random_mutate_large(&mut rng); 60 | (first, second) 61 | }, 62 | |(first, second)| { 63 | black_box(StructDiff::diff(&first, &second)); 64 | }, 65 | BatchSize::LargeInput, 66 | ) 67 | }); 68 | group.finish(); 69 | } 70 | 71 | #[cfg(feature = "compare")] 72 | mod diff_struct_bench { 73 | use super::{ 74 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 75 | }; 76 | use criterion::BatchSize; 77 | use diff::Diff; 78 | 79 | pub(super) fn mutate(c: &mut Criterion) { 80 | const BENCH_NAME: &str = "diff_struct_mutate"; 81 | let mut group = c.benchmark_group(GROUP_NAME); 82 | group 83 | .sample_size(SAMPLE_SIZE) 84 | .measurement_time(MEASUREMENT_TIME); 85 | group.bench_function(BENCH_NAME, |b| { 86 | b.iter_batched( 87 | || { 88 | let mut rng = WyRand::new_seed(SEED); 89 | let first = TestBench::generate_random_large(&mut rng); 90 | let second = first.clone().random_mutate_large(&mut rng); 91 | (first, second) 92 | }, 93 | |(first, second)| { 94 | black_box(Diff::diff(&first, &second)); 95 | }, 96 | BatchSize::LargeInput, 97 | ) 98 | }); 99 | group.finish(); 100 | } 101 | } 102 | 103 | #[cfg(feature = "compare")] 104 | mod serde_diff_bench { 105 | use super::{ 106 | black_box, Criterion, TestBench, WyRand, GROUP_NAME, MEASUREMENT_TIME, SAMPLE_SIZE, SEED, 107 | }; 108 | use bincode::Options; 109 | use criterion::BatchSize; 110 | 111 | pub(super) fn mutate(c: &mut Criterion) { 112 | const BENCH_NAME: &str = "serde_diff_mutate"; 113 | 114 | let mut group = c.benchmark_group(GROUP_NAME); 115 | group 116 | .sample_size(SAMPLE_SIZE) 117 | .measurement_time(MEASUREMENT_TIME); 118 | group.bench_function(BENCH_NAME, |b| { 119 | b.iter_batched( 120 | || { 121 | let mut rng = WyRand::new_seed(SEED); 122 | let first = TestBench::generate_random_large(&mut rng); 123 | let second = first.clone().random_mutate_large(&mut rng); 124 | let options = bincode::DefaultOptions::new() 125 | .with_fixint_encoding() 126 | .allow_trailing_bytes(); 127 | 128 | (first, second, options) 129 | }, 130 | |(first, second, options)| { 131 | black_box( 132 | options 133 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 134 | .unwrap(), 135 | ); 136 | }, 137 | BatchSize::LargeInput, 138 | ) 139 | }); 140 | group.finish(); 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /tests/types.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use generators::{fill, rand_bool, rand_string}; 4 | use nanorand::{Rng, WyRand}; 5 | #[cfg(feature = "nanoserde")] 6 | use nanoserde::{DeBin, SerBin}; 7 | #[cfg(feature = "serde")] 8 | use serde::{Deserialize, Serialize}; 9 | #[cfg_attr(not(feature = "generated_setters"), expect(unused_imports))] 10 | use structdiff::{Difference, StructDiff}; 11 | 12 | pub trait RandValue 13 | where 14 | Self: Sized, 15 | { 16 | fn next() -> Self { 17 | let mut rng = WyRand::new(); 18 | Self::next_seeded(&mut rng) 19 | } 20 | 21 | fn next_seeded(rng: &mut WyRand) -> Self; 22 | } 23 | 24 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 25 | #[cfg_attr(feature = "nanoserde", derive(SerBin, DeBin))] 26 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 27 | #[difference(setters)] 28 | pub struct Test { 29 | pub test1: i32, 30 | pub test2: String, 31 | pub test3: Vec, 32 | pub test4: f32, 33 | pub test5: Option, 34 | } 35 | 36 | #[derive(Debug, PartialEq, Clone, Difference)] 37 | #[cfg_attr(feature = "nanoserde", derive(SerBin, DeBin))] 38 | #[difference(setters)] 39 | pub struct TestSkip 40 | where 41 | A: PartialEq, 42 | { 43 | pub test1: A, 44 | pub test2: String, 45 | #[difference(skip)] 46 | pub test3skip: Vec, 47 | pub test4: f32, 48 | } 49 | 50 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 51 | #[cfg_attr(feature = "nanoserde", derive(SerBin, DeBin))] 52 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 53 | pub enum TestEnum { 54 | #[default] 55 | F0, 56 | F1(bool), 57 | F2(String), 58 | F3 { 59 | field1: String, 60 | field2: bool, 61 | }, 62 | F4(Test), 63 | } 64 | 65 | impl RandValue for Test { 66 | fn next_seeded(rng: &mut WyRand) -> Self { 67 | Test { 68 | test1: rng.generate(), 69 | test2: rand_string(rng), 70 | test3: fill(rng), 71 | test4: match f32::from_bits(rng.generate::()) { 72 | val if val.is_nan() => 0.0, 73 | val => val, 74 | }, 75 | test5: match rng.generate::() { 76 | true => Some(rng.generate()), 77 | false => None, 78 | }, 79 | } 80 | } 81 | } 82 | 83 | impl RandValue for TestEnum { 84 | fn next_seeded(rng: &mut WyRand) -> Self { 85 | match rng.generate_range(0..5) { 86 | 0 => Self::F0, 87 | 1 => Self::F1(rand_bool(rng)), 88 | 2 => Self::F2(rand_string(rng)), 89 | 3 => Self::F3 { 90 | field1: rand_string(rng), 91 | field2: rand_bool(rng), 92 | }, 93 | _ => Self::F4(Test::next()), 94 | } 95 | } 96 | } 97 | 98 | #[derive(Difference, Default, PartialEq, Debug, Clone)] 99 | #[difference(setters)] 100 | #[allow(unused)] 101 | pub struct TestSetters { 102 | #[difference(setter_name = "testing123", recurse)] 103 | pub f0: Test, 104 | pub f1: Test, 105 | pub f2: TestEnum, 106 | #[difference(recurse)] 107 | pub f3: Option, 108 | #[difference(collection_strategy = "unordered_array_like")] 109 | pub f4: Vec, 110 | #[difference(collection_strategy = "unordered_map_like", map_equality = "key_only")] 111 | pub f5: BTreeMap, 112 | #[difference( 113 | collection_strategy = "unordered_map_like", 114 | map_equality = "key_and_value" 115 | )] 116 | pub f6: BTreeMap, 117 | } 118 | 119 | impl RandValue for TestSetters { 120 | fn next_seeded(rng: &mut WyRand) -> Self { 121 | TestSetters { 122 | f0: Test::next(), 123 | f1: Test::next(), 124 | f2: TestEnum::next(), 125 | f3: if rng.generate::() { 126 | Some(Test::next_seeded(rng)) 127 | } else { 128 | None 129 | }, 130 | f4: generators::fill(rng), 131 | f5: generators::fill::>(rng) 132 | .into_iter() 133 | .map(|x| (x, Test::next_seeded(rng))) 134 | .take(10) 135 | .collect(), 136 | f6: generators::fill::>(rng) 137 | .into_iter() 138 | .map(|x| (x, Test::next_seeded(rng))) 139 | .take(10) 140 | .collect(), 141 | } 142 | } 143 | } 144 | 145 | mod generators { 146 | use nanorand::{Rng, WyRand}; 147 | 148 | pub(super) fn rand_bool(rng: &mut WyRand) -> bool { 149 | let base = rng.generate::() as usize; 150 | base % 2 == 0 151 | } 152 | 153 | pub(super) fn rand_string(rng: &mut WyRand) -> String { 154 | let base = vec![(); rng.generate::() as usize]; 155 | base.into_iter() 156 | .map(|_| rng.generate::() as u32) 157 | .filter_map(char::from_u32) 158 | .collect::() 159 | } 160 | 161 | pub(super) fn fill(rng: &mut WyRand) -> T 162 | where 163 | V: nanorand::RandomGen, 164 | T: FromIterator, 165 | { 166 | let base = vec![(); rng.generate::() as usize]; 167 | base.into_iter().map(|_| rng.generate::()).collect::() 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "nanoserde")] 2 | use nanoserde::{DeBin, SerBin}; 3 | 4 | #[cfg(feature = "serde")] 5 | use serde::{de::DeserializeOwned, Serialize}; 6 | 7 | pub use structdiff_derive::Difference; 8 | 9 | pub mod collections; 10 | 11 | #[cfg(all(feature = "nanoserde", feature = "serde", feature = "debug_diffs"))] 12 | pub(crate) mod __private { 13 | use super::*; 14 | pub trait StructDiffOwnedBound: 15 | SerBin + DeBin + Serialize + DeserializeOwned + Clone + std::fmt::Debug 16 | { 17 | } 18 | impl 19 | StructDiffOwnedBound for T 20 | { 21 | } 22 | 23 | pub trait StructDiffRefBound: SerBin + Serialize + Clone + std::fmt::Debug {} 24 | impl StructDiffRefBound for T {} 25 | } 26 | 27 | #[cfg(all(feature = "nanoserde", not(feature = "serde"), feature = "debug_diffs"))] 28 | pub(crate) mod __private { 29 | use super::*; 30 | 31 | pub trait StructDiffOwnedBound: SerBin + DeBin + Clone + std::fmt::Debug {} 32 | impl StructDiffOwnedBound for T {} 33 | 34 | pub trait StructDiffRefBound: SerBin + Clone + std::fmt::Debug {} 35 | impl StructDiffRefBound for T {} 36 | } 37 | 38 | #[cfg(all(feature = "serde", not(feature = "nanoserde"), feature = "debug_diffs"))] 39 | pub(crate) mod __private { 40 | use super::*; 41 | 42 | pub trait StructDiffOwnedBound: Serialize + DeserializeOwned + Clone + std::fmt::Debug {} 43 | impl StructDiffOwnedBound for T {} 44 | 45 | pub trait StructDiffRefBound: Serialize + Clone + std::fmt::Debug {} 46 | impl StructDiffRefBound for T {} 47 | } 48 | 49 | #[cfg(all( 50 | not(feature = "serde"), 51 | not(feature = "nanoserde"), 52 | feature = "debug_diffs" 53 | ))] 54 | pub(crate) mod __private { 55 | use super::*; 56 | 57 | pub trait StructDiffOwnedBound: Clone + std::fmt::Debug {} 58 | impl StructDiffOwnedBound for T {} 59 | 60 | pub trait StructDiffRefBound: Clone + std::fmt::Debug {} 61 | impl StructDiffRefBound for T {} 62 | } 63 | 64 | #[cfg(all(feature = "nanoserde", feature = "serde", not(feature = "debug_diffs")))] 65 | pub(crate) mod __private { 66 | use super::*; 67 | pub trait StructDiffOwnedBound: SerBin + DeBin + Serialize + DeserializeOwned + Clone {} 68 | impl StructDiffOwnedBound for T {} 69 | 70 | pub trait StructDiffRefBound: SerBin + Serialize + Clone {} 71 | impl StructDiffRefBound for T {} 72 | } 73 | 74 | #[cfg(all( 75 | feature = "nanoserde", 76 | not(feature = "serde"), 77 | not(feature = "debug_diffs") 78 | ))] 79 | pub(crate) mod __private { 80 | use super::*; 81 | 82 | pub trait StructDiffOwnedBound: SerBin + DeBin + Clone {} 83 | impl StructDiffOwnedBound for T {} 84 | 85 | pub trait StructDiffRefBound: SerBin + Clone {} 86 | impl StructDiffRefBound for T {} 87 | } 88 | 89 | #[cfg(all( 90 | feature = "serde", 91 | not(feature = "nanoserde"), 92 | not(feature = "debug_diffs") 93 | ))] 94 | pub(crate) mod __private { 95 | use super::*; 96 | 97 | pub trait StructDiffOwnedBound: Serialize + DeserializeOwned + Clone {} 98 | impl StructDiffOwnedBound for T {} 99 | 100 | pub trait StructDiffRefBound: Serialize + Clone {} 101 | impl StructDiffRefBound for T {} 102 | } 103 | 104 | #[cfg(all( 105 | not(feature = "serde"), 106 | not(feature = "nanoserde"), 107 | not(feature = "debug_diffs") 108 | ))] 109 | pub(crate) mod __private { 110 | 111 | pub trait StructDiffOwnedBound: Clone {} 112 | impl StructDiffOwnedBound for T {} 113 | 114 | pub trait StructDiffRefBound: Clone {} 115 | impl StructDiffRefBound for T {} 116 | } 117 | 118 | pub trait StructDiff { 119 | /// A generated type used to represent the difference 120 | /// between two instances of a struct which implements 121 | /// the StructDiff trait. 122 | type Diff: __private::StructDiffOwnedBound; 123 | 124 | /// A generated type used to represent the difference 125 | /// between two instances of a struct which implements 126 | /// the StructDiff trait (using references). 127 | type DiffRef<'target>: __private::StructDiffRefBound + Into 128 | where 129 | Self: 'target; 130 | 131 | /// Generate a diff between two instances of a struct. 132 | /// This diff may be serialized if one of the serialization 133 | /// features is enabled. 134 | /// 135 | /// ``` 136 | /// use structdiff::{Difference, StructDiff}; 137 | /// 138 | /// #[derive(Debug, PartialEq, Clone, Difference)] 139 | /// struct Example { 140 | /// field1: f64, 141 | /// } 142 | /// 143 | /// let first = Example { 144 | /// field1: 0.0, 145 | /// }; 146 | /// 147 | /// let second = Example { 148 | /// field1: 3.14, 149 | /// }; 150 | /// 151 | /// let diffs: Vec<::Diff> = first.diff(&second); 152 | /// 153 | /// let diffed = first.apply(diffs); 154 | /// assert_eq!(diffed, second); 155 | /// ``` 156 | fn diff(&self, updated: &Self) -> Vec; 157 | 158 | /// Generate a diff between two instances of a struct, for 159 | /// use in passing to serializer. Much more efficient for 160 | /// structs with large fields where the diff will not be stored. 161 | /// 162 | /// ``` 163 | /// use structdiff::{Difference, StructDiff}; 164 | /// 165 | /// #[derive(Debug, PartialEq, Clone, Difference)] 166 | /// struct Example { 167 | /// field1: f64, 168 | /// } 169 | /// 170 | /// let first = Example { 171 | /// field1: 0.0, 172 | /// }; 173 | /// 174 | /// let second = Example { 175 | /// field1: 3.14, 176 | /// }; 177 | /// 178 | /// let diffs: Vec<::DiffRef<'_>> = first.diff_ref(&second); 179 | /// 180 | /// let diffed = first.clone().apply(diffs.into_iter().map(Into::into).collect()); 181 | /// assert_eq!(diffed, second); 182 | /// ``` 183 | fn diff_ref<'target>(&'target self, updated: &'target Self) -> Vec>; 184 | 185 | /// Apply a single-field diff to a mutable self ref 186 | fn apply_single(&mut self, diff: Self::Diff); 187 | 188 | /// Apply a full diff to an owned self 189 | fn apply(mut self, diffs: Vec) -> Self 190 | where 191 | Self: Sized, 192 | { 193 | for diff in diffs { 194 | self.apply_single(diff); 195 | } 196 | self 197 | } 198 | 199 | /// Apply a full diff to a self ref, returning a cloned version of self 200 | /// after diff is applied 201 | fn apply_ref(&self, diffs: Vec) -> Self 202 | where 203 | Self: Clone, 204 | { 205 | self.clone().apply(diffs) 206 | } 207 | 208 | /// Apply a full diff to a mutable self ref 209 | fn apply_mut(&mut self, diffs: Vec) { 210 | for diff in diffs { 211 | self.apply_single(diff); 212 | } 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2022 Kirk Nickish 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /tests/integration.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports)] 2 | 3 | mod derives; 4 | mod enums; 5 | mod expose; 6 | mod types; 7 | use assert_unordered::{assert_eq_unordered, assert_eq_unordered_sort}; 8 | pub use types::{RandValue, Test, TestEnum, TestSkip}; 9 | 10 | use std::f32::consts::PI; 11 | use std::hash::Hash; 12 | use std::{ 13 | collections::{BTreeMap, BTreeSet, HashMap, HashSet, LinkedList}, 14 | fmt::Debug, 15 | num::Wrapping, 16 | }; 17 | use structdiff::{Difference, StructDiff}; 18 | 19 | #[cfg(feature = "serde")] 20 | use serde::{Deserialize, Serialize}; 21 | 22 | #[cfg(feature = "nanoserde")] 23 | use nanoserde::{DeBin, SerBin}; 24 | 25 | macro_rules! nanoserde_ref_test { 26 | ($first:ident, $second:ident) => { 27 | #[cfg(feature = "nanoserde")] 28 | assert_eq!( 29 | nanoserde::SerBin::serialize_bin(&(&$first).diff(&$second)), 30 | nanoserde::SerBin::serialize_bin(&(&$first).diff_ref(&$second)) 31 | ) 32 | }; 33 | } 34 | 35 | #[test] 36 | /// This should match the code used in README.md 37 | fn test_example() { 38 | #[derive(Debug, PartialEq, Clone, Difference)] 39 | struct Example { 40 | field1: f64, 41 | #[difference(skip)] 42 | field2: Vec, 43 | #[difference(collection_strategy = "unordered_array_like")] 44 | field3: BTreeSet, 45 | } 46 | 47 | let first = Example { 48 | field1: 0.0, 49 | field2: vec![], 50 | field3: vec![1, 2, 3].into_iter().collect(), 51 | }; 52 | 53 | let second = Example { 54 | field1: PI as f64, 55 | field2: vec![1], 56 | field3: vec![2, 3, 4].into_iter().collect(), 57 | }; 58 | 59 | let diffs = first.diff(&second); 60 | // diffs is now a Vec of differences, with length 61 | // equal to number of changed/unskipped fields 62 | assert_eq!(diffs.len(), 2); 63 | 64 | let diffed = first.apply(diffs); 65 | // diffed is now equal to second, except for skipped field 66 | assert_eq!(diffed.field1, second.field1); 67 | assert_eq!(&diffed.field3, &second.field3); 68 | assert_ne!(diffed, second); 69 | } 70 | 71 | #[test] 72 | fn test_derive() { 73 | let first: Test = Test { 74 | test1: 0, 75 | test2: String::new(), 76 | test3: Vec::new(), 77 | test4: 0.0, 78 | test5: None, 79 | }; 80 | 81 | let second = Test { 82 | test1: first.test1, 83 | test2: String::from("Hello Diff"), 84 | test3: vec![1], 85 | test4: PI, 86 | test5: Some(12), 87 | }; 88 | 89 | let diffs = first.diff(&second); 90 | let diffed = first.clone().apply(diffs); 91 | 92 | assert_eq!(&diffed, &second); 93 | nanoserde_ref_test!(first, second); 94 | } 95 | 96 | #[test] 97 | fn test_derive_with_skip() { 98 | let first: TestSkip = TestSkip { 99 | test1: 0, 100 | test2: String::new(), 101 | test3skip: Vec::new(), 102 | test4: 0.0, 103 | }; 104 | 105 | let second: TestSkip = TestSkip { 106 | test1: first.test1, 107 | test2: String::from("Hello Diff"), 108 | test3skip: vec![1], 109 | test4: PI, 110 | }; 111 | 112 | let diffs = first.diff(&second); 113 | 114 | #[cfg(feature = "serde")] 115 | { 116 | let ser_diff = bincode::serialize(&diffs).unwrap(); 117 | let deser_diff = bincode::deserialize(&ser_diff).unwrap(); 118 | let diffed_serde = first.clone().apply(deser_diff); 119 | 120 | assert_eq!(diffed_serde.test1, second.test1); 121 | assert_eq!(diffed_serde.test2, second.test2); 122 | assert_ne!(diffed_serde.test3skip, second.test3skip); 123 | assert_eq!(diffed_serde.test4, second.test4); 124 | } 125 | 126 | #[cfg(feature = "nanoserde")] 127 | { 128 | let ser = SerBin::serialize_bin(&diffs); 129 | let diffed_serde = first.clone().apply(DeBin::deserialize_bin(&ser).unwrap()); 130 | 131 | assert_eq!(diffed_serde.test1, second.test1); 132 | assert_eq!(diffed_serde.test2, second.test2); 133 | assert_ne!(diffed_serde.test3skip, second.test3skip); 134 | assert_eq!(diffed_serde.test4, second.test4); 135 | } 136 | 137 | let diffed = first.clone().apply(diffs); 138 | 139 | //check that all except the skipped are changed 140 | assert_eq!(diffed.test1, second.test1); 141 | assert_eq!(diffed.test2, second.test2); 142 | assert_ne!(diffed.test3skip, second.test3skip); 143 | assert_eq!(diffed.test4, second.test4); 144 | 145 | nanoserde_ref_test!(first, second); 146 | } 147 | 148 | #[derive(Debug, PartialEq, Clone, Difference)] 149 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 150 | #[cfg_attr(feature = "nanoserde", derive(SerBin, DeBin))] 151 | #[difference(setters)] 152 | struct TestGenerics { 153 | test1: A, 154 | test2: B, 155 | test3: C, 156 | test4: HashMap, 157 | } 158 | 159 | #[test] 160 | fn test_generics() { 161 | type TestType = TestGenerics, String>; 162 | let first: TestType = TestGenerics { 163 | test1: 0, 164 | test2: 42, 165 | test3: Some(true), 166 | test4: [(String::from("test123"), 1)].into_iter().collect(), 167 | }; 168 | 169 | let second: TestType = TestGenerics { 170 | test1: 0, 171 | test2: 42, 172 | test3: None, 173 | test4: [(String::from("test1234"), 2)].into_iter().collect(), 174 | }; 175 | 176 | let diffs = first.diff(&second); 177 | 178 | #[cfg(feature = "serde")] 179 | { 180 | let ser_diff = bincode::serialize(&diffs).unwrap(); 181 | let deser_diff = bincode::deserialize(&ser_diff).unwrap(); 182 | let diffed_serde = first.clone().apply(deser_diff); 183 | 184 | assert_eq!(diffed_serde, second); 185 | } 186 | 187 | #[cfg(feature = "nanoserde")] 188 | { 189 | let ser = SerBin::serialize_bin(&diffs); 190 | let diffed_serde = first.clone().apply(DeBin::deserialize_bin(&ser).unwrap()); 191 | 192 | assert_eq!(&diffed_serde, &second); 193 | } 194 | 195 | let diffed = first.clone().apply(diffs); 196 | 197 | //check that all except the skipped are changed 198 | assert_eq!(diffed.test1, second.test1); 199 | assert_eq!(diffed.test2, second.test2); 200 | assert_eq!(diffed.test3, second.test3); 201 | assert_eq!(diffed.test4, second.test4); 202 | 203 | nanoserde_ref_test!(first, second) 204 | } 205 | 206 | #[derive(Debug, PartialEq, Clone, Difference)] 207 | #[difference(setters)] 208 | struct TestGenericsSkip { 209 | test1: A, 210 | test2: B, 211 | test3: C, 212 | #[difference(skip)] 213 | test4: HashMap, 214 | test5: HashMap, 215 | } 216 | 217 | #[test] 218 | fn test_generics_skip() { 219 | let first: TestGenericsSkip, String> = TestGenericsSkip { 220 | test1: 0, 221 | test2: 42, 222 | test3: Some(true), 223 | test4: [(String::from("test123"), 1)].into_iter().collect(), 224 | test5: [(String::from("test123"), 1)].into_iter().collect(), 225 | }; 226 | 227 | let second: TestGenericsSkip, String> = TestGenericsSkip { 228 | test1: 0, 229 | test2: 42, 230 | test3: None, 231 | test4: [(String::from("test1234"), 2)].into_iter().collect(), 232 | test5: [(String::from("test1234"), 2)].into_iter().collect(), 233 | }; 234 | 235 | let diffs = first.diff(&second); 236 | 237 | #[cfg(feature = "serde")] 238 | { 239 | let ser_diff = bincode::serialize(&diffs).unwrap(); 240 | let deser_diff = bincode::deserialize(&ser_diff).unwrap(); 241 | let diffed_serde = first.clone().apply(deser_diff); 242 | 243 | assert_eq!(diffed_serde.test1, second.test1); 244 | assert_eq!(diffed_serde.test2, second.test2); 245 | assert_eq!(diffed_serde.test3, second.test3); 246 | assert_ne!(diffed_serde.test4, second.test4); 247 | assert_eq!(diffed_serde.test5, second.test5); 248 | } 249 | 250 | #[cfg(feature = "nanoserde")] 251 | { 252 | let ser = SerBin::serialize_bin(&diffs); 253 | let diffed_serde = first.clone().apply(DeBin::deserialize_bin(&ser).unwrap()); 254 | 255 | assert_eq!(diffed_serde.test1, second.test1); 256 | assert_eq!(diffed_serde.test2, second.test2); 257 | assert_eq!(diffed_serde.test3, second.test3); 258 | assert_ne!(diffed_serde.test4, second.test4); 259 | assert_eq!(diffed_serde.test5, second.test5); 260 | } 261 | 262 | let diffed = first.clone().apply(diffs); 263 | 264 | //check that all except the skipped are changed 265 | assert_eq!(diffed.test1, second.test1); 266 | assert_eq!(diffed.test2, second.test2); 267 | assert_eq!(diffed.test3, second.test3); 268 | assert_ne!(diffed.test4, second.test4); 269 | assert_eq!(diffed.test5, second.test5); 270 | 271 | nanoserde_ref_test!(first, second); 272 | } 273 | 274 | #[test] 275 | fn test_enums() { 276 | let mut follower = TestEnum::next(); 277 | let mut leader: TestEnum; 278 | for _ in 0..100 { 279 | leader = TestEnum::next(); 280 | let diff = follower.diff(&leader); 281 | follower.apply_mut(diff); 282 | assert_eq!(&leader, &follower); 283 | nanoserde_ref_test!(leader, follower); 284 | } 285 | } 286 | 287 | mod derive_inner { 288 | use std::f32::consts::PI; 289 | 290 | use super::{StructDiff, Test}; 291 | //tests that the associated type does not need to be exported manually 292 | 293 | #[test] 294 | fn test_derive_inner() { 295 | let first: Test = Test { 296 | test1: 0, 297 | test2: String::new(), 298 | test3: Vec::new(), 299 | test4: 0.0, 300 | test5: None, 301 | }; 302 | 303 | let second = Test { 304 | test1: first.test1, 305 | test2: String::from("Hello Diff"), 306 | test3: vec![1], 307 | test4: PI, 308 | test5: Some(13), 309 | }; 310 | 311 | let diffs = first.diff(&second); 312 | let diffed = first.clone().apply(diffs); 313 | 314 | assert_eq!(diffed, second); 315 | nanoserde_ref_test!(first, second); 316 | } 317 | } 318 | 319 | #[test] 320 | fn test_recurse() { 321 | #[derive(Debug, PartialEq, Clone, Difference)] 322 | #[difference(setters)] 323 | struct TestRecurse { 324 | test1: i32, 325 | #[difference(recurse)] 326 | test2: Test, 327 | #[difference(recurse)] 328 | test3: Option, 329 | #[difference(recurse)] 330 | test4: Option, 331 | #[difference(recurse)] 332 | test5: Option, 333 | } 334 | 335 | let first = TestRecurse { 336 | test1: 0, 337 | test2: Test { 338 | test1: 0, 339 | test2: String::new(), 340 | test3: Vec::new(), 341 | test4: 0.0, 342 | test5: Some(14), 343 | }, 344 | test3: None, 345 | test4: Some(Test::default()), 346 | test5: Some(Test { 347 | test1: 0, 348 | test2: String::new(), 349 | test3: Vec::new(), 350 | test4: 0.0, 351 | test5: Some(14), 352 | }), 353 | }; 354 | 355 | let second = TestRecurse { 356 | test1: 1, 357 | test2: Test { 358 | test1: 2, 359 | test2: String::new(), 360 | test3: Vec::new(), 361 | test4: PI, 362 | test5: None, 363 | }, 364 | test3: Some(Test::default()), 365 | test4: Some(Test { 366 | test1: 0, 367 | test2: String::new(), 368 | test3: Vec::new(), 369 | test4: 0.0, 370 | test5: Some(14), 371 | }), 372 | test5: None, 373 | }; 374 | 375 | let diffs = first.diff(&second); 376 | assert_eq!(diffs.len(), 5); 377 | 378 | type TestRecurseFields = ::Diff; 379 | 380 | if let TestRecurseFields::test2(val) = &diffs[1] { 381 | assert_eq!(val.len(), 3); 382 | } else { 383 | panic!("Recursion failure"); 384 | } 385 | 386 | #[cfg(feature = "serde")] 387 | { 388 | let ser_diff = bincode::serialize(&diffs).unwrap(); 389 | let deser_diff = bincode::deserialize(&ser_diff).unwrap(); 390 | let diffed_serde = first.clone().apply(deser_diff); 391 | 392 | assert_eq!(diffed_serde, second); 393 | } 394 | 395 | #[cfg(feature = "nanoserde")] 396 | { 397 | let ser = SerBin::serialize_bin(&diffs); 398 | let diffed_serde = first.clone().apply(DeBin::deserialize_bin(&ser).unwrap()); 399 | 400 | assert_eq!(&diffed_serde, &second); 401 | } 402 | 403 | let diffed = first.clone().apply(diffs); 404 | 405 | assert_eq!(diffed, second); 406 | nanoserde_ref_test!(first, second); 407 | } 408 | 409 | #[test] 410 | fn test_collection_strategies() { 411 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 412 | struct TestCollection { 413 | #[difference(collection_strategy = "unordered_array_like")] 414 | test1: Vec, 415 | #[difference(collection_strategy = "unordered_array_like")] 416 | test2: HashSet, 417 | #[difference(collection_strategy = "unordered_array_like")] 418 | test3: LinkedList, 419 | } 420 | 421 | let first = TestCollection { 422 | test1: vec![10, 15, 20, 25, 30], 423 | test3: vec![10, 15, 17].into_iter().collect(), 424 | ..Default::default() 425 | }; 426 | 427 | let second = TestCollection { 428 | test1: Vec::default(), 429 | test2: vec![10].into_iter().collect(), 430 | test3: vec![10, 15, 17, 19].into_iter().collect(), 431 | }; 432 | 433 | let diffs = first.diff(&second); 434 | 435 | #[cfg(feature = "serde")] 436 | { 437 | let ser_diff = bincode::serialize(&diffs).unwrap(); 438 | let deser_diff = bincode::deserialize(&ser_diff).unwrap(); 439 | let diffed_serde = first.clone().apply(deser_diff); 440 | 441 | use assert_unordered::assert_eq_unordered; 442 | assert_eq_unordered!(&diffed_serde.test1, &second.test1); 443 | assert_eq_unordered!(&diffed_serde.test2, &second.test2); 444 | assert_eq_unordered!(&diffed_serde.test3, &second.test3); 445 | } 446 | 447 | #[cfg(feature = "nanoserde")] 448 | { 449 | let ser = SerBin::serialize_bin(&diffs); 450 | let diffed_nserde = first.clone().apply(DeBin::deserialize_bin(&ser).unwrap()); 451 | 452 | use assert_unordered::assert_eq_unordered; 453 | assert_eq_unordered!(&diffed_nserde.test1, &second.test1); 454 | assert_eq_unordered!(&diffed_nserde.test2, &second.test2); 455 | assert_eq_unordered!(&diffed_nserde.test3, &second.test3); 456 | } 457 | 458 | let diffed = first.clone().apply(diffs); 459 | 460 | use assert_unordered::assert_eq_unordered; 461 | assert_eq_unordered!(&diffed.test1, &second.test1); 462 | assert_eq_unordered!(&diffed.test2, &second.test2); 463 | assert_eq_unordered!(&diffed.test3, &second.test3); 464 | 465 | nanoserde_ref_test!(first, second); 466 | } 467 | 468 | #[test] 469 | fn test_key_value() { 470 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 471 | struct TestCollection { 472 | #[difference( 473 | collection_strategy = "unordered_map_like", 474 | map_equality = "key_and_value" 475 | )] 476 | test1: HashMap, 477 | } 478 | 479 | let first = TestCollection { 480 | test1: vec![(10, 0), (15, 2), (20, 0), (25, 0), (30, 15)] 481 | .into_iter() 482 | .collect(), 483 | }; 484 | 485 | let second = TestCollection { 486 | test1: vec![(10, 21), (15, 2), (20, 0), (25, 0), (30, 15)] 487 | .into_iter() 488 | .collect(), 489 | }; 490 | 491 | let diffs = first.diff(&second); 492 | 493 | #[cfg(feature = "serde")] 494 | { 495 | let ser_diff = bincode::serialize(&diffs).unwrap(); 496 | let deser_diff = bincode::deserialize(&ser_diff).unwrap(); 497 | let diffed_serde = first.clone().apply(deser_diff); 498 | 499 | use assert_unordered::assert_eq_unordered; 500 | assert_eq_unordered!(&diffed_serde.test1, &second.test1); 501 | } 502 | 503 | #[cfg(feature = "nanoserde")] 504 | { 505 | let ser = SerBin::serialize_bin(&diffs); 506 | let diffed_serde = first.clone().apply(DeBin::deserialize_bin(&ser).unwrap()); 507 | 508 | use assert_unordered::assert_eq_unordered; 509 | assert_eq_unordered!(&diffed_serde.test1, &second.test1); 510 | } 511 | 512 | let diffed = first.clone().apply(diffs); 513 | 514 | use assert_unordered::assert_eq_unordered; 515 | assert_eq_unordered!(&diffed.test1, &second.test1); 516 | 517 | nanoserde_ref_test!(first, second); 518 | } 519 | 520 | #[cfg(feature = "generated_setters")] 521 | #[test] 522 | fn test_setters() { 523 | use types::TestSetters; 524 | let mut base = TestSetters::default(); 525 | let mut end = TestSetters::default(); 526 | let mut partial_diffs = vec![]; 527 | let mut full_diffs = vec![]; 528 | 529 | for _ in 0..100 { 530 | end = TestSetters::next(); 531 | let base_clone = base.clone(); 532 | partial_diffs.extend(base.testing123(end.f0.clone())); 533 | partial_diffs.extend(base.set_f1_with_diff(end.f1.clone())); 534 | partial_diffs.extend(base.set_f2_with_diff(end.f2.clone())); 535 | partial_diffs.extend(base.set_f3_with_diff(end.f3.clone())); 536 | partial_diffs.extend(base.set_f4_with_diff(end.f4.clone())); 537 | partial_diffs.extend(base.set_f5_with_diff(end.f5.clone())); 538 | partial_diffs.extend(base.set_f6_with_diff(end.f6.clone())); 539 | let tmp = base_clone.apply_ref(partial_diffs.clone()); 540 | assert_eq!(&tmp.f0, &end.f0); 541 | assert_eq!(&tmp.f1, &end.f1); 542 | assert_eq!(&tmp.f2, &end.f2); 543 | assert_eq!(&tmp.f3, &end.f3); 544 | assert_eq_unordered!(&tmp.f4, &end.f4); 545 | assert_eq_unordered!(&tmp.f5, &end.f5); 546 | assert_eq_unordered!(&tmp.f6, &end.f6); 547 | 548 | assert_eq!(&base.f0, &end.f0); 549 | assert_eq!(&base.f1, &end.f1); 550 | assert_eq!(&base.f2, &end.f2); 551 | assert_eq!(&base.f3, &end.f3); 552 | assert_eq_unordered!(&base.f4, &end.f4); 553 | assert_eq_unordered!(&base.f5, &end.f5); 554 | assert_eq_unordered!(&base.f6, &end.f6); 555 | full_diffs.extend(std::mem::take(&mut partial_diffs)); 556 | } 557 | 558 | let modified = TestSetters::default().apply(full_diffs); 559 | assert_eq!(modified.f0, end.f0); 560 | assert_eq!(modified.f1, end.f1); 561 | assert_eq!(modified.f2, end.f2); 562 | assert_eq!(modified.f3, end.f3); 563 | assert_eq_unordered_sort!(modified.f4, end.f4); 564 | assert_eq_unordered!(modified.f5, end.f5); 565 | assert_eq_unordered!(modified.f6, end.f6); 566 | } 567 | -------------------------------------------------------------------------------- /src/collections/rope/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cmp::Ordering::{Equal, Greater}, 3 | collections::VecDeque, 4 | fmt::Debug, 5 | num::NonZeroUsize, 6 | ops::{Index, IndexMut, Neg}, 7 | }; 8 | 9 | /// helper type for [`NodesWithCount::slot_mut_internal`] and [`NodesWithCount::remove_internal`] 10 | enum RetType { 11 | Further(usize), 12 | This(usize), 13 | } 14 | 15 | #[derive(Clone, Default)] 16 | enum Node { 17 | #[default] 18 | Empty, 19 | Single(T), 20 | Multiple(NodesWithCount), 21 | } 22 | 23 | impl Node { 24 | fn is_occupied(&self) -> bool { 25 | matches!( 26 | self, 27 | Node::Multiple(NodesWithCount { 28 | count: Some(..), 29 | .. 30 | }) | Node::Single(..) 31 | ) 32 | } 33 | } 34 | 35 | #[derive(Clone)] 36 | struct NodesWithCount { 37 | count: Option, 38 | nodes: VecDeque>, 39 | } 40 | 41 | impl Default for NodesWithCount { 42 | fn default() -> Self { 43 | Self { 44 | count: None, 45 | nodes: VecDeque::new(), 46 | } 47 | } 48 | } 49 | 50 | #[derive(Clone)] 51 | pub struct Rope(NodesWithCount); 52 | 53 | impl NodesWithCount { 54 | #[inline] 55 | fn shallow_len(&self) -> usize { 56 | self.count.map(NonZeroUsize::get).unwrap_or_default() 57 | } 58 | 59 | fn pop_front(&mut self) -> Option { 60 | let node_mut = self 61 | .nodes 62 | .iter_mut() 63 | .find(|v| matches!(v, Node::Single(..) | Node::Multiple(..)))?; 64 | match node_mut { 65 | Node::Empty => unreachable!(), 66 | Node::Single(_) => { 67 | let Node::Single(v) = std::mem::take(node_mut) else { 68 | unreachable!() 69 | }; 70 | self.count = match self 71 | .count 72 | .expect("we're removing a node, count should be >= 1") 73 | { 74 | NonZeroUsize::MIN => None, 75 | higher => Some(NonZeroUsize::new(higher.get() - 1).unwrap()), 76 | }; 77 | Some(v) 78 | } 79 | Node::Multiple(nodes) => { 80 | let v = nodes 81 | .pop_front() 82 | .expect("MUST be at least one value in a Node::Multiple"); 83 | self.count = match self 84 | .count 85 | .expect("we're removing a node, count should be >= 1") 86 | { 87 | NonZeroUsize::MIN => None, 88 | higher => Some(NonZeroUsize::new(higher.get() - 1).unwrap()), 89 | }; 90 | if nodes.shallow_len() == 0 { 91 | std::mem::take(node_mut); 92 | } 93 | Some(v) 94 | } 95 | } 96 | } 97 | 98 | fn index_internal(&self, idx: usize, mut last: Option) -> Option<&T> { 99 | for node in &self.nodes { 100 | match (node, &mut last) { 101 | (Node::Empty, _) => continue, 102 | (Node::Single(v), Some(last)) if *last == idx - 1 => return Some(v), 103 | (Node::Single(_), Some(last)) => *last += 1, 104 | (Node::Single(v), None) if idx == 0 => return Some(v), 105 | (Node::Single(_), last @ None) => *last = Some(0), 106 | (Node::Multiple(nodes), last) 107 | if nodes.shallow_len() + last.unwrap_or_default() >= idx => 108 | { 109 | return nodes.index_internal(idx, *last) 110 | } 111 | (Node::Multiple(nodes), last) => { 112 | *last = 113 | Some(last.unwrap_or_default() + nodes.count.map_or(0, NonZeroUsize::get)) 114 | } 115 | } 116 | } 117 | 118 | None 119 | } 120 | 121 | fn index_mut_internal(&mut self, idx: usize, mut last: Option) -> Option<&mut T> { 122 | for node in self.nodes.iter_mut() { 123 | match node { 124 | Node::Empty => continue, 125 | Node::Single(v) => { 126 | let curr = last.map(|l| l + 1).unwrap_or_default(); 127 | match curr == idx { 128 | true => return Some(v), 129 | false => last = Some(curr), 130 | } 131 | } 132 | Node::Multiple(nodes) => { 133 | // relying here on the fact that a Multiple MUST have at least one element 134 | let endex = match last { 135 | Some(last) => last + nodes.shallow_len(), 136 | None => nodes.shallow_len() - 1, 137 | }; 138 | match endex >= idx { 139 | true => return nodes.index_mut_internal(idx, last), 140 | false => last = Some(endex), 141 | } 142 | } 143 | } 144 | } 145 | 146 | None 147 | } 148 | 149 | /// returns the parent of the Node::Single which holds the item at the relevant index, the 150 | /// index within the parent of that Node::Single, and whether cleanups are needed due to empty Nodes::Multiple. 151 | /// 152 | /// Apply an optional `adjustment` to the node count at each layer 153 | fn slot_mut_internal( 154 | &mut self, 155 | idx: usize, 156 | mut last: Option, 157 | adjustment: Option, 158 | ) -> Option<(&mut NodesWithCount, usize)> { 159 | let mut return_idx = None; 160 | for (inner_idx, node) in self.nodes.iter_mut().enumerate() { 161 | match node { 162 | Node::Empty => continue, 163 | Node::Single(_) => { 164 | let curr = last.map(|l| l + 1).unwrap_or_default(); 165 | match curr == idx { 166 | true => { 167 | return_idx = Some(RetType::This(inner_idx)); 168 | break; 169 | } 170 | false => last = Some(curr), 171 | } 172 | } 173 | Node::Multiple(nodes) => { 174 | // relying here on the fact that a Multiple MUST have at least one element 175 | let endex = match last { 176 | Some(last) => last + nodes.shallow_len(), 177 | None => nodes.shallow_len() - 1, 178 | }; 179 | match endex >= idx { 180 | true => { 181 | return_idx = Some(RetType::Further(inner_idx)); 182 | break; 183 | } 184 | false => last = Some(endex), 185 | } 186 | } 187 | } 188 | } 189 | 190 | return_idx.map(|inner_idx| { 191 | match adjustment { 192 | Some(pos @ 1..) => { 193 | self.count = Some(self.count.unwrap().checked_add(pos as usize).unwrap()) 194 | } 195 | Some(neg @ ..-1) => { 196 | self.count = NonZeroUsize::new(self.count.unwrap().get() - neg.neg() as usize); 197 | } 198 | _ => (), 199 | } 200 | match inner_idx { 201 | RetType::Further(i) => { 202 | let Some(Node::Multiple(mult)) = self.nodes.get_mut(i) else { 203 | panic!() 204 | }; 205 | mult.slot_mut_internal(idx, last, adjustment).unwrap() 206 | } 207 | RetType::This(i) => (self, i), 208 | } 209 | }) 210 | } 211 | 212 | fn insert_internal(&mut self, idx: usize, element: T) { 213 | match (self.count, idx) { 214 | (None, 0) => { 215 | return { 216 | let len = self.nodes.len(); 217 | match len == 0 { 218 | true => self.nodes.push_back(Node::Single(element)), 219 | false => self.nodes[len.saturating_div(2)] = Node::Single(element), 220 | } 221 | self.count = Some(NonZeroUsize::MIN); 222 | } 223 | } 224 | (Some(len), idx) if len.get() == idx => { 225 | return { 226 | // check if there's a Node::Empty that we can replace with a Node::Single(element), otherwise append to top level 227 | let last_occupied = self 228 | .nodes 229 | .iter() 230 | .enumerate() 231 | .rev() 232 | .find_map(|(idx, v)| v.is_occupied().then_some(idx)) 233 | .unwrap(); 234 | self.count = Some( 235 | self.count 236 | .map_or(NonZeroUsize::MIN, |v| v.checked_add(1).unwrap()), 237 | ); 238 | match self.nodes.get_mut(last_occupied + 1) { 239 | None => self.nodes.push_back(Node::Single(element)), 240 | Some(empty) => *empty = Node::Single(element), 241 | }; 242 | }; 243 | } 244 | _ => (), 245 | } 246 | 247 | // it's neither a front nor a back, find the internal slot that holds the item at the relevent index and either push it if 248 | // it's at front/back of its vec, or replace the Node::Single with a Node::Multiple 249 | let (nodes, slot, ..) = self.slot_mut_internal(idx, None, Some(1)).unwrap(); 250 | 251 | // check for special cases 252 | if slot == 0 { 253 | return nodes.nodes.push_front(Node::Single(element)); 254 | } 255 | if let Some(Node::Multiple(before)) = nodes.nodes.get_mut(slot - 1) { 256 | before.count = before.count.map(|c| c.checked_add(1)).unwrap(); 257 | before.nodes.push_back(Node::Single(element)); 258 | return; 259 | } 260 | 261 | let node_slot = &mut nodes.nodes[slot]; 262 | let prev = std::mem::take(node_slot); 263 | debug_assert!(matches!(prev, Node::Single(..))); 264 | 265 | *node_slot = Node::Multiple(NodesWithCount { 266 | count: Some(NonZeroUsize::new(2).unwrap()), 267 | nodes: vec![Node::Single(element), prev].into(), 268 | }); 269 | } 270 | 271 | /// Get the item at index `idx`, patching up Node type at each level if they become empty 272 | fn remove_internal(&mut self, idx: usize, mut last: Option) -> Option { 273 | let mut return_idx = None; 274 | for (inner_idx, node) in self.nodes.iter_mut().enumerate() { 275 | match node { 276 | Node::Empty => continue, 277 | Node::Single(_) => { 278 | let curr = last.map(|l| l + 1).unwrap_or_default(); 279 | match curr == idx { 280 | true => { 281 | return_idx = Some(RetType::This(inner_idx)); 282 | break; 283 | } 284 | false => last = Some(curr), 285 | } 286 | } 287 | Node::Multiple(nodes) => { 288 | // relying here on the fact that a Multiple MUST have at least one element 289 | let endex = match last { 290 | Some(last) => last + nodes.count.unwrap().get(), 291 | None => nodes.count.unwrap().get() - 1, 292 | }; 293 | match endex >= idx { 294 | true => { 295 | return_idx = Some(RetType::Further(inner_idx)); 296 | break; 297 | } 298 | false => last = Some(endex), 299 | } 300 | } 301 | } 302 | } 303 | 304 | let inner_idx = return_idx?; 305 | 306 | self.count = NonZeroUsize::new(self.count.unwrap().get() - 1_usize); 307 | 308 | match inner_idx { 309 | RetType::Further(i) => { 310 | let Some(Node::Multiple(mult)) = self.nodes.get_mut(i) else { 311 | panic!() 312 | }; 313 | let ret = mult.remove_internal(idx, last); 314 | 315 | if mult.count.is_none() { 316 | self.nodes[i] = Node::Empty; 317 | } 318 | 319 | ret 320 | } 321 | RetType::This(i) => { 322 | let Some(Node::Single(single)) = self.nodes.get_mut(i).map(std::mem::take) else { 323 | panic!() 324 | }; 325 | Some(single) 326 | } 327 | } 328 | } 329 | 330 | fn swap_internal(&mut self, [low, high]: [usize; 2], _arg: i32) { 331 | let high_elem = self.remove_internal(high, None).unwrap(); 332 | let (nodes, slot) = self.slot_mut_internal(low, None, None).unwrap(); 333 | 334 | let Node::Single(low) = std::mem::replace(&mut nodes.nodes[slot], Node::Single(high_elem)) 335 | else { 336 | panic!(); 337 | }; 338 | self.insert_internal(high, low); 339 | } 340 | 341 | fn drain_internal(&mut self, l_idx: usize, r_idx: usize, mut last: Option) { 342 | let mut removed = 0; 343 | for node in self.nodes.iter_mut() { 344 | match node { 345 | Node::Empty => continue, 346 | single @ Node::Single(_) => { 347 | let new_last = last.map(|i| i + 1).unwrap_or_default(); 348 | if (l_idx..=r_idx).contains(&new_last) { 349 | *single = Node::Empty; 350 | removed += 1; 351 | } 352 | last = Some(new_last); 353 | } 354 | Node::Multiple(nodes_with_count) => { 355 | let nodes_with_count_len = nodes_with_count.count.unwrap().get(); 356 | // get the endex of the last element of nodes_with_count 357 | let last_after_sub = match last { 358 | Some(last) => last + nodes_with_count_len, 359 | None => nodes_with_count_len - 1, 360 | }; 361 | 362 | // check if the endex is still lower than the lowest value we are to remove 363 | if last_after_sub < l_idx { 364 | last = Some(last_after_sub); 365 | continue; 366 | } 367 | 368 | // we need to remove some things; figure out if we can remove the entire subtree or need to recurse 369 | match r_idx.cmp(&last_after_sub) { 370 | // we can yeet the whole thing 371 | Equal | Greater 372 | if last.map_or_else( 373 | || l_idx == last.unwrap_or_default(), 374 | |l| l_idx <= l + 1, 375 | ) => 376 | { 377 | let before = nodes_with_count_len; 378 | *node = Node::Empty; 379 | removed += before; 380 | } 381 | // we need to remove only part of this node 382 | _ => { 383 | let before = nodes_with_count_len; 384 | nodes_with_count.drain_internal(l_idx, r_idx, last); 385 | let after = nodes_with_count.count.unwrap().get(); 386 | removed += before.abs_diff(after); 387 | } 388 | } 389 | last = Some(last_after_sub); 390 | } 391 | } 392 | 393 | if last.is_some_and(|l| l >= r_idx) { 394 | break; 395 | } 396 | } 397 | 398 | self.count = self 399 | .count 400 | .map(|c| c.get() - removed) 401 | .and_then(NonZeroUsize::new); 402 | } 403 | } 404 | 405 | impl FromIterator for Rope { 406 | fn from_iter>(iter: C) -> Self { 407 | let nodes = iter 408 | .into_iter() 409 | .map(|t| Node::Single(t)) 410 | .collect::>(); 411 | Self(NodesWithCount { 412 | count: NonZeroUsize::try_from(nodes.len()).ok(), 413 | nodes, 414 | }) 415 | } 416 | } 417 | 418 | pub struct IntoIter { 419 | owned: NodesWithCount, 420 | } 421 | 422 | impl IntoIterator for Rope { 423 | type Item = T; 424 | 425 | type IntoIter = IntoIter; 426 | 427 | fn into_iter(self) -> Self::IntoIter { 428 | IntoIter { owned: self.0 } 429 | } 430 | } 431 | 432 | impl Iterator for IntoIter { 433 | type Item = T; 434 | 435 | fn next(&mut self) -> Option { 436 | self.owned.pop_front() 437 | } 438 | } 439 | 440 | impl Index for Rope { 441 | type Output = T; 442 | 443 | fn index(&self, index: usize) -> &Self::Output { 444 | self.0 445 | .index_internal(index, None) 446 | .expect("Failed to find element") 447 | } 448 | } 449 | 450 | impl IndexMut for Rope { 451 | fn index_mut(&mut self, index: usize) -> &mut Self::Output { 452 | self.0 453 | .index_mut_internal(index, None) 454 | .expect("Failed to find element") 455 | } 456 | } 457 | 458 | impl std::fmt::Debug for Node { 459 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 460 | match self { 461 | Self::Empty => write!(f, "Node::Empty"), 462 | Self::Single(arg0) => f.debug_tuple("Node::Single").field(arg0).finish(), 463 | Self::Multiple(arg0) => f.debug_tuple("Node::Multiple").field(arg0).finish(), 464 | } 465 | } 466 | } 467 | 468 | impl std::fmt::Debug for NodesWithCount { 469 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 470 | f.debug_struct("NodesWithCount") 471 | .field("count", &self.count) 472 | .field("nodes", &self.nodes) 473 | .finish() 474 | } 475 | } 476 | 477 | impl std::fmt::Debug for Rope { 478 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 479 | f.debug_tuple("Rope").field(&self.0).finish() 480 | } 481 | } 482 | 483 | impl Rope { 484 | pub fn len(&self) -> usize { 485 | self.0.count.map_or(0, usize::from) 486 | } 487 | 488 | pub fn insert(&mut self, index: usize, element: T) { 489 | self.0.insert_internal(index, element); 490 | } 491 | 492 | pub fn remove(&mut self, index: usize) { 493 | self.0 494 | .remove_internal(index, None) 495 | .expect("No item at index"); 496 | } 497 | 498 | pub fn drain>(&mut self, range: R) { 499 | use std::ops::Bound; 500 | 501 | let (l_idx, r_idx) = match (range.start_bound(), range.end_bound()) { 502 | (Bound::Included(l_i), Bound::Excluded(r_e)) if l_i == r_e => return, 503 | (Bound::Included(l_i), Bound::Included(r_i)) => (*l_i, *r_i), 504 | (Bound::Included(l_i), Bound::Excluded(r_e)) => (*l_i, r_e - 1), 505 | (Bound::Included(l_i), Bound::Unbounded) => (*l_i, self.len() - 1), 506 | (Bound::Excluded(l_e), Bound::Included(r_i)) => (l_e + 1, *r_i), 507 | (Bound::Excluded(l_e), Bound::Excluded(r_e)) => (l_e + 1, r_e - 1), 508 | (Bound::Excluded(l_e), Bound::Unbounded) => (l_e + 1, self.len() - 1), 509 | (Bound::Unbounded, Bound::Included(r_i)) => (0, *r_i), 510 | (Bound::Unbounded, Bound::Excluded(r_e)) => (0, r_e - 1), 511 | (Bound::Unbounded, Bound::Unbounded) => (0, self.len() - 1), 512 | }; 513 | 514 | if r_idx == 0 && l_idx == 0 && self.0.shallow_len() == 0 { 515 | return; 516 | } 517 | 518 | self.0.drain_internal(l_idx, r_idx, None); 519 | } 520 | 521 | pub fn swap(&mut self, a: usize, b: usize) { 522 | if a != b { 523 | self.0.swap_internal([a.min(b), a.max(b)], 0); 524 | } 525 | } 526 | } 527 | -------------------------------------------------------------------------------- /benchmarks/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | 3 | use assert_unordered::assert_eq_unordered_sort; 4 | use nanorand::{Rng, WyRand}; 5 | use structdiff::{Difference, StructDiff}; 6 | 7 | pub mod basic; 8 | pub mod large; 9 | 10 | #[derive(Debug, Difference, PartialEq, Clone, serde::Serialize, serde::Deserialize)] 11 | #[cfg_attr(feature = "compare", derive(diff::Diff))] 12 | #[cfg_attr(feature = "compare", diff(attr( 13 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 14 | )))] 15 | #[cfg_attr(feature = "compare", derive(serde_diff::SerdeDiff))] 16 | pub struct TestBench { 17 | pub a: String, 18 | pub b: i32, 19 | #[difference(collection_strategy = "unordered_array_like")] 20 | #[cfg_attr(feature = "compare", serde_diff(opaque))] 21 | pub c: HashSet, 22 | #[difference(collection_strategy = "ordered_array_like")] 23 | pub d: Vec, 24 | #[difference(collection_strategy = "unordered_map_like", map_equality = "key_only")] 25 | pub e: HashMap, 26 | #[difference( 27 | collection_strategy = "unordered_map_like", 28 | map_equality = "key_and_value" 29 | )] 30 | pub f: HashMap, 31 | } 32 | 33 | fn rand_string(rng: &mut WyRand) -> String { 34 | let base = vec![(); rng.generate_range::(5..15) as usize]; 35 | base.into_iter() 36 | .map(|_| rng.generate::() as u32) 37 | .filter_map(char::from_u32) 38 | .collect::() 39 | } 40 | 41 | fn rand_string_large(rng: &mut WyRand) -> String { 42 | let base = vec![(); rng.generate::() as usize]; 43 | base.into_iter() 44 | .map(|_| rng.generate::()) 45 | .filter_map(char::from_u32) 46 | .collect::() 47 | } 48 | 49 | impl TestBench { 50 | pub fn generate_random(rng: &mut WyRand) -> TestBench { 51 | TestBench { 52 | a: rand_string(rng), 53 | b: rng.generate::(), 54 | c: (0..rng.generate_range::(5..15)) 55 | .map(|_| rand_string(rng)) 56 | .into_iter() 57 | .collect(), 58 | d: (0..rng.generate_range::(5..15)) 59 | .map(|_| rand_string(rng)) 60 | .into_iter() 61 | .collect(), 62 | e: (0..rng.generate_range::(5..15)) 63 | .map(|_| (rng.generate::(), rand_string(rng))) 64 | .into_iter() 65 | .collect(), 66 | f: (0..rng.generate_range::(5..15)) 67 | .map(|_| (rng.generate::(), rand_string(rng))) 68 | .into_iter() 69 | .collect(), 70 | } 71 | } 72 | 73 | pub fn generate_random_large(rng: &mut WyRand) -> TestBench { 74 | TestBench { 75 | a: rand_string_large(rng), 76 | b: rng.generate::(), 77 | c: (0..rng.generate_range::(0..(u16::MAX / 5))) 78 | .map(|_| rand_string(rng)) 79 | .into_iter() 80 | .collect(), 81 | d: (0..rng.generate_range::(0..(u16::MAX / 5))) 82 | .map(|_| rand_string(rng)) 83 | .into_iter() 84 | .collect(), 85 | e: (0..rng.generate_range::(0..(u16::MAX / 5))) 86 | .map(|_| (rng.generate::(), rand_string(rng))) 87 | .into_iter() 88 | .collect(), 89 | f: (0..rng.generate_range::(0..(u16::MAX / 5))) 90 | .map(|_| (rng.generate::(), rand_string(rng))) 91 | .into_iter() 92 | .collect(), 93 | } 94 | } 95 | 96 | pub fn random_mutate(self, rng: &mut WyRand) -> Self { 97 | match rng.generate_range(0..6) { 98 | 0 => Self { 99 | a: rand_string(rng), 100 | ..self 101 | }, 102 | 1 => Self { 103 | b: rng.generate::(), 104 | ..self 105 | }, 106 | 2 => Self { 107 | c: self 108 | .c 109 | .into_iter() 110 | .filter(|_| rng.generate_range(0..100) < 30_u8) 111 | .collect::>() 112 | .into_iter() 113 | .map(|v| { 114 | if rng.generate_range(0..100) < 25_u8 { 115 | rand_string(rng) 116 | } else { 117 | v 118 | } 119 | }) 120 | .collect::>() 121 | .into_iter() 122 | .chain( 123 | (0..rng.generate_range::(0..(u8::MAX / 4))) 124 | .map(|_| rand_string(rng)), 125 | ) 126 | .collect(), 127 | ..self 128 | }, 129 | 3 => Self { 130 | d: self 131 | .d 132 | .into_iter() 133 | .filter(|_| rng.generate_range(0..100) < 30_u8) 134 | .collect::>() 135 | .into_iter() 136 | .map(|v| { 137 | if rng.generate_range(0..100) < 25_u8 { 138 | rand_string(rng) 139 | } else { 140 | v 141 | } 142 | }) 143 | .collect::>() 144 | .into_iter() 145 | .chain( 146 | (0..rng.generate_range::(0..(u8::MAX / 4))) 147 | .map(|_| rand_string(rng)), 148 | ) 149 | .collect(), 150 | ..self 151 | }, 152 | 4 => Self { 153 | e: self 154 | .e 155 | .into_iter() 156 | .filter(|_| rng.generate_range(0..100) < 25_u8) 157 | .collect::>() 158 | .into_iter() 159 | .map(|v| { 160 | if rng.generate_range(0..100) < 25_u8 { 161 | (rng.generate::(), rand_string(rng)) 162 | } else { 163 | v 164 | } 165 | }) 166 | .collect::>() 167 | .into_iter() 168 | .chain( 169 | (0..rng.generate_range::(0..(u8::MAX / 4))) 170 | .map(|_| (rng.generate::(), rand_string(rng))), 171 | ) 172 | .collect(), 173 | ..self 174 | }, 175 | 5 => Self { 176 | f: self 177 | .f 178 | .into_iter() 179 | .filter(|_| rng.generate_range(0..100) < 25_u8) 180 | .collect::>() 181 | .into_iter() 182 | .map(|v| { 183 | if rng.generate_range(0..100) < 25_u8 { 184 | (rng.generate::(), rand_string(rng)) 185 | } else { 186 | v 187 | } 188 | }) 189 | .collect::>() 190 | .into_iter() 191 | .chain( 192 | (0..rng.generate_range::(0..(u8::MAX / 4))) 193 | .map(|_| (rng.generate::(), rand_string(rng))), 194 | ) 195 | .collect(), 196 | ..self 197 | }, 198 | _ => self, 199 | } 200 | } 201 | 202 | pub fn random_mutate_large(self, rng: &mut WyRand) -> Self { 203 | match rng.generate_range(0..6) { 204 | 0 => Self { 205 | a: rand_string_large(rng), 206 | ..self 207 | }, 208 | 1 => Self { 209 | b: rng.generate::(), 210 | ..self 211 | }, 212 | 2 => Self { 213 | c: self 214 | .c 215 | .into_iter() 216 | .filter(|_| rng.generate_range(0..100) < 30_u8) 217 | .collect::>() 218 | .into_iter() 219 | .map(|v| { 220 | if rng.generate_range(0..100) < 25_u8 { 221 | rand_string(rng) 222 | } else { 223 | v 224 | } 225 | }) 226 | .collect::>() 227 | .into_iter() 228 | .chain( 229 | (0..rng.generate_range::(0..(u16::MAX / 5))) 230 | .map(|_| rand_string(rng)), 231 | ) 232 | .collect(), 233 | ..self 234 | }, 235 | 3 => Self { 236 | d: self 237 | .d 238 | .into_iter() 239 | .filter(|_| rng.generate_range(0..100) < 30_u8) 240 | .collect::>() 241 | .into_iter() 242 | .map(|v| { 243 | if rng.generate_range(0..100) < 25_u8 { 244 | rand_string(rng) 245 | } else { 246 | v 247 | } 248 | }) 249 | .collect::>() 250 | .into_iter() 251 | .chain( 252 | (0..rng.generate_range::(0..(u16::MAX / 5))) 253 | .map(|_| rand_string(rng)), 254 | ) 255 | .collect(), 256 | ..self 257 | }, 258 | 4 => Self { 259 | e: self 260 | .e 261 | .into_iter() 262 | .filter(|_| rng.generate_range(0..100) < 25_u8) 263 | .collect::>() 264 | .into_iter() 265 | .map(|v| { 266 | if rng.generate_range(0..100) < 25_u8 { 267 | (rng.generate::(), rand_string(rng)) 268 | } else { 269 | v 270 | } 271 | }) 272 | .collect::>() 273 | .into_iter() 274 | .chain( 275 | (0..rng.generate_range::(0..(u16::MAX / 5))) 276 | .map(|_| (rng.generate::(), rand_string(rng))), 277 | ) 278 | .collect(), 279 | ..self 280 | }, 281 | 5 => Self { 282 | f: self 283 | .f 284 | .into_iter() 285 | .filter(|_| rng.generate_range(0..100) < 25_u8) 286 | .collect::>() 287 | .into_iter() 288 | .map(|v| { 289 | if rng.generate_range(0..100) < 25_u8 { 290 | (rng.generate::(), rand_string(rng)) 291 | } else { 292 | v 293 | } 294 | }) 295 | .collect::>() 296 | .into_iter() 297 | .chain( 298 | (0..rng.generate_range::(0..(u16::MAX / 5))) 299 | .map(|_| (rng.generate::(), rand_string(rng))), 300 | ) 301 | .collect(), 302 | ..self 303 | }, 304 | _ => self, 305 | } 306 | } 307 | 308 | #[track_caller] 309 | pub fn assert_eq(self, right: TestBench, diff: &Vec<::Diff>) { 310 | assert_eq!(self.a, right.a, "{:?}", diff); 311 | assert_eq!(self.b, right.b, "{:?}", diff); 312 | assert_eq_unordered_sort!(self.c, right.c, "{:?}", diff); 313 | assert_eq_unordered_sort!(self.d, right.d, "{:?}", diff); 314 | assert_eq_unordered_sort!( 315 | self.e.iter().map(|x| x.0).collect::>(), 316 | right.e.iter().map(|x| x.0).collect::>(), 317 | "{:?}", 318 | diff 319 | ); 320 | assert_eq_unordered_sort!(self.f, right.f, "{:?}", diff); 321 | } 322 | } 323 | 324 | #[cfg(test)] 325 | mod size_tests { 326 | use bincode::Options; 327 | 328 | use super::*; 329 | 330 | #[test] 331 | fn test_sizes_basic() { 332 | structdiff_size::size_basic(); 333 | #[cfg(feature = "compare")] 334 | { 335 | serde_diff_size::size_basic(); 336 | diff_struct_size::size_basic(); 337 | } 338 | } 339 | 340 | #[ignore] 341 | #[test] 342 | fn test_sizes_large() { 343 | structdiff_size::size_large(); 344 | #[cfg(feature = "compare")] 345 | { 346 | serde_diff_size::size_large(); 347 | diff_struct_size::size_large(); 348 | } 349 | } 350 | 351 | #[test] 352 | fn test_sizes_basic_mut() { 353 | structdiff_size_mut::size_basic_mut(); 354 | #[cfg(feature = "compare")] 355 | { 356 | serde_diff_size_mut::size_basic_mut(); 357 | diff_struct_size_mut::size_basic_mut(); 358 | } 359 | } 360 | 361 | #[ignore] 362 | #[test] 363 | fn test_sizes_large_mut() { 364 | structdiff_size_mut::size_large_mut(); 365 | #[cfg(feature = "compare")] 366 | { 367 | serde_diff_size_mut::size_large_mut(); 368 | diff_struct_size_mut::size_large_mut(); 369 | } 370 | } 371 | 372 | mod structdiff_size { 373 | use super::*; 374 | 375 | pub fn size_basic() { 376 | let mut bytes = 0_u64; 377 | let mut rng = WyRand::new(); 378 | for _i in 0..100 { 379 | let first = std::hint::black_box(TestBench::generate_random(&mut rng)); 380 | let second = std::hint::black_box(TestBench::generate_random(&mut rng)); 381 | let diff = StructDiff::diff(&first, &second); 382 | bytes += bincode::serialized_size(&diff).unwrap(); 383 | } 384 | println!("StructDiff - small: {} bytes", bytes as f64 / 100.0) 385 | } 386 | 387 | pub fn size_large() { 388 | let mut bytes = 0_u64; 389 | let mut rng = WyRand::new(); 390 | for _i in 0..100 { 391 | let first = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 392 | let second = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 393 | bytes += bincode::serialized_size(&StructDiff::diff(&first, &second)).unwrap(); 394 | } 395 | println!("StructDiff - large: {} bytes", bytes as f64 / 100.0) 396 | } 397 | } 398 | 399 | #[cfg(feature = "compare")] 400 | mod diff_struct_size { 401 | use diff::Diff; 402 | 403 | use super::*; 404 | 405 | pub fn size_basic() { 406 | let mut bytes = 0_u64; 407 | let mut rng = WyRand::new(); 408 | for _i in 0..100 { 409 | let first = std::hint::black_box(TestBench::generate_random(&mut rng)); 410 | let second = std::hint::black_box(TestBench::generate_random(&mut rng)); 411 | let diff = Diff::diff(&first, &second); 412 | bytes += bincode::serialized_size(&diff).unwrap(); 413 | } 414 | 415 | println!("Diff-Struct - small: {} bytes", bytes as f64 / 100.0) 416 | } 417 | 418 | pub fn size_large() { 419 | let mut bytes = 0_u64; 420 | let mut rng = WyRand::new(); 421 | for _ in 0..100 { 422 | let first = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 423 | let second = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 424 | bytes += bincode::serialized_size(&Diff::diff(&first, &second)).unwrap(); 425 | } 426 | println!("Diff-Struct - large: {} bytes", bytes as f64 / 100.0) 427 | } 428 | } 429 | 430 | #[cfg(feature = "compare")] 431 | mod serde_diff_size { 432 | use super::*; 433 | 434 | pub fn size_basic() { 435 | let mut bytes = 0_u64; 436 | let mut rng = WyRand::new(); 437 | let options = bincode::DefaultOptions::new() 438 | .with_fixint_encoding() 439 | .allow_trailing_bytes(); 440 | for _ in 0..100 { 441 | let first = std::hint::black_box(TestBench::generate_random(&mut rng)); 442 | let second = std::hint::black_box(TestBench::generate_random(&mut rng)); 443 | let diff = std::hint::black_box( 444 | options 445 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 446 | .unwrap(), 447 | ); 448 | bytes += bincode::serialized_size(&diff).unwrap(); 449 | } 450 | println!("Serde-Diff - small: {} bytes", bytes as f64 / 100.0) 451 | } 452 | 453 | pub fn size_large() { 454 | let mut bytes = 0_u64; 455 | let mut rng = WyRand::new(); 456 | let options = bincode::DefaultOptions::new() 457 | .with_fixint_encoding() 458 | .allow_trailing_bytes(); 459 | for _ in 0..100 { 460 | let first = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 461 | let second = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 462 | let diff = std::hint::black_box( 463 | options 464 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 465 | .unwrap(), 466 | ); 467 | bytes += bincode::serialized_size(&diff).unwrap(); 468 | } 469 | println!("Serde-Diff - large: {} bytes", bytes as f64 / 100.0) 470 | } 471 | } 472 | 473 | mod structdiff_size_mut { 474 | use super::*; 475 | 476 | pub fn size_basic_mut() { 477 | let mut bytes = 0_u64; 478 | let mut rng = WyRand::new(); 479 | for _ in 0..100 { 480 | let first = std::hint::black_box(TestBench::generate_random(&mut rng)); 481 | let second = std::hint::black_box(first.clone().random_mutate(&mut rng)); 482 | let diff = StructDiff::diff(&first, &second); 483 | 484 | bytes += bincode::serialized_size(&diff).unwrap(); 485 | } 486 | println!("StructDiff - mut small: {} bytes", bytes as f64 / 100.0) 487 | } 488 | 489 | pub fn size_large_mut() { 490 | let mut bytes = 0_u64; 491 | let mut rng = WyRand::new(); 492 | for _ in 0..100 { 493 | let first = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 494 | let second = std::hint::black_box(first.clone().random_mutate_large(&mut rng)); 495 | let diff = StructDiff::diff(&first, &second); 496 | bytes += bincode::serialized_size(&diff).unwrap(); 497 | } 498 | println!("StructDiff - mut large: {} bytes", bytes as f64 / 100.0) 499 | } 500 | } 501 | 502 | #[cfg(feature = "compare")] 503 | mod diff_struct_size_mut { 504 | use diff::Diff; 505 | 506 | use super::*; 507 | 508 | pub fn size_basic_mut() { 509 | let mut bytes = 0_u64; 510 | let mut rng = WyRand::new(); 511 | let options = bincode::DefaultOptions::new() 512 | .with_fixint_encoding() 513 | .allow_trailing_bytes(); 514 | for _ in 0..100 { 515 | let first = std::hint::black_box(TestBench::generate_random(&mut rng)); 516 | let second = std::hint::black_box(first.clone().random_mutate(&mut rng)); 517 | let diff = 518 | std::hint::black_box(options.serialize(&Diff::diff(&first, &second)).unwrap()); 519 | 520 | bytes += bincode::serialized_size(&diff).unwrap(); 521 | } 522 | println!("Diff-Struct - mut small: {} bytes", bytes as f64 / 100.0) 523 | } 524 | 525 | pub fn size_large_mut() { 526 | let mut bytes = 0_u64; 527 | let mut rng = WyRand::new(); 528 | let options = bincode::DefaultOptions::new() 529 | .with_fixint_encoding() 530 | .allow_trailing_bytes(); 531 | for _ in 0..100 { 532 | let first = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 533 | let second = std::hint::black_box(first.clone().random_mutate_large(&mut rng)); 534 | let diff = 535 | std::hint::black_box(options.serialize(&Diff::diff(&first, &second)).unwrap()); 536 | bytes += bincode::serialized_size(&diff).unwrap(); 537 | } 538 | println!("Diff-Struct - mut large: {} bytes", bytes as f64 / 100.0) 539 | } 540 | } 541 | 542 | #[cfg(feature = "compare")] 543 | mod serde_diff_size_mut { 544 | use bincode::Options; 545 | 546 | use super::*; 547 | 548 | pub fn size_basic_mut() { 549 | let mut bytes = 0_u64; 550 | let mut rng = WyRand::new(); 551 | let options = bincode::DefaultOptions::new() 552 | .with_fixint_encoding() 553 | .allow_trailing_bytes(); 554 | for _ in 0..100 { 555 | let first = std::hint::black_box(TestBench::generate_random(&mut rng)); 556 | let second = std::hint::black_box(first.clone().random_mutate(&mut rng)); 557 | let diff = std::hint::black_box( 558 | options 559 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 560 | .unwrap(), 561 | ); 562 | 563 | bytes += bincode::serialized_size(&diff).unwrap(); 564 | } 565 | println!("Serde-Diff - mut small: {} bytes", bytes as f64 / 100.0) 566 | } 567 | 568 | pub fn size_large_mut() { 569 | let mut bytes = 0_u64; 570 | let mut rng = WyRand::new(); 571 | let options = bincode::DefaultOptions::new() 572 | .with_fixint_encoding() 573 | .allow_trailing_bytes(); 574 | for _ in 0..100 { 575 | let first = std::hint::black_box(TestBench::generate_random_large(&mut rng)); 576 | let second = std::hint::black_box(first.clone().random_mutate_large(&mut rng)); 577 | let diff = std::hint::black_box( 578 | options 579 | .serialize(&serde_diff::Diff::serializable(&first, &second)) 580 | .unwrap(), 581 | ); 582 | bytes += bincode::serialized_size(&diff).unwrap(); 583 | } 584 | println!("Serde-Diff - mut large: {} bytes", bytes as f64 / 100.0) 585 | } 586 | } 587 | } 588 | -------------------------------------------------------------------------------- /src/collections/unordered_map_like.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "nanoserde")] 2 | use nanoserde::{DeBin, SerBin}; 3 | #[cfg(feature = "serde")] 4 | use serde::{Deserialize, Serialize}; 5 | #[cfg(not(feature = "rustc_hash"))] 6 | type HashMap = std::collections::HashMap; 7 | #[cfg(feature = "rustc_hash")] 8 | type HashMap = 9 | std::collections::HashMap>; 10 | 11 | use std::hash::Hash; 12 | 13 | #[derive(Debug, Clone)] 14 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 15 | pub(crate) enum UnorderedMapLikeChange { 16 | InsertMany(K, V, usize), 17 | RemoveMany(K, usize), 18 | InsertSingle(K, V), 19 | RemoveSingle(K), 20 | } 21 | 22 | impl<'a, K: Clone, V: Clone> From> 23 | for UnorderedMapLikeChange 24 | { 25 | fn from(value: UnorderedMapLikeChange<&'a K, &'a V>) -> Self { 26 | match value { 27 | UnorderedMapLikeChange::InsertMany(key, value, count) => { 28 | UnorderedMapLikeChange::InsertMany(key.clone(), value.clone(), count) 29 | } 30 | UnorderedMapLikeChange::RemoveMany(key, count) => { 31 | UnorderedMapLikeChange::RemoveMany(key.clone(), count) 32 | } 33 | UnorderedMapLikeChange::InsertSingle(key, value) => { 34 | UnorderedMapLikeChange::InsertSingle(key.clone(), value.clone()) 35 | } 36 | UnorderedMapLikeChange::RemoveSingle(key) => { 37 | UnorderedMapLikeChange::RemoveSingle(key.clone()) 38 | } 39 | } 40 | } 41 | } 42 | 43 | #[derive(Debug, Clone)] 44 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 45 | pub(crate) enum UnorderedMapLikeDiffInternal { 46 | Replace(Vec<(K, V)>), 47 | Modify(Vec>), 48 | } 49 | 50 | #[repr(transparent)] 51 | #[derive(Debug, Clone)] 52 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 53 | pub struct UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal); 54 | 55 | impl<'a, K: Clone, V: Clone> From> 56 | for UnorderedMapLikeDiff 57 | { 58 | fn from(value: UnorderedMapLikeDiff<&'a K, &'a V>) -> Self { 59 | let new_inner = match value.0 { 60 | UnorderedMapLikeDiffInternal::Replace(replace) => { 61 | UnorderedMapLikeDiffInternal::Replace( 62 | replace 63 | .into_iter() 64 | .map(|(k, v)| (k.clone(), v.clone())) 65 | .collect(), 66 | ) 67 | } 68 | UnorderedMapLikeDiffInternal::Modify(modify) => { 69 | UnorderedMapLikeDiffInternal::Modify(modify.into_iter().map(Into::into).collect()) 70 | } 71 | }; 72 | Self(new_inner) 73 | } 74 | } 75 | 76 | fn collect_into_key_eq_map< 77 | 'a, 78 | K: Hash + PartialEq + Eq + 'a, 79 | V: 'a, 80 | B: Iterator, 81 | >( 82 | list: B, 83 | ) -> HashMap<&'a K, (&'a V, usize)> { 84 | let mut map: HashMap<&K, (&V, usize)> = HashMap::default(); 85 | map.reserve(list.size_hint().1.unwrap_or_default()); 86 | for (key, value) in list { 87 | match map.get_mut(&key) { 88 | Some((_, count)) => *count += 1, 89 | None => { 90 | map.insert(key, (value, 1_usize)); 91 | } 92 | } 93 | } 94 | map 95 | } 96 | 97 | fn collect_into_key_value_eq_map< 98 | 'a, 99 | K: Hash + PartialEq + Eq + 'a, 100 | V: PartialEq + 'a, 101 | B: Iterator, 102 | >( 103 | list: B, 104 | ) -> HashMap<&'a K, (&'a V, usize)> { 105 | let mut map: HashMap<&K, (&V, usize)> = HashMap::default(); 106 | map.reserve(list.size_hint().1.unwrap_or_default()); 107 | 108 | for (key, value) in list { 109 | match map.get_mut(&key) { 110 | Some((ref current_val, count)) => match current_val == &value { 111 | true => *count += 1, 112 | false => { 113 | map.insert(key, (value, 1_usize)); 114 | } 115 | }, 116 | None => { 117 | map.insert(key, (value, 1_usize)); 118 | } 119 | } 120 | } 121 | map 122 | } 123 | 124 | enum Operation { 125 | Insert, 126 | Remove, 127 | } 128 | 129 | impl UnorderedMapLikeChange { 130 | fn new(item: (K, V), count: usize, insert_or_remove: Operation) -> Self { 131 | #[cfg(feature = "debug_asserts")] 132 | debug_assert_ne!(count, 0); 133 | match (insert_or_remove, count) { 134 | (Operation::Insert, 1) => UnorderedMapLikeChange::InsertSingle(item.0, item.1), 135 | (Operation::Insert, val) => UnorderedMapLikeChange::InsertMany(item.0, item.1, val), 136 | (Operation::Remove, 1) => UnorderedMapLikeChange::RemoveSingle(item.0), 137 | 138 | (Operation::Remove, val) => UnorderedMapLikeChange::RemoveMany(item.0, val), 139 | } 140 | } 141 | } 142 | 143 | pub fn unordered_hashcmp< 144 | 'a, 145 | #[cfg(feature = "nanoserde")] K: Hash + Clone + PartialEq + Eq + SerBin + DeBin + std::fmt::Debug + 'a, 146 | #[cfg(not(feature = "nanoserde"))] K: Hash + Clone + PartialEq + Eq + 'a, 147 | V: Clone + PartialEq + std::fmt::Debug + 'a, 148 | B: Iterator, 149 | >( 150 | previous: B, 151 | current: B, 152 | key_only: bool, 153 | ) -> Option> { 154 | let (mut previous, current) = if key_only { 155 | ( 156 | collect_into_key_eq_map(previous), 157 | collect_into_key_eq_map(current), 158 | ) 159 | } else { 160 | ( 161 | collect_into_key_value_eq_map(previous), 162 | collect_into_key_value_eq_map(current), 163 | ) 164 | }; 165 | 166 | if (current.len() as isize) < ((previous.len() as isize) - (current.len() as isize)) { 167 | return Some(UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal::Replace( 168 | current 169 | .into_iter() 170 | .flat_map(|(k, (v, count))| std::iter::repeat_n((k, v), count)) 171 | .collect(), 172 | ))); 173 | } 174 | 175 | let mut ret: Vec> = 176 | Vec::with_capacity((previous.len() + current.len()) >> 1); 177 | 178 | for (&k, &(v, current_count)) in current.iter() { 179 | match previous.remove(&k) { 180 | Some((prev_val, prev_count)) if prev_val == v => { 181 | match (current_count as i128) - (prev_count as i128) { 182 | add if add > 1 => ret.push(UnorderedMapLikeChange::new( 183 | (k, v), 184 | add as usize, 185 | Operation::Insert, 186 | )), 187 | add if add == 1 => ret.push(UnorderedMapLikeChange::new( 188 | (k, v), 189 | add as usize, 190 | Operation::Insert, 191 | )), 192 | sub if sub < 0 => ret.push(UnorderedMapLikeChange::new( 193 | (k, v), 194 | -sub as usize, 195 | Operation::Remove, 196 | )), 197 | sub if sub == -1 => ret.push(UnorderedMapLikeChange::new( 198 | (k, v), 199 | -sub as usize, 200 | Operation::Remove, 201 | )), 202 | _ => (), 203 | } 204 | } 205 | Some((prev_val, prev_count)) if prev_val != v => { 206 | ret.push(UnorderedMapLikeChange::new( 207 | (k, prev_val), 208 | prev_count, 209 | Operation::Remove, 210 | )); 211 | ret.push(UnorderedMapLikeChange::new( 212 | (k, v), 213 | current_count, 214 | Operation::Insert, 215 | )); 216 | } 217 | Some(_) => unreachable!(), 218 | None => ret.push(UnorderedMapLikeChange::new( 219 | (k, v), 220 | current_count, 221 | Operation::Insert, 222 | )), 223 | } 224 | } 225 | 226 | for (k, (v, count)) in previous.into_iter() { 227 | ret.push(UnorderedMapLikeChange::new( 228 | (k, v), 229 | count, 230 | Operation::Remove, 231 | )) 232 | } 233 | 234 | ret.shrink_to_fit(); 235 | 236 | match ret.is_empty() { 237 | true => None, 238 | false => Some(UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal::Modify( 239 | ret, 240 | ))), 241 | } 242 | } 243 | 244 | pub fn apply_unordered_hashdiffs< 245 | #[cfg(feature = "nanoserde")] K: Hash + Clone + PartialEq + Eq + SerBin + DeBin + 'static, 246 | #[cfg(not(feature = "nanoserde"))] K: Hash + Clone + PartialEq + Eq + 'static, 247 | V: Clone + 'static, 248 | B: IntoIterator, 249 | >( 250 | list: B, 251 | diffs: UnorderedMapLikeDiff, 252 | ) -> Box> { 253 | let diffs = match diffs { 254 | UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal::Replace(replacement)) => { 255 | return Box::new(replacement.into_iter()); 256 | } 257 | UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal::Modify(diffs)) => diffs, 258 | }; 259 | 260 | let (insertions, removals): (Vec<_>, Vec<_>) = diffs.into_iter().partition(|x| match &x { 261 | UnorderedMapLikeChange::InsertMany(..) | UnorderedMapLikeChange::InsertSingle(..) => true, 262 | UnorderedMapLikeChange::RemoveMany(..) | UnorderedMapLikeChange::RemoveSingle(..) => false, 263 | }); 264 | let holder: Vec<_> = list.into_iter().collect(); 265 | // let ref_holder: Vec<_> = holder.iter().map(|(k, v)| (k, v)).collect(); 266 | let mut list_hash = collect_into_key_eq_map(holder.iter().map(|t| (&t.0, &t.1))); 267 | 268 | for remove in removals { 269 | match remove { 270 | UnorderedMapLikeChange::RemoveMany(key, count) => match list_hash.get_mut(&key) { 271 | Some(val) if val.1 > count => { 272 | val.1 -= count; 273 | } 274 | Some(val) if val.1 <= count => { 275 | list_hash.remove(&key); 276 | } 277 | _ => (), 278 | }, 279 | UnorderedMapLikeChange::RemoveSingle(key) => match list_hash.get_mut(&key) { 280 | Some(val) if val.1 > 1 => { 281 | val.1 -= 1; 282 | } 283 | Some(val) if val.1 <= 1 => { 284 | list_hash.remove(&key); 285 | } 286 | _ => (), 287 | }, 288 | _ => unreachable!("Sorting failure"), 289 | } 290 | } 291 | 292 | for insertion in insertions.iter() { 293 | match insertion { 294 | UnorderedMapLikeChange::InsertMany(key, value, count) => { 295 | match list_hash.get_mut(&key) { 296 | Some(val) => { 297 | val.1 += count; 298 | } 299 | None => { 300 | list_hash.insert(key, (value, *count)); 301 | } 302 | } 303 | } 304 | UnorderedMapLikeChange::InsertSingle(key, value) => match list_hash.get_mut(&key) { 305 | Some(val) => { 306 | val.1 += 1; 307 | } 308 | None => { 309 | list_hash.insert(key, (value, 1)); 310 | } 311 | }, 312 | _ => { 313 | #[cfg(all(debug_assertions, feature = "debug_asserts"))] 314 | panic!("Sorting failure") 315 | } 316 | } 317 | } 318 | 319 | Box::new( 320 | list_hash 321 | .into_iter() 322 | .flat_map(|(k, (v, count))| std::iter::repeat_n((k.clone(), v.clone()), count)) 323 | .collect::>() 324 | .into_iter(), 325 | ) 326 | } 327 | 328 | #[cfg(feature = "nanoserde")] 329 | mod nanoserde_impls { 330 | use super::{ 331 | DeBin, SerBin, UnorderedMapLikeChange, UnorderedMapLikeDiff, UnorderedMapLikeDiffInternal, 332 | }; 333 | 334 | impl SerBin for UnorderedMapLikeChange 335 | where 336 | K: SerBin + PartialEq + Clone + DeBin, 337 | V: SerBin + PartialEq + Clone + DeBin, 338 | { 339 | fn ser_bin(&self, output: &mut Vec) { 340 | match self { 341 | Self::InsertMany(k, v, c) => { 342 | 0_u8.ser_bin(output); 343 | k.ser_bin(output); 344 | v.ser_bin(output); 345 | c.ser_bin(output); 346 | } 347 | Self::RemoveMany(k, c) => { 348 | 1_u8.ser_bin(output); 349 | k.ser_bin(output); 350 | c.ser_bin(output); 351 | } 352 | Self::InsertSingle(k, v) => { 353 | 2_u8.ser_bin(output); 354 | k.ser_bin(output); 355 | v.ser_bin(output); 356 | } 357 | Self::RemoveSingle(k) => { 358 | 3_u8.ser_bin(output); 359 | k.ser_bin(output); 360 | } 361 | } 362 | } 363 | } 364 | 365 | impl SerBin for &UnorderedMapLikeChange<&K, &V> 366 | where 367 | K: SerBin + PartialEq + Clone + DeBin, 368 | V: SerBin + PartialEq + Clone + DeBin, 369 | { 370 | fn ser_bin(&self, output: &mut Vec) { 371 | match *self { 372 | UnorderedMapLikeChange::InsertMany(k, v, c) => { 373 | 0_u8.ser_bin(output); 374 | k.ser_bin(output); 375 | v.ser_bin(output); 376 | c.ser_bin(output); 377 | } 378 | UnorderedMapLikeChange::RemoveMany(k, c) => { 379 | 1_u8.ser_bin(output); 380 | k.ser_bin(output); 381 | c.ser_bin(output); 382 | } 383 | UnorderedMapLikeChange::InsertSingle(k, v) => { 384 | 2_u8.ser_bin(output); 385 | k.ser_bin(output); 386 | v.ser_bin(output); 387 | } 388 | UnorderedMapLikeChange::RemoveSingle(k) => { 389 | 3_u8.ser_bin(output); 390 | k.ser_bin(output); 391 | } 392 | } 393 | } 394 | } 395 | 396 | impl SerBin for UnorderedMapLikeDiff 397 | where 398 | K: SerBin + PartialEq + Clone + DeBin, 399 | V: SerBin + PartialEq + Clone + DeBin, 400 | { 401 | fn ser_bin(&self, output: &mut Vec) { 402 | match &self.0 { 403 | UnorderedMapLikeDiffInternal::Replace(val) => { 404 | 0_u8.ser_bin(output); 405 | val.len().ser_bin(output); 406 | for (key, value) in val { 407 | key.ser_bin(output); 408 | value.ser_bin(output); 409 | } 410 | } 411 | UnorderedMapLikeDiffInternal::Modify(val) => { 412 | 1_u8.ser_bin(output); 413 | val.len().ser_bin(output); 414 | for change_spec in val { 415 | change_spec.ser_bin(output); 416 | } 417 | } 418 | } 419 | } 420 | } 421 | 422 | impl SerBin for &UnorderedMapLikeDiff<&K, &V> 423 | where 424 | K: SerBin + PartialEq + Clone + DeBin, 425 | V: SerBin + PartialEq + Clone + DeBin, 426 | { 427 | fn ser_bin(&self, output: &mut Vec) { 428 | match &self.0 { 429 | UnorderedMapLikeDiffInternal::Replace(val) => { 430 | 0_u8.ser_bin(output); 431 | val.len().ser_bin(output); 432 | for (key, value) in val { 433 | key.ser_bin(output); 434 | value.ser_bin(output); 435 | } 436 | } 437 | UnorderedMapLikeDiffInternal::Modify(val) => { 438 | 1_u8.ser_bin(output); 439 | val.len().ser_bin(output); 440 | for change_spec in val { 441 | change_spec.ser_bin(output); 442 | } 443 | } 444 | } 445 | } 446 | } 447 | 448 | impl DeBin for UnorderedMapLikeChange 449 | where 450 | K: SerBin + PartialEq + Clone + DeBin, 451 | V: SerBin + PartialEq + Clone + DeBin, 452 | { 453 | fn de_bin( 454 | offset: &mut usize, 455 | bytes: &[u8], 456 | ) -> Result, nanoserde::DeBinErr> { 457 | let id: u8 = DeBin::de_bin(offset, bytes)?; 458 | core::result::Result::Ok(match id { 459 | 0_u8 => UnorderedMapLikeChange::InsertMany( 460 | DeBin::de_bin(offset, bytes)?, 461 | DeBin::de_bin(offset, bytes)?, 462 | DeBin::de_bin(offset, bytes)?, 463 | ), 464 | 1_u8 => UnorderedMapLikeChange::RemoveMany( 465 | DeBin::de_bin(offset, bytes)?, 466 | DeBin::de_bin(offset, bytes)?, 467 | ), 468 | 2_u8 => UnorderedMapLikeChange::InsertSingle( 469 | DeBin::de_bin(offset, bytes)?, 470 | DeBin::de_bin(offset, bytes)?, 471 | ), 472 | 3_u8 => UnorderedMapLikeChange::RemoveSingle(DeBin::de_bin(offset, bytes)?), 473 | _ => { 474 | return core::result::Result::Err(nanoserde::DeBinErr { 475 | o: *offset, 476 | l: 0, 477 | s: bytes.len(), 478 | }) 479 | } 480 | }) 481 | } 482 | } 483 | 484 | impl DeBin for UnorderedMapLikeDiff 485 | where 486 | K: SerBin + PartialEq + Clone + DeBin, 487 | V: SerBin + PartialEq + Clone + DeBin, 488 | { 489 | fn de_bin( 490 | offset: &mut usize, 491 | bytes: &[u8], 492 | ) -> Result, nanoserde::DeBinErr> { 493 | let id: u8 = DeBin::de_bin(offset, bytes)?; 494 | core::result::Result::Ok(match id { 495 | 0_u8 => { 496 | let len: usize = DeBin::de_bin(offset, bytes)?; 497 | let mut contents: Vec<(K, V)> = Vec::new(); 498 | for _ in 0..len { 499 | let content = DeBin::de_bin(offset, bytes)?; 500 | contents.push(content); 501 | } 502 | UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal::Replace(contents)) 503 | } 504 | 1_u8 => { 505 | let len: usize = DeBin::de_bin(offset, bytes)?; 506 | let mut contents: Vec> = Vec::new(); 507 | for _ in 0..len { 508 | let content = DeBin::de_bin(offset, bytes)?; 509 | contents.push(content); 510 | } 511 | UnorderedMapLikeDiff(UnorderedMapLikeDiffInternal::Modify(contents)) 512 | } 513 | _ => { 514 | return core::result::Result::Err(nanoserde::DeBinErr { 515 | o: *offset, 516 | l: 0, 517 | s: bytes.len(), 518 | }) 519 | } 520 | }) 521 | } 522 | } 523 | } 524 | 525 | #[cfg(test)] 526 | mod test { 527 | use std::collections::{BTreeMap, HashMap}; 528 | 529 | use super::{UnorderedMapLikeDiff, UnorderedMapLikeDiffInternal}; 530 | use crate::{Difference, StructDiff}; 531 | 532 | use crate as structdiff; 533 | 534 | #[test] 535 | fn test_key_only() { 536 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 537 | #[difference(setters)] 538 | struct TestCollection { 539 | #[difference(collection_strategy = "unordered_map_like")] 540 | test1: HashMap, 541 | #[difference(collection_strategy = "unordered_map_like")] 542 | test2: BTreeMap, 543 | #[difference(collection_strategy = "unordered_map_like")] 544 | test3: HashMap, 545 | #[difference(collection_strategy = "unordered_map_like")] 546 | test4: BTreeMap, 547 | } 548 | 549 | let first = TestCollection { 550 | test1: vec![(10, 0), (15, 2), (20, 0), (25, 0), (30, 15)] 551 | .into_iter() 552 | .collect(), 553 | test2: vec![(10, 0), (15, 2), (20, 0), (25, 0)] 554 | .into_iter() 555 | .collect(), 556 | test3: vec![(10, 0), (15, 2), (20, 0), (25, 0), (30, 15)] 557 | .into_iter() 558 | .collect(), 559 | test4: vec![(10, 0), (15, 2), (20, 0), (25, 0)] 560 | .into_iter() 561 | .collect(), 562 | }; 563 | 564 | let second = TestCollection { 565 | test1: Default::default(), 566 | test2: vec![(10, 0), (15, 2), (20, 0), (25, 0), (10, 0)] 567 | .into_iter() 568 | .collect(), 569 | test3: vec![(10, 0), (15, 2), (20, 0), (25, 0)] 570 | .into_iter() 571 | .collect(), 572 | test4: vec![(10, 0), (15, 2), (20, 0), (25, 0), (15, 2)] 573 | .into_iter() 574 | .collect(), // add duplicated field 575 | }; 576 | 577 | let diffs = first.diff(&second); 578 | 579 | type TestCollectionFields = ::Diff; 580 | 581 | if let TestCollectionFields::test1(UnorderedMapLikeDiff( 582 | UnorderedMapLikeDiffInternal::Replace(val), 583 | )) = &diffs[0] 584 | { 585 | assert_eq!(val.len(), 0); 586 | } else { 587 | panic!("Collection strategy failure"); 588 | } 589 | 590 | let diffed = first.apply(diffs); 591 | 592 | use assert_unordered::assert_eq_unordered; 593 | assert_eq_unordered!(diffed.test1, second.test1); 594 | assert_eq_unordered!(diffed.test2, second.test2); 595 | assert_eq_unordered!(diffed.test3, second.test3); 596 | assert_eq_unordered!(diffed.test4, second.test4); 597 | } 598 | 599 | #[test] 600 | fn test_key_value() { 601 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 602 | #[difference(setters)] 603 | struct TestCollection { 604 | #[difference( 605 | collection_strategy = "unordered_map_like", 606 | map_equality = "key_and_value" 607 | )] 608 | test1: HashMap, 609 | } 610 | 611 | let first = TestCollection { 612 | test1: vec![(10, 0), (15, 2), (20, 0), (25, 0), (30, 15)] 613 | .into_iter() 614 | .collect(), 615 | }; 616 | 617 | let second = TestCollection { 618 | test1: vec![(10, 21), (15, 2), (20, 0), (25, 0), (30, 15)] 619 | .into_iter() 620 | .collect(), 621 | }; 622 | 623 | let diffs = first.diff(&second); 624 | 625 | let diffed = first.clone().apply(diffs); 626 | 627 | use assert_unordered::assert_eq_unordered; 628 | assert_eq_unordered!(&diffed.test1, &second.test1); 629 | 630 | let diffs = first.diff_ref(&second); 631 | 632 | let diffed = first 633 | .clone() 634 | .apply(diffs.into_iter().map(Into::into).collect()); 635 | 636 | assert_eq_unordered!(diffed.test1, second.test1); 637 | } 638 | } 639 | -------------------------------------------------------------------------------- /src/collections/unordered_map_like_recursive.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "nanoserde")] 2 | use nanoserde::{DeBin, SerBin}; 3 | #[cfg(feature = "serde")] 4 | use serde::{Deserialize, Serialize}; 5 | #[cfg(feature = "debug_diffs")] 6 | use std::fmt::Debug; 7 | 8 | #[cfg(not(feature = "rustc_hash"))] 9 | type HashMap = std::collections::HashMap; 10 | #[cfg(feature = "rustc_hash")] 11 | type HashMap = 12 | std::collections::HashMap>; 13 | 14 | use std::{hash::Hash, marker::PhantomData}; 15 | 16 | use crate::StructDiff; 17 | 18 | #[cfg_attr(feature = "debug_diffs", derive(Debug))] 19 | #[derive(Clone)] 20 | #[cfg_attr(feature = "serde", derive(Serialize))] 21 | pub(crate) enum UnorderedMapLikeRecursiveChangeRef<'a, K: Clone, V: StructDiff + Clone> { 22 | Insert((&'a K, &'a V)), 23 | Remove(&'a K), 24 | Change((&'a K, Vec>)), 25 | } 26 | 27 | #[cfg_attr(feature = "debug_diffs", derive(Debug))] 28 | #[derive(Clone)] 29 | #[cfg_attr(feature = "serde", derive(Serialize))] 30 | pub(crate) enum UnorderedMapLikeRecursiveDiffInternalRef<'a, K: Clone, V: StructDiff + Clone> { 31 | Replace(Vec<(&'a K, &'a V)>), 32 | Modify(Vec>), 33 | } 34 | 35 | /// Used internally by StructDiff to track recursive changes to a map-like collection 36 | #[repr(transparent)] 37 | #[derive(Clone)] 38 | #[cfg_attr(feature = "debug_diffs", derive(Debug))] 39 | #[cfg_attr(feature = "serde", derive(Serialize))] 40 | pub struct UnorderedMapLikeRecursiveDiffRef<'a, K: Clone, V: StructDiff + Clone>( 41 | UnorderedMapLikeRecursiveDiffInternalRef<'a, K, V>, 42 | ); 43 | 44 | #[cfg_attr(feature = "debug_diffs", derive(Debug))] 45 | #[derive(Clone)] 46 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 47 | pub(crate) enum UnorderedMapLikeRecursiveChangeOwned { 48 | Insert((K, V)), 49 | Remove(K), 50 | Change((K, Vec)), 51 | } 52 | 53 | #[cfg_attr(feature = "debug_diffs", derive(Debug))] 54 | #[derive(Clone)] 55 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 56 | pub(crate) enum UnorderedMapLikeRecursiveDiffInternalOwned { 57 | Replace(Vec<(K, V)>), 58 | Modify(Vec>), 59 | } 60 | 61 | /// Used internally by StructDiff to track recursive changes to a map-like collection 62 | #[repr(transparent)] 63 | #[derive(Clone)] 64 | #[cfg_attr(feature = "debug_diffs", derive(Debug))] 65 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 66 | pub struct UnorderedMapLikeRecursiveDiffOwned( 67 | UnorderedMapLikeRecursiveDiffInternalOwned, 68 | ); 69 | 70 | impl<'a, K: Clone, V: StructDiff + Clone> From> 71 | for UnorderedMapLikeRecursiveDiffOwned 72 | { 73 | fn from(value: UnorderedMapLikeRecursiveDiffRef<'a, K, V>) -> Self { 74 | let new_inner: UnorderedMapLikeRecursiveDiffInternalOwned = match value.0 { 75 | UnorderedMapLikeRecursiveDiffInternalRef::Replace(vals) => { 76 | UnorderedMapLikeRecursiveDiffInternalOwned::Replace( 77 | vals.into_iter() 78 | .map(|(k, v)| (k.clone(), v.clone())) 79 | .collect(), 80 | ) 81 | } 82 | UnorderedMapLikeRecursiveDiffInternalRef::Modify(vals) => { 83 | let vals = vals 84 | .into_iter() 85 | .map(|change| match change { 86 | UnorderedMapLikeRecursiveChangeRef::Insert((k, v)) => { 87 | UnorderedMapLikeRecursiveChangeOwned::Insert((k.clone(), v.clone())) 88 | } 89 | UnorderedMapLikeRecursiveChangeRef::Remove(k) => { 90 | UnorderedMapLikeRecursiveChangeOwned::Remove(k.clone()) 91 | } 92 | UnorderedMapLikeRecursiveChangeRef::Change((k, diffs)) => { 93 | let diffs = diffs 94 | .into_iter() 95 | .map(|x| { 96 | let ret: V::Diff = x.into(); 97 | ret 98 | }) 99 | .collect(); 100 | UnorderedMapLikeRecursiveChangeOwned::Change((k.clone(), diffs)) 101 | } 102 | }) 103 | .collect::>>(); 104 | UnorderedMapLikeRecursiveDiffInternalOwned::Modify(vals) 105 | } 106 | }; 107 | UnorderedMapLikeRecursiveDiffOwned(new_inner) 108 | } 109 | } 110 | 111 | fn collect_into_key_eq_map< 112 | 'a, 113 | K: Hash + PartialEq + Eq + 'a, 114 | V: 'a, 115 | B: Iterator, 116 | >( 117 | list: B, 118 | ) -> HashMap<&'a K, &'a V> { 119 | let mut map: HashMap<&K, &V> = HashMap::default(); 120 | map.reserve(list.size_hint().1.unwrap_or_default()); 121 | 122 | for (key, value) in list { 123 | map.insert(key, value); 124 | } 125 | map 126 | } 127 | 128 | enum Operation 129 | where 130 | V: StructDiff, 131 | VDIFF: Into + Clone, 132 | { 133 | Insert, 134 | Remove, 135 | Change(Vec, PhantomData), 136 | } 137 | 138 | impl<'a, K: Clone, V: StructDiff + Clone> UnorderedMapLikeRecursiveChangeRef<'a, K, V> { 139 | fn new(item: (&'a K, &'a V), insert_or_remove: Operation>) -> Self 140 | where 141 | Self: 'a, 142 | { 143 | match insert_or_remove { 144 | Operation::Insert => UnorderedMapLikeRecursiveChangeRef::Insert((item.0, item.1)), 145 | Operation::Remove => UnorderedMapLikeRecursiveChangeRef::Remove(item.0), 146 | Operation::Change(diff, ..) => { 147 | UnorderedMapLikeRecursiveChangeRef::Change((item.0, diff)) 148 | } 149 | } 150 | } 151 | } 152 | 153 | pub fn unordered_hashcmp< 154 | 'a, 155 | #[cfg(feature = "nanoserde")] K: Hash + Clone + PartialEq + Eq + SerBin + DeBin + 'a, 156 | #[cfg(not(feature = "nanoserde"))] K: Hash + Clone + PartialEq + Eq + 'a, 157 | V: Clone + PartialEq + StructDiff + 'a, 158 | B: Iterator, 159 | >( 160 | previous: B, 161 | current: B, 162 | key_only: bool, 163 | ) -> Option> { 164 | let (previous, mut current) = ( 165 | collect_into_key_eq_map(previous), 166 | collect_into_key_eq_map(current), 167 | ); 168 | 169 | // TODO look at replacing remove/insert pairs with a new type of change (K1, K2, V::Diff) 170 | // for space optimization. This method is fast but may send extra data over the wire. 171 | 172 | if key_only { 173 | if (current.len() as isize) < ((previous.len() as isize) - (current.len() as isize)) { 174 | return Some(UnorderedMapLikeRecursiveDiffRef( 175 | UnorderedMapLikeRecursiveDiffInternalRef::Replace( 176 | current.into_iter().collect::>(), 177 | ), 178 | )); 179 | } 180 | 181 | let mut ret: Vec> = 182 | Vec::with_capacity((previous.len() + current.len()) >> 1); 183 | 184 | for prev_entry in previous.into_iter() { 185 | if current.remove_entry(prev_entry.0).is_none() { 186 | ret.push(UnorderedMapLikeRecursiveChangeRef::new( 187 | prev_entry, 188 | Operation::Remove, 189 | )); 190 | } 191 | } 192 | 193 | for add_entry in current.into_iter() { 194 | ret.push(UnorderedMapLikeRecursiveChangeRef::new( 195 | add_entry, 196 | Operation::Insert, 197 | )) 198 | } 199 | 200 | ret.shrink_to_fit(); 201 | 202 | match ret.is_empty() { 203 | true => None, 204 | false => Some(UnorderedMapLikeRecursiveDiffRef( 205 | UnorderedMapLikeRecursiveDiffInternalRef::Modify(ret), 206 | )), 207 | } 208 | } else { 209 | if (current.len() as isize) < ((previous.len() as isize) - (current.len() as isize)) { 210 | return Some(UnorderedMapLikeRecursiveDiffRef( 211 | UnorderedMapLikeRecursiveDiffInternalRef::Replace( 212 | current.into_iter().collect::>(), 213 | ), 214 | )); 215 | } 216 | 217 | let mut ret: Vec> = 218 | Vec::with_capacity((previous.len() + current.len()) >> 1); 219 | 220 | for prev_entry in previous.into_iter() { 221 | match current.remove_entry(prev_entry.0) { 222 | None => ret.push(UnorderedMapLikeRecursiveChangeRef::new( 223 | prev_entry, 224 | Operation::Remove, 225 | )), 226 | Some(current_entry) if prev_entry.1 != current_entry.1 => { 227 | ret.push(UnorderedMapLikeRecursiveChangeRef::new( 228 | current_entry, 229 | Operation::Change(prev_entry.1.diff_ref(current_entry.1), PhantomData), 230 | )) 231 | } 232 | _ => (), // no change 233 | } 234 | } 235 | 236 | for add_entry in current.into_iter() { 237 | ret.push(UnorderedMapLikeRecursiveChangeRef::new( 238 | add_entry, 239 | Operation::Insert, 240 | )) 241 | } 242 | 243 | ret.shrink_to_fit(); 244 | 245 | match ret.is_empty() { 246 | true => None, 247 | false => Some(UnorderedMapLikeRecursiveDiffRef( 248 | UnorderedMapLikeRecursiveDiffInternalRef::Modify(ret), 249 | )), 250 | } 251 | } 252 | } 253 | 254 | pub fn apply_unordered_hashdiffs< 255 | #[cfg(feature = "nanoserde")] K: Hash + Clone + PartialEq + Eq + SerBin + DeBin + 'static, 256 | #[cfg(not(feature = "nanoserde"))] K: Hash + Clone + PartialEq + Eq + 'static, 257 | V: Clone + StructDiff + 'static, 258 | B: IntoIterator, 259 | >( 260 | list: B, 261 | diffs: UnorderedMapLikeRecursiveDiffOwned, 262 | ) -> Box> { 263 | let diffs = match diffs { 264 | UnorderedMapLikeRecursiveDiffOwned( 265 | UnorderedMapLikeRecursiveDiffInternalOwned::Replace(replacement), 266 | ) => { 267 | return Box::new(replacement.into_iter()); 268 | } 269 | UnorderedMapLikeRecursiveDiffOwned(UnorderedMapLikeRecursiveDiffInternalOwned::Modify( 270 | diffs, 271 | )) => diffs, 272 | }; 273 | 274 | let (insertions, rem): (Vec<_>, Vec<_>) = diffs 275 | .into_iter() 276 | .partition(|x| matches!(&x, UnorderedMapLikeRecursiveChangeOwned::Insert(_))); 277 | let (removals, changes): (Vec<_>, Vec<_>) = rem 278 | .into_iter() 279 | .partition(|x| matches!(&x, UnorderedMapLikeRecursiveChangeOwned::Remove(_))); 280 | 281 | let mut list_hash = HashMap::::from_iter(list); 282 | 283 | for remove in removals { 284 | let UnorderedMapLikeRecursiveChangeOwned::Remove(key) = remove else { 285 | continue; 286 | }; 287 | list_hash.remove(&key); 288 | } 289 | 290 | for change in changes { 291 | let UnorderedMapLikeRecursiveChangeOwned::Change((key, diff)) = change else { 292 | continue; 293 | }; 294 | let Some(to_change) = list_hash.get_mut(&key) else { 295 | continue; 296 | }; 297 | to_change.apply_mut(diff); 298 | } 299 | 300 | for insert in insertions { 301 | let UnorderedMapLikeRecursiveChangeOwned::Insert((key, value)) = insert else { 302 | continue; 303 | }; 304 | list_hash.insert(key, value); 305 | } 306 | 307 | Box::new(list_hash.into_iter()) 308 | } 309 | 310 | #[cfg(feature = "nanoserde")] 311 | mod nanoserde_impls { 312 | use crate::StructDiff; 313 | 314 | use super::{ 315 | DeBin, SerBin, UnorderedMapLikeRecursiveChangeOwned, UnorderedMapLikeRecursiveChangeRef, 316 | UnorderedMapLikeRecursiveDiffInternalOwned, UnorderedMapLikeRecursiveDiffInternalRef, 317 | UnorderedMapLikeRecursiveDiffOwned, UnorderedMapLikeRecursiveDiffRef, 318 | }; 319 | 320 | impl SerBin for UnorderedMapLikeRecursiveChangeOwned 321 | where 322 | K: SerBin + PartialEq + Clone + DeBin, 323 | V: SerBin + PartialEq + Clone + DeBin + StructDiff, 324 | { 325 | fn ser_bin(&self, output: &mut Vec) { 326 | match self { 327 | Self::Insert(val) => { 328 | 0_u8.ser_bin(output); 329 | val.ser_bin(output); 330 | } 331 | Self::Remove(val) => { 332 | 1_u8.ser_bin(output); 333 | val.ser_bin(output); 334 | } 335 | Self::Change(val) => { 336 | 2_u8.ser_bin(output); 337 | val.ser_bin(output); 338 | } 339 | } 340 | } 341 | } 342 | 343 | impl SerBin for UnorderedMapLikeRecursiveChangeRef<'_, K, V> 344 | where 345 | K: SerBin + PartialEq + Clone, 346 | V: SerBin + PartialEq + Clone + StructDiff, 347 | { 348 | fn ser_bin(&self, output: &mut Vec) { 349 | match self { 350 | Self::Insert(val) => { 351 | 0_u8.ser_bin(output); 352 | val.0.ser_bin(output); 353 | val.1.ser_bin(output); 354 | } 355 | Self::Remove(val) => { 356 | 1_u8.ser_bin(output); 357 | val.ser_bin(output); 358 | } 359 | Self::Change(val) => { 360 | 2_u8.ser_bin(output); 361 | val.0.ser_bin(output); 362 | val.1.ser_bin(output); 363 | } 364 | } 365 | } 366 | } 367 | 368 | impl SerBin for UnorderedMapLikeRecursiveDiffOwned 369 | where 370 | K: SerBin + PartialEq + Clone + DeBin, 371 | V: SerBin + PartialEq + Clone + DeBin + StructDiff, 372 | { 373 | fn ser_bin(&self, output: &mut Vec) { 374 | match &self.0 { 375 | UnorderedMapLikeRecursiveDiffInternalOwned::Replace(val) => { 376 | 0_u8.ser_bin(output); 377 | val.ser_bin(output); 378 | } 379 | UnorderedMapLikeRecursiveDiffInternalOwned::Modify(val) => { 380 | 1_u8.ser_bin(output); 381 | val.ser_bin(output); 382 | } 383 | } 384 | } 385 | } 386 | 387 | impl SerBin for UnorderedMapLikeRecursiveDiffRef<'_, K, V> 388 | where 389 | K: SerBin + PartialEq + Clone, 390 | V: SerBin + PartialEq + Clone + StructDiff, 391 | { 392 | fn ser_bin(&self, output: &mut Vec) { 393 | match &self.0 { 394 | UnorderedMapLikeRecursiveDiffInternalRef::Replace(val) => { 395 | 0_u8.ser_bin(output); 396 | val.len().ser_bin(output); 397 | for (key, value) in val { 398 | key.ser_bin(output); 399 | value.ser_bin(output) 400 | } 401 | } 402 | UnorderedMapLikeRecursiveDiffInternalRef::Modify(val) => { 403 | 1_u8.ser_bin(output); 404 | val.ser_bin(output); 405 | } 406 | } 407 | } 408 | } 409 | 410 | impl SerBin for &UnorderedMapLikeRecursiveDiffRef<'_, K, V> 411 | where 412 | K: SerBin + PartialEq + Clone, 413 | V: SerBin + PartialEq + Clone + StructDiff, 414 | { 415 | #[inline(always)] 416 | fn ser_bin(&self, output: &mut Vec) { 417 | (*self).ser_bin(output) 418 | } 419 | } 420 | 421 | impl DeBin for UnorderedMapLikeRecursiveChangeOwned 422 | where 423 | K: SerBin + PartialEq + Clone + DeBin, 424 | V: SerBin + PartialEq + Clone + DeBin + StructDiff, 425 | { 426 | fn de_bin( 427 | offset: &mut usize, 428 | bytes: &[u8], 429 | ) -> Result, nanoserde::DeBinErr> { 430 | let id: u8 = DeBin::de_bin(offset, bytes)?; 431 | core::result::Result::Ok(match id { 432 | 0_u8 => UnorderedMapLikeRecursiveChangeOwned::Insert(DeBin::de_bin(offset, bytes)?), 433 | 1_u8 => UnorderedMapLikeRecursiveChangeOwned::Remove(DeBin::de_bin(offset, bytes)?), 434 | 2_u8 => UnorderedMapLikeRecursiveChangeOwned::Change(DeBin::de_bin(offset, bytes)?), 435 | _ => { 436 | return core::result::Result::Err(nanoserde::DeBinErr { 437 | o: *offset, 438 | l: 0, 439 | s: bytes.len(), 440 | }) 441 | } 442 | }) 443 | } 444 | } 445 | 446 | impl DeBin for UnorderedMapLikeRecursiveDiffOwned 447 | where 448 | K: SerBin + PartialEq + Clone + DeBin, 449 | V: SerBin + PartialEq + Clone + DeBin + StructDiff, 450 | { 451 | fn de_bin( 452 | offset: &mut usize, 453 | bytes: &[u8], 454 | ) -> Result, nanoserde::DeBinErr> { 455 | let id: u8 = DeBin::de_bin(offset, bytes)?; 456 | core::result::Result::Ok(match id { 457 | 0_u8 => UnorderedMapLikeRecursiveDiffOwned( 458 | UnorderedMapLikeRecursiveDiffInternalOwned::Replace(DeBin::de_bin( 459 | offset, bytes, 460 | )?), 461 | ), 462 | 1_u8 => UnorderedMapLikeRecursiveDiffOwned( 463 | UnorderedMapLikeRecursiveDiffInternalOwned::Modify(DeBin::de_bin( 464 | offset, bytes, 465 | )?), 466 | ), 467 | _ => { 468 | return core::result::Result::Err(nanoserde::DeBinErr { 469 | o: *offset, 470 | l: 0, 471 | s: bytes.len(), 472 | }) 473 | } 474 | }) 475 | } 476 | } 477 | } 478 | 479 | #[cfg(test)] 480 | mod test { 481 | #[cfg(feature = "nanoserde")] 482 | use nanoserde::{DeBin, SerBin}; 483 | #[cfg(feature = "serde")] 484 | use serde::{Deserialize, Serialize}; 485 | 486 | use crate::{Difference, StructDiff}; 487 | use std::collections::{BTreeMap, HashMap}; 488 | 489 | use crate as structdiff; 490 | 491 | #[test] 492 | fn test_key_only() { 493 | #[cfg_attr(feature = "nanoserde", derive(DeBin, SerBin))] 494 | #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] 495 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 496 | pub struct TestRecurse { 497 | recurse1: i32, 498 | recurse2: Option, 499 | } 500 | 501 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 502 | struct TestCollection { 503 | #[difference( 504 | collection_strategy = "unordered_map_like", 505 | recurse, 506 | map_equality = "key_only" 507 | )] 508 | test1: HashMap, 509 | #[difference(collection_strategy = "unordered_map_like", map_equality = "key_only")] 510 | test2: BTreeMap, 511 | } 512 | 513 | let first = TestCollection { 514 | test1: vec![ 515 | ( 516 | 10, 517 | TestRecurse { 518 | recurse1: 0, 519 | recurse2: None, 520 | }, 521 | ), 522 | ( 523 | 15, 524 | TestRecurse { 525 | recurse1: 2, 526 | recurse2: Some("Hello".to_string()), 527 | }, 528 | ), 529 | ] 530 | .into_iter() 531 | .collect(), 532 | test2: vec![(10, 0), (15, 2), (20, 0), (25, 2)] 533 | .into_iter() 534 | .collect(), 535 | }; 536 | 537 | let second = TestCollection { 538 | test1: vec![ 539 | ( 540 | 11, 541 | TestRecurse { 542 | recurse1: 0, 543 | recurse2: Some("Hello World".to_string()), 544 | }, 545 | ), 546 | ( 547 | 15, 548 | TestRecurse { 549 | recurse1: 2, 550 | recurse2: Some("Hello World".to_string()), 551 | }, 552 | ), 553 | ] 554 | .into_iter() 555 | .collect(), 556 | test2: vec![(10, 0), (15, 2), (20, 0), (25, 0)] 557 | .into_iter() 558 | .collect(), 559 | }; 560 | 561 | let diffs = first.diff(&second); 562 | assert_eq!(diffs.len(), 2); 563 | let diffed = first.apply(diffs); 564 | 565 | use assert_unordered::assert_eq_unordered; 566 | assert_eq_unordered!( 567 | diffed.test1.keys().collect::>(), 568 | second.test1.keys().collect::>() 569 | ); 570 | assert_eq!(diffed.test1[&11], second.test1[&11]); 571 | assert_ne!(diffed.test1[&15], second.test1[&15]); 572 | assert_eq_unordered!(diffed.test2, second.test2); 573 | } 574 | 575 | #[test] 576 | fn test_key_value() { 577 | #[cfg_attr(feature = "nanoserde", derive(DeBin, SerBin))] 578 | #[cfg_attr(feature = "serde", derive(Deserialize, Serialize))] 579 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 580 | #[difference(setters)] 581 | pub struct TestRecurse { 582 | recurse1: i32, 583 | recurse2: Option, 584 | } 585 | 586 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 587 | #[difference(setters)] 588 | struct TestCollection { 589 | #[difference( 590 | collection_strategy = "unordered_map_like", 591 | map_equality = "key_and_value", 592 | recurse 593 | )] 594 | test1: HashMap, 595 | #[difference( 596 | collection_strategy = "unordered_map_like", 597 | map_equality = "key_and_value" 598 | )] 599 | test2: BTreeMap, 600 | } 601 | 602 | let first = TestCollection { 603 | test1: vec![ 604 | ( 605 | 10, 606 | TestRecurse { 607 | recurse1: 0, 608 | recurse2: None, 609 | }, 610 | ), 611 | ( 612 | 15, 613 | TestRecurse { 614 | recurse1: 2, 615 | recurse2: Some("Hello".to_string()), 616 | }, 617 | ), 618 | ] 619 | .into_iter() 620 | .collect(), 621 | test2: vec![(10, 0), (15, 2), (20, 0), (25, 2)] 622 | .into_iter() 623 | .collect(), 624 | }; 625 | 626 | let second = TestCollection { 627 | test1: vec![ 628 | ( 629 | 11, 630 | TestRecurse { 631 | recurse1: 0, 632 | recurse2: Some("Hello World".to_string()), 633 | }, 634 | ), 635 | ( 636 | 15, 637 | TestRecurse { 638 | recurse1: 2, 639 | recurse2: Some("Hello World".to_string()), 640 | }, 641 | ), 642 | ] 643 | .into_iter() 644 | .collect(), 645 | test2: vec![(10, 0), (15, 2), (20, 0), (25, 0), (10, 0)] 646 | .into_iter() 647 | .collect(), 648 | }; 649 | 650 | let diffs = first.diff(&second); 651 | let diffed = first.apply(diffs); 652 | 653 | use assert_unordered::assert_eq_unordered; 654 | assert_eq_unordered!(diffed.test1, second.test1); 655 | assert_eq_unordered!(diffed.test2, second.test2); 656 | } 657 | } 658 | -------------------------------------------------------------------------------- /src/collections/unordered_array_like.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "nanoserde")] 2 | use nanoserde::{DeBin, SerBin}; 3 | #[cfg(feature = "serde")] 4 | use serde::{Deserialize, Serialize}; 5 | #[cfg(not(feature = "rustc_hash"))] 6 | type HashMap = std::collections::HashMap; 7 | #[cfg(feature = "rustc_hash")] 8 | type HashMap = 9 | std::collections::HashMap>; 10 | 11 | use std::hash::Hash; 12 | 13 | #[derive(Debug, Clone)] 14 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 15 | pub(crate) struct UnorderedArrayLikeChangeSpec { 16 | item: T, 17 | count: S, 18 | } 19 | 20 | #[derive(Debug, Clone)] 21 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 22 | pub(crate) enum UnorderedArrayLikeChange { 23 | InsertMany(UnorderedArrayLikeChangeSpec), 24 | RemoveMany(UnorderedArrayLikeChangeSpec), 25 | InsertFew(UnorderedArrayLikeChangeSpec), 26 | RemoveFew(UnorderedArrayLikeChangeSpec), 27 | InsertSingle(T), 28 | RemoveSingle(T), 29 | } 30 | 31 | impl<'a, T: Clone + 'a> From> for UnorderedArrayLikeChange { 32 | fn from(value: UnorderedArrayLikeChange<&'a T>) -> Self { 33 | match value { 34 | UnorderedArrayLikeChange::InsertMany(UnorderedArrayLikeChangeSpec { item, count }) => { 35 | UnorderedArrayLikeChange::InsertMany(UnorderedArrayLikeChangeSpec { 36 | item: item.clone(), 37 | count, 38 | }) 39 | } 40 | UnorderedArrayLikeChange::RemoveMany(UnorderedArrayLikeChangeSpec { item, count }) => { 41 | UnorderedArrayLikeChange::RemoveMany(UnorderedArrayLikeChangeSpec { 42 | item: item.clone(), 43 | count, 44 | }) 45 | } 46 | UnorderedArrayLikeChange::InsertFew(UnorderedArrayLikeChangeSpec { item, count }) => { 47 | UnorderedArrayLikeChange::InsertFew(UnorderedArrayLikeChangeSpec { 48 | item: item.clone(), 49 | count, 50 | }) 51 | } 52 | UnorderedArrayLikeChange::RemoveFew(UnorderedArrayLikeChangeSpec { item, count }) => { 53 | UnorderedArrayLikeChange::RemoveFew(UnorderedArrayLikeChangeSpec { 54 | item: item.clone(), 55 | count, 56 | }) 57 | } 58 | UnorderedArrayLikeChange::InsertSingle(v) => { 59 | UnorderedArrayLikeChange::InsertSingle(v.clone()) 60 | } 61 | UnorderedArrayLikeChange::RemoveSingle(v) => { 62 | UnorderedArrayLikeChange::RemoveSingle(v.clone()) 63 | } 64 | } 65 | } 66 | } 67 | 68 | #[derive(Debug, Clone)] 69 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 70 | pub(crate) enum UnorderedArrayLikeDiffInternal { 71 | Replace(Vec), 72 | Modify(Vec>), 73 | } 74 | 75 | #[repr(transparent)] 76 | #[derive(Debug, Clone)] 77 | #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] 78 | pub struct UnorderedArrayLikeDiff(UnorderedArrayLikeDiffInternal); 79 | 80 | impl<'a, T: Clone + 'a> From> for UnorderedArrayLikeDiff { 81 | fn from(value: UnorderedArrayLikeDiff<&'a T>) -> Self { 82 | let new_inner = match value.0 { 83 | UnorderedArrayLikeDiffInternal::Replace(replace) => { 84 | UnorderedArrayLikeDiffInternal::Replace(replace.into_iter().cloned().collect()) 85 | } 86 | UnorderedArrayLikeDiffInternal::Modify(modify) => { 87 | UnorderedArrayLikeDiffInternal::Modify(modify.into_iter().map(Into::into).collect()) 88 | } 89 | }; 90 | Self(new_inner) 91 | } 92 | } 93 | 94 | fn collect_into_map<'a, T: Hash + PartialEq + Eq + 'a, B: Iterator>( 95 | list: B, 96 | ) -> HashMap { 97 | let mut map: HashMap = HashMap::default(); 98 | map.reserve(list.size_hint().1.unwrap_or_default()); 99 | 100 | for item in list { 101 | match map.get_mut(&item) { 102 | Some(count) => *count += 1, 103 | None => { 104 | map.insert(item, 1_usize); 105 | } 106 | } 107 | } 108 | map 109 | } 110 | 111 | enum InsertOrRemove { 112 | Insert, 113 | Remove, 114 | } 115 | 116 | impl UnorderedArrayLikeChange { 117 | fn new(item: T, count: usize, insert_or_remove: InsertOrRemove) -> Self { 118 | #[cfg(feature = "debug_asserts")] 119 | debug_assert_ne!(count, 0); 120 | match (insert_or_remove, count) { 121 | (InsertOrRemove::Insert, 1) => UnorderedArrayLikeChange::InsertSingle(item), 122 | (InsertOrRemove::Insert, val) if val <= u8::MAX as usize => { 123 | UnorderedArrayLikeChange::InsertFew(UnorderedArrayLikeChangeSpec { 124 | item, 125 | count: val as u8, 126 | }) 127 | } 128 | (InsertOrRemove::Insert, val) if val > u8::MAX as usize => { 129 | UnorderedArrayLikeChange::InsertMany(UnorderedArrayLikeChangeSpec { 130 | item, 131 | count: val, 132 | }) 133 | } 134 | (InsertOrRemove::Remove, 1) => UnorderedArrayLikeChange::RemoveSingle(item), 135 | (InsertOrRemove::Remove, val) if val <= u8::MAX as usize => { 136 | UnorderedArrayLikeChange::RemoveFew(UnorderedArrayLikeChangeSpec { 137 | item, 138 | count: val as u8, 139 | }) 140 | } 141 | (InsertOrRemove::Remove, val) if val > u8::MAX as usize => { 142 | UnorderedArrayLikeChange::RemoveMany(UnorderedArrayLikeChangeSpec { 143 | item, 144 | count: val, 145 | }) 146 | } 147 | (_, _) => unreachable!(), 148 | } 149 | } 150 | } 151 | 152 | pub fn unordered_hashcmp< 153 | 'a, 154 | #[cfg(feature = "nanoserde")] T: Hash + Clone + PartialEq + Eq + SerBin + DeBin + 'a, 155 | #[cfg(not(feature = "nanoserde"))] T: Hash + Clone + PartialEq + Eq + 'a, 156 | B: Iterator, 157 | >( 158 | previous: B, 159 | current: B, 160 | ) -> Option> { 161 | let mut previous = collect_into_map(previous); 162 | let current = collect_into_map(current); 163 | 164 | if (current.len() as isize) < ((previous.len() as isize) - (current.len() as isize)) { 165 | return Some(UnorderedArrayLikeDiff( 166 | UnorderedArrayLikeDiffInternal::Replace( 167 | current 168 | .into_iter() 169 | .flat_map(|(k, v)| std::iter::repeat_n(k, v)) 170 | .collect(), 171 | ), 172 | )); 173 | } 174 | 175 | let mut ret: Vec> = 176 | Vec::with_capacity((previous.len() + current.len()) >> 1); 177 | 178 | for (k, current_count) in current.iter() { 179 | match previous.remove(k) { 180 | Some(prev_count) => match (*current_count as i128) - (prev_count as i128) { 181 | add if add > 1 => ret.push(UnorderedArrayLikeChange::new( 182 | k, 183 | add as usize, 184 | InsertOrRemove::Insert, 185 | )), 186 | add if add == 1 => ret.push(UnorderedArrayLikeChange::new( 187 | k, 188 | add as usize, 189 | InsertOrRemove::Insert, 190 | )), 191 | sub if sub < 0 => ret.push(UnorderedArrayLikeChange::new( 192 | k, 193 | -sub as usize, 194 | InsertOrRemove::Remove, 195 | )), 196 | sub if sub == -1 => ret.push(UnorderedArrayLikeChange::new( 197 | k, 198 | -sub as usize, 199 | InsertOrRemove::Remove, 200 | )), 201 | _ => (), 202 | }, 203 | None => ret.push(UnorderedArrayLikeChange::new( 204 | k, 205 | *current_count, 206 | InsertOrRemove::Insert, 207 | )), 208 | } 209 | } 210 | 211 | for (k, v) in previous.into_iter() { 212 | ret.push(UnorderedArrayLikeChange::new(k, v, InsertOrRemove::Remove)) 213 | } 214 | 215 | ret.shrink_to_fit(); 216 | 217 | match ret.is_empty() { 218 | true => None, 219 | false => Some(UnorderedArrayLikeDiff( 220 | UnorderedArrayLikeDiffInternal::Modify(ret), 221 | )), 222 | } 223 | } 224 | 225 | pub fn apply_unordered_hashdiffs< 226 | #[cfg(feature = "nanoserde")] T: Hash + Clone + PartialEq + Eq + SerBin + DeBin + 'static, 227 | #[cfg(not(feature = "nanoserde"))] T: Hash + Clone + PartialEq + Eq + 'static, 228 | B: IntoIterator, 229 | >( 230 | list: B, 231 | diffs: UnorderedArrayLikeDiff, 232 | ) -> Box> 233 | where 234 | ::IntoIter: ExactSizeIterator, 235 | { 236 | let diffs = match diffs { 237 | UnorderedArrayLikeDiff(UnorderedArrayLikeDiffInternal::Replace(replacement)) => { 238 | return Box::new(replacement.into_iter()); 239 | } 240 | UnorderedArrayLikeDiff(UnorderedArrayLikeDiffInternal::Modify(diffs)) => diffs, 241 | }; 242 | 243 | let (insertions, removals): ( 244 | Vec>, 245 | Vec>, 246 | ) = diffs.into_iter().partition(|x| match &x { 247 | UnorderedArrayLikeChange::InsertMany(_) 248 | | UnorderedArrayLikeChange::InsertFew(_) 249 | | UnorderedArrayLikeChange::InsertSingle(_) => true, 250 | UnorderedArrayLikeChange::RemoveMany(_) 251 | | UnorderedArrayLikeChange::RemoveFew(_) 252 | | UnorderedArrayLikeChange::RemoveSingle(_) => false, 253 | }); 254 | let mut list_hash = collect_into_map(list.into_iter()); 255 | 256 | for remove in removals { 257 | match remove { 258 | UnorderedArrayLikeChange::RemoveMany(UnorderedArrayLikeChangeSpec { item, count }) => { 259 | match list_hash.get_mut(&item) { 260 | Some(val) if *val > count => { 261 | *val -= count; 262 | } 263 | Some(val) if *val <= count => { 264 | list_hash.remove(&item); 265 | } 266 | _ => (), 267 | } 268 | } 269 | UnorderedArrayLikeChange::RemoveFew(UnorderedArrayLikeChangeSpec { item, count }) => { 270 | match list_hash.get_mut(&item) { 271 | Some(val) if *val > count as usize => { 272 | *val -= count as usize; 273 | } 274 | Some(val) if *val <= count as usize => { 275 | list_hash.remove(&item); 276 | } 277 | _ => (), 278 | } 279 | } 280 | UnorderedArrayLikeChange::RemoveSingle(item) => match list_hash.get_mut(&item) { 281 | Some(val) if *val > 1 => { 282 | *val -= 1; 283 | } 284 | Some(val) if *val <= 1 => { 285 | list_hash.remove(&item); 286 | } 287 | _ => (), 288 | }, 289 | _ => { 290 | #[cfg(all(debug_assertions, feature = "debug_asserts"))] 291 | panic!("Sorting failure") 292 | } 293 | } 294 | } 295 | 296 | for insertion in insertions.into_iter() { 297 | match insertion { 298 | UnorderedArrayLikeChange::InsertMany(UnorderedArrayLikeChangeSpec { item, count }) => { 299 | match list_hash.get_mut(&item) { 300 | Some(val) => { 301 | *val += count; 302 | } 303 | None => { 304 | list_hash.insert(item, count); 305 | } 306 | } 307 | } 308 | UnorderedArrayLikeChange::InsertFew(UnorderedArrayLikeChangeSpec { item, count }) => { 309 | match list_hash.get_mut(&item) { 310 | Some(val) => { 311 | *val += count as usize; 312 | } 313 | None => { 314 | list_hash.insert(item, count as usize); 315 | } 316 | } 317 | } 318 | UnorderedArrayLikeChange::InsertSingle(item) => match list_hash.get_mut(&item) { 319 | Some(val) => { 320 | *val += 1; 321 | } 322 | None => { 323 | list_hash.insert(item, 1); 324 | } 325 | }, 326 | _ => { 327 | #[cfg(all(debug_assertions, feature = "debug_asserts"))] 328 | panic!("Sorting failure") 329 | } 330 | } 331 | } 332 | 333 | Box::new( 334 | list_hash 335 | .into_iter() 336 | .flat_map(|(k, v)| std::iter::repeat_n(k, v)), 337 | ) 338 | } 339 | 340 | #[cfg(feature = "nanoserde")] 341 | mod nanoserde_impls { 342 | use super::{ 343 | DeBin, SerBin, UnorderedArrayLikeChange, UnorderedArrayLikeChangeSpec, 344 | UnorderedArrayLikeDiff, UnorderedArrayLikeDiffInternal, 345 | }; 346 | 347 | impl SerBin for UnorderedArrayLikeChangeSpec { 348 | fn ser_bin(&self, output: &mut Vec) { 349 | self.item.ser_bin(output); 350 | self.count.ser_bin(output) 351 | } 352 | } 353 | 354 | impl SerBin for &UnorderedArrayLikeChangeSpec<&T, usize> { 355 | fn ser_bin(&self, output: &mut Vec) { 356 | self.item.ser_bin(output); 357 | self.count.ser_bin(output) 358 | } 359 | } 360 | 361 | impl SerBin for UnorderedArrayLikeChangeSpec { 362 | fn ser_bin(&self, output: &mut Vec) { 363 | self.item.ser_bin(output); 364 | self.count.ser_bin(output) 365 | } 366 | } 367 | 368 | impl SerBin for &UnorderedArrayLikeChangeSpec<&T, u8> { 369 | fn ser_bin(&self, output: &mut Vec) { 370 | self.item.ser_bin(output); 371 | self.count.ser_bin(output) 372 | } 373 | } 374 | 375 | impl SerBin for UnorderedArrayLikeChange { 376 | fn ser_bin(&self, output: &mut Vec) { 377 | match self { 378 | Self::InsertMany(val) => { 379 | 0_u8.ser_bin(output); 380 | val.ser_bin(output); 381 | } 382 | Self::RemoveMany(val) => { 383 | 1_u8.ser_bin(output); 384 | val.ser_bin(output); 385 | } 386 | Self::InsertFew(val) => { 387 | 2_u8.ser_bin(output); 388 | val.ser_bin(output); 389 | } 390 | Self::RemoveFew(val) => { 391 | 3_u8.ser_bin(output); 392 | val.ser_bin(output); 393 | } 394 | Self::InsertSingle(val) => { 395 | 4_u8.ser_bin(output); 396 | val.ser_bin(output); 397 | } 398 | Self::RemoveSingle(val) => { 399 | 5_u8.ser_bin(output); 400 | val.ser_bin(output); 401 | } 402 | } 403 | } 404 | } 405 | 406 | impl SerBin for &UnorderedArrayLikeChange<&T> { 407 | fn ser_bin(&self, output: &mut Vec) { 408 | match self { 409 | UnorderedArrayLikeChange::InsertMany(val) => { 410 | 0_u8.ser_bin(output); 411 | val.ser_bin(output); 412 | } 413 | UnorderedArrayLikeChange::RemoveMany(val) => { 414 | 1_u8.ser_bin(output); 415 | val.ser_bin(output); 416 | } 417 | UnorderedArrayLikeChange::InsertFew(val) => { 418 | 2_u8.ser_bin(output); 419 | val.ser_bin(output); 420 | } 421 | UnorderedArrayLikeChange::RemoveFew(val) => { 422 | 3_u8.ser_bin(output); 423 | val.ser_bin(output); 424 | } 425 | UnorderedArrayLikeChange::InsertSingle(val) => { 426 | 4_u8.ser_bin(output); 427 | val.ser_bin(output); 428 | } 429 | UnorderedArrayLikeChange::RemoveSingle(val) => { 430 | 5_u8.ser_bin(output); 431 | val.ser_bin(output); 432 | } 433 | } 434 | } 435 | } 436 | 437 | impl SerBin for UnorderedArrayLikeDiff { 438 | fn ser_bin(&self, output: &mut Vec) { 439 | match &self.0 { 440 | UnorderedArrayLikeDiffInternal::Replace(val) => { 441 | 0_u8.ser_bin(output); 442 | val.ser_bin(output); 443 | } 444 | UnorderedArrayLikeDiffInternal::Modify(val) => { 445 | 1_u8.ser_bin(output); 446 | val.ser_bin(output); 447 | } 448 | } 449 | } 450 | } 451 | 452 | impl SerBin for &UnorderedArrayLikeDiff<&T> { 453 | fn ser_bin(&self, output: &mut Vec) { 454 | match &self.0 { 455 | UnorderedArrayLikeDiffInternal::Replace(val) => { 456 | 0_u8.ser_bin(output); 457 | val.len().ser_bin(output); 458 | for entry in val { 459 | entry.ser_bin(output); 460 | } 461 | } 462 | UnorderedArrayLikeDiffInternal::Modify(val) => { 463 | 1_u8.ser_bin(output); 464 | val.len().ser_bin(output); 465 | for entry in val { 466 | entry.ser_bin(output); 467 | } 468 | } 469 | } 470 | } 471 | } 472 | 473 | impl DeBin for UnorderedArrayLikeChangeSpec { 474 | fn de_bin(offset: &mut usize, bytes: &[u8]) -> Result { 475 | core::result::Result::Ok(Self { 476 | item: DeBin::de_bin(offset, bytes)?, 477 | count: DeBin::de_bin(offset, bytes)?, 478 | }) 479 | } 480 | } 481 | 482 | impl DeBin for UnorderedArrayLikeChangeSpec { 483 | fn de_bin(offset: &mut usize, bytes: &[u8]) -> Result { 484 | core::result::Result::Ok(Self { 485 | item: DeBin::de_bin(offset, bytes)?, 486 | count: DeBin::de_bin(offset, bytes)?, 487 | }) 488 | } 489 | } 490 | 491 | impl DeBin for UnorderedArrayLikeChange { 492 | fn de_bin( 493 | offset: &mut usize, 494 | bytes: &[u8], 495 | ) -> Result, nanoserde::DeBinErr> { 496 | let id: u8 = DeBin::de_bin(offset, bytes)?; 497 | core::result::Result::Ok(match id { 498 | 0_u8 => UnorderedArrayLikeChange::InsertMany(DeBin::de_bin(offset, bytes)?), 499 | 1_u8 => UnorderedArrayLikeChange::RemoveMany(DeBin::de_bin(offset, bytes)?), 500 | 2_u8 => UnorderedArrayLikeChange::InsertFew(DeBin::de_bin(offset, bytes)?), 501 | 3_u8 => UnorderedArrayLikeChange::RemoveFew(DeBin::de_bin(offset, bytes)?), 502 | 4_u8 => UnorderedArrayLikeChange::InsertSingle(DeBin::de_bin(offset, bytes)?), 503 | 5_u8 => UnorderedArrayLikeChange::RemoveSingle(DeBin::de_bin(offset, bytes)?), 504 | _ => { 505 | return core::result::Result::Err(nanoserde::DeBinErr { 506 | o: *offset, 507 | l: 0, 508 | s: bytes.len(), 509 | }) 510 | } 511 | }) 512 | } 513 | } 514 | 515 | impl DeBin for UnorderedArrayLikeDiff { 516 | fn de_bin( 517 | offset: &mut usize, 518 | bytes: &[u8], 519 | ) -> Result, nanoserde::DeBinErr> { 520 | let id: u8 = DeBin::de_bin(offset, bytes)?; 521 | core::result::Result::Ok(match id { 522 | 0_u8 => { 523 | let len: usize = DeBin::de_bin(offset, bytes)?; 524 | let mut contents: Vec = Vec::new(); 525 | for _ in 0..len { 526 | let content = DeBin::de_bin(offset, bytes)?; 527 | contents.push(content); 528 | } 529 | UnorderedArrayLikeDiff(UnorderedArrayLikeDiffInternal::Replace(contents)) 530 | } 531 | 1_u8 => { 532 | let len: usize = DeBin::de_bin(offset, bytes)?; 533 | let mut contents: Vec> = Vec::new(); 534 | for _ in 0..len { 535 | let content = DeBin::de_bin(offset, bytes)?; 536 | contents.push(content); 537 | } 538 | UnorderedArrayLikeDiff(UnorderedArrayLikeDiffInternal::Modify(contents)) 539 | } 540 | _ => { 541 | return core::result::Result::Err(nanoserde::DeBinErr { 542 | o: *offset, 543 | l: 0, 544 | s: bytes.len(), 545 | }) 546 | } 547 | }) 548 | } 549 | } 550 | } 551 | 552 | #[cfg(test)] 553 | mod test { 554 | use std::collections::{HashSet, LinkedList}; 555 | 556 | use super::{UnorderedArrayLikeDiff, UnorderedArrayLikeDiffInternal}; 557 | use crate::{Difference, StructDiff}; 558 | 559 | use crate as structdiff; 560 | 561 | #[test] 562 | fn test_collection_strategies() { 563 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 564 | // #[derive(Debug, PartialEq, Clone, Default)] 565 | // #[difference(setters)] 566 | struct TestCollection { 567 | #[difference(collection_strategy = "unordered_array_like")] 568 | test1: Vec, 569 | #[difference(collection_strategy = "unordered_array_like")] 570 | test2: HashSet, 571 | #[difference(collection_strategy = "unordered_array_like")] 572 | test3: LinkedList, 573 | } 574 | 575 | let first = TestCollection { 576 | test1: vec![10, 15, 20, 25, 30], 577 | test3: vec![10, 15, 17].into_iter().collect(), 578 | ..Default::default() 579 | }; 580 | 581 | let second = TestCollection { 582 | test1: Vec::default(), 583 | test2: vec![10].into_iter().collect(), 584 | test3: vec![10, 15, 17, 19].into_iter().collect(), 585 | }; 586 | 587 | let diffs = first.diff(&second).to_owned(); 588 | 589 | type TestCollectionFields = ::Diff; 590 | 591 | if let TestCollectionFields::test1(UnorderedArrayLikeDiff( 592 | UnorderedArrayLikeDiffInternal::Replace(val), 593 | )) = &diffs[0] 594 | { 595 | assert_eq!(val.len(), 0); 596 | } else { 597 | panic!("Collection strategy failure"); 598 | } 599 | 600 | if let TestCollectionFields::test3(UnorderedArrayLikeDiff( 601 | UnorderedArrayLikeDiffInternal::Modify(val), 602 | )) = &diffs[2] 603 | { 604 | assert_eq!(val.len(), 1); 605 | } else { 606 | panic!("Collection strategy failure"); 607 | } 608 | 609 | let diffed = first.apply(diffs); 610 | 611 | use assert_unordered::assert_eq_unordered; 612 | assert_eq_unordered!(diffed.test1, second.test1); 613 | assert_eq_unordered!(diffed.test2, second.test2); 614 | assert_eq_unordered!(diffed.test3, second.test3); 615 | } 616 | 617 | #[test] 618 | fn test_collection_strategies_ref() { 619 | #[derive(Debug, PartialEq, Clone, Difference, Default)] 620 | // #[derive(Debug, PartialEq, Clone, Default)] 621 | #[difference(setters)] 622 | struct TestCollection { 623 | #[difference(collection_strategy = "unordered_array_like")] 624 | test1: Vec, 625 | #[difference(collection_strategy = "unordered_array_like")] 626 | test2: HashSet, 627 | #[difference(collection_strategy = "unordered_array_like")] 628 | test3: LinkedList, 629 | } 630 | 631 | let first = TestCollection { 632 | test1: vec![10, 15, 20, 25, 30], 633 | test3: vec![10, 15, 17].into_iter().collect(), 634 | ..Default::default() 635 | }; 636 | 637 | let second = TestCollection { 638 | test1: Vec::default(), 639 | test2: vec![10].into_iter().collect(), 640 | test3: vec![10, 15, 17, 19].into_iter().collect(), 641 | }; 642 | 643 | let diffs = first.diff_ref(&second).to_owned(); 644 | 645 | type TestCollectionFields<'target> = ::DiffRef<'target>; 646 | 647 | if let TestCollectionFields::test1(UnorderedArrayLikeDiff( 648 | UnorderedArrayLikeDiffInternal::Replace(val), 649 | )) = &diffs[0] 650 | { 651 | assert_eq!(val.len(), 0); 652 | } else { 653 | panic!("Collection strategy failure"); 654 | } 655 | 656 | if let TestCollectionFields::test3(UnorderedArrayLikeDiff( 657 | UnorderedArrayLikeDiffInternal::Modify(val), 658 | )) = &diffs[2] 659 | { 660 | assert_eq!(val.len(), 1); 661 | } else { 662 | panic!("Collection strategy failure"); 663 | } 664 | 665 | let owned = diffs.into_iter().map(Into::into).collect(); 666 | let diffed = first.apply(owned); 667 | 668 | use assert_unordered::assert_eq_unordered; 669 | assert_eq_unordered!(diffed.test1, second.test1); 670 | assert_eq_unordered!(diffed.test2, second.test2); 671 | assert_eq_unordered!(diffed.test3, second.test3); 672 | } 673 | } 674 | --------------------------------------------------------------------------------