├── CONTRIBUTING.md ├── .gitignore ├── src ├── stabilisation_num.rs ├── incrsan │ ├── stable.rs │ └── nightly.rs ├── node │ └── id.rs ├── kind │ ├── array_fold.rs │ ├── bind.rs │ ├── map.rs │ └── expert.rs ├── boxes.rs ├── cutoff.rs ├── state │ └── expert.rs ├── scope.rs ├── incrsan.rs ├── lib.rs ├── syntax.rs ├── node_update.rs ├── kind.rs ├── adjust_heights_heap.rs ├── recompute_heap.rs ├── internal_observer.rs └── var.rs ├── incremental-macros ├── Cargo.toml ├── examples │ └── calc.rs └── src │ ├── debug.rs │ └── lib.rs ├── incremental-map ├── Cargo.toml ├── tests │ └── integration.rs └── src │ ├── btree_map.rs │ └── symmetric_fold.rs ├── LICENSE.md ├── Cargo.toml ├── examples └── stabilise_100.rs ├── README.md ├── benches ├── linear.rs └── shares_per_symbol.rs └── tests ├── doc_preliminaries.rs ├── expert.rs └── fixed_point.rs /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Note to self: use [`cargo-workspaces`](https://github.com/pksunkara/cargo-workspaces). 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /incremental-macros/target 3 | Cargo.lock 4 | *.png 5 | *.dot 6 | *.svg 7 | *.pdf 8 | .DS_Store 9 | -------------------------------------------------------------------------------- /src/stabilisation_num.rs: -------------------------------------------------------------------------------- 1 | #[repr(transparent)] 2 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] 3 | pub struct StabilisationNum(pub i32); 4 | 5 | impl StabilisationNum { 6 | pub fn init() -> Self { 7 | Self(-1) 8 | } 9 | pub fn add1(self) -> Self { 10 | Self(self.0 + 1) 11 | } 12 | pub fn is_never(&self) -> bool { 13 | self.0 == -1 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/incrsan/stable.rs: -------------------------------------------------------------------------------- 1 | /// Inactive auto trait, as `nightly-incrsan` feature is not enabled. 2 | /// 3 | /// Designed to prevent having an incremental node own an Observer, which is 4 | /// invalid use of the API and will panic when dropped. 5 | /// 6 | /// This type is implemented for every T, which means it does not restrict anything. 7 | /// You should only have to deal with this trait if you enable the `nightly-incrsan` feature. 8 | pub trait NotObserver {} 9 | impl NotObserver for T {} 10 | -------------------------------------------------------------------------------- /incremental-macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "incremental-macros" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [features] 10 | default = ["string-interner"] 11 | 12 | [dependencies] 13 | incremental = { path = "..", features = ["slotmap"] } 14 | slotmap = "1.0.6" 15 | string-interner = { version = "0.14.0", optional = true } 16 | paste = { version = "1.0.9" } 17 | 18 | [dev-dependencies] 19 | tracing-subscriber = { workspace = true } 20 | tracing = { workspace = true } 21 | -------------------------------------------------------------------------------- /src/node/id.rs: -------------------------------------------------------------------------------- 1 | use std::cell::Cell; 2 | use std::fmt; 3 | 4 | #[derive(Copy, Clone, PartialEq, Eq, Hash)] 5 | pub struct NodeId(pub(crate) usize); 6 | impl NodeId { 7 | pub(super) fn next() -> Self { 8 | thread_local! { 9 | static NODE_ID: Cell = Cell::new(0); 10 | } 11 | 12 | NODE_ID.with(|x| { 13 | let next = x.get() + 1; 14 | x.set(next); 15 | NodeId(next) 16 | }) 17 | } 18 | } 19 | impl fmt::Debug for NodeId { 20 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 21 | write!(f, "{}", self.0) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /incremental-map/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "incremental-map" 3 | version = "0.2.8" 4 | edition = "2021" 5 | description = "combinators for incremental immutable maps (see crate `incremental`)" 6 | license = "MIT" 7 | 8 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 9 | 10 | # version separately from incremental 11 | [package.metadata.workspaces] 12 | independent = true 13 | 14 | [features] 15 | default = [] 16 | im = ["dep:im-rc"] 17 | nightly-incrsan = ["incremental/nightly-incrsan"] 18 | 19 | [dependencies] 20 | im-rc = { workspace = true, optional = true } 21 | incremental = { path = "..", version = "0.2.0" } 22 | 23 | [dev-dependencies] 24 | tracing-subscriber = { workspace = true } 25 | tracing = { workspace = true } 26 | criterion = { workspace = true } 27 | test-log = { workspace = true } 28 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2008--2023 Jane Street Group, LLC opensource-contacts@janestreet.com 4 | Copyright (c) 2021--2023 Cormac Relf web@cormacrelf.net 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 7 | 8 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 9 | 10 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 11 | 12 | -------------------------------------------------------------------------------- /src/incrsan/nightly.rs: -------------------------------------------------------------------------------- 1 | /// 2 | /// Nightly-only auto trait to prevent having an incremental node own an Observer, which is 3 | /// invalid use of the API and will panic when dropped. 4 | /// 5 | /// ```compile_fail 6 | /// use incremental::*; 7 | /// 8 | /// let state = IncrState::new(); 9 | /// let constant = state.constant(1); 10 | /// let observer = constant.observe(); 11 | /// let map = constant.map(move |_| { 12 | /// let _ = observer.value(); // invalid! will panic, so don't try! 13 | /// 1234 14 | /// }); 15 | /// ``` 16 | /// 17 | #[diagnostic::on_unimplemented( 18 | message = "Type contains an incremental::Observer", 19 | // {Self} expands to the thing that doesn't implement NotObserver, which is e.g. 20 | // `Observer`. 21 | label = "Contains {Self}, which is either an Observer or cannot be proven not to contain one.", 22 | note = "Observers are the roots for garbage-collecting incremental nodes.\nThey hold incrementals, not the other way round.\nIf you need access to another incremental value in a map node, try map2.\nIf you know this type has no observers, you can use incremental::incrsan::AssertNotObserver.\n" 23 | )] 24 | pub auto trait NotObserver {} 25 | 26 | impl !NotObserver for crate::public::Observer {} 27 | 28 | impl NotObserver for super::AssertNotObserver {} 29 | -------------------------------------------------------------------------------- /src/kind/array_fold.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::fmt::{self, Debug}; 3 | 4 | use super::{Incr, Value}; 5 | use crate::boxes::{new_unsized, SmallBox}; 6 | use crate::incrsan::NotObserver; 7 | use crate::kind::KindTrait; 8 | use crate::{NodeRef, ValueInternal}; 9 | 10 | pub(crate) struct ArrayFold { 11 | pub(crate) init: R, 12 | pub(crate) fold: RefCell, 13 | pub(crate) children: Vec>, 14 | } 15 | 16 | impl KindTrait for ArrayFold 17 | where 18 | F: FnMut(R, &I) -> R + 'static + NotObserver, 19 | I: Value, 20 | R: Value, 21 | { 22 | fn compute(&self) -> SmallBox { 23 | let mut acc = self.init.clone(); 24 | let mut f = self.fold.borrow_mut(); 25 | acc = self.children.iter().fold(acc, |acc, x| { 26 | let v = x.node.value_as_ref().unwrap(); 27 | f(acc, &v) 28 | }); 29 | new_unsized!(acc) 30 | } 31 | fn children_len(&self) -> usize { 32 | self.children.len() 33 | } 34 | fn iter_children_packed(&self) -> Box + '_> { 35 | Box::new(self.children.iter().map(|x| x.node.packed())) 36 | } 37 | fn slow_get_child(&self, index: usize) -> NodeRef { 38 | self.children[index].node.packed() 39 | } 40 | fn debug_ty(&self, f: &mut fmt::Formatter) -> fmt::Result { 41 | write!( 42 | f, 43 | "ArrayFold<[{}] -> {}>", 44 | std::any::type_name::(), 45 | std::any::type_name::() 46 | ) 47 | } 48 | } 49 | 50 | impl Debug for ArrayFold 51 | where 52 | R: Debug, 53 | { 54 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 55 | f.debug_struct("ArrayFold") 56 | .field("len", &self.children.len()) 57 | .finish() 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "incremental" 3 | version = "0.2.8" 4 | authors = ["Cormac Relf "] 5 | edition = "2021" 6 | categories = ["algorithms", "data-structures", "caching"] 7 | description = "incremental computations, based on Jane Street's incremental" 8 | keywords = ["incremental", "computation"] 9 | license = "MIT" 10 | repository = "https://github.com/cormacrelf/incremental-rs" 11 | 12 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 13 | 14 | [workspace] 15 | members = ["incremental-macros", "incremental-map"] 16 | 17 | [workspace.dependencies] 18 | test-log = { version = "0.2.11", default-features = false, features = [ 19 | "trace", 20 | ] } 21 | tracing-subscriber = { version = "0.3.16", default-features = false, features = [ 22 | "fmt", 23 | "ansi", 24 | "env-filter", 25 | "smallvec", 26 | "tracing-log", 27 | ] } 28 | criterion = { version = "0.5.1", features = ["html_reports"] } 29 | tracing = { version = "0.1.37", features = ["release_max_level_off"] } 30 | im-rc = { version = "15.1.0" } 31 | 32 | [features] 33 | rust-analyzer = [] 34 | nightly-incrsan = [] 35 | nightly-miny = ["dep:miny"] 36 | 37 | [dependencies] 38 | smallvec = "1.10.0" 39 | tracing = { version = "0.1.37", features = [] } 40 | slotmap = { version = "1.0.6", optional = true } 41 | im-rc = { workspace = true, optional = true } 42 | miny = { version = "2.0.2", optional = true } 43 | 44 | [dev-dependencies] 45 | tracing-subscriber = { workspace = true } 46 | tracing = { workspace = true } 47 | criterion = { workspace = true } 48 | test-log = { workspace = true } 49 | incremental-map = { path = "incremental-map", features = ["im"] } 50 | rand = "0.8.5" 51 | im-rc = { workspace = true } 52 | 53 | [profile.release] 54 | lto = "thin" 55 | 56 | [[bench]] 57 | name = "linear" 58 | harness = false 59 | 60 | [[bench]] 61 | name = "shares_per_symbol" 62 | harness = false 63 | -------------------------------------------------------------------------------- /examples/stabilise_100.rs: -------------------------------------------------------------------------------- 1 | use incremental::{Incr, IncrState, Observer, Var}; 2 | use std::time::Instant; 3 | 4 | const NODE_COUNT: u64 = 50; 5 | const ITER_COUNT: u64 = 500000; 6 | 7 | #[allow(dead_code)] 8 | fn using_map(mut node: Incr) -> Incr { 9 | for _ in 0..NODE_COUNT { 10 | node = node.map(|val| val + 1); 11 | } 12 | node 13 | } 14 | 15 | #[allow(dead_code)] 16 | fn using_bind(mut node: Incr) -> Incr { 17 | for _ in 0..NODE_COUNT { 18 | node = node.binds(move |incr, &val| incr.constant(val + 1)); 19 | } 20 | node 21 | } 22 | 23 | fn main() { 24 | let incr = IncrState::new_with_height(1200); 25 | let first_num = incr.var(0u64); 26 | let o = first_num.pipe(using_map).observe(); 27 | incr.stabilise(); 28 | // o.save_dot_to_file("stabilise_100.dot"); 29 | assert_eq!(o.try_get_value(), Ok(NODE_COUNT)); 30 | 31 | let prev_stats = incr.stats(); 32 | let start = Instant::now(); 33 | iter(o, &incr, first_num); 34 | let dur = Instant::now().duration_since(start); 35 | let recomputed = (incr.stats() - prev_stats).recomputed; 36 | 37 | println!(); 38 | let expect_total = (NODE_COUNT * ITER_COUNT) as u32; 39 | println!( 40 | "recompute count {recomputed} ({:.2}x NODE_COUNT * ITER_COUNT)", 41 | recomputed as f32 / expect_total as f32 42 | ); 43 | println!("{:?} per node", dur / recomputed as u32); 44 | } 45 | 46 | #[inline(never)] 47 | fn iter(node: Observer, incr: &IncrState, set_first_num: Var) { 48 | let mut update_number = 0; 49 | for i in 0..ITER_COUNT { 50 | if i % (ITER_COUNT / 10) == 0 { 51 | println!("{}%", (i * 100) / (ITER_COUNT)); 52 | } 53 | update_number += 1; 54 | set_first_num.set(update_number); 55 | incr.stabilise(); 56 | assert_eq!(node.try_get_value(), Ok(update_number + NODE_COUNT)); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/boxes.rs: -------------------------------------------------------------------------------- 1 | #[cfg(not(feature = "nightly-miny"))] 2 | #[macro_use] 3 | mod impls { 4 | macro_rules! new_unsized { 5 | ($expr:expr $(,)?) => { 6 | ::std::boxed::Box::new($expr) 7 | }; 8 | } 9 | /// Type alias for [Box], because feature nightly-miny is disabled. 10 | pub(crate) type SmallBox = Box; 11 | pub(crate) use new_unsized; 12 | 13 | use crate::ValueInternal; 14 | pub fn downcast_inner(boxed: SmallBox) -> Option { 15 | let boxed = downcast_box(boxed)?; 16 | Some(*boxed) 17 | } 18 | fn downcast_box(boxed: SmallBox) -> Option> { 19 | if !boxed.as_any().is::() { 20 | return None; 21 | } 22 | // Safety: we checked that boxed holds V 23 | Some(unsafe { downcast_unchecked(boxed) }) 24 | } 25 | unsafe fn downcast_unchecked(boxed: SmallBox) -> SmallBox { 26 | debug_assert!(boxed.as_any().is::()); 27 | // Safety: passed to caller 28 | unsafe { 29 | let raw: *mut (dyn ValueInternal) = Box::into_raw(boxed); 30 | Box::from_raw(raw as *mut V) 31 | } 32 | } 33 | } 34 | 35 | #[cfg(feature = "nightly-miny")] 36 | #[macro_use] 37 | mod impls { 38 | use crate::ValueInternal; 39 | 40 | macro_rules! new_unsized { 41 | ($expr:expr $(,)?) => { 42 | ::miny::Miny::new_unsized($expr) 43 | }; 44 | } 45 | /// Type alias for [miny::Miny], because feature nightly-miny is enabled. 46 | pub type SmallBox = miny::Miny; 47 | pub(crate) use new_unsized; 48 | 49 | pub fn downcast_inner(boxed: SmallBox) -> Option { 50 | // I'm not convinced that Miny::downcast is not too general -- it may allow 51 | miny::Miny::downcast(boxed).ok() 52 | } 53 | } 54 | 55 | pub(crate) use impls::*; 56 | -------------------------------------------------------------------------------- /src/kind/bind.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::{cell::Cell, fmt}; 3 | 4 | use crate::boxes::SmallBox; 5 | use crate::incrsan::NotObserver; 6 | use crate::node::ErasedNode; 7 | use crate::node::NodeId; 8 | use crate::scope::{BindScope, Scope}; 9 | use crate::ValueInternal; 10 | use crate::{NodeRef, WeakNode}; 11 | 12 | pub(crate) struct BindNode { 13 | pub id_lhs_change: Cell, 14 | pub lhs_change: RefCell, 15 | pub main: RefCell, 16 | pub lhs: NodeRef, 17 | pub mapper: RefCell>, 18 | pub rhs: RefCell>, 19 | pub rhs_scope: RefCell, 20 | pub all_nodes_created_on_rhs: RefCell>, 21 | } 22 | 23 | impl BindScope for BindNode { 24 | fn id(&self) -> NodeId { 25 | self.id_lhs_change.get() 26 | } 27 | fn is_valid(&self) -> bool { 28 | let main_ = self.main.borrow(); 29 | let Some(main) = main_.upgrade() else { 30 | return false; 31 | }; 32 | main.is_valid() 33 | } 34 | fn is_necessary(&self) -> bool { 35 | let main_ = self.main.borrow(); 36 | let Some(main) = main_.upgrade() else { 37 | return false; 38 | }; 39 | main.is_necessary() 40 | } 41 | fn height(&self) -> i32 { 42 | let lhs_change_ = self.lhs_change.borrow(); 43 | let lhs_change = lhs_change_.upgrade().unwrap(); 44 | lhs_change.height() 45 | } 46 | fn add_node(&self, node: WeakNode) { 47 | tracing::info!( 48 | "added node to BindScope({:?}): {:?}", 49 | self.id(), 50 | node.upgrade() 51 | ); 52 | let mut all = self.all_nodes_created_on_rhs.borrow_mut(); 53 | all.push(node); 54 | } 55 | } 56 | 57 | pub(crate) trait LhsChangeFn: 58 | FnMut(&dyn ValueInternal) -> NodeRef + 'static + NotObserver 59 | { 60 | } 61 | impl LhsChangeFn for F where F: FnMut(&dyn ValueInternal) -> NodeRef + 'static + NotObserver {} 62 | 63 | impl fmt::Debug for BindNode { 64 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 65 | f.debug_struct("BindNode") 66 | // .field("output", &self.rhs.borrow().as_ref().map(|x| &x.node)) 67 | .finish() 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/cutoff.rs: -------------------------------------------------------------------------------- 1 | use crate::{incrsan::NotObserver, ValueInternal}; 2 | 3 | pub(crate) struct ErasedCutoff { 4 | should_cutoff: Box>, 5 | } 6 | 7 | impl ErasedCutoff { 8 | pub(crate) fn new(mut cutoff: Cutoff) -> Self { 9 | Self { 10 | // codegen: one copy of this closure is generated for each T 11 | should_cutoff: Box::new( 12 | move |a: &dyn ValueInternal, b: &dyn ValueInternal| -> bool { 13 | let Some(a) = a.as_any().downcast_ref::() else { 14 | return false; 15 | }; 16 | let Some(b) = b.as_any().downcast_ref::() else { 17 | return false; 18 | }; 19 | cutoff.should_cutoff(a, b) 20 | }, 21 | ), 22 | } 23 | } 24 | pub(crate) fn should_cutoff(&mut self, a: &dyn ValueInternal, b: &dyn ValueInternal) -> bool { 25 | (&mut *self.should_cutoff)(a, b) 26 | } 27 | } 28 | 29 | #[derive(Clone)] 30 | #[non_exhaustive] 31 | pub enum Cutoff { 32 | Always, 33 | Never, 34 | PartialEq, 35 | Fn(fn(&T, &T) -> bool), 36 | FnBoxed(Box>), 37 | } 38 | 39 | pub trait CutoffClosure: FnMut(&T, &T) -> bool + NotObserver { 40 | fn clone_box(&self) -> Box>; 41 | } 42 | 43 | impl CutoffClosure for F 44 | where 45 | F: FnMut(&T, &T) -> bool + Clone + 'static + NotObserver, 46 | { 47 | fn clone_box(&self) -> Box> { 48 | Box::new(self.clone()) 49 | } 50 | } 51 | 52 | impl Clone for Box> { 53 | fn clone(&self) -> Self { 54 | (**self).clone_box() 55 | } 56 | } 57 | 58 | impl Cutoff 59 | where 60 | T: PartialEq, 61 | { 62 | pub fn should_cutoff(&mut self, a: &T, b: &T) -> bool { 63 | match self { 64 | Self::Always => true, 65 | Self::Never => false, 66 | Self::PartialEq => a.eq(b), 67 | Self::Fn(comparator) => comparator(a, b), 68 | Self::FnBoxed(comparator) => comparator(a, b), 69 | } 70 | } 71 | 72 | pub(crate) fn erased(self) -> ErasedCutoff 73 | where 74 | T: Clone + 'static, 75 | { 76 | ErasedCutoff::new(self) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/state/expert.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use crate::kind::{self, expert::ExpertEdge}; 3 | use crate::node::ErasedNode; 4 | use crate::node::Incremental; 5 | use crate::WeakIncr; 6 | use kind::expert::PackedEdge; 7 | 8 | pub(crate) fn create(state: &State, recompute: F, on_observability_change: O) -> Incr 9 | where 10 | T: Value, 11 | F: FnMut() -> T + 'static + NotObserver, 12 | O: FnMut(bool) + 'static + NotObserver, 13 | { 14 | let node = Node::create_rc::( 15 | state.weak(), 16 | state.current_scope(), 17 | Kind::Expert(kind::ExpertNode::new_obs( 18 | recompute, 19 | on_observability_change, 20 | )), 21 | ); 22 | 23 | // if debug 24 | // then 25 | // if Option.is_some state.only_in_debug.currently_running_node 26 | // then 27 | // state.only_in_debug.expert_nodes_created_by_current_node 28 | // <- T node :: state.only_in_debug.expert_nodes_created_by_current_node; 29 | 30 | Incr { node } 31 | } 32 | 33 | pub(crate) fn create_cyclic( 34 | state: &State, 35 | cyclic: Cyclic, 36 | on_observability_change: O, 37 | ) -> Incr 38 | where 39 | T: Value, 40 | Cyclic: FnOnce(WeakIncr) -> F, 41 | F: FnMut() -> T + 'static + NotObserver, 42 | O: FnMut(bool) + 'static + NotObserver, 43 | { 44 | let node = Rc::::new_cyclic(|weak| { 45 | let weak_incr = WeakIncr(weak.clone()); 46 | let recompute = cyclic(weak_incr); 47 | let mut node = Node::create::( 48 | state.weak(), 49 | state.current_scope(), 50 | Kind::Expert(kind::ExpertNode::new_obs( 51 | recompute, 52 | on_observability_change, 53 | )), 54 | ); 55 | node.weak_self = weak.clone(); 56 | node 57 | }); 58 | node.created_in.add_node(node.clone()); 59 | Incr { node } 60 | } 61 | 62 | pub(crate) fn make_stale(node: &NodeRef) { 63 | node.expert_make_stale(); 64 | } 65 | 66 | pub(crate) fn invalidate(node: &NodeRef) { 67 | let state = node.state(); 68 | #[cfg(debug_assertions)] 69 | node.assert_currently_running_node_is_child("invalidate"); 70 | node.invalidate_node(&state); 71 | state.propagate_invalidity(); 72 | } 73 | 74 | pub(crate) fn add_dependency(node: &NodeRef, edge: PackedEdge) { 75 | node.expert_add_dependency(edge); 76 | } 77 | 78 | pub(crate) fn remove_dependency(node: &dyn Incremental, dyn_edge: &dyn ExpertEdge) { 79 | node.expert_remove_dependency(dyn_edge); 80 | } 81 | -------------------------------------------------------------------------------- /src/scope.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | use std::rc::Weak; 3 | 4 | use super::{node::NodeId, NodeRef, WeakNode}; 5 | use crate::incrsan::NotObserver; 6 | use crate::node::ErasedNode; 7 | 8 | pub(crate) trait BindScope: fmt::Debug + NotObserver { 9 | fn id(&self) -> NodeId; 10 | fn is_valid(&self) -> bool; 11 | fn is_necessary(&self) -> bool; 12 | fn height(&self) -> i32; 13 | fn add_node(&self, node: WeakNode); 14 | } 15 | 16 | #[derive(Clone)] 17 | pub(crate) enum Scope { 18 | Top, 19 | Bind(Weak), 20 | } 21 | 22 | impl fmt::Debug for Scope { 23 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 24 | match self { 25 | Scope::Top => write!(f, "Scope::Top"), 26 | Scope::Bind(bind) => { 27 | let Some(bind) = bind.upgrade() else { 28 | return write!(f, "Scope::Bind(weak, deallocated)"); 29 | }; 30 | write!(f, "Scope::Bind({:?})", bind.id()) 31 | } 32 | } 33 | } 34 | } 35 | 36 | impl Scope { 37 | fn equals(&self, other: &Self) -> bool { 38 | match self { 39 | Scope::Top => matches!(other, Scope::Top), 40 | Scope::Bind(w1) => match other { 41 | Scope::Bind(w2) => crate::weak_thin_ptr_eq(w1, w2), 42 | _ => false, 43 | }, 44 | } 45 | } 46 | pub(crate) fn height(&self) -> i32 { 47 | match self { 48 | Self::Top => 0, 49 | Self::Bind(weak) => { 50 | let strong = weak.upgrade().unwrap(); 51 | strong.height() 52 | } 53 | } 54 | } 55 | pub(crate) fn is_valid(&self) -> bool { 56 | match self { 57 | Self::Top => true, 58 | Self::Bind(weak) => { 59 | let strong = weak.upgrade().unwrap(); 60 | strong.is_valid() 61 | } 62 | } 63 | } 64 | pub(crate) fn is_necessary(&self) -> bool { 65 | match self { 66 | Self::Top => true, 67 | Self::Bind(weak) => { 68 | let strong = weak.upgrade().unwrap(); 69 | strong.is_necessary() 70 | } 71 | } 72 | } 73 | pub(crate) fn add_node(&self, node: NodeRef) { 74 | assert!(node.created_in().equals(self)); 75 | match self { 76 | Self::Top => {} 77 | Self::Bind(bind_weak) => { 78 | let bind = bind_weak.upgrade().unwrap(); 79 | bind.add_node(node.weak()); 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # incremental-rs 2 | 3 | A port of Jane Street's [Incremental][jane] library. 4 | 5 | - Pretty rigorous implementation of the basic Incr features, moderately well 6 | tested and benchmarked, performs very similarly to the original. 7 | - Partial implementation of incremental-map 8 | 9 | [jane]: https://github.com/janestreet/incremental 10 | 11 | ### Install 12 | 13 | ```sh 14 | cargo add incremental 15 | ``` 16 | 17 | ### Examples 18 | 19 | Basic usage: 20 | 21 | ```rust 22 | use incremental::IncrState; 23 | 24 | let state = IncrState::new(); 25 | let variable = state.var(5); 26 | let times_10 = variable.map(|num| num * 10); 27 | let observer = times_10.observe(); 28 | 29 | // stabilise will propagate any changes 30 | state.stabilise(); 31 | let value = observer.value(); 32 | assert_eq!(value, 50); 33 | 34 | // now mutate 35 | variable.set(10); 36 | state.stabilise(); 37 | 38 | // watch as var was propagated through the tree, and reached our observer 39 | assert_eq!(observer.value(), 100); 40 | ``` 41 | 42 | Subscriptions, and an illustration of how propagation stops when nodes produce 43 | the same value as last time: 44 | 45 | ```rust 46 | use incremental::{IncrState, Update}; 47 | 48 | // A little system to compute the absolute value of an input 49 | // Note that the input could change (e.g. 5 to -5), but the 50 | // output may stay the same (5 both times). 51 | let state = IncrState::new(); 52 | let variable = state.var(5i32); 53 | let absolute = variable.map(|num| num.abs()); 54 | let observer = absolute.observe(); 55 | 56 | // set up a subscription. 57 | use std::{cell::RefCell, rc::Rc}; 58 | let latest = Rc::new(RefCell::new(None)); 59 | let latest_clone = latest.clone(); 60 | let subscription_token = observer.subscribe(move |update| { 61 | *latest_clone.borrow_mut() = Some(update.cloned()); 62 | }); 63 | 64 | // initial stabilisation 65 | state.stabilise(); 66 | assert_eq!(observer.value(), 5); 67 | assert_eq!(latest.borrow().clone(), Some(Update::Initialised(5))); 68 | 69 | // now mutate, but such that the output of abs() won't change 70 | variable.set(-5); 71 | state.stabilise(); 72 | // The subscription function was not called, because the `absolute` node 73 | // produced the same value as last time we stabilised. 74 | assert_eq!(latest.borrow().clone(), Some(Update::Initialised(5))); 75 | assert_eq!(observer.value(), 5); 76 | 77 | // now mutate such that the output changes too 78 | variable.set(-10); 79 | state.stabilise(); 80 | // The observer did get a new value, and did call the subscription function 81 | assert_eq!(latest.borrow().clone(), Some(Update::Changed(10))); 82 | assert_eq!(observer.value(), 10); 83 | 84 | // now unsubscribe. this also implicitly happens if you drop the observer, 85 | // but you can individually unsubscribe particular subscriptions if you wish. 86 | observer.unsubscribe(subscription_token); 87 | // dropping the observer also unloads any part of the computation graph 88 | // that was only running for the purposes of this particular observer 89 | drop(observer); 90 | 91 | // now that the observer is dead, we can mutate the variable and nothing will 92 | // happen, like, at all. The absolute value will not be computed. 93 | variable.set(100000000); 94 | let recomputed = state.stats().recomputed; 95 | state.stabilise(); 96 | assert_eq!(recomputed, state.stats().recomputed); 97 | ``` 98 | -------------------------------------------------------------------------------- /src/incrsan.rs: -------------------------------------------------------------------------------- 1 | //! Sanitisers for using incremental correctly. 2 | //! 3 | //! These are enabled by the `nightly-incrsan` feature flag, which requires a nightly compiler. 4 | //! They primarily work through auto-traits, which can be a bit of a pain since incremental code 5 | //! often interfaces with other code exposing `Box`-like APIs. So it is not enabled by 6 | //! default. 7 | //! 8 | //! You can opt-in by: 9 | //! 10 | //! - enabling the `nightly-incrsan` feature; 11 | //! - peppering around [`+ NotObserver`][NotObserver] bounds on things like `impl FnMut() -> ...`; 12 | //! - wrapping foreign types in [AssertNotObserver] when you are sure they do not contain observers 13 | 14 | // Rustc will parse things inside the cfg attribute even if the feature is not enabled. 15 | // But #[path = "..."] will help the compiler only parse one version of this code. 16 | // 17 | #[cfg_attr(feature = "nightly-incrsan", path = "incrsan/nightly.rs")] 18 | #[cfg_attr(not(feature = "nightly-incrsan"), path = "incrsan/stable.rs")] 19 | mod implementation; 20 | 21 | pub use implementation::*; 22 | 23 | /// A wrapper struct to assert that its contents are not observers, in the vein of 24 | /// [`std::panic::AssertUnwindSafe`]. 25 | /// 26 | /// Only does anything with the `nightly-incrsan` feature enabled. 27 | /// 28 | /// This is good if you have a `Box` you want to use somewhere, where the trait is some 29 | /// foreign trait and you can't prove it to the compiler, but it doesn't have any observers in it. 30 | /// 31 | /// For example: 32 | /// 33 | /// ``` 34 | /// use incremental::*; 35 | /// use incremental::incrsan::*; 36 | /// 37 | /// let state = IncrState::new(); 38 | /// let constant = state.constant(1); 39 | /// let not_observer: Box = Box::new(|| println!("hello")); 40 | /// 41 | /// // wrap in this to assert to the compiler it's ok, since you know what you put in the box 42 | /// let not_observer = AssertNotObserver(not_observer); 43 | /// 44 | /// // now you can use it freely inside map nodes, observer.subscribe() callbacks, etc 45 | /// let map = constant.map(move |_| { 46 | /// not_observer(); // No longer a compiler error 47 | /// 1234 48 | /// }); 49 | /// ``` 50 | #[repr(transparent)] 51 | #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 52 | pub struct AssertNotObserver(pub T); 53 | 54 | impl std::ops::Deref for AssertNotObserver { 55 | type Target = T; 56 | fn deref(&self) -> &Self::Target { 57 | &self.0 58 | } 59 | } 60 | 61 | impl std::ops::DerefMut for AssertNotObserver { 62 | fn deref_mut(&mut self) -> &mut Self::Target { 63 | &mut self.0 64 | } 65 | } 66 | 67 | /// Let the compiler check for you that T does not contain an observer. 68 | /// 69 | /// Useful if you're about to wrap in an AssertNotObserver, and then 70 | /// wrap in some kind of container that has no observers, that you know 71 | /// of, but you still want to check that the type you're putting in the 72 | /// container is `NotObserver`. 73 | pub fn check_not_observer(value: T) -> T { 74 | value 75 | } 76 | 77 | /// Adds `+ NotObserver` to the trait object if incrsan is enabled, and not otherwise. 78 | /// 79 | /// Why? Because NotObserver can only be used that way if it is an auto-trait. Without incrsan, it 80 | /// is a regular trait, which cannot be used in that position. Outside of trait objects, you can 81 | /// just use it as a trait bound `` and it is fine. 82 | /// 83 | /// Got to wrap the traits listed after dyn with (). So Box. Just for tt-munching 84 | /// purposes, because > is ambiguous as a terminator. 85 | macro_rules! not_observer_boxed_trait { 86 | ( 87 | $vis:vis type $ident:ident $(:: $path:ident)* $(<$($g:ident),+>)? = $box_type:ident $(:: $path2:ident)* < dyn ($($tt:tt)+) >; 88 | ) => { 89 | #[cfg(not(feature = "nightly-incrsan"))] 90 | $vis type $ident $(:: $path)* $(< $($g),+ >)? = $box_type $(:: $path2)* < dyn $($tt)+ >; 91 | #[cfg(feature = "nightly-incrsan")] 92 | $vis type $ident $(:: $path)* $(< $($g),+ >)? = $box_type $(:: $path2)* < dyn $($tt)+ + $crate::incrsan::NotObserver >; 93 | }; 94 | } 95 | pub(crate) use not_observer_boxed_trait; 96 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // #![feature(type_alias_impl_trait)] 2 | #![doc = include_str!("../README.md")] 3 | // We have some really complicated types. Most of them can't be typedef'd to be any shorter. 4 | #![allow(clippy::type_complexity)] 5 | // #![allow(clippy::single_match)] 6 | #![cfg_attr(feature = "nightly-incrsan", feature(negative_impls))] 7 | #![cfg_attr(feature = "nightly-incrsan", feature(auto_traits))] 8 | 9 | mod boxes; 10 | 11 | mod adjust_heights_heap; 12 | mod cutoff; 13 | mod incr; 14 | pub mod incrsan; 15 | mod internal_observer; 16 | mod kind; 17 | mod node; 18 | mod node_update; 19 | mod recompute_heap; 20 | mod scope; 21 | mod stabilisation_num; 22 | mod state; 23 | mod syntax; 24 | mod var; 25 | 26 | mod public; 27 | use boxes::SmallBox; 28 | pub use public::*; 29 | 30 | use fmt::Debug; 31 | use std::any::Any; 32 | use std::cell::Cell; 33 | use std::fmt; 34 | use std::rc::{Rc, Weak}; 35 | 36 | use self::incrsan::NotObserver; 37 | 38 | /// Trait alias for `Debug + Clone + 'static` 39 | pub trait Value: Debug + Clone + PartialEq + NotObserver + 'static + Any { 40 | fn as_any(&self) -> &dyn Any; 41 | } 42 | impl Value for T 43 | where 44 | T: Debug + Clone + PartialEq + NotObserver + 'static + Any, 45 | { 46 | fn as_any(&self) -> &dyn Any { 47 | self 48 | } 49 | } 50 | 51 | /// Trait alias for `Debug + 'static + Any`. Used in trait objects, instead of 52 | /// `dyn ValueInternal`, because [Value] cannot be made into a trait object 53 | /// (Clone and PartialEq include Self types). 54 | pub(crate) trait ValueInternal: Debug + NotObserver + 'static + Any { 55 | fn as_any(&self) -> &dyn Any; 56 | fn clone_any(&self) -> SmallBox; 57 | } 58 | impl ValueInternal for T 59 | where 60 | T: Debug + Clone + PartialEq + NotObserver + 'static + Any, 61 | { 62 | fn as_any(&self) -> &dyn Any { 63 | self 64 | } 65 | fn clone_any(&self) -> SmallBox { 66 | boxes::new_unsized!(self.clone()) 67 | } 68 | } 69 | 70 | pub(crate) type NodeRef = Rc; 71 | pub(crate) type WeakNode = Weak; 72 | 73 | pub trait Invariant { 74 | fn invariant(&self); 75 | } 76 | 77 | /// Solves the problem of `Rc::::ptr_eq` producing bad results, since 78 | /// it compares fat pointers and their vtables which may differ between crates 79 | /// for the same underlying type, or be the same for two different underlying types 80 | /// when rustc uses the same vtable for each. 81 | /// 82 | /// Probably don't use this for traits implemented by ZSTs... but there is no 83 | /// good way to do pointer equality in that case anyway, without any allocations to 84 | /// compare. 85 | pub(crate) fn rc_thin_ptr_eq(one: &Rc, two: &Rc) -> bool { 86 | let one_: *const () = Rc::as_ptr(one).cast(); 87 | let two_: *const () = Rc::as_ptr(two).cast(); 88 | one_ == two_ 89 | } 90 | pub(crate) fn rc_thin_ptr_eq_t2(one: &Rc, two: &Rc) -> bool { 91 | let one_: *const () = Rc::as_ptr(one).cast(); 92 | let two_: *const () = Rc::as_ptr(two).cast(); 93 | one_ == two_ 94 | } 95 | pub(crate) fn weak_thin_ptr_eq(one: &Weak, two: &Weak) -> bool { 96 | let one_: *const () = Weak::as_ptr(one).cast(); 97 | let two_: *const () = Weak::as_ptr(two).cast(); 98 | one_ == two_ 99 | } 100 | pub(crate) fn dyn_thin_ptr_eq(one: &T, two: &T) -> bool { 101 | let one_: *const () = one as *const T as *const (); 102 | let two_: *const () = two as *const T as *const (); 103 | one_ == two_ 104 | } 105 | 106 | /// Little helper trait for bumping a statistic. 107 | pub(crate) trait CellIncrement { 108 | type Num; 109 | fn increment(&self); 110 | fn decrement(&self); 111 | // std is going to add Cell:update... someday... 112 | fn update_val(&self, f: impl FnOnce(Self::Num) -> Self::Num); 113 | } 114 | 115 | macro_rules! impl_cell_increment { 116 | ($num_ty:ty) => { 117 | impl CellIncrement for Cell<$num_ty> { 118 | type Num = $num_ty; 119 | #[inline] 120 | fn update_val(&self, f: impl FnOnce(Self::Num) -> Self::Num) { 121 | self.set(f(self.get())); 122 | } 123 | #[inline(always)] 124 | fn increment(&self) { 125 | self.update_val(|x| x + 1) 126 | } 127 | #[inline(always)] 128 | fn decrement(&self) { 129 | self.update_val(|x| x - 1) 130 | } 131 | } 132 | }; 133 | } 134 | impl_cell_increment!(i32); 135 | impl_cell_increment!(usize); 136 | -------------------------------------------------------------------------------- /src/syntax.rs: -------------------------------------------------------------------------------- 1 | //! This module has some syntax helpers. 2 | use std::ops::Rem; 3 | 4 | use super::Incr; 5 | use super::Value; 6 | 7 | macro_rules! map_builder { 8 | (@def $(#[$attr:meta])* $n:ident<$vfirst:ident : $ifirst:ident, $($v:ident : $upto_i:ident),+ >::$map:ident) => { 9 | $(#[$attr])* 10 | pub struct $n<$ifirst, $($upto_i,)+>(Incr<$ifirst>, $(Incr<$upto_i>,)+); 11 | impl<$ifirst: Value, $($upto_i: Value,)+> $n<$ifirst, $($upto_i),+> { 12 | /// Maps the incrementals in the (i1 % i2 % ...) syntax all at once. 13 | pub fn map(&self, f: impl FnMut(&$ifirst, $(&$upto_i),+) -> R + 'static + crate::incrsan::NotObserver) -> Incr { 14 | let Self($vfirst, $($v),+) = self; 15 | $vfirst.$map($($v),+, f) 16 | } 17 | /// Zips the incrementals in the (i1 % i2 % ...) syntax into an `Incr<(I1, I2, ...)>`. 18 | pub fn zip(&self) -> Incr<($ifirst, $($upto_i),+)> { 19 | let Self($vfirst, $($v),+) = self; 20 | $vfirst.$map($($v),+, |$vfirst, $($v),+| ($vfirst.clone(), $($v.clone()),+)) 21 | } 22 | } 23 | }; 24 | 25 | 26 | (@def_rem $n:ident<$($v:ident : $upto_i:ident),+ > 27 | => $(#[$attr:meta])* $n_plus_1:ident<.., $v_plus_1:ident: $i_plus_1:ident>::$map_n_plus_1:ident) => { 28 | map_builder!(@def $(#[$attr])* $n_plus_1<$($v: $upto_i,)+ $v_plus_1: $i_plus_1>::$map_n_plus_1); 29 | impl<$($upto_i,)+ $i_plus_1> Rem> for $n<$($upto_i,)+> { 30 | type Output = $n_plus_1<$($upto_i,)+ $i_plus_1>; 31 | fn rem(self, rhs: Incr<$i_plus_1>) -> Self::Output { 32 | let Self($($v),+) = self; 33 | $n_plus_1($($v,)+ rhs) 34 | } 35 | } 36 | impl<$($upto_i,)+ $i_plus_1> Rem<&Incr<$i_plus_1>> for $n<$($upto_i,)+> { 37 | type Output = $n_plus_1<$($upto_i,)+ $i_plus_1>; 38 | fn rem(self, rhs: &Incr<$i_plus_1>) -> Self::Output { 39 | let Self($($v),+) = self; 40 | $n_plus_1($($v,)+ rhs.clone()) 41 | } 42 | } 43 | }; 44 | 45 | { 46 | [ 47 | @rest 48 | $n:ident<$($v:ident: $upto_i:ident),+>::$map_n:ident, 49 | ] 50 | } => { 51 | }; 52 | { 53 | [ 54 | @rest 55 | $n:ident<$($v:ident: $upto_i:ident),+>::$map_n:ident, 56 | $(#[$attr:meta])* $n_plus_1:ident<.., $v_plus_1:ident: $i_plus_1:ident>::$map_n_plus_1:ident, 57 | $($rest:tt)* 58 | ] 59 | } => { 60 | map_builder!(@def_rem $n<$($v: $upto_i),+> => $(#[$attr])* $n_plus_1<.., $v_plus_1: $i_plus_1>::$map_n_plus_1); 61 | map_builder!([ 62 | @rest 63 | $n_plus_1<$($v: $upto_i,)+ $v_plus_1: $i_plus_1>::$map_n_plus_1, 64 | $($rest)* 65 | ]); 66 | }; 67 | { 68 | [ 69 | $(#[$attr:meta])* 70 | $n:ident<$($v:ident: $upto_i:ident),+>::$map:ident, 71 | $($rest:tt)* 72 | ] 73 | } => { 74 | map_builder!(@def $(#[$attr])* $n<$($v: $upto_i),+>::$map); 75 | map_builder!([@rest $n<$($v: $upto_i),+>::$map, $($rest)*]); 76 | }; 77 | } 78 | 79 | map_builder!([ 80 | /// Produced by the syntax `i1 % i2` for two `Incr`s. 81 | MapBuilder2::map2, 82 | /// Produced by the syntax `i1 % i2 % i3` for 3 `Incr`s. 83 | MapBuilder3<.., i3: I3>::map3, 84 | /// Produced by the syntax `i1 % i2 % i3 % i4` for 4 `Incr`s. 85 | MapBuilder4<.., i4: I4>::map4, 86 | /// Produced by the syntax `i1 % i2 % i3 % i4 % i5` for 5 `Incr`s. 87 | MapBuilder5<.., i5: I5>::map5, 88 | /// Produced by the syntax `i1 % i2 % i3 % i4 % i5 % i6` for 6 `Incr`s. 89 | MapBuilder6<.., i6: I6>::map6, 90 | ]); 91 | 92 | // Base case 93 | 94 | impl Rem> for Incr { 95 | type Output = MapBuilder2; 96 | fn rem(self, rhs: Incr) -> Self::Output { 97 | MapBuilder2(self, rhs) 98 | } 99 | } 100 | 101 | impl Rem<&Incr> for &Incr { 102 | type Output = MapBuilder2; 103 | fn rem(self, rhs: &Incr) -> Self::Output { 104 | MapBuilder2(self.clone(), rhs.clone()) 105 | } 106 | } 107 | 108 | #[test] 109 | fn test_syntax() { 110 | let incr = crate::IncrState::new(); 111 | let i1 = incr.constant(5); 112 | let i2 = incr.constant(10); 113 | let i3 = incr.constant(9); 114 | let out = (&i1 % &i2 % &i3).map(|&a, &b, &c| a * b * c); 115 | let obs = out.observe(); 116 | incr.stabilise(); 117 | assert_eq!(obs.try_get_value(), Ok(450)); 118 | 119 | let i4 = incr.constant(1); 120 | let i5 = incr.constant(1); 121 | let i6 = incr.constant(1); 122 | let _4 = (&i1 % &i2 % &i3 % &i4).map(|_, _, _, _| 0); 123 | let _5 = (&i1 % &i2 % &i3 % &i4 % &i5).map(|_, _, _, _, _| 0); 124 | let _6 = (&i1 % &i2 % &i3 % &i4 % &i5 % &i6).map(|_, _, _, _, _, _| 0); 125 | let _6_owned = (i1 % i2 % i3 % i4 % i5 % i6).map(|_, _, _, _, _, _| 0); 126 | } 127 | -------------------------------------------------------------------------------- /src/node_update.rs: -------------------------------------------------------------------------------- 1 | use std::cell::Cell; 2 | 3 | use super::stabilisation_num::StabilisationNum; 4 | use crate::boxes::SmallBox; 5 | use crate::incrsan::not_observer_boxed_trait; 6 | use crate::node::{ErasedNode, Node}; 7 | 8 | not_observer_boxed_trait! { 9 | pub(crate) type BoxedUpdateFn = SmallBox))>; 10 | } 11 | 12 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] 13 | enum Previously { 14 | NeverBeenUpdated, 15 | Necessary, 16 | Changed, 17 | Invalidated, 18 | Unnecessary, 19 | } 20 | 21 | #[derive(Copy, Clone, Debug)] 22 | pub(crate) enum NodeUpdateDelayed { 23 | Necessary, 24 | Changed, 25 | Invalidated, 26 | Unnecessary, 27 | } 28 | 29 | #[derive(Debug)] 30 | pub enum NodeUpdate { 31 | Necessary(T), 32 | Changed(T), 33 | Invalidated, 34 | Unnecessary, 35 | } 36 | 37 | not_observer_boxed_trait! { 38 | pub(crate) type ErasedOnUpdateHandler = Box; 39 | } 40 | 41 | pub(crate) trait HandleUpdate { 42 | fn run(&mut self, node: &Node, node_update: NodeUpdateDelayed, now: StabilisationNum); 43 | } 44 | 45 | pub(crate) struct OnUpdateHandler { 46 | handler_fn: BoxedUpdateFn, 47 | previous_update_kind: Cell, 48 | created_at: StabilisationNum, 49 | } 50 | 51 | impl OnUpdateHandler { 52 | pub(crate) fn new(created_at: StabilisationNum, handler_fn: BoxedUpdateFn) -> Self { 53 | OnUpdateHandler { 54 | handler_fn, 55 | created_at, 56 | previous_update_kind: Previously::NeverBeenUpdated.into(), 57 | } 58 | } 59 | fn really_run_downcast(&mut self, node: &Node, node_update: NodeUpdateDelayed) { 60 | self.previous_update_kind.set(match &node_update { 61 | NodeUpdateDelayed::Changed => Previously::Changed, 62 | NodeUpdateDelayed::Necessary => Previously::Necessary, 63 | NodeUpdateDelayed::Invalidated => Previously::Invalidated, 64 | NodeUpdateDelayed::Unnecessary => Previously::Unnecessary, 65 | }); 66 | let value_any = node.value_as_any(); 67 | let concrete_update = match node_update { 68 | NodeUpdateDelayed::Changed => { 69 | let value_any = value_any.unwrap(); 70 | let v = value_any 71 | .as_any() 72 | .downcast_ref::() 73 | .expect("downcast_ref failed"); 74 | return (self.handler_fn)(NodeUpdate::Changed(&*v)); 75 | } 76 | NodeUpdateDelayed::Necessary => { 77 | let value_any = value_any.unwrap(); 78 | let v = value_any 79 | .as_any() 80 | .downcast_ref::() 81 | .expect("downcast_ref failed"); 82 | return (self.handler_fn)(NodeUpdate::Necessary(&*v)); 83 | } 84 | NodeUpdateDelayed::Invalidated => NodeUpdate::Invalidated, 85 | NodeUpdateDelayed::Unnecessary => NodeUpdate::Unnecessary, 86 | }; 87 | (self.handler_fn)(concrete_update); 88 | } 89 | } 90 | 91 | impl HandleUpdate for OnUpdateHandler { 92 | fn run(&mut self, node: &Node, node_update: NodeUpdateDelayed, now: StabilisationNum) { 93 | /* We only run the handler if was created in an earlier stabilization cycle. If the 94 | handler was created by another on-update handler during the running of on-update 95 | handlers in the current stabilization, we treat the added handler as if it were added 96 | after this stabilization finished. We will run it at the next stabilization, because 97 | the node with the handler was pushed on [state.handle_after_stabilization]. */ 98 | if self.created_at < now { 99 | match (self.previous_update_kind.get(), node_update) { 100 | /* Once a node is invalidated, there will never be further information to provide, 101 | since incremental does not allow an invalid node to become valid. */ 102 | (Previously::Invalidated, _) => (), 103 | /* These cases can happen if a node is handled after stabilization due to another 104 | handler. But for the current handler, there is nothing to do because there is no 105 | new information to provide. */ 106 | (Previously::Changed, NodeUpdateDelayed::Necessary) 107 | | (Previously::Necessary, NodeUpdateDelayed::Necessary) 108 | | (Previously::Unnecessary, NodeUpdateDelayed::Unnecessary) => (), 109 | /* If this handler hasn't seen a node that is changing, we treat the update as an 110 | initialization. */ 111 | ( 112 | Previously::NeverBeenUpdated | Previously::Unnecessary, 113 | NodeUpdateDelayed::Changed, 114 | ) => self.really_run_downcast(node, NodeUpdateDelayed::Necessary), 115 | /* All other updates are run as is. */ 116 | (_, node_update) => self.really_run_downcast(node, node_update), 117 | } 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /benches/linear.rs: -------------------------------------------------------------------------------- 1 | use std::hint::black_box; 2 | use std::time::Instant; 3 | 4 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 5 | 6 | use incremental::{Incr, IncrState, Observer, Var}; 7 | 8 | #[derive(Clone, Copy)] 9 | enum SequenceKind { 10 | Trivial, 11 | TrivialBind, 12 | Spokes, 13 | Recombine, 14 | Wide, 15 | } 16 | 17 | fn sequence(node: Incr, kind: SequenceKind, size: u64) -> Incr { 18 | match kind { 19 | SequenceKind::Trivial => (0..size) 20 | .into_iter() 21 | .fold(node, |node, _| node.map(|val| val + 1)), 22 | SequenceKind::TrivialBind => (0..size).into_iter().fold(node, |node, _| { 23 | node.binds(|incr, &val| incr.constant(val + 1)) 24 | }), 25 | SequenceKind::Spokes => { 26 | let all = (0..size) 27 | .into_iter() 28 | .map(|_| node.map(|val| val + 1)) 29 | .collect::>(); 30 | node.state().fold(all, 0, |a, b| a + b) 31 | } 32 | SequenceKind::Recombine => (0..size).into_iter().fold(node, |node, _| { 33 | let a = node.map(|x| x + 1); 34 | let b = node.map(|x| x + 1); 35 | (a % b).map(|a, b| a + b) 36 | }), 37 | SequenceKind::Wide => { 38 | let double = |list: Vec>| -> Vec> { 39 | list.into_iter() 40 | .flat_map(|i| { 41 | let a = i.map(|x| x + 1); 42 | let b = i.map(|x| x + 1); 43 | [a, b] 44 | }) 45 | .collect() 46 | }; 47 | let spread = (0..size).into_iter().fold(vec![node], |acc, _| double(acc)); 48 | reduce_balanced(&spread, |a, b| a.map2(b, |a, b| a + b)).unwrap() 49 | } 50 | } 51 | } 52 | 53 | fn reduce_balanced(slice: &[T], mut f: impl FnMut(&T, &T) -> T) -> Option { 54 | fn reduce_balanced_inner(slice: &[T], f: &mut impl FnMut(&T, &T) -> T) -> Option { 55 | let size = slice.len(); 56 | if size == 0 { 57 | return None; 58 | } 59 | if size == 1 { 60 | return slice.first().cloned(); 61 | } 62 | let mid = size / 2; 63 | let left = reduce_balanced_inner(&slice[..mid], f).unwrap(); 64 | let right = reduce_balanced_inner(&slice[mid..], f).unwrap(); 65 | Some(f(&left, &right)) 66 | } 67 | reduce_balanced_inner(slice, &mut f) 68 | } 69 | 70 | #[test] 71 | fn test_reduce_balanced() { 72 | let slice: &[i32] = &[]; 73 | assert_eq!(reduce_balanced(slice, |a, b| a + b), None); 74 | let slice = &[1]; 75 | assert_eq!(reduce_balanced(slice, |a, b| a + b), Some(1)); 76 | let slice = &[1, 2]; 77 | assert_eq!(reduce_balanced(slice, |a, b| a + b), Some(3)); 78 | let slice = &[1, 2, 3, 4]; 79 | assert_eq!(reduce_balanced(slice, |a, b| a + b), Some(10)); 80 | let slice = &[1, 2, 3, 4, 5]; 81 | assert_eq!(reduce_balanced(slice, |a, b| a + b), Some(15)); 82 | } 83 | 84 | fn setup(kind: SequenceKind, size: u64) -> (Var, IncrState, Observer, u32) { 85 | let incr = IncrState::new_with_height(1200); 86 | let first_num = incr.var(1u64); 87 | let o = first_num.pipe2(sequence, kind, size).observe(); 88 | incr.stabilise(); 89 | first_num.set(0); 90 | let stats = incr.stats(); 91 | incr.stabilise(); 92 | let recomputed = incr.stats().diff(stats).recomputed as u32; 93 | (first_num, incr, o, recomputed) 94 | } 95 | 96 | impl From for String { 97 | fn from(value: SequenceKind) -> Self { 98 | match value { 99 | SequenceKind::Wide => "wide", 100 | SequenceKind::Trivial => "trivial", 101 | SequenceKind::TrivialBind => "trivial-bind", 102 | SequenceKind::Spokes => "spokes", 103 | SequenceKind::Recombine => "recombine", 104 | } 105 | .into() 106 | } 107 | } 108 | 109 | fn bench_node(c: &mut Criterion, kind: SequenceKind, size: u64) { 110 | c.bench_with_input(BenchmarkId::new(kind, size), &size, |b, &size| { 111 | let (var, incr, obs, _recomputed) = setup(kind, size); 112 | b.iter_custom(|iters| { 113 | let start = Instant::now(); 114 | let recomputed = incr.stats().recomputed; 115 | for _ in 0..iters { 116 | var.update(|x| x + 1); 117 | incr.stabilise(); 118 | black_box(obs.value()); 119 | } 120 | let diff = incr.stats().recomputed - recomputed; 121 | iters as u32 * start.elapsed() / diff as u32 122 | }) 123 | }); 124 | } 125 | 126 | fn bench_stabilise(c: &mut Criterion, kind: SequenceKind, size: u64) { 127 | c.bench_with_input(BenchmarkId::new("stabilise", size), &size, |b, &size| { 128 | let (_var, incr, obs, _recomputed) = setup(kind, size); 129 | b.iter(|| { 130 | incr.stabilise(); 131 | black_box(obs.value()); 132 | }) 133 | }); 134 | } 135 | 136 | #[tracing::instrument(skip_all)] 137 | fn all(c: &mut Criterion) { 138 | tracing_subscriber::fmt().init(); 139 | bench_node(c, SequenceKind::Recombine, 50); 140 | bench_node(c, SequenceKind::Trivial, 1000); 141 | bench_node(c, SequenceKind::TrivialBind, 50); 142 | bench_node(c, SequenceKind::Spokes, 1000); 143 | bench_node(c, SequenceKind::Wide, 5); 144 | bench_node(c, SequenceKind::Wide, 10); 145 | bench_stabilise(c, SequenceKind::Recombine, 50); 146 | } 147 | 148 | criterion_group!(benches, all); 149 | criterion_main!(benches); 150 | -------------------------------------------------------------------------------- /tests/doc_preliminaries.rs: -------------------------------------------------------------------------------- 1 | fn stabilise_diff(incr: &incremental::IncrState, msg: &str) -> incremental::StatsDiff { 2 | let before = incr.stats(); 3 | incr.stabilise(); 4 | let delta = incr.stats() - before; 5 | println!("{msg} : {delta:#?}"); 6 | delta 7 | } 8 | 9 | mod projections_and_cutoffs { 10 | use super::*; 11 | use std::rc::Rc; 12 | 13 | use incremental::{Incr, IncrState, StatsDiff}; 14 | use test_log::test; 15 | 16 | #[derive(Debug, Clone, PartialEq)] 17 | struct Z { 18 | // Rc for cheap clones like they have in OCaml with GC. 19 | a: Rc>, 20 | b: (i32, i32), 21 | } 22 | 23 | /// ``` 24 | /// +---+ +--------+ 25 | /// .->| a |-->| a_prod |-. 26 | /// / +---+ +--------+ \ 27 | /// +---+/ '->+--------+ 28 | /// | z | | result | 29 | /// +---+\ .->+--------+ 30 | /// \ +---+ +--------+ / 31 | /// '->| b |-->| b_prod |-' 32 | /// +---+ +--------+ 33 | /// ``` 34 | /// 35 | fn sumproduct_can_cutoff(z: Incr) -> Incr { 36 | let a_prod = { 37 | // clone the rc 38 | let a = z.map(|z| z.a.clone()); 39 | a.map(|a| { 40 | println!("a_prod ran (expensive)"); 41 | a.iter().product() 42 | }) 43 | }; 44 | let b_prod = { 45 | let b = z.map(|z| z.b); 46 | b.map(|&(b1, b2)| { 47 | println!("b_prod ran (cheap)"); 48 | b1 * b2 49 | }) 50 | }; 51 | a_prod.map2(&b_prod, |a, b| a + b) 52 | } 53 | 54 | #[test] 55 | fn one() { 56 | let incr = IncrState::new(); 57 | let z = incr.var(Z { 58 | a: Rc::new(vec![3, 2]), 59 | b: (1, 4), 60 | }); 61 | let result = z.pipe(sumproduct_can_cutoff).observe(); 62 | 63 | // This creates six nodes total. 64 | assert_eq!(incr.stats().created, 6); 65 | 66 | let diff = stabilise_diff( 67 | &incr, 68 | "initial stabilise, after observing sumproduct_can_cutoff", 69 | ); 70 | assert!(matches!( 71 | diff, 72 | StatsDiff { 73 | changed: 6, 74 | recomputed: 6, 75 | necessary: 6, // and all 6 are participating. 76 | .. 77 | } 78 | )); 79 | assert_eq!(result.value(), 10); 80 | 81 | // We don't have the problem with "newly allocated tuples" having 82 | // different pointer addresses. The default cutoff comparator in 83 | // incremental-rs is PartialEq. 84 | z.modify(|z| z.b = (1, 4)); 85 | 86 | let diff = stabilise_diff(&incr, "after updating z.b but with no actual change"); 87 | assert!(matches!( 88 | diff, 89 | StatsDiff { 90 | changed: 0, 91 | // One for the z watch node. That's it. z.b gets cut off. 92 | recomputed: 1, 93 | .. 94 | } 95 | )); 96 | 97 | assert_eq!(result.value(), 10); 98 | z.modify(|z| z.b = (5, 6)); 99 | 100 | let diff = stabilise_diff(&incr, "after updating z.b"); 101 | assert!(matches!( 102 | diff, 103 | StatsDiff { 104 | changed: 4, // all except a_prod 105 | recomputed: 5, // but a_prod didn't need to update at all. 106 | .. 107 | } 108 | )); 109 | assert_eq!(result.value(), 36); 110 | } 111 | 112 | /// ```ignore 113 | /// +--------+ 114 | /// .->| a_prod |-. 115 | /// / +--------+ \ 116 | /// +---+/ '->+--------+ 117 | /// | z | | result | 118 | /// +---+\ .->+--------+ 119 | /// \ +--------+ / 120 | /// '->| b_prod |-' 121 | /// +--------+ 122 | /// ``` 123 | /// 124 | fn sumproduct_smaller_graph(z: Incr) -> Incr { 125 | let a_prod = z.map(|z| { 126 | println!("a_prod ran (expensive)"); 127 | z.a.iter().product() 128 | }); 129 | let b_prod = z.map(|&Z { b: (b1, b2), .. }| { 130 | println!("b_prod ran (cheap)"); 131 | b1 * b2 132 | }); 133 | a_prod.map2(&b_prod, |a, b| a + b) 134 | } 135 | 136 | #[test] 137 | fn two() { 138 | let incr = IncrState::new(); 139 | let z = incr.var(Z { 140 | a: Rc::new(vec![3, 2]), 141 | b: (1, 4), 142 | }); 143 | let result = z.pipe(sumproduct_smaller_graph).observe(); 144 | 145 | let diff = stabilise_diff(&incr, "after observing sumproduct_smaller_graph"); 146 | assert_eq!(incr.stats().created, 4); 147 | assert!(matches!( 148 | diff, 149 | StatsDiff { 150 | changed: 4, 151 | recomputed: 4, 152 | .. 153 | } 154 | )); 155 | 156 | z.modify(|z| z.b = (5, 6)); 157 | 158 | let diff = stabilise_diff(&incr, "after updating z.b"); 159 | assert!(matches!( 160 | diff, 161 | StatsDiff { 162 | changed: 3, // all except a_prod changed. 163 | recomputed: 4, // but all of them recomputed. 164 | .. 165 | } 166 | )); 167 | 168 | assert_eq!(result.value(), 36); 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /tests/expert.rs: -------------------------------------------------------------------------------- 1 | use std::{cell::RefCell, rc::Rc}; 2 | // RUST_LOG_SPAN_EVENTS=enter,exit 3 | use test_log::test; 4 | 5 | use incremental::incrsan::NotObserver; 6 | use incremental::{expert::*, Incr, IncrState, Value}; 7 | 8 | fn join(incr: &Incr>) -> Incr { 9 | let prev_rhs: Rc>>> = Rc::new(None.into()); 10 | let state = incr.state(); 11 | let join = Node::::new(&state, { 12 | let prev_rhs_ = prev_rhs.clone(); 13 | move || prev_rhs_.borrow().clone().unwrap().value_cloned() 14 | }); 15 | let join_ = join.weak(); 16 | // In order to schedule the dependency additions/deletions before the expert Node executes, 17 | // we put this in a Map node (akin to BindLhsChange). 18 | let lhs_change = incr.map(move |rhs| { 19 | let dep = join_.add_dependency(rhs); 20 | let mut prev_rhs_ = prev_rhs.borrow_mut(); 21 | if let Some(prev) = prev_rhs_.take() { 22 | join_.remove_dependency(prev); 23 | } 24 | prev_rhs_.replace(dep); 25 | }); 26 | join.add_dependency(&lhs_change); 27 | join.watch() 28 | } 29 | 30 | #[test] 31 | fn test_join() { 32 | let incr = IncrState::new(); 33 | let inner = incr.var(10i32); 34 | let outer = incr.var(inner.watch()); 35 | let joined = join(&outer); 36 | let o = joined.observe(); 37 | incr.stabilise(); 38 | assert_eq!(o.try_get_value(), Ok(10)); 39 | inner.set(20); 40 | incr.stabilise(); 41 | assert_eq!(o.try_get_value(), Ok(20)); 42 | } 43 | 44 | #[allow(dead_code)] 45 | fn bind( 46 | incr: Incr, 47 | mut f: impl FnMut(&T) -> Incr + 'static + NotObserver, 48 | ) -> Incr { 49 | let prev_rhs: Rc>>> = Rc::new(None.into()); 50 | let state = incr.state(); 51 | let join = Node::::new(&state, { 52 | let prev_rhs_ = prev_rhs.clone(); 53 | move || prev_rhs_.borrow().clone().unwrap().value_cloned() 54 | }); 55 | let join_ = join.weak(); 56 | let lhs_change = incr.map(move |input| { 57 | let rhs = f(input); 58 | let mut prev_rhs_ = prev_rhs.borrow_mut(); 59 | if prev_rhs_.as_ref().map_or(false, |prev| prev.node() == rhs) { 60 | return; 61 | } 62 | let dep = join_.add_dependency(&rhs); 63 | if let Some(prev) = prev_rhs_.take() { 64 | join_.remove_dependency(prev); 65 | } 66 | prev_rhs_.replace(dep); 67 | }); 68 | join.add_dependency(&lhs_change); 69 | join.watch() 70 | } 71 | 72 | #[test] 73 | fn map345_expert() { 74 | let incr = IncrState::new(); 75 | let i1 = incr.var(3); 76 | let i2 = incr.var(5); 77 | let i3 = incr.var(7); 78 | let triple = i1.map3(&i2, &i3, |a, b, c| (a * b, *c)); 79 | 80 | // now have an expert node add a dependency on triple 81 | let outer = incr.var(triple.clone()); 82 | let joined = join(&outer); 83 | 84 | let j = joined.observe(); 85 | incr.stabilise(); 86 | assert_eq!(j.value(), (15, 7)); 87 | } 88 | 89 | fn manual_zip2(one: &Incr, two: &Incr) -> Incr<(T1, T2)> { 90 | let state = one.state(); 91 | enum Storage { 92 | None, 93 | OneOnly(A), 94 | TwoOnly(B), 95 | Both(A, B), 96 | } 97 | impl Storage { 98 | fn take(&mut self) -> Self { 99 | std::mem::replace(self, Storage::None) 100 | } 101 | fn both_cloned(&self) -> (A, B) 102 | where 103 | A: Clone, 104 | B: Clone, 105 | { 106 | match self { 107 | Self::Both(a, b) => (a.clone(), b.clone()), 108 | _ => panic!("zip2 node has not yet read both inputs"), 109 | } 110 | } 111 | } 112 | let current = Rc::new(RefCell::new(Storage::None)); 113 | let zip2 = Node::<(T1, T2)>::new(&state, { 114 | let current_ = current.clone(); 115 | move || current_.borrow().both_cloned() 116 | }); 117 | let current_1 = current.clone(); 118 | zip2.add_dependency_with(one, move |new_one| { 119 | let mut tuple = current_1.borrow_mut(); 120 | let storage = tuple.take(); 121 | let new_one = new_one.clone(); 122 | let new = match storage { 123 | Storage::Both(_, b) | Storage::TwoOnly(b) => Storage::Both(new_one, b), 124 | Storage::None | Storage::OneOnly(_) => Storage::OneOnly(new_one), 125 | }; 126 | *tuple = new; 127 | }); 128 | let current_2 = current; 129 | zip2.add_dependency_with(two, move |new_two| { 130 | let mut tuple = current_2.borrow_mut(); 131 | let storage = tuple.take(); 132 | let new_b = new_two.clone(); 133 | let new = match storage { 134 | Storage::Both(a, _) | Storage::OneOnly(a) => Storage::Both(a, new_b), 135 | Storage::None | Storage::TwoOnly(_) => Storage::TwoOnly(new_b), 136 | }; 137 | *tuple = new; 138 | }); 139 | zip2.watch() 140 | } 141 | 142 | #[test] 143 | fn test_zip2() { 144 | let incr = IncrState::new(); 145 | let i1 = incr.var(3); 146 | let i2 = incr.var(5); 147 | let z = manual_zip2(&i1, &i2); 148 | let o = z.observe(); 149 | incr.stabilise(); 150 | assert_eq!(o.value(), (3, 5)); 151 | } 152 | 153 | #[test] 154 | fn expert_duplicate_inputs() { 155 | let incr = IncrState::new(); 156 | let constant = incr.constant(3); 157 | let z = manual_zip2(&constant, &constant); 158 | let o = z.observe(); 159 | incr.stabilise(); 160 | assert_eq!(o.value(), (3, 3)); 161 | drop(o); 162 | incr.stabilise(); 163 | } 164 | -------------------------------------------------------------------------------- /src/kind.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{self, Debug}; 2 | use std::rc::Rc; 3 | 4 | use crate::boxes::{new_unsized, SmallBox}; 5 | use crate::incrsan::NotObserver; 6 | use crate::var::ErasedVariable; 7 | use crate::{Incr, NodeRef, Value, ValueInternal}; 8 | 9 | pub(crate) trait KindTrait: 'static + NotObserver + Debug { 10 | fn compute(&self) -> SmallBox; 11 | fn children_len(&self) -> usize; 12 | fn iter_children_packed(&self) -> Box + '_>; 13 | fn slow_get_child(&self, index: usize) -> NodeRef; 14 | fn debug_ty(&self, f: &mut fmt::Formatter) -> fmt::Result; 15 | } 16 | 17 | mod array_fold; 18 | mod bind; 19 | pub(crate) mod expert; 20 | mod map; 21 | 22 | pub(crate) use array_fold::*; 23 | pub(crate) use bind::*; 24 | pub(crate) use expert::ExpertNode; 25 | pub(crate) use map::*; 26 | 27 | pub(crate) enum Kind { 28 | Constant(SmallBox), 29 | ArrayFold(SmallBox), 30 | // We have a strong reference to the Var, because (e.g.) the user's public::Var 31 | // may have been set and then dropped before the next stabilise(). 32 | Var(Rc), 33 | Map(map::MapNode), 34 | MapWithOld(map::MapWithOld), 35 | MapRef(map::MapRefNode), 36 | Map2(map::Map2Node), 37 | Map3(map::Map3Node), 38 | Map4(map::Map4Node), 39 | Map5(map::Map5Node), 40 | Map6(map::Map6Node), 41 | BindLhsChange { 42 | bind: Rc, 43 | }, 44 | BindMain { 45 | bind: Rc, 46 | // Ownership goes 47 | // a Kind::BindMain holds a BindNode & the BindLhsChange 48 | // a Kind::BindLhsChange holds a BindNode 49 | // BindNode holds weak refs to both 50 | lhs_change: NodeRef, 51 | }, 52 | Expert(expert::ExpertNode), 53 | } 54 | 55 | impl Debug for Kind { 56 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 57 | match self { 58 | Kind::Constant(_) => write!(f, "Constant"), 59 | Kind::ArrayFold(af) => write!(f, "ArrayFold({af:?})"), 60 | Kind::Var(var) => write!(f, "Var({:?})", var), 61 | Kind::Map(map) => write!(f, "Map({:?})", map), 62 | Kind::MapWithOld(map) => write!(f, "MapWithOld({:?})", map), 63 | Kind::Map2(map) => write!(f, "Map2({:?})", map), 64 | Kind::Map3(map) => write!(f, "Map3({:?})", map), 65 | Kind::Map4(map) => write!(f, "Map4({:?})", map), 66 | Kind::Map5(map) => write!(f, "Map5({:?})", map), 67 | Kind::Map6(map) => write!(f, "Map6({:?})", map), 68 | Kind::BindLhsChange { bind, .. } => write!(f, "BindLhsChange({:?})", bind), 69 | Kind::BindMain { bind, .. } => write!(f, "BindMain({:?})", bind), 70 | Kind::MapRef(mapref) => write!(f, "MapRef({:?})", mapref), 71 | Kind::Expert(expert) => write!(f, "Expert({:?})", expert), 72 | } 73 | } 74 | } 75 | 76 | impl Kind { 77 | pub(crate) fn debug_ty(&self) -> impl Debug + '_ { 78 | return KindDebugTy(self); 79 | struct KindDebugTy<'a>(&'a Kind); 80 | impl<'a> Debug for KindDebugTy<'a> { 81 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 82 | match self.0 { 83 | Kind::Constant(_) => write!(f, "Constant"), 84 | Kind::ArrayFold(af) => af.debug_ty(f), 85 | Kind::Var(var) => var.debug_ty(f), 86 | Kind::Map(..) => { 87 | write!(f, "Map<(...) -> ...>") 88 | } 89 | Kind::MapWithOld(..) => { 90 | write!(f, "MapWithOld<(...) -> ...>") 91 | } 92 | Kind::MapRef(..) => { 93 | write!(f, "MapRef<(...) -> ...>") 94 | } 95 | Kind::Map2(..) => { 96 | write!(f, "Map2<(...) -> ...>") 97 | } 98 | Kind::Map3(..) => { 99 | write!(f, "Map3<(...) -> ...>") 100 | } 101 | Kind::Map4(..) => { 102 | write!(f, "Map4<(...) -> ...>") 103 | } 104 | Kind::Map5(..) => { 105 | write!(f, "Map5<(...) -> ...>") 106 | } 107 | Kind::Map6(..) => { 108 | write!(f, "Map6<(...) -> ...>") 109 | } 110 | Kind::BindLhsChange { .. } => { 111 | write!(f, "BindLhsChange",) 112 | } 113 | Kind::BindMain { .. } => { 114 | write!(f, "BindMain<(lhs: dynamic) -> (dynamic)>",) 115 | } 116 | Kind::Expert(_) => write!(f, "Expert"), 117 | } 118 | } 119 | } 120 | } 121 | 122 | pub(crate) fn constant(value: T) -> Kind { 123 | Kind::Constant(new_unsized!(value)) 124 | } 125 | } 126 | 127 | impl Kind { 128 | pub(crate) const BIND_RHS_CHILD_INDEX: i32 = 1; 129 | pub(crate) fn initial_num_children(&self) -> usize { 130 | match self { 131 | Self::Constant(_) => 0, 132 | Self::ArrayFold(af) => af.children_len(), 133 | Self::Var(_) => 0, 134 | Self::Map(_) | Self::MapWithOld(_) | Self::MapRef(_) => 1, 135 | Self::Map2(_) => 2, 136 | Self::Map3(_) => 3, 137 | Self::Map4(_) => 4, 138 | Self::Map5(_) => 5, 139 | Self::Map6(_) => 6, 140 | Self::BindLhsChange { .. } => 1, 141 | Self::BindMain { .. } => 2, 142 | Self::Expert(_) => 0, 143 | } 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/adjust_heights_heap.rs: -------------------------------------------------------------------------------- 1 | use crate::Invariant; 2 | 3 | use super::recompute_heap::RecomputeHeap; 4 | use super::NodeRef; 5 | use crate::node::ErasedNode; 6 | use std::collections::VecDeque; 7 | 8 | type Queue = VecDeque; 9 | 10 | #[derive(Debug)] 11 | pub(crate) struct AdjustHeightsHeap { 12 | length: usize, 13 | height_lower_bound: i32, 14 | max_height_seen: i32, 15 | queues: Vec, 16 | } 17 | 18 | impl AdjustHeightsHeap { 19 | pub(crate) fn is_empty(&self) -> bool { 20 | self.length == 0 21 | } 22 | pub(crate) fn max_height_allowed(&self) -> i32 { 23 | self.queues.len() as i32 - 1 24 | } 25 | pub(crate) fn new(max_height_allowed: usize) -> Self { 26 | Self { 27 | length: 0, 28 | height_lower_bound: max_height_allowed as i32 + 1, 29 | max_height_seen: 0, 30 | queues: vec![Default::default(); max_height_allowed + 1], 31 | } 32 | } 33 | 34 | pub fn clear(&mut self) { 35 | for q in self.queues.iter_mut() { 36 | q.clear(); 37 | } 38 | self.length = 0; 39 | self.height_lower_bound = self.max_height_allowed() + 1; 40 | } 41 | 42 | pub(crate) fn set_max_height_allowed(&mut self, new_mha: usize) { 43 | if (new_mha as i32) < self.max_height_seen { 44 | panic!("cannot set max_height_allowed less than max height already seen"); 45 | } 46 | debug_assert!(self.is_empty()); 47 | debug_assert_eq!(calculate_len(&self.queues), 0); 48 | self.queues.resize(new_mha, VecDeque::new()); 49 | } 50 | pub(crate) fn add_unless_mem(&mut self, node: NodeRef) { 51 | if node.height_in_adjust_heights_heap().get() == -1 { 52 | let height = node.height(); 53 | /* We process nodes in increasing order of pre-adjusted height, so it is a bug if we 54 | ever try to add a node that would violate that. */ 55 | debug_assert!(height >= self.height_lower_bound); 56 | /* Whenever we set a node's height, we use [set_height], which enforces this. */ 57 | debug_assert!(height <= self.max_height_allowed()); 58 | node.height_in_adjust_heights_heap().set(height); 59 | self.length += 1; 60 | let q: &mut Queue = self.queues.get_mut(height as usize).unwrap(); 61 | q.push_back(node); 62 | } 63 | } 64 | pub(crate) fn remove_min(&'_ mut self) -> Option { 65 | if self.is_empty() { 66 | return None; 67 | } 68 | let mut height = self.height_lower_bound; 69 | let mut q: &mut Queue; 70 | while { 71 | q = self.queues.get_mut(height as usize)?; 72 | q.is_empty() 73 | } { 74 | height += 1; 75 | } 76 | self.height_lower_bound = height; 77 | let node = q.pop_front()?; 78 | node.height_in_adjust_heights_heap().set(-1); 79 | self.length -= 1; 80 | Some(node) 81 | } 82 | pub(crate) fn set_height(&mut self, node: &NodeRef, height: i32) { 83 | if height > self.max_height_seen { 84 | self.max_height_seen = height; 85 | if height > self.max_height_allowed() { 86 | panic!( 87 | "node with too large height: {height} > max allowed {}", 88 | self.max_height_allowed() 89 | ); 90 | } 91 | } 92 | node.set_height(height); 93 | } 94 | pub(crate) fn ensure_height_requirement( 95 | &mut self, 96 | original_child: &NodeRef, 97 | original_parent: &NodeRef, 98 | child: &NodeRef, 99 | parent: &NodeRef, 100 | ) { 101 | debug_assert!(child.is_necessary()); 102 | debug_assert!(parent.is_necessary()); 103 | if crate::rc_thin_ptr_eq(parent, original_child) { 104 | panic!( 105 | "adding edge made graph cyclic:\n\ 106 | original_child: {original_child:?}\n\ 107 | original_parent: {original_parent:?}\n\ 108 | current child: {child:?}\n\ 109 | current parent: {parent:?}" 110 | ); 111 | } 112 | if child.height() >= parent.height() { 113 | self.add_unless_mem(parent.clone()); 114 | /* We set [parent.height] after adding [parent] to the heap, so that [parent] goes 115 | in the heap with its pre-adjusted height. */ 116 | self.set_height(parent, child.height() + 1); 117 | } 118 | } 119 | pub(crate) fn adjust_heights( 120 | &mut self, 121 | rch: &RecomputeHeap, 122 | original_child: NodeRef, 123 | original_parent: NodeRef, 124 | ) { 125 | tracing::debug!( 126 | "adjust_heights from child(id={:?},h={:?}) to parent(id={:?},h={:?})", 127 | original_child.id(), 128 | original_child.height(), 129 | original_parent.id(), 130 | original_parent.height() 131 | ); 132 | debug_assert!(self.is_empty()); 133 | debug_assert!(original_child.height() >= original_parent.height()); 134 | self.height_lower_bound = original_parent.height(); 135 | self.ensure_height_requirement( 136 | &original_child, 137 | &original_parent, 138 | &original_child, 139 | &original_parent, 140 | ); 141 | while let Some(child) = self.remove_min() { 142 | tracing::debug!( 143 | "ahh popped(in_rch={:?}): {:?}", 144 | child.is_in_recompute_heap(), 145 | child 146 | ); 147 | if child.is_in_recompute_heap() { 148 | rch.increase_height(&child); 149 | } 150 | // for each of the child's parents, add it to the heap 151 | // if child height >= parent height. 152 | child.ensure_parent_height_requirements(self, &original_child, &original_parent); 153 | child.adjust_heights_bind_lhs_change(self, &original_child, &original_parent); 154 | } 155 | debug_assert!(self.is_empty()); 156 | debug_assert!(original_child.height() < original_parent.height()); 157 | } 158 | } 159 | 160 | fn calculate_len(queues: &[Queue]) -> usize { 161 | queues.iter().map(|q| q.len()).sum() 162 | } 163 | 164 | impl Invariant for AdjustHeightsHeap { 165 | fn invariant(&self) { 166 | assert_eq!(self.length, calculate_len(&self.queues)); 167 | assert!(self.height_lower_bound >= 0); 168 | assert!(self.height_lower_bound as usize <= self.queues.len()); 169 | for height in 0..self.height_lower_bound { 170 | let q = self.queues.get(height as usize).unwrap(); 171 | assert!(q.is_empty()) 172 | } 173 | assert!(self.max_height_seen >= 0); 174 | assert!(self.max_height_seen <= self.max_height_allowed()); 175 | self.queues.invariant(); 176 | } 177 | } 178 | 179 | impl Invariant for Vec> { 180 | fn invariant(&self) { 181 | let queues: &[Queue] = self.as_slice(); 182 | for (height, q) in queues.iter().enumerate() { 183 | let q: &Queue = q; 184 | let height = height as i32; 185 | for node in q.iter() { 186 | assert!(node.height_in_adjust_heights_heap().get() == height); 187 | } 188 | } 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /incremental-macros/examples/calc.rs: -------------------------------------------------------------------------------- 1 | //! This example demonstrates how to memoize the execution of scripts which can 2 | //! depend on other scripts---invalidating the result of a script's execution 3 | //! only if a file it depends on changes. 4 | //! 5 | //! It originally appeared in the similar Rust computation caching tool `comemo` 6 | //! at , 7 | //! used under the MIT license. 8 | 9 | use incremental::{Incr, IncrState, Var}; 10 | use incremental_macros::{db, interned, memoized, InternedString}; 11 | use std::collections::HashMap; 12 | 13 | db! { 14 | struct Db { 15 | /// We are using a HashMap for this basic example, but Incremental will take a clone each 16 | /// time you modify + stabilise, in order to help determine whether it changed or not and 17 | /// avoid spurious recomputation. So using an immutable shared-structure `im_rc::HashMap` 18 | /// is generally a better choice. Or `im_rc::OrdMap`, which can also be used with 19 | /// incremental-map. 20 | /// 21 | /// (note, could be good to have a version of Var that does not do this, same way MapRef 22 | /// does not do this.) 23 | files: Var>, 24 | } provides { 25 | EvalFile, 26 | InternedString, 27 | } 28 | } 29 | 30 | interned!( 31 | /// A newtype of [string_interner::DefaultSymbol]. 32 | /// 33 | /// Can be used with a Db providing `incremental_macros::InternedString`. 34 | type Filename = String; 35 | ); 36 | 37 | memoized! { 38 | fn eval_file(db: &Db, filename: Filename) -> Incr { 39 | let files = db.files.watch(); 40 | 41 | // This is our caching layer. We are only interested in changes in the script stored 42 | // against `filename`. 43 | let script = files.map(move |files| { 44 | files.get(&filename).cloned().unwrap_or_default() 45 | }); 46 | 47 | // using bind, each time the script for this filename is changed, we generate a new 48 | // computation graph. The shape of the computation will change depending on what we 49 | // write in the scripts. 50 | let db = db.clone(); 51 | script.bind(move |script| eval_script(&db, script)) 52 | } 53 | } 54 | 55 | fn eval_script(db: &Db, script: &str) -> Incr { 56 | let db = db.clone(); 57 | 58 | // for "2 + 3 + eval file.calc", this will end up holding 5. 59 | let mut literal_sum = 0; 60 | 61 | // The rest of the components of the addition are Incrs, which we will sum together 62 | // using the `fold` method. 63 | let incr_components = script 64 | .split('+') 65 | .map(str::trim) 66 | .filter_map(|part| match part.strip_prefix("eval ") { 67 | Some(path) => { 68 | let filename = Filename::new(&db, path); 69 | Some(eval_file(&db, filename)) 70 | } 71 | None => { 72 | literal_sum += part.parse::().unwrap(); 73 | None 74 | } 75 | }) 76 | .collect::>(); 77 | 78 | // we initialise the fold with `literal_sum`. 79 | // we could, alternatively, push an `db.incr.constant(literal_sum)` onto the array 80 | // and then initialise with `0`, but we like to avoid creating unnecessary Incrs. 81 | db.incr.fold(incr_components, literal_sum, |a, b| a + b) 82 | } 83 | 84 | fn main() { 85 | tracing_subscriber::fmt() 86 | .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) 87 | .init(); 88 | 89 | let incr = IncrState::new(); 90 | let db = &Db::new(&incr, incr.var(HashMap::new())); 91 | 92 | let alpha_calc = Filename::new(db, "alpha.calc"); 93 | let beta_calc = Filename::new(db, "beta.calc"); 94 | let gamma_calc = Filename::new(db, "gamma.calc"); 95 | 96 | db.files.modify(|fs| { 97 | fs.insert(alpha_calc, "2 + eval beta.calc".into()); 98 | fs.insert(beta_calc, "2 + 3".into()); 99 | fs.insert(gamma_calc, "8 + 3".into()); 100 | }); 101 | 102 | let alpha_incr = eval_file(db, alpha_calc); 103 | let alpha_obs = alpha_incr.observe(); 104 | 105 | // The Incr is cached using the filename as a key. Since eval_file 106 | // is recursively defined, each dependency ("eval beta.calc") will use 107 | // the cached version if possible. Multiple files referencing the same 108 | // beta.calc will reuse the results of evaluating it. 109 | let alpha_2 = eval_file(db, alpha_calc); 110 | assert_eq!(alpha_2, alpha_incr); 111 | 112 | // 113 | // Dependencies 114 | // 115 | 116 | // This is our first stabilise call. The variable `files` is new, and 117 | // has never propagated its changes. Both alpha.calc and beta.calc 118 | // are new and will be computed for the first time.. 119 | incr.stabilise(); 120 | assert_eq!(alpha_obs.value(), 7); 121 | 122 | // A cache hit. 123 | // 124 | // beta.calc was a dependency of alpha.calc, so if you grab its node 125 | // and slap an observer on it, you get the results for free. 126 | // 127 | // Nevertheless we must call incr.stabilise(), as observers can't give 128 | // you their value until they have been stabilised. If you remove the call, 129 | // you'll notice `beta.try_get_value()` returns `Err(ObserverError::NeverStabilised)`. 130 | let beta = eval_file(db, beta_calc).observe(); 131 | incr.stabilise(); 132 | assert_eq!(beta.try_get_value(), Ok(5)); 133 | 134 | // Modify the gamma file. Nothing depends on gamma.calc. This will simply 135 | // propagate the new value of `files` to the caching nodes for alpha & beta, 136 | // and stop there. 137 | db.files.modify(|fs| { 138 | fs.insert(gamma_calc, "42".into()); 139 | }); 140 | incr.stabilise(); 141 | assert_eq!(alpha_obs.value(), 7); 142 | 143 | // Now beta.calc will depend on gamma.calc 144 | db.files.modify(|fs| { 145 | fs.insert(beta_calc, "4 + eval gamma.calc".into()); 146 | }); 147 | incr.stabilise(); 148 | 149 | // Alpha and beta both have to recompute, because beta.calc was referenced by alpha.calc. 150 | // There is now a computation for gamma.calc in the mix, because it is referenced now. 151 | assert_eq!(alpha_obs.value(), 48); 152 | 153 | // Observers and the computation graph 154 | 155 | // observe gamma.calc. 156 | let gamma = eval_file(db, gamma_calc) 157 | .with_graphviz_user_data("gamma observes this node") 158 | .observe(); 159 | incr.save_dot_to_file("gamma-in-use.dot"); 160 | 161 | // Drop the gamma.calc reference from beta.calc. Alpha does not depend on gamma any more. 162 | db.files.modify(|fs| { 163 | fs.insert(beta_calc, "43".into()); 164 | }); 165 | 166 | incr.stabilise(); 167 | assert_eq!(alpha_obs.value(), 45); 168 | incr.save_dot_to_file("gamma-hanging-around.dot"); 169 | 170 | // The gamma observer is now the only thing hanging onto the evaluated gamma.calc computation. 171 | // Run with `RUST_LOG=trace cargo run --example calc` to see the computation being torn down. 172 | tracing::trace_span!("drop(gamma)").in_scope(|| { 173 | drop(gamma); 174 | incr.stabilise(); 175 | }); 176 | incr.save_dot_to_file("gamma-gone.dot"); 177 | 178 | // If we add a cycle, stabilise will panic. There should be a better way to recover from such a 179 | // cycle. 180 | 181 | // db.files.modify(|fs| { 182 | // fs.insert(alpha_calc, "10 + eval alpha.calc".into()); 183 | // }); 184 | // incr.stabilise(); 185 | } 186 | -------------------------------------------------------------------------------- /src/recompute_heap.rs: -------------------------------------------------------------------------------- 1 | use crate::node::ErasedNode; 2 | use crate::CellIncrement; 3 | use crate::NodeRef; 4 | use std::cell::{Cell, Ref, RefCell}; 5 | use std::collections::VecDeque; 6 | use std::fmt; 7 | 8 | type Queue = RefCell>; 9 | 10 | pub(crate) struct RecomputeHeap { 11 | queues: RefCell>, 12 | height_lower_bound: Cell, 13 | length: Cell, 14 | swap: Queue, 15 | } 16 | 17 | impl fmt::Debug for RecomputeHeap { 18 | #[rustfmt::skip::macros] 19 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 20 | writeln!(f, "RecomputeHeap {{")?; 21 | writeln!(f, " length: {},", self.len())?; 22 | writeln!( 23 | f, 24 | " height_lower_bound: {},", 25 | self.height_lower_bound.get() 26 | )?; 27 | writeln!(f, " ---")?; 28 | let mut skip = 0; 29 | for (ix, q) in self.queues.borrow().iter().map(RefCell::borrow).enumerate() { 30 | if q.is_empty() { 31 | skip += 1; 32 | continue; 33 | } 34 | if skip != 0 { 35 | writeln!(f, " [ skipped {:3} ]", skip)?; 36 | } 37 | skip = 0; 38 | writeln!( 39 | f, 40 | " [ height: {:3}, len: {:5}, cap: {:5} ],", 41 | ix, 42 | q.len(), 43 | q.capacity() 44 | )?; 45 | } 46 | writeln!(f, "}}")?; 47 | Ok(()) 48 | } 49 | } 50 | 51 | impl RecomputeHeap { 52 | pub fn new(max_height_allowed: usize) -> Self { 53 | let mut queues = Vec::with_capacity(max_height_allowed + 1); 54 | for _ in 0..max_height_allowed + 1 { 55 | queues.push(Default::default()); 56 | } 57 | Self { 58 | queues: queues.into(), 59 | height_lower_bound: (max_height_allowed as i32 + 1).into(), 60 | length: 0.into(), 61 | swap: Default::default(), 62 | } 63 | } 64 | 65 | pub fn len(&self) -> usize { 66 | self.length.get() 67 | } 68 | 69 | pub fn is_empty(&self) -> bool { 70 | self.len() == 0 71 | } 72 | 73 | pub fn clear(&self) { 74 | for q in self.queues.borrow().iter() { 75 | q.borrow_mut().clear(); 76 | } 77 | self.swap.borrow_mut().clear(); 78 | self.length.set(0); 79 | self.height_lower_bound.set(i32::MAX); 80 | } 81 | 82 | pub fn link(&self, node: NodeRef) { 83 | // make these locals so the debugger can see them 84 | let _node_id = node.id().0; 85 | let node_height = node.height(); 86 | assert!(node_height >= 0); 87 | assert!(node_height <= self.max_height_allowed()); 88 | node.height_in_recompute_heap().set(node_height); 89 | let q = self.queue_for(node_height as usize); 90 | q.borrow_mut().push_back(node); 91 | } 92 | 93 | pub fn unlink(&self, node: &NodeRef) { 94 | let height_in_rch = node.height_in_recompute_heap().get(); 95 | let queue = self.queue_for(height_in_rch as usize); 96 | // Unfortunately we must scan for the node 97 | // if this is slow, we should use a hash set or something instead with a fast "remove_any" 98 | // method 99 | let mut q = queue.borrow_mut(); 100 | let Some(indexof) = q.iter().position(|x| crate::rc_thin_ptr_eq(x, node)) else { 101 | panic!("node was not in recompute heap: {node:?}"); 102 | }; 103 | // order within a particular queue does not matter at all. 104 | // they're all the same height so they cannot have any dependencies 105 | // so we can use swap_remove 106 | q.swap_remove_back(indexof); 107 | } 108 | 109 | pub fn insert(&self, node: NodeRef) { 110 | tracing::trace!("inserting into RCH @ h={} {:?}", node.height(), node); 111 | debug_assert!( 112 | !node.is_in_recompute_heap() && node.needs_to_be_computed(), 113 | "incorrect attempt to insert node in recompute heap" 114 | ); 115 | debug_assert!(node.height() <= self.max_height_allowed()); 116 | if node.height() < self.height_lower_bound.get() { 117 | self.height_lower_bound.set(node.height()); 118 | } 119 | self.link(node); 120 | self.length.increment(); 121 | } 122 | 123 | pub fn remove(&self, node: NodeRef) { 124 | let _node_id = node.id(); 125 | debug_assert!( 126 | node.is_in_recompute_heap() && !node.needs_to_be_computed(), 127 | "incorrect attempt to remove node from recompute heap" 128 | ); 129 | self.unlink(&node); 130 | node.height_in_recompute_heap().set(-1); 131 | self.length.decrement(); 132 | } 133 | 134 | pub fn min_height(&self) -> i32 { 135 | self.raise_min_height(); 136 | self.height_lower_bound.get() 137 | } 138 | 139 | fn raise_min_height(&self) { 140 | let queues = self.queues.borrow(); 141 | let max = queues.len() as i32; 142 | if self.length.get() == 0 { 143 | self.height_lower_bound.set(max); 144 | } else { 145 | while queues 146 | .get(self.height_lower_bound.get() as usize) 147 | .map_or(false, |q| q.borrow().is_empty()) 148 | { 149 | self.height_lower_bound.increment(); 150 | } 151 | } 152 | } 153 | 154 | pub(crate) fn increase_height(&self, node: &NodeRef) { 155 | debug_assert!(node.height() > node.height_in_recompute_heap().get()); 156 | debug_assert!(node.is_in_recompute_heap()); 157 | debug_assert!(node.height() <= self.max_height_allowed()); 158 | self.unlink(node); 159 | self.link(node.clone()); // sets height_in_recompute_heap <- height 160 | } 161 | 162 | fn queue_for(&self, height: usize) -> Ref { 163 | Ref::map(self.queues.borrow(), |queue| &queue[height]) 164 | } 165 | 166 | pub(crate) fn remove_min(&self) -> Option { 167 | if self.is_empty() { 168 | return None; 169 | } 170 | let queues = self.queues.borrow(); 171 | debug_assert!(self.height_lower_bound.get() >= 0); 172 | let len = queues.len(); 173 | let mut queue; 174 | while { 175 | queue = queues.get(self.height_lower_bound.get() as usize)?; 176 | queue.borrow().is_empty() 177 | } { 178 | self.height_lower_bound.increment(); 179 | debug_assert!( 180 | (self.height_lower_bound.get() as usize) < len, 181 | "RecomputeHeap::remove_min unexpectedly reached end of heap" 182 | ); 183 | } 184 | let mut q = queue.borrow_mut(); 185 | let node = q.pop_front()?; 186 | node.height_in_recompute_heap().set(-1); 187 | self.length.decrement(); 188 | Some(node) 189 | } 190 | 191 | pub(crate) fn max_height_allowed(&self) -> i32 { 192 | self.queues.borrow().len() as i32 - 1 193 | } 194 | pub(crate) fn set_max_height_allowed(&self, new_max_height: usize) { 195 | let mut queues = self.queues.borrow_mut(); 196 | #[cfg(debug_assertions)] 197 | { 198 | // this should be ensured by adjust-heights-heap's tracking of highest node seen. 199 | for i in new_max_height + 1..queues.len() { 200 | assert!(queues.get(i).is_none()) 201 | } 202 | } 203 | queues.resize(new_max_height, Queue::default()); 204 | self.height_lower_bound.set(std::cmp::min( 205 | self.height_lower_bound.get(), 206 | queues.len() as i32 + 1, 207 | )); 208 | } 209 | } 210 | -------------------------------------------------------------------------------- /benches/shares_per_symbol.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 2 | 3 | use im_rc::{ordmap::Entry, OrdMap}; 4 | use incremental::incrsan::NotObserver; 5 | use incremental::{Incr, IncrState, Value}; 6 | use incremental_map::im_rc::IncrOrdMap; 7 | use incremental_map::IncrMap; 8 | use tracing::Level; 9 | 10 | #[derive(Copy, Clone, PartialEq, Debug)] 11 | enum Dir { 12 | Buy, 13 | Sell, 14 | } 15 | 16 | #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] 17 | struct Symbol(u32); 18 | #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] 19 | struct Oid(u32); 20 | 21 | #[derive(Clone, PartialEq)] 22 | struct Order { 23 | // OCaml strings are GC'd. 24 | id: Oid, 25 | price: f32, 26 | size: u32, 27 | sym: Symbol, 28 | dir: Dir, 29 | } 30 | impl std::fmt::Debug for Order { 31 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 32 | f.debug_tuple("Order") 33 | .field(&self.id) 34 | .field(&self.dir) 35 | .field(&self.price) 36 | .field(&self.size) 37 | .field(&self.sym) 38 | .finish() 39 | } 40 | } 41 | 42 | fn index_by( 43 | map: Incr>, 44 | get_outer_index: impl Fn(&V) -> KOuter + Clone + 'static + NotObserver, 45 | ) -> Incr>> { 46 | let get_outer_index_ = get_outer_index.clone(); 47 | let indexed = map.incr_unordered_fold( 48 | OrdMap::new(), 49 | move |mut acc, key_inner, value| { 50 | let index = get_outer_index_(value); 51 | acc.entry(index) 52 | .or_insert_with(|| OrdMap::new()) 53 | .insert(key_inner.clone(), value.clone()); 54 | acc 55 | }, 56 | move |mut acc, key_inner, value| { 57 | let index = get_outer_index(value); 58 | match acc.entry(index) { 59 | Entry::Vacant(_) => panic!(), 60 | Entry::Occupied(mut o) => { 61 | let map = o.get_mut(); 62 | map.remove(key_inner); 63 | if map.is_empty() { 64 | o.remove(); 65 | } 66 | } 67 | } 68 | acc 69 | }, 70 | false, 71 | ); 72 | #[cfg(debug_assertions)] 73 | indexed.set_graphviz_user_data("index_by"); 74 | indexed 75 | } 76 | 77 | #[test] 78 | fn test_index_by() { 79 | let incr = IncrState::new(); 80 | let var = incr.var(OrdMap::::default()); 81 | let o = index_by(var.watch(), |x| x.to_uppercase()).observe(); 82 | let insert = |k: i32, val: &str| { 83 | var.modify(|map| { 84 | map.insert(k, val.to_string()); 85 | }); 86 | incr.stabilise(); 87 | o.value() 88 | }; 89 | use im_rc::ordmap; 90 | assert_eq!( 91 | insert(1, "bar"), 92 | ordmap! { 93 | "BAR".to_string() => ordmap! { 94 | 1i32 => "bar".to_string() 95 | } 96 | } 97 | ); 98 | assert_eq!( 99 | insert(1, "foo"), 100 | ordmap! { 101 | "FOO".to_string() => ordmap! { 102 | 1i32 => "foo".to_string() 103 | } 104 | } 105 | ); 106 | assert_eq!( 107 | insert(2, "foo"), 108 | ordmap! { 109 | "FOO".to_string() => ordmap! { 110 | 1i32 => "foo".to_string(), 111 | 2i32 => "foo".to_string() 112 | } 113 | } 114 | ); 115 | assert_eq!( 116 | insert(3, "bar"), 117 | ordmap! { 118 | "BAR".to_string() => ordmap! { 119 | 3i32 => "bar".to_string() 120 | }, 121 | "FOO".to_string() => ordmap! { 122 | 1i32 => "foo".to_string(), 123 | 2i32 => "foo".to_string() 124 | } 125 | } 126 | ); 127 | assert_eq!( 128 | insert(2, "bar"), 129 | ordmap! { 130 | "BAR".to_string() => ordmap! { 131 | 2i32 => "bar".to_string(), 132 | 3i32 => "bar".to_string() 133 | }, 134 | "FOO".to_string() => ordmap! { 135 | 1i32 => "foo".to_string() 136 | } 137 | } 138 | ); 139 | } 140 | 141 | fn shares_per_symbol(orders: Incr>) -> Incr> { 142 | fn shares(_k: &Symbol, orders: Incr>) -> Incr { 143 | orders.incr_unordered_fold( 144 | 0, 145 | |acc, _k, x| acc + x.size, 146 | |acc, _k, x| acc - x.size, 147 | false, 148 | ) 149 | } 150 | 151 | let x = index_by(orders, |x| x.sym); 152 | x.incr_mapi_(shares) 153 | } 154 | 155 | fn shares_per_symbol_flat(orders: Incr>) -> Incr> { 156 | fn update_sym_map( 157 | op: fn(u32, u32) -> u32, 158 | ) -> impl FnMut(OrdMap, &Oid, &Order) -> OrdMap + NotObserver { 159 | move |mut acc, _k, o| { 160 | match acc.entry(o.sym) { 161 | Entry::Vacant(e) => { 162 | e.insert(o.size); 163 | } 164 | Entry::Occupied(mut e) => { 165 | e.insert(op(*e.get(), o.size)); 166 | } 167 | } 168 | acc 169 | } 170 | } 171 | orders.incr_unordered_fold( 172 | OrdMap::new(), 173 | update_sym_map(|a, b| a + b), 174 | update_sym_map(|a, b| a - b), 175 | false, 176 | ) 177 | } 178 | 179 | fn random_order(rng: &mut impl rand::Rng) -> Order { 180 | let num_symbols = 100; 181 | let sym = rng.gen_range(0..num_symbols); 182 | let size = rng.gen_range(0..10_000); 183 | let price = rng.gen_range(0..10_000) as f32 / 100.; 184 | let dir = if rng.gen_ratio(1, 2) { 185 | Dir::Buy 186 | } else { 187 | Dir::Sell 188 | }; 189 | let id = rng.gen_range(0..u32::MAX); 190 | Order { 191 | id: Oid(id), 192 | dir, 193 | price, 194 | size, 195 | sym: Symbol(sym), 196 | } 197 | } 198 | 199 | fn random_orders(rng: &mut impl rand::Rng, n: u32) -> OrdMap { 200 | (0..n).into_iter().fold(OrdMap::new(), |mut acc, _| { 201 | let o = random_order(rng); 202 | acc.insert(o.id, o); 203 | acc 204 | }) 205 | } 206 | 207 | fn setup( 208 | n: u32, 209 | sps_fn: fn(Incr>) -> Incr>, 210 | ) -> impl FnMut() { 211 | tracing::info!("setup called"); 212 | let mut rng = rand::thread_rng(); 213 | let init_orders = random_orders(&mut rng, n); 214 | let incr = IncrState::new(); 215 | let var = incr.var(init_orders.clone()); 216 | let shares = var.pipe(sps_fn).observe(); 217 | incr.stabilise(); 218 | if n < 100 { 219 | shares.save_dot_to_file(&format!("shares-{}.dot", n)); 220 | } 221 | move || { 222 | let random = random_order(&mut rng); 223 | var.set(init_orders.update(random.id, random)); 224 | incr.stabilise(); 225 | drop(shares.value()); 226 | } 227 | } 228 | 229 | #[tracing::instrument(skip_all)] 230 | fn bench_update(c: &mut Criterion) { 231 | // use tracing_subscriber::fmt::format::FmtSpan; 232 | tracing_subscriber::fmt() 233 | .with_max_level(Level::WARN) 234 | // .with_span_events(FmtSpan::ENTER) 235 | .init(); 236 | 237 | let size = 1_000_000; 238 | c.bench_with_input(BenchmarkId::new("nested", 20), &20, |b, &size| { 239 | let iter_fn = setup(size, shares_per_symbol); 240 | b.iter(iter_fn) 241 | }); 242 | c.bench_with_input(BenchmarkId::new("nested", size), &size, |b, &size| { 243 | let iter_fn = setup(size, shares_per_symbol); 244 | b.iter(iter_fn) 245 | }); 246 | c.bench_with_input(BenchmarkId::new("flat", size), &size, |b, &size| { 247 | let iter_fn = setup(size, shares_per_symbol_flat); 248 | b.iter(iter_fn) 249 | }); 250 | } 251 | 252 | criterion_group!(benches, bench_update); 253 | criterion_main!(benches); 254 | -------------------------------------------------------------------------------- /incremental-macros/src/debug.rs: -------------------------------------------------------------------------------- 1 | // Lifted directly from salsa-2022, under the MIT license. 2 | // 3 | 4 | use std::{ 5 | collections::{HashMap, HashSet}, 6 | fmt, 7 | rc::Rc, 8 | sync::Arc, 9 | }; 10 | 11 | pub trait DebugWithDb { 12 | fn debug<'me, 'db>(&'me self, db: &'me Db) -> DebugWith<'me, Db> 13 | where 14 | Self: Sized + 'me, 15 | { 16 | DebugWith { 17 | value: BoxRef::Ref(self), 18 | db, 19 | include_all_fields: false, 20 | } 21 | } 22 | 23 | fn debug_with<'me, 'db>(&'me self, db: &'me Db, include_all_fields: bool) -> DebugWith<'me, Db> 24 | where 25 | Self: Sized + 'me, 26 | { 27 | DebugWith { 28 | value: BoxRef::Ref(self), 29 | db, 30 | include_all_fields, 31 | } 32 | } 33 | 34 | /// Be careful when using this method inside a tracked function, 35 | /// because the default macro generated implementation will read all fields, 36 | /// maybe introducing undesired dependencies. 37 | fn debug_all<'me, 'db>(&'me self, db: &'me Db) -> DebugWith<'me, Db> 38 | where 39 | Self: Sized + 'me, 40 | { 41 | DebugWith { 42 | value: BoxRef::Ref(self), 43 | db, 44 | include_all_fields: true, 45 | } 46 | } 47 | 48 | fn into_debug<'me, 'db>(self, db: &'me Db) -> DebugWith<'me, Db> 49 | where 50 | Self: Sized + 'me, 51 | { 52 | DebugWith { 53 | value: BoxRef::Box(Box::new(self)), 54 | db, 55 | include_all_fields: false, 56 | } 57 | } 58 | 59 | /// Be careful when using this method inside a tracked function, 60 | /// because the default macro generated implementation will read all fields, 61 | /// maybe introducing undesired dependencies. 62 | fn into_debug_all<'me, 'db>(self, db: &'me Db) -> DebugWith<'me, Db> 63 | where 64 | Self: Sized + 'me, 65 | { 66 | DebugWith { 67 | value: BoxRef::Box(Box::new(self)), 68 | db, 69 | include_all_fields: true, 70 | } 71 | } 72 | 73 | /// if `include_all_fields` is `false` only identity fields should be read, which means: 74 | /// - for `#\[salsa::input\]` no fields 75 | /// - for `#\[salsa::tracked\]` only fields with `#[id]` attribute 76 | /// - for `#\[salsa::interned\]` any field 77 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result; 78 | } 79 | 80 | pub struct DebugWith<'me, Db: ?Sized> { 81 | value: BoxRef<'me, dyn DebugWithDb + 'me>, 82 | db: &'me Db, 83 | include_all_fields: bool, 84 | } 85 | 86 | enum BoxRef<'me, T: ?Sized> { 87 | Box(Box), 88 | Ref(&'me T), 89 | } 90 | 91 | impl std::ops::Deref for BoxRef<'_, T> { 92 | type Target = T; 93 | 94 | fn deref(&self) -> &Self::Target { 95 | match self { 96 | BoxRef::Box(b) => b, 97 | BoxRef::Ref(r) => r, 98 | } 99 | } 100 | } 101 | 102 | impl fmt::Debug for DebugWith<'_, D> { 103 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 104 | DebugWithDb::fmt(&*self.value, f, self.db, self.include_all_fields) 105 | } 106 | } 107 | 108 | impl DebugWithDb for &T 109 | where 110 | T: DebugWithDb, 111 | { 112 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 113 | T::fmt(self, f, db, include_all_fields) 114 | } 115 | } 116 | 117 | impl DebugWithDb for Box 118 | where 119 | T: DebugWithDb, 120 | { 121 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 122 | T::fmt(self, f, db, include_all_fields) 123 | } 124 | } 125 | 126 | impl DebugWithDb for Rc 127 | where 128 | T: DebugWithDb, 129 | { 130 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 131 | T::fmt(self, f, db, include_all_fields) 132 | } 133 | } 134 | 135 | impl DebugWithDb for Arc 136 | where 137 | T: DebugWithDb, 138 | { 139 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 140 | T::fmt(self, f, db, include_all_fields) 141 | } 142 | } 143 | 144 | impl DebugWithDb for Vec 145 | where 146 | T: DebugWithDb, 147 | { 148 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 149 | let elements = self.iter().map(|e| e.debug_with(db, include_all_fields)); 150 | f.debug_list().entries(elements).finish() 151 | } 152 | } 153 | 154 | impl DebugWithDb for Option 155 | where 156 | T: DebugWithDb, 157 | { 158 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 159 | let me = self.as_ref().map(|v| v.debug_with(db, include_all_fields)); 160 | fmt::Debug::fmt(&me, f) 161 | } 162 | } 163 | 164 | impl DebugWithDb for HashMap 165 | where 166 | K: DebugWithDb, 167 | V: DebugWithDb, 168 | { 169 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 170 | let elements = self.iter().map(|(k, v)| { 171 | ( 172 | k.debug_with(db, include_all_fields), 173 | v.debug_with(db, include_all_fields), 174 | ) 175 | }); 176 | f.debug_map().entries(elements).finish() 177 | } 178 | } 179 | 180 | impl DebugWithDb for (A, B) 181 | where 182 | A: DebugWithDb, 183 | B: DebugWithDb, 184 | { 185 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 186 | f.debug_tuple("") 187 | .field(&self.0.debug_with(db, include_all_fields)) 188 | .field(&self.1.debug_with(db, include_all_fields)) 189 | .finish() 190 | } 191 | } 192 | 193 | impl DebugWithDb for (A, B, C) 194 | where 195 | A: DebugWithDb, 196 | B: DebugWithDb, 197 | C: DebugWithDb, 198 | { 199 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 200 | f.debug_tuple("") 201 | .field(&self.0.debug_with(db, include_all_fields)) 202 | .field(&self.1.debug_with(db, include_all_fields)) 203 | .field(&self.2.debug_with(db, include_all_fields)) 204 | .finish() 205 | } 206 | } 207 | 208 | impl DebugWithDb for HashSet 209 | where 210 | V: DebugWithDb, 211 | { 212 | fn fmt(&self, f: &mut fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> fmt::Result { 213 | let elements = self.iter().map(|e| e.debug_with(db, include_all_fields)); 214 | f.debug_list().entries(elements).finish() 215 | } 216 | } 217 | 218 | /// This is used by the macro generated code. 219 | /// If the field type implements `DebugWithDb`, uses that, otherwise, uses `Debug`. 220 | /// That's the "has impl" trick () 221 | #[doc(hidden)] 222 | pub mod helper { 223 | use super::{DebugWith, DebugWithDb}; 224 | use std::{fmt, marker::PhantomData}; 225 | 226 | pub trait Fallback { 227 | fn salsa_debug<'a>(a: &'a T, _db: &Db, _include_all_fields: bool) -> &'a dyn fmt::Debug { 228 | a 229 | } 230 | } 231 | 232 | pub struct SalsaDebug(PhantomData, PhantomData); 233 | 234 | impl, Db: ?Sized> SalsaDebug { 235 | #[allow(dead_code)] 236 | pub fn salsa_debug<'a, 'b: 'a>( 237 | a: &'a T, 238 | db: &'b Db, 239 | include_all_fields: bool, 240 | ) -> DebugWith<'a, Db> { 241 | a.debug_with(db, include_all_fields) 242 | } 243 | } 244 | 245 | impl Fallback for Everything {} 246 | } 247 | -------------------------------------------------------------------------------- /src/kind/map.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::{cell::Cell, fmt}; 3 | 4 | use crate::boxes::SmallBox; 5 | use crate::incrsan::NotObserver; 6 | use crate::{Incr, NodeRef}; 7 | use crate::{Value, ValueInternal}; 8 | 9 | pub(crate) trait FRef: 10 | (for<'a> Fn(&'a dyn ValueInternal) -> &'a dyn ValueInternal) + 'static + NotObserver 11 | { 12 | } 13 | impl FRef for F where 14 | F: (for<'a> Fn(&'a dyn ValueInternal) -> &'a dyn ValueInternal) + 'static + NotObserver 15 | { 16 | } 17 | 18 | pub(crate) struct MapRefNode { 19 | pub(crate) input: NodeRef, 20 | // Can't make this one Miny because of some weird issues with lifetimes? 21 | pub(crate) mapper: Box, 22 | pub(crate) did_change: Cell, 23 | } 24 | 25 | impl fmt::Debug for MapRefNode { 26 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 27 | f.debug_struct("MapRefNode") 28 | .field("did_change", &self.did_change.get()) 29 | .finish() 30 | } 31 | } 32 | 33 | macro_rules! map_node { 34 | (@rest) => { 35 | node_generics_default! { B1, BindLhs, BindRhs } 36 | node_generics_default! { Fold, Update, WithOld, FRef, Recompute, ObsChange } 37 | }; 38 | (@FnMut $($param:ident,)*) => { 39 | FnMut($($param,)*) -> Self::R 40 | }; 41 | (@head $t1:ident, $($t2:ident,)*) => { 42 | $t1 43 | }; 44 | (@tail_args $tfield1:ident: $t1:ident, $($tfield2:ident: $t2:ident,)*) => { 45 | $($tfield2: &Incr<$t2>,)* 46 | }; 47 | (@tail_mapper $mapnode:ident { $f:expr, $self:ident, $tfield1:ident, $($tfield2:ident,)* }) => { 48 | $mapnode { 49 | $tfield1: $self.node.packed(), 50 | $($tfield2: $tfield2.node.packed(),)* 51 | mapper: RefCell::new($crate::boxes::new_unsized!($f)), 52 | } 53 | }; 54 | (@any $type:ty) => {dyn $crate::ValueInternal}; 55 | ($vis:vis struct $mapnode:ident < 56 | inputs { 57 | $tfield1:ident: $t1:ident = $i1:ident, 58 | $( 59 | $tfield:ident : $t:ident = $i:ident, 60 | )* 61 | } 62 | fn { 63 | $ffield:ident : $fparam:ident(.. $(, $t2:ident)*) -> $r:ident, 64 | } 65 | > { 66 | default < $($d:ident),* >, 67 | $(#[$method_meta:meta])* 68 | impl Incr::$methodname:ident, Kind::$kind:ident 69 | }) => { 70 | $vis struct $mapnode { 71 | $vis $tfield1: crate::NodeRef, 72 | $($vis $tfield: crate::NodeRef,)* 73 | $vis $ffield: RefCell<$fparam>, 74 | } 75 | 76 | $crate::incrsan::not_observer_boxed_trait! { 77 | pub(crate) type $fparam = crate::boxes::SmallBox $crate::boxes::SmallBox)>; 78 | } 79 | 80 | impl fmt::Debug for $mapnode 81 | { 82 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 83 | f.debug_struct(stringify!($mapnode)).finish() 84 | } 85 | } 86 | impl<$t1: Value> Incr<$t1> { 87 | $(#[$method_meta])* 88 | pub fn $methodname<$fparam, $($t2,)* $r>( 89 | &self, 90 | $($tfield: &Incr<$t>,)* 91 | mut f: $fparam) -> Incr 92 | where 93 | $($t: Value,)* 94 | $r: Value, 95 | $fparam : FnMut(&$t1, $(&$t2,)*) -> $r + 'static + NotObserver, 96 | { 97 | let mapper = map_node! { 98 | @tail_mapper $mapnode { 99 | move | 100 | $tfield1: &dyn $crate::ValueInternal, 101 | $( 102 | $tfield: &dyn $crate::ValueInternal, 103 | )* 104 | | -> $crate::boxes::SmallBox 105 | { 106 | let $tfield1 = $tfield1.as_any().downcast_ref::<$t1>().expect("Type error in map function"); 107 | $( 108 | let $tfield = $tfield.as_any().downcast_ref::<$t>().expect("Type error in map function"); 109 | )* 110 | $crate::boxes::new_unsized!(f( $tfield1, $($tfield,)* )) 111 | }, 112 | self, 113 | $tfield1, 114 | $($tfield,)* 115 | } 116 | }; 117 | let state = self.node.state(); 118 | let node = crate::node::Node::create_rc::<$r>( 119 | state.weak(), 120 | state.current_scope.borrow().clone(), 121 | crate::kind::Kind::$kind(mapper), 122 | ); 123 | Incr { node } 124 | } 125 | } 126 | }; 127 | } 128 | 129 | macro_rules! default_doc { 130 | () => { 131 | r#" 132 | Like [Incr::map] and [Incr::map2], but with more input incrementals. 133 | 134 | If you don't feel like counting, try using the `(i1 % i2 % ...).map(|_, _, ...| ...)` syntax. 135 | "# 136 | }; 137 | } 138 | 139 | map_node! { 140 | pub(crate) struct MapNode< 141 | inputs { 142 | input: T1 = I1, 143 | } 144 | fn { mapper: F1(..) -> R, } 145 | > { 146 | default < F2, F3, F4, F5, F6, I1, I2, I3, I4, I5, I6 >, 147 | /// Takes an incremental (self), and produces a new incremental whose value 148 | /// is the result of applying a function `f` to the first value. 149 | /// 150 | /// ## Example 151 | /// 152 | /// ``` 153 | /// # use incremental::*; 154 | /// let state = IncrState::new(); 155 | /// let var = state.var(20); 156 | /// 157 | /// // produce a new incremental that adds ten 158 | /// let plus10 = var.map(|x| *x + 10); 159 | /// 160 | /// let observer = plus10.observe(); 161 | /// state.stabilise(); 162 | /// assert_eq!(observer.value(), 30); 163 | /// var.set(400); 164 | /// state.stabilise(); 165 | /// assert_eq!(observer.value(), 410); 166 | /// ``` 167 | impl Incr::map, Kind::Map 168 | } 169 | } 170 | 171 | // impl MapNode { 172 | // fn thing(&self) { 173 | // self.mapper 174 | // } 175 | // } 176 | 177 | map_node! { 178 | pub(crate) struct Map2Node< 179 | inputs { 180 | one: T1 = I1, 181 | two: T2 = I2, 182 | } 183 | fn { mapper: F2(.., T2) -> R, } 184 | > { 185 | default < F1, F3, F4, F5, F6, I1, I2, I3, I4, I5, I6 >, 186 | /// Like [Incr::map], but with two inputs. 187 | /// 188 | /// ``` 189 | /// # use incremental::*; 190 | /// let state = IncrState::new(); 191 | /// let v1 = state.var(1); 192 | /// let v2 = state.var(1); 193 | /// let add = v1.map2(&v2, |a, b| *a + *b); 194 | /// ``` 195 | impl Incr::map2, Kind::Map2 196 | } 197 | } 198 | 199 | map_node! { 200 | pub(crate) struct Map3Node< 201 | inputs { 202 | one: T1 = I1, 203 | two: T2 = I2, 204 | three: T3 = I3, 205 | } 206 | fn { mapper: F3(.., T2, T3) -> R, } 207 | > { 208 | default < F1, F2, F4, F5, F6, I1, I2, I3, I4, I5, I6 >, 209 | #[doc = default_doc!()] 210 | impl Incr::map3, Kind::Map3 211 | } 212 | } 213 | 214 | map_node! { 215 | pub(crate) struct Map4Node< 216 | inputs { 217 | one: T1 = I1, 218 | two: T2 = I2, 219 | three: T3 = I3, 220 | four: T4 = I4, 221 | } 222 | fn { mapper: F4(.., T2, T3, T4) -> R, } 223 | > { 224 | default < F1, F2, F3, F5, F6, I1, I2, I3, I4, I5, I6 >, 225 | #[doc = default_doc!()] 226 | impl Incr::map4, Kind::Map4 227 | } 228 | } 229 | 230 | map_node! { 231 | pub(crate) struct Map5Node< 232 | inputs { 233 | one: T1 = I1, 234 | two: T2 = I2, 235 | three: T3 = I3, 236 | four: T4 = I4, 237 | five: T5 = I5, 238 | } 239 | fn { mapper: F5(.., T2, T3, T4, T5) -> R, } 240 | > { 241 | default < F1, F2, F3, F4, F6, I1, I2, I3, I4, I5, I6 >, 242 | #[doc = default_doc!()] 243 | impl Incr::map5, Kind::Map5 244 | } 245 | } 246 | 247 | map_node! { 248 | pub(crate) struct Map6Node< 249 | inputs { 250 | one: T1 = I1, 251 | two: T2 = I2, 252 | three: T3 = I3, 253 | four: T4 = I4, 254 | five: T5 = I5, 255 | six: T6 = I6, 256 | } 257 | fn { mapper: F6(.., T2, T3, T4, T5, T6) -> R, } 258 | > { 259 | default < F1, F2, F3, F4, F5, I1, I2, I3, I4, I5, I6 >, 260 | #[doc = default_doc!()] 261 | impl Incr::map6, Kind::Map6 262 | } 263 | } 264 | 265 | pub(crate) trait FWithOld: 266 | FnMut( 267 | Option>, 268 | &dyn ValueInternal, 269 | ) -> (SmallBox, bool) 270 | + 'static 271 | + NotObserver 272 | { 273 | } 274 | impl FWithOld for F where 275 | F: FnMut( 276 | Option>, 277 | &dyn ValueInternal, 278 | ) -> (SmallBox, bool) 279 | + 'static 280 | + NotObserver 281 | { 282 | } 283 | 284 | /// Lets you dismantle the old R for parts. 285 | pub(crate) struct MapWithOld { 286 | pub input: NodeRef, 287 | pub mapper: RefCell>, 288 | } 289 | 290 | impl fmt::Debug for MapWithOld { 291 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 292 | f.debug_struct("MapWithOld").finish() 293 | } 294 | } 295 | -------------------------------------------------------------------------------- /incremental-map/tests/integration.rs: -------------------------------------------------------------------------------- 1 | use std::{cell::Cell, collections::BTreeMap, rc::Rc}; 2 | 3 | use test_log::test; 4 | 5 | use incremental::Incr; 6 | use incremental::IncrState; 7 | use incremental_map::prelude::*; 8 | 9 | #[derive(Debug)] 10 | struct CallCounter(&'static str, Cell); 11 | 12 | #[allow(dead_code)] 13 | impl CallCounter { 14 | fn new(name: &'static str) -> Rc { 15 | Self(name, Cell::new(0)).into() 16 | } 17 | fn count(&self) -> u32 { 18 | self.1.get() 19 | } 20 | fn increment(&self) { 21 | self.1.set(self.1.get() + 1); 22 | } 23 | } 24 | 25 | impl PartialEq for CallCounter { 26 | fn eq(&self, other: &u32) -> bool { 27 | self.1.get().eq(other) 28 | } 29 | } 30 | 31 | #[test] 32 | fn incr_map_uf() { 33 | let incr = IncrState::new(); 34 | let mut b = BTreeMap::new(); 35 | b.insert("five", 5); 36 | b.insert("eight", 8); 37 | 38 | let setter = incr.var(b.clone()); 39 | 40 | // let list = vec![ 1, 2, 3 ]; 41 | // let sum = list.into_iter().fold(0, |acc, x| acc + x); 42 | 43 | let sum = 44 | setter.incr_unordered_fold(0i32, |acc, _, new| acc + new, |acc, _, old| acc - old, true); 45 | 46 | let o = sum.observe(); 47 | 48 | incr.stabilise(); 49 | assert_eq!(o.try_get_value(), Ok(13)); 50 | 51 | b.remove("five"); 52 | setter.set(b.clone()); 53 | incr.stabilise(); 54 | assert_eq!(o.try_get_value(), Ok(8)); 55 | 56 | b.insert("five", 100); 57 | setter.set(b.clone()); 58 | incr.stabilise(); 59 | assert_eq!(o.try_get_value(), Ok(108)); 60 | 61 | b.insert("five", 105); 62 | setter.set(b.clone()); 63 | incr.stabilise(); 64 | assert_eq!(o.try_get_value(), Ok(113)); 65 | o.save_dot_to_file("incr_map_uf.dot"); 66 | } 67 | 68 | #[test] 69 | fn incr_map_filter_mapi() { 70 | let incr = IncrState::new(); 71 | let mut b = BTreeMap::new(); 72 | b.insert("five", 5); 73 | b.insert("ten", 10); 74 | let v = incr.var(b); 75 | let filtered = v 76 | .incr_filter_mapi(|&k, &v| Some(v).filter(|_| k.len() > 3)) 77 | .observe(); 78 | incr.stabilise(); 79 | let x = IntoIterator::into_iter([("five", 5i32)]); 80 | assert_eq!(filtered.value(), x.collect()); 81 | } 82 | 83 | #[test] 84 | fn incr_map_primes() { 85 | let primes = primes_lt(1_000_000); 86 | let incr = IncrState::new(); 87 | let mut b = BTreeMap::new(); 88 | b.insert("five", 5); 89 | b.insert("seven", 7); 90 | b.insert("ten", 10); 91 | let v = incr.var(b.clone()); 92 | let filtered = v 93 | .incr_filter_map(move |&v| Some(v).filter(|x| dbg!(is_prime(*dbg!(x), &primes)))) 94 | .observe(); 95 | 96 | incr.stabilise(); 97 | let x = BTreeMap::from([("five", 5), ("seven", 7)]); 98 | assert_eq!(filtered.value(), x); 99 | 100 | b.remove("seven"); 101 | b.insert("971", 971); 102 | v.set(b.clone()); 103 | incr.stabilise(); 104 | let x = BTreeMap::from([("971", 971), ("five", 5)]); 105 | assert_eq!(filtered.value(), x); 106 | } 107 | 108 | // https://gist.github.com/glebm/440bbe2fc95e7abee40eb260ec82f85c 109 | fn is_prime(n: usize, primes: &Vec) -> bool { 110 | for &p in primes { 111 | let q = n / p; 112 | if q < p { 113 | return true; 114 | }; 115 | let r = n - q * p; 116 | if r == 0 { 117 | return false; 118 | }; 119 | } 120 | panic!("too few primes") 121 | } 122 | fn primes_lt(bound: usize) -> Vec { 123 | let mut primes: Vec = (0..bound + 1).map(|num| num == 2 || num & 1 != 0).collect(); 124 | let mut num = 3usize; 125 | while num * num <= bound { 126 | let mut j = num * num; 127 | while j <= bound { 128 | primes[j] = false; 129 | j += num; 130 | } 131 | num += 2; 132 | } 133 | primes 134 | .into_iter() 135 | .enumerate() 136 | .skip(2) 137 | .filter_map(|(i, p)| if p { Some(i) } else { None }) 138 | .collect::>() 139 | } 140 | 141 | #[test] 142 | fn incr_map_rc() { 143 | let counter = CallCounter::new("mapper"); 144 | let counter_ = counter.clone(); 145 | let incr = IncrState::new(); 146 | let rc = Rc::new(BTreeMap::from([(5, "hello"), (10, "goodbye")])); 147 | let var = incr.var(rc); 148 | let observer = var 149 | .incr_mapi(move |&_k, &v| { 150 | counter_.increment(); 151 | v.to_string() + ", world" 152 | }) 153 | .observe(); 154 | incr.stabilise(); 155 | assert_eq!(*counter, 2); 156 | let greetings = observer.value(); 157 | assert_eq!(greetings.get(&5), Some(&String::from("hello, world"))); 158 | incr.stabilise(); 159 | assert_eq!(*counter, 2); 160 | let rc = Rc::new(BTreeMap::from([(10, "changed")])); 161 | // we've saved ourselves some clones already 162 | // (from the Var to its watch node, for example) 163 | var.set(rc); 164 | incr.stabilise(); 165 | assert_eq!(*counter, 3); 166 | } 167 | 168 | #[test] 169 | fn incr_filter_mapi() { 170 | let counter = CallCounter::new("mapper"); 171 | let incr = IncrState::new(); 172 | let rc = Rc::new(BTreeMap::from([(5, "hello"), (10, "goodbye")])); 173 | let var = incr.var(rc); 174 | let counter_ = counter.clone(); 175 | let observer = var 176 | .incr_filter_mapi(move |&k, &v| { 177 | counter_.increment(); 178 | if k < 10 { 179 | return None; 180 | } 181 | Some(v.to_string() + ", world") 182 | }) 183 | .observe(); 184 | incr.stabilise(); 185 | assert_eq!(*counter, 2); 186 | let greetings = observer.value(); 187 | tracing::debug!("greetings were: {greetings:?}"); 188 | assert_eq!(greetings.get(&5), None); 189 | assert_eq!(greetings.get(&10), Some(&"goodbye, world".to_string())); 190 | incr.stabilise(); 191 | assert_eq!(*counter, 2); 192 | let rc = Rc::new(BTreeMap::from([(10, "changed")])); 193 | // we've saved ourselves some clones already 194 | // (from the Var to its watch node, for example) 195 | var.set(rc); 196 | incr.stabilise(); 197 | assert_eq!(*counter, 3); 198 | } 199 | 200 | #[cfg(feature = "im")] 201 | #[test] 202 | fn incr_partition_mapi() { 203 | use im_rc::ordmap; 204 | 205 | let incr = IncrState::new(); 206 | let var = incr.var(ordmap! { 2i32 => "Hello", 3 => "three", 4 => "four", 5 => "five" }); 207 | let partitioned = var.watch().incr_partition(|key, _value| *key < 4); 208 | 209 | let left_ = partitioned.map_ref(|(a, _)| a).observe(); 210 | let right = partitioned.map_ref(|(_, b)| b).observe(); 211 | 212 | incr.stabilise(); 213 | assert_eq!(left_.value(), ordmap! { 2i32 => "Hello", 3 => "three" }); 214 | assert_eq!(right.value(), ordmap! { 4i32 => "four", 5 => "five" }); 215 | left_.save_dot_to_file("/tmp/incr_partition_mapi.dot"); 216 | } 217 | 218 | #[cfg(feature = "im")] 219 | #[test] 220 | fn incr_unordered_fold_struct() { 221 | use incremental::Value; 222 | 223 | struct SumFold; 224 | 225 | impl UnorderedFold, K, i32, i32> for SumFold { 226 | fn add(&mut self, acc: i32, _key: &K, value: &i32) -> i32 { 227 | acc + value 228 | } 229 | 230 | fn remove(&mut self, acc: i32, _key: &K, value: &i32) -> i32 { 231 | acc - value 232 | } 233 | fn revert_to_init_when_empty(&self) -> bool { 234 | true 235 | } 236 | fn initial_fold(&mut self, init: i32, input: &im_rc::OrdMap) -> i32 { 237 | input.iter().fold(init, |acc, (_k, v)| acc + v) 238 | } 239 | } 240 | 241 | use im_rc::ordmap; 242 | let state = IncrState::new(); 243 | let var = state.var(ordmap! { 1 => 1, 2 => 3 }); 244 | 245 | let folded: Incr = var.watch().incr_unordered_fold_with(0, SumFold); 246 | let obs = folded.observe(); 247 | state.stabilise(); 248 | assert_eq!(obs.value(), 4); 249 | 250 | var.modify(|omap| { 251 | omap.insert(100, 7); 252 | }); 253 | state.stabilise(); 254 | assert_eq!(obs.value(), 11); 255 | var.modify(|omap| { 256 | omap.insert(100, 7); 257 | }); 258 | state.stabilise(); 259 | assert_eq!(obs.value(), 11); 260 | } 261 | 262 | #[cfg(feature = "im")] 263 | #[test] 264 | fn test_types() { 265 | use ::im_rc::ordmap; 266 | 267 | let state = IncrState::new(); 268 | let var = state.var(ordmap! { 1 => 2, 2 => 4, 3 => 6 }); 269 | let fold = ClosureFold::new() 270 | .add(|acc, _k, v| acc + v) 271 | .remove(|acc, _k, v| acc - v) 272 | .update(|acc, _k, old, new| acc - old + new) 273 | .revert_to_init_when_empty(true); 274 | let folded = var.watch().incr_unordered_fold_with(0, fold); 275 | let obs = folded.observe(); 276 | state.stabilise(); 277 | assert_eq!(obs.value(), 12); 278 | } 279 | 280 | #[test] 281 | #[cfg(feature = "im")] 282 | fn test_merge() { 283 | use ::im_rc::ordmap; 284 | let state = IncrState::new(); 285 | let left = state.var(ordmap! { 1i32 => "a", 2 => "a" }); 286 | let right = state.var(ordmap! { 2 => "b", 3 => "b" }); 287 | 288 | let l = left.incr_merge(&right, |_key, merge| merge.into_left().cloned()); 289 | let r = left.incr_merge(&right, |_key, merge| merge.into_right().cloned()); 290 | 291 | let m = left.incr_merge(&right, |_key, merge| match merge { 292 | MergeElement::Left(left) => Some(format!("left: {left}")), 293 | MergeElement::Right(right) => Some(format!("right: {right}")), 294 | MergeElement::Both(left, right) => Some(format!("both: {left} + {right}")), 295 | }); 296 | 297 | let l_obs = l.observe(); 298 | state.stabilise(); 299 | assert_eq!( 300 | l_obs.value(), 301 | ordmap! { 302 | 1 => "a", 303 | 2 => "a" 304 | } 305 | ); 306 | 307 | let r_obs = r.observe(); 308 | state.stabilise(); 309 | assert_eq!( 310 | r_obs.value(), 311 | ordmap! { 312 | 2 => "b", 313 | 3 => "b" 314 | } 315 | ); 316 | 317 | let m_obs = m.observe(); 318 | state.stabilise(); 319 | assert_eq!( 320 | m_obs.value(), 321 | ordmap! { 322 | 1 => "left: a".to_owned(), 323 | 2 => "both: a + b".to_owned(), 324 | 3 => "right: b".to_owned() 325 | } 326 | ); 327 | 328 | // Now update. We should merge again with the updated key 329 | right.modify(|map| _ = map.insert(2, "b (updated)")); 330 | state.stabilise(); 331 | 332 | assert_eq!( 333 | m_obs.value(), 334 | ordmap! { 335 | 1 => "left: a".to_owned(), 336 | 2 => "both: a + b (updated)".to_owned(), 337 | 3 => "right: b".to_owned() 338 | } 339 | ); 340 | 341 | // concurrent modify delete 342 | left.modify(|map| _ = map.remove(&2)); 343 | right.modify(|map| { 344 | map.insert(2, "b (updated 2)"); 345 | map.remove(&3); 346 | }); 347 | state.stabilise(); 348 | 349 | assert_eq!( 350 | m_obs.value(), 351 | ordmap! { 352 | 1 => "left: a".to_owned(), 353 | 2 => "right: b (updated 2)".to_owned() 354 | } 355 | ); 356 | } 357 | -------------------------------------------------------------------------------- /incremental-map/src/btree_map.rs: -------------------------------------------------------------------------------- 1 | use std::marker::PhantomData; 2 | use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; 3 | 4 | use super::symmetric_fold::{DiffElement, MergeElement, MergeOnceWith, SymmetricDiffMap}; 5 | use super::{FilterMapOperator, MapOperator, Operator}; 6 | use crate::symmetric_fold::SymmetricFoldMap; 7 | use incremental::expert::{Dependency, Node, WeakNode}; 8 | use incremental::incrsan::NotObserver; 9 | use incremental::{Cutoff, Incr, Value}; 10 | 11 | use crate::WithOldIO; 12 | 13 | pub trait IncrBTreeMap { 14 | fn incr_mapi_(&self, f: F) -> Incr> 15 | where 16 | V2: Value, 17 | F: FnMut(&K, Incr) -> Incr + 'static + NotObserver; 18 | 19 | fn incr_mapi_cutoff(&self, f: F, cutoff: Cutoff) -> Incr> 20 | where 21 | V2: Value, 22 | F: FnMut(&K, Incr) -> Incr + 'static + NotObserver; 23 | 24 | fn incr_filter_mapi_(&self, f: F) -> Incr> 25 | where 26 | V2: Value, 27 | F: FnMut(&K, Incr) -> Incr> + 'static + NotObserver; 28 | 29 | fn incr_filter_mapi_cutoff(&self, f: F, cutoff: Cutoff) -> Incr> 30 | where 31 | V2: Value, 32 | F: FnMut(&K, Incr) -> Incr> + 'static + NotObserver; 33 | 34 | fn incr_merge(&self, other: &Incr>, f: F) -> Incr> 35 | where 36 | V2: Value, 37 | R: Value, 38 | F: FnMut(&K, MergeElement<&V, &V2>) -> Option + 'static + NotObserver; 39 | } 40 | 41 | impl IncrBTreeMap for Incr> { 42 | fn incr_mapi_( 43 | &self, 44 | f: F, 45 | // see im_rc 46 | // cutoff: Option> 47 | ) -> Incr> 48 | where 49 | V2: Value, 50 | F: FnMut(&K, Incr) -> Incr + 'static + NotObserver, 51 | { 52 | incr_filter_mapi_generic_btree_map(self, MapOperator(f, PhantomData), None) 53 | } 54 | 55 | fn incr_mapi_cutoff(&self, f: F, cutoff: Cutoff) -> Incr> 56 | where 57 | V2: Value, 58 | F: FnMut(&K, Incr) -> Incr + 'static + NotObserver, 59 | { 60 | incr_filter_mapi_generic_btree_map(self, MapOperator(f, PhantomData), Some(cutoff)) 61 | } 62 | 63 | fn incr_filter_mapi_( 64 | &self, 65 | f: F, 66 | // see im_rc 67 | // cutoff: Option> 68 | ) -> Incr> 69 | where 70 | V2: Value, 71 | F: FnMut(&K, Incr) -> Incr> + 'static + NotObserver, 72 | { 73 | incr_filter_mapi_generic_btree_map(self, FilterMapOperator(f, PhantomData), None) 74 | } 75 | 76 | fn incr_filter_mapi_cutoff(&self, f: F, cutoff: Cutoff) -> Incr> 77 | where 78 | V2: Value, 79 | F: FnMut(&K, Incr) -> Incr> + 'static + NotObserver, 80 | { 81 | incr_filter_mapi_generic_btree_map(self, FilterMapOperator(f, PhantomData), Some(cutoff)) 82 | } 83 | 84 | /// Merge two maps incrementally, where 85 | /// 86 | /// - if a key appears only in self, the predicate runs with [`MergeElement::Left`] 87 | /// - if a key appears only in other, the predicate runs with [`MergeElement::Right`] 88 | /// - if a key appears in both, the predicate runs with [`MergeElement::Both`] 89 | /// 90 | /// The predicate is only re-run for added/removed/modified keys in each map, using the 91 | /// symmetric diff property. 92 | fn incr_merge(&self, other: &Incr>, mut f: F) -> Incr> 93 | where 94 | V2: Value, 95 | R: Value, 96 | F: FnMut(&K, MergeElement<&V, &V2>) -> Option + 'static + NotObserver, 97 | { 98 | let i = self.with_old_input_output2(other, move |old, new_left_map, new_right_map| { 99 | let mut did_change = false; 100 | let output = merge_shared_impl( 101 | old, 102 | new_left_map, 103 | new_right_map, 104 | |mut acc_output, key, merge_elem| { 105 | use MergeElement::*; 106 | did_change = true; 107 | let data = match merge_elem { 108 | Both((_, left_diff), (_, right_diff)) => { 109 | (left_diff.new_data(), right_diff.new_data()) 110 | } 111 | Left((_, left_diff)) => (left_diff.new_data(), new_right_map.get(key)), 112 | Right((_, right_diff)) => (new_left_map.get(key), right_diff.new_data()), 113 | }; 114 | let output_data_opt = match data { 115 | (None, None) => None, 116 | (Some(x), None) => f(key, MergeElement::Left(x)), 117 | (None, Some(x)) => f(key, MergeElement::Right(x)), 118 | (Some(a), Some(b)) => f(key, MergeElement::Both(a, b)), 119 | }; 120 | match output_data_opt { 121 | None => acc_output.remove(key), 122 | Some(r) => acc_output.insert(key.clone(), r), 123 | }; 124 | acc_output 125 | }, 126 | ); 127 | (output, did_change) 128 | }); 129 | #[cfg(debug_assertions)] 130 | i.set_graphviz_user_data(Box::new(format!( 131 | "incr_merge -> {}", 132 | std::any::type_name::>() 133 | ))); 134 | i 135 | } 136 | } 137 | 138 | fn incr_filter_mapi_generic_btree_map( 139 | lhs: &Incr>, 140 | mut f: O, 141 | cutoff: Option>, 142 | ) -> Incr> 143 | where 144 | K: Value + Ord, 145 | V: Value, 146 | O: Operator + 'static + NotObserver, 147 | O::Output: Value, 148 | V2: Value, 149 | { 150 | let state = lhs.state(); 151 | let prev_map: Rc>> = Rc::new(RefCell::new(BTreeMap::new())); 152 | let acc = Rc::new(RefCell::new(BTreeMap::::new())); 153 | let result = Node::>::new(&state, { 154 | let acc_ = acc.clone(); 155 | move || acc_.borrow().clone() 156 | }); 157 | let on_inner_change = { 158 | let acc_ = acc.clone(); 159 | move |key: &K, opt: &O::Output| { 160 | let mut acc = acc_.borrow_mut(); 161 | let opt = O::as_opt(opt); 162 | match opt { 163 | None => { 164 | acc.remove(key); 165 | } 166 | Some(x) => { 167 | acc.insert(key.clone(), x.clone()); 168 | } 169 | } 170 | drop(acc); 171 | } 172 | }; 173 | 174 | let mut prev_nodes = BTreeMap::, Dependency)>::new(); 175 | let result_weak = result.weak(); 176 | 177 | let lhs_change = lhs.map_cyclic({ 178 | move |lhs_change, map| { 179 | let mut prev_map_mut = prev_map.borrow_mut(); 180 | prev_map_mut.symmetric_fold(map, &mut prev_nodes, |nodes, (key, diff)| { 181 | match diff { 182 | DiffElement::Unequal(_, _) => { 183 | let (node, _dep) = nodes.get(key).unwrap(); 184 | node.make_stale(); 185 | nodes 186 | } 187 | DiffElement::Left(_) => { 188 | let (node, dep) = nodes.remove(key).unwrap(); 189 | // running remove_dependency will cause node's weak ref to die. 190 | // so we upgrade it first. 191 | let node = node.upgrade().unwrap(); 192 | result_weak.remove_dependency(dep); 193 | let mut acc = acc.borrow_mut(); 194 | acc.remove(key); 195 | // Invalidate does have to happen after remove_dependency. 196 | node.invalidate(); 197 | nodes 198 | } 199 | DiffElement::Right(_) => { 200 | let key = key.clone(); 201 | let node = Node::::new(&state, { 202 | let key_ = key.clone(); 203 | let prev_map_ = prev_map.clone(); 204 | move || { 205 | let prev_map = prev_map_.borrow(); 206 | prev_map.get(&key_).unwrap().clone() 207 | } 208 | }); 209 | if let Some(cutoff) = cutoff.clone() { 210 | node.watch().set_cutoff(cutoff); 211 | } 212 | let lhs_change = lhs_change.upgrade().unwrap(); 213 | node.add_dependency(&lhs_change); 214 | let mapped = f.call_fn(&key, node.watch()); 215 | let user_function_dep = result_weak.add_dependency_with(&mapped, { 216 | let key = key.clone(); 217 | let on_inner_change = on_inner_change.clone(); 218 | move |v| on_inner_change(&key, v) 219 | }); 220 | nodes.insert(key, (node.weak(), user_function_dep)); 221 | nodes 222 | } 223 | } 224 | }); 225 | *prev_map_mut = map.clone(); 226 | } 227 | }); 228 | result.add_dependency(&lhs_change); 229 | result.watch() 230 | } 231 | 232 | pub(crate) fn merge_shared_impl< 233 | K: Clone + Ord, 234 | V1: Clone + PartialEq, 235 | V2: Clone + PartialEq, 236 | R: Clone, 237 | >( 238 | old: Option<(BTreeMap, BTreeMap, BTreeMap)>, 239 | new_left_map: &BTreeMap, 240 | new_right_map: &BTreeMap, 241 | mut f: impl FnMut( 242 | BTreeMap, 243 | &K, 244 | MergeElement<(&K, DiffElement<&V1>), (&K, DiffElement<&V2>)>, 245 | ) -> BTreeMap, 246 | ) -> BTreeMap { 247 | let (old_left_map, old_right_map, old_output) = match old { 248 | None => (BTreeMap::new(), BTreeMap::new(), BTreeMap::new()), 249 | Some(x) => x, 250 | }; 251 | let left_diff = old_left_map.symmetric_diff(new_left_map); 252 | let right_diff = old_right_map.symmetric_diff(new_right_map); 253 | // relies on the key iteration being sorted, as in BTreeMap. 254 | let merge = MergeOnceWith::new(left_diff, right_diff, |(k, _), (k2, _)| k.cmp(k2)); 255 | merge.fold(old_output, |output, merge_elem| { 256 | let key = match merge_elem { 257 | MergeElement::Left((key, _)) | MergeElement::Right((key, _)) => key, 258 | MergeElement::Both((left_key, _), (_right_key, _)) => { 259 | // comparisons can be expensive 260 | // assert_eq!(left_key, right_key); 261 | left_key 262 | } 263 | }; 264 | f(output, key, merge_elem) 265 | }) 266 | } 267 | -------------------------------------------------------------------------------- /src/internal_observer.rs: -------------------------------------------------------------------------------- 1 | use super::node::ErasedNode; 2 | use super::node_update::{NodeUpdateDelayed, OnUpdateHandler}; 3 | use super::stabilisation_num::StabilisationNum; 4 | use super::state::{IncrStatus, State}; 5 | use crate::node::Node; 6 | use crate::node_update::HandleUpdate; 7 | use std::cell::RefCell; 8 | use std::collections::HashMap; 9 | use std::fmt::{Debug, Display}; 10 | use std::hash::Hash; 11 | use std::rc::Rc; 12 | use std::{cell::Cell, rc::Weak}; 13 | 14 | use super::{CellIncrement, Incr}; 15 | use super::{NodeRef, Value}; 16 | use crate::incrsan::NotObserver; 17 | 18 | use self::ObserverState::*; 19 | 20 | #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] 21 | pub struct ObserverId(usize); 22 | impl ObserverId { 23 | fn next() -> Self { 24 | thread_local! { 25 | static OBSERVER_ID: Cell = Cell::new(0); 26 | } 27 | 28 | OBSERVER_ID.with(|x| { 29 | let next = x.get() + 1; 30 | x.set(next); 31 | ObserverId(next) 32 | }) 33 | } 34 | } 35 | 36 | pub(crate) struct InternalObserver { 37 | id: ObserverId, 38 | pub(crate) state: Cell, 39 | observing: Incr, 40 | weak_self: Weak, 41 | on_update_handlers: RefCell>>, 42 | next_subscriber: Cell, 43 | } 44 | 45 | pub(crate) type WeakObserver = Weak; 46 | pub(crate) type StrongObserver = Rc; 47 | 48 | pub(crate) trait ErasedObserver: Debug + NotObserver { 49 | fn id(&self) -> ObserverId; 50 | fn state(&self) -> &Cell; 51 | fn observing_packed(&self) -> NodeRef; 52 | fn observing_erased(&self) -> &Node; 53 | fn disallow_future_use(&self, state: &State); 54 | fn num_handlers(&self) -> i32; 55 | fn add_to_observed_node(&self); 56 | fn remove_from_observed_node(&self); 57 | fn unsubscribe(&self, token: SubscriptionToken) -> Result<(), ObserverError>; 58 | fn run_all(&self, input: &Node, node_update: NodeUpdateDelayed, now: StabilisationNum); 59 | } 60 | 61 | impl ErasedObserver for InternalObserver { 62 | fn id(&self) -> ObserverId { 63 | self.id 64 | } 65 | fn state(&self) -> &Cell { 66 | &self.state 67 | } 68 | fn observing_packed(&self) -> NodeRef { 69 | self.observing.node.clone().packed() 70 | } 71 | fn observing_erased(&self) -> &Node { 72 | self.observing.node.erased() 73 | } 74 | fn disallow_future_use(&self, state: &State) { 75 | match self.state.get() { 76 | Disallowed | Unlinked => {} 77 | Created => { 78 | state 79 | .num_active_observers 80 | .set(state.num_active_observers.get() - 1); 81 | self.state.set(Unlinked); 82 | let mut ouh = self.on_update_handlers.borrow_mut(); 83 | ouh.clear(); 84 | } 85 | InUse => { 86 | state 87 | .num_active_observers 88 | .set(state.num_active_observers.get() - 1); 89 | self.state.set(Disallowed); 90 | let mut dobs = state.disallowed_observers.borrow_mut(); 91 | dobs.push(self.weak_self.clone()); 92 | } 93 | } 94 | } 95 | fn num_handlers(&self) -> i32 { 96 | self.on_update_handlers.borrow().len() as i32 97 | } 98 | fn add_to_observed_node(&self) { 99 | let node = &self.observing.node; 100 | node.add_observer(self.id(), self.weak_self.clone()); 101 | let num = node.num_on_update_handlers(); 102 | num.set(num.get() + self.num_handlers()); 103 | } 104 | fn remove_from_observed_node(&self) { 105 | let node = &self.observing.node; 106 | node.remove_observer(self.id()); 107 | let num = node.num_on_update_handlers(); 108 | num.set(num.get() - self.num_handlers()); 109 | } 110 | 111 | // This is not available in OCaml Incremental, it seems! 112 | fn unsubscribe(&self, token: SubscriptionToken) -> Result<(), ObserverError> { 113 | if token.0 != self.id { 114 | return Err(ObserverError::Mismatch); 115 | } 116 | match self.state.get() { 117 | // In these cases, on_update_handlers is already cleared. 118 | // it's fine to try to unsubscribe from a dead/dying subscriber. 119 | // That will generally happen through State::unsubscribe 120 | // (which routes it to here through all_observers.get(...)). 121 | Disallowed | Unlinked => Ok(()), 122 | Created | InUse => { 123 | // delete from the list in either case 124 | self.on_update_handlers.borrow_mut().remove(&token); 125 | 126 | match self.state.get() { 127 | Created => { 128 | // No need to do a big cleanup. We haven't done the batch add yet in state.rs. 129 | Ok(()) 130 | } 131 | InUse => { 132 | let observing = self.observing_erased(); 133 | let num = observing.num_on_update_handlers(); 134 | num.increment(); 135 | Ok(()) 136 | } 137 | _ => unreachable!(), 138 | } 139 | } 140 | } 141 | } 142 | fn run_all(&self, input: &Node, node_update: NodeUpdateDelayed, now: StabilisationNum) { 143 | let mut handlers = self.on_update_handlers.borrow_mut(); 144 | for (id, handler) in handlers.iter_mut() { 145 | tracing::trace!("running update handler with id {id:?}"); 146 | /* We have to test [state] before each on-update handler, because an on-update 147 | handler might disable its own observer, which should prevent other on-update 148 | handlers in the same observer from running. */ 149 | match self.state.get() { 150 | Created | Unlinked => panic!(), 151 | Disallowed => (), 152 | InUse => handler.run(input, node_update, now), 153 | } 154 | } 155 | } 156 | } 157 | 158 | impl Debug for InternalObserver { 159 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 160 | f.debug_struct("InternalObserver") 161 | .field("state", &self.state.get()) 162 | .field("value", &self.try_get_value()) 163 | .finish() 164 | } 165 | } 166 | 167 | impl InternalObserver { 168 | pub(crate) fn incr_state(&self) -> Option> { 169 | self.observing.node.state_opt() 170 | } 171 | pub(crate) fn new(observing: Incr) -> Rc { 172 | let id = ObserverId::next(); 173 | Rc::new_cyclic(|weak_self| Self { 174 | id, 175 | state: Cell::new(Created), 176 | observing, 177 | on_update_handlers: Default::default(), 178 | weak_self: weak_self.clone(), 179 | next_subscriber: SubscriptionToken(id, 1).into(), 180 | }) 181 | } 182 | pub(crate) fn try_get_value(&self) -> Result { 183 | let t = self.incr_state(); 184 | match t { 185 | Some(t) => match t.status.get() { 186 | IncrStatus::NotStabilising | IncrStatus::RunningOnUpdateHandlers => { 187 | self.value_inner() 188 | } 189 | IncrStatus::Stabilising => Err(ObserverError::CurrentlyStabilising), 190 | }, 191 | // the whole state is dead... so is the node, methinks. 192 | None => Err(ObserverError::ObservingInvalid), 193 | } 194 | } 195 | pub(crate) fn value_inner(&self) -> Result { 196 | match self.state.get() { 197 | Created => Err(ObserverError::NeverStabilised), 198 | InUse => self 199 | .observing 200 | .node 201 | .value_opt() 202 | .ok_or(ObserverError::ObservingInvalid), 203 | Disallowed | Unlinked => Err(ObserverError::Disallowed), 204 | } 205 | } 206 | pub(crate) fn subscribe( 207 | &self, 208 | handler: OnUpdateHandler, 209 | ) -> Result { 210 | match self.state.get() { 211 | Disallowed | Unlinked => Err(ObserverError::Disallowed), 212 | Created | InUse => { 213 | let token = self.next_subscriber.get(); 214 | self.next_subscriber.set(token.succ()); 215 | self.on_update_handlers.borrow_mut().insert(token, handler); 216 | match self.state.get() { 217 | Created => { 218 | /* We'll bump [observing.num_on_update_handlers] when [t] is actually added to 219 | [observing.observers] at the start of the next stabilization. */ 220 | } 221 | InUse => { 222 | let observing = self.observing_erased(); 223 | let num = observing.num_on_update_handlers(); 224 | num.set(num.get() + 1); 225 | } 226 | _ => unreachable!(), 227 | } 228 | Ok(token) 229 | } 230 | } 231 | } 232 | } 233 | 234 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] 235 | pub struct SubscriptionToken(ObserverId, i32); 236 | 237 | impl SubscriptionToken { 238 | fn succ(&self) -> Self { 239 | Self(self.0, self.1 + 1) 240 | } 241 | pub(crate) fn observer_id(&self) -> ObserverId { 242 | self.0 243 | } 244 | } 245 | 246 | /// State transitions: 247 | /// 248 | /// ```ignore 249 | /// Created --> In_use --> Disallowed --> Unlinked 250 | /// | ^ 251 | /// \-------------------------------------/ 252 | /// ``` 253 | /// 254 | #[derive(Copy, Clone, Debug, PartialEq)] 255 | pub(crate) enum ObserverState { 256 | Created, 257 | InUse, 258 | Disallowed, 259 | Unlinked, 260 | } 261 | 262 | #[derive(Debug, PartialEq, Eq, Clone)] 263 | #[non_exhaustive] 264 | pub enum ObserverError { 265 | CurrentlyStabilising, 266 | NeverStabilised, 267 | Disallowed, 268 | ObservingInvalid, 269 | Mismatch, 270 | } 271 | 272 | impl Display for ObserverError { 273 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 274 | match self { 275 | Self::CurrentlyStabilising => write!(f, "Incremental is currently stabilising. You cannot call Observer::value inside e.g. a map or bind function."), 276 | Self::NeverStabilised => write!(f, "Incremental has never stabilised. Observer does not yet have a value."), 277 | Self::Disallowed => write!(f, "Observer has been disallowed"), 278 | Self::ObservingInvalid => write!(f, "observing an invalid Incr"), 279 | Self::Mismatch => write!(f, "called unsubscribe with the wrong observer"), 280 | } 281 | } 282 | } 283 | impl std::error::Error for ObserverError {} 284 | 285 | #[cfg(debug_assertions)] 286 | impl Drop for InternalObserver { 287 | fn drop(&mut self) { 288 | let count = Rc::strong_count(&self.observing.node); 289 | tracing::info!( 290 | "dropping InternalObserver with id {:?}, observing node with strong_count {count}", 291 | self.id 292 | ); 293 | debug_assert!(matches!(self.state.get(), Disallowed | Unlinked)); 294 | } 295 | } 296 | -------------------------------------------------------------------------------- /src/var.rs: -------------------------------------------------------------------------------- 1 | use core::fmt::Debug; 2 | use std::cell::{Cell, RefCell}; 3 | use std::rc::{Rc, Weak}; 4 | 5 | #[cfg(test)] 6 | use test_log::test; 7 | 8 | use super::node::{ErasedNode, Incremental, Node, NodeId}; 9 | use super::stabilisation_num::StabilisationNum; 10 | use super::state::IncrStatus; 11 | use super::state::State; 12 | use super::CellIncrement; 13 | use super::Incr; 14 | use crate::boxes::{new_unsized, SmallBox}; 15 | use crate::incrsan::NotObserver; 16 | use crate::kind::KindTrait; 17 | use crate::Value; 18 | use crate::ValueInternal; 19 | 20 | // For the delayed variable set list (set_during_stabilisation). 21 | // We use Weak to ensure we don't interfere with the manual 22 | // Rc-cycle-breaking on public::Var. 23 | pub(crate) type WeakVar = Weak; 24 | 25 | pub(crate) trait ErasedVariable: Debug + NotObserver + KindTrait { 26 | fn set_var_stabilise_end(&self); 27 | fn id(&self) -> NodeId; 28 | fn break_rc_cycle(&self); 29 | fn set_at(&self) -> StabilisationNum; 30 | } 31 | 32 | impl ErasedVariable for Var { 33 | fn set_var_stabilise_end(&self) { 34 | let v_opt = self.value_set_during_stabilisation.borrow_mut().take(); 35 | // if it's None, then we were simply pushed onto the 36 | // value_set_during_stabilisation stack twice. So ignore. 37 | if let Some(v) = v_opt { 38 | self.set_var_while_not_stabilising(v); 39 | } 40 | } 41 | fn id(&self) -> NodeId { 42 | self.node_id.get() 43 | } 44 | fn break_rc_cycle(&self) { 45 | self.node.take(); 46 | } 47 | fn set_at(&self) -> StabilisationNum { 48 | self.set_at.get() 49 | } 50 | } 51 | 52 | impl KindTrait for Var { 53 | fn compute(&self) -> SmallBox { 54 | new_unsized!((*self.value.borrow()).clone()) 55 | } 56 | 57 | fn children_len(&self) -> usize { 58 | 0 59 | } 60 | 61 | // not used 62 | fn iter_children_packed(&self) -> Box + '_> { 63 | Box::new(std::iter::empty()) 64 | } 65 | 66 | fn slow_get_child(&self, _index: usize) -> crate::NodeRef { 67 | panic!() 68 | } 69 | 70 | fn debug_ty(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 71 | write!(f, "Var<{}>", std::any::type_name::()) 72 | } 73 | } 74 | 75 | pub struct Var { 76 | pub(crate) state: Weak, 77 | pub(crate) value: RefCell, 78 | pub(crate) value_set_during_stabilisation: RefCell>, 79 | pub(crate) set_at: Cell, 80 | // mutable for initialisation 81 | pub(crate) node: RefCell>>, 82 | // mutable for initialisation 83 | pub(crate) node_id: Cell, 84 | } 85 | 86 | impl Debug for Var { 87 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 88 | f.debug_struct("Var") 89 | .field("set_at", &self.set_at.get()) 90 | .field("value", &self.value.borrow()) 91 | .finish() 92 | } 93 | } 94 | 95 | impl Var { 96 | pub(crate) fn erased(self: &Rc) -> WeakVar { 97 | Rc::downgrade(self) as WeakVar 98 | } 99 | 100 | pub(crate) fn get(&self) -> T { 101 | self.value.borrow().clone() 102 | } 103 | 104 | pub(crate) fn was_changed_during_stabilisation(&self) -> bool { 105 | self.value_set_during_stabilisation 106 | .borrow() 107 | .as_ref() 108 | .map_or(false, |during| self.value.borrow().ne(during)) 109 | } 110 | 111 | pub(crate) fn update(self: &Rc, f: impl FnOnce(T) -> T) 112 | where 113 | T: Default, 114 | { 115 | let t = self.state.upgrade().unwrap(); 116 | match t.status.get() { 117 | IncrStatus::NotStabilising | IncrStatus::RunningOnUpdateHandlers => { 118 | { 119 | let mut value = self.value.borrow_mut(); 120 | // T: Default. So we can save a clone by writing e.g. an empty vec or map in there. 121 | let taken = std::mem::take(&mut *value); 122 | *value = f(taken); 123 | } 124 | self.did_set_var_while_not_stabilising(); 125 | } 126 | IncrStatus::Stabilising => { 127 | let mut delayed_slot = self.value_set_during_stabilisation.borrow_mut(); 128 | if let Some(delayed) = &mut *delayed_slot { 129 | // T: Default. So we can save a clone by writing e.g. an empty vec or map in there. 130 | let taken = std::mem::take(delayed); 131 | *delayed = f(taken); 132 | } else { 133 | let mut stack = t.set_during_stabilisation.borrow_mut(); 134 | stack.push(self.erased()); 135 | // we have to clone, because we don't want to mem::take the value 136 | // that some nodes might still need to read during this stabilisation. 137 | let cloned = (*self.value.borrow()).clone(); 138 | delayed_slot.replace(f(cloned)); 139 | } 140 | } 141 | }; 142 | } 143 | 144 | pub(crate) fn replace_with(self: &Rc, f: impl FnOnce(&mut T) -> T) -> T { 145 | let t = self.state.upgrade().unwrap(); 146 | match t.status.get() { 147 | IncrStatus::NotStabilising | IncrStatus::RunningOnUpdateHandlers => { 148 | let old = { 149 | let v = &mut *self.value.borrow_mut(); 150 | let new = f(v); 151 | std::mem::replace(v, new) 152 | }; 153 | self.did_set_var_while_not_stabilising(); 154 | old 155 | } 156 | IncrStatus::Stabilising => { 157 | let mut delayed_slot = self.value_set_during_stabilisation.borrow_mut(); 158 | if let Some(delayed) = &mut *delayed_slot { 159 | let new = f(delayed); 160 | std::mem::replace(delayed, new) 161 | } else { 162 | let mut stack = t.set_during_stabilisation.borrow_mut(); 163 | stack.push(self.erased()); 164 | let mut cloned = (*self.value.borrow()).clone(); 165 | let new = f(&mut cloned); 166 | let old = std::mem::replace(&mut cloned, new); 167 | delayed_slot.replace(cloned); 168 | old 169 | } 170 | } 171 | } 172 | } 173 | 174 | pub(crate) fn modify(self: &Rc, f: impl FnOnce(&mut T)) { 175 | let t = self.state.upgrade().unwrap(); 176 | match t.status.get() { 177 | IncrStatus::NotStabilising | IncrStatus::RunningOnUpdateHandlers => { 178 | { 179 | let mut v = self.value.borrow_mut(); 180 | f(&mut v); 181 | } 182 | self.did_set_var_while_not_stabilising(); 183 | } 184 | IncrStatus::Stabilising => { 185 | let mut v = self.value_set_during_stabilisation.borrow_mut(); 186 | if let Some(v) = &mut *v { 187 | f(v); 188 | } else { 189 | let mut stack = t.set_during_stabilisation.borrow_mut(); 190 | stack.push(self.erased()); 191 | let mut cloned = (*self.value.borrow()).clone(); 192 | f(&mut cloned); 193 | v.replace(cloned); 194 | } 195 | } 196 | }; 197 | } 198 | 199 | pub(crate) fn set(self: &Rc, value: T) { 200 | let t = self.state.upgrade().unwrap(); 201 | match t.status.get() { 202 | IncrStatus::RunningOnUpdateHandlers | IncrStatus::NotStabilising => { 203 | self.set_var_while_not_stabilising(value); 204 | } 205 | IncrStatus::Stabilising => { 206 | let mut v = self.value_set_during_stabilisation.borrow_mut(); 207 | if v.is_none() { 208 | let mut stack = t.set_during_stabilisation.borrow_mut(); 209 | stack.push(self.erased()); 210 | } 211 | *v = Some(value); 212 | } 213 | } 214 | } 215 | 216 | fn set_var_while_not_stabilising(&self, value: T) { 217 | { 218 | let mut value_slot = self.value.borrow_mut(); 219 | *value_slot = value; 220 | } 221 | self.did_set_var_while_not_stabilising(); 222 | } 223 | 224 | fn did_set_var_while_not_stabilising(&self) { 225 | let Some(watch) = self.node.borrow().clone() else { 226 | panic!( 227 | "uninitialised var or abandoned watch node (had {:?})", 228 | self.node_id 229 | ) 230 | }; 231 | let t = self.state.upgrade().unwrap(); 232 | t.num_var_sets.increment(); 233 | if self.set_at.get() < t.stabilisation_num.get() { 234 | tracing::info!( 235 | "variable set at t={:?}, current revision is t={:?}", 236 | self.set_at.get().0, 237 | t.stabilisation_num.get().0 238 | ); 239 | self.set_at.set(t.stabilisation_num.get()); 240 | debug_assert!(watch.is_stale()); 241 | if watch.is_necessary() && !watch.is_in_recompute_heap() { 242 | tracing::info!( 243 | "inserting var watch into recompute heap at height {:?}", 244 | watch.height() 245 | ); 246 | t.recompute_heap.insert(watch.packed()); 247 | } 248 | } 249 | } 250 | 251 | pub(crate) fn watch(&self) -> Incr { 252 | Incr { 253 | node: self 254 | .node 255 | .borrow() 256 | .clone() 257 | .expect("var was not initialised") 258 | .as_input(), 259 | } 260 | } 261 | } 262 | 263 | #[cfg(test)] 264 | thread_local! { 265 | static DID_DROP: Cell = Cell::new(0); 266 | } 267 | 268 | #[cfg(test)] 269 | impl Drop for Var { 270 | fn drop(&mut self) { 271 | tracing::trace!("Dropping var with id {:?}", self.node_id); 272 | DID_DROP.with(|cell| cell.set(cell.get() + 1)); 273 | } 274 | } 275 | 276 | #[test] 277 | fn var_drop() { 278 | DID_DROP.with(|cell| cell.set(0)); 279 | { 280 | let incr = crate::IncrState::new(); 281 | println!("before var created"); 282 | let v = incr.var(10); 283 | println!("before watch created"); 284 | let w = v.watch(); 285 | drop(v); 286 | println!("watch created, public::Var dropped"); 287 | let o = w.observe(); 288 | incr.stabilise(); 289 | assert_eq!(o.try_get_value(), Ok(10)); 290 | } 291 | assert_eq!(DID_DROP.with(|cell| cell.get()), 1); 292 | } 293 | 294 | #[test] 295 | fn var_drop_delayed() { 296 | DID_DROP.with(|cell| cell.set(0)); 297 | { 298 | let incr = crate::IncrState::new(); 299 | let v = incr.var(10); 300 | let w = v.watch(); 301 | let c = incr.constant(9).bind(move |_| { 302 | v.set(99); 303 | w.clone() 304 | }); 305 | let o = c.observe(); 306 | incr.stabilise(); 307 | assert_eq!(o.try_get_value(), Ok(10)); 308 | incr.stabilise(); 309 | assert_eq!(o.try_get_value(), Ok(99)); 310 | } 311 | assert_eq!(DID_DROP.with(|cell| cell.get()), 1); 312 | } 313 | -------------------------------------------------------------------------------- /tests/fixed_point.rs: -------------------------------------------------------------------------------- 1 | use std::{cell::Cell, rc::Rc}; 2 | 3 | use incremental::incrsan::NotObserver; 4 | use incremental::{Incr, IncrState, Observer, SubscriptionToken, Update, Value, WeakState}; 5 | use test_log::test; 6 | 7 | fn fixed_point( 8 | state: &WeakState, 9 | init: T, 10 | mut f: impl FnMut(&mut T) -> T + 'static + NotObserver, 11 | ) -> Incr { 12 | let var = state.var(init); 13 | let v = var.clone(); 14 | 15 | // now, mapping var. if var is set during stabilisation, then 16 | // it queues var to be recomputed next round. 17 | // however this does not enqueue this map node. that only happens 18 | // during stabilisation IF var changes. 19 | var.map(move |_input| { 20 | // returns the old value 21 | // this means the first output of this incremental 22 | // is just init. 23 | v.replace_with(|x| f(x)) 24 | }) 25 | } 26 | 27 | #[test] 28 | fn one_node() { 29 | let incr = IncrState::new(); 30 | let observer = fixed_point(&incr.weak(), 10_u32, |x| x.saturating_sub(1)).observe(); 31 | observer.subscribe(|t| { 32 | println!("observed {:?}", t); 33 | }); 34 | while !incr.is_stable() { 35 | incr.stabilise(); 36 | } 37 | assert_eq!(observer.value(), 0); 38 | } 39 | 40 | struct FixedPointIter<'a, T: Value> { 41 | cycle_count: Rc>, 42 | token: SubscriptionToken, 43 | observer: Observer, 44 | state: &'a IncrState, 45 | } 46 | 47 | impl<'a, T: Value> FixedPointIter<'a, T> { 48 | fn new(state: &'a IncrState, observer: Observer) -> Self { 49 | let cycle_count = Rc::new(Cell::new(0i32)); 50 | let weak = Rc::downgrade(&cycle_count); 51 | let token = observer.subscribe(move |_val| { 52 | let count = weak.upgrade().unwrap(); 53 | count.set(count.get() + 1); 54 | }); 55 | Self { 56 | cycle_count, 57 | token, 58 | observer, 59 | state, 60 | } 61 | } 62 | 63 | fn iterate(&self) -> T { 64 | let mut last_cycle_count = -1; 65 | while self.cycle_count.get() != last_cycle_count && !self.state.is_stable() { 66 | last_cycle_count = self.cycle_count.get(); 67 | // this will possibly increment cycle_count 68 | // if it doesn't, it's because the observed node did not emit a change event. 69 | self.state.stabilise(); 70 | } 71 | self.observer.value() 72 | } 73 | } 74 | 75 | impl<'a, T: Value> Drop for FixedPointIter<'a, T> { 76 | fn drop(&mut self) { 77 | self.state.unsubscribe(self.token); 78 | } 79 | } 80 | 81 | #[test] 82 | fn iterated() { 83 | // let cell = Rc::>::new(); 84 | let incr = IncrState::new(); 85 | let tillzero = fixed_point(&incr.weak(), 10_u32, |x| x.saturating_sub(1)); 86 | let observer = tillzero.observe(); 87 | let fixed_point = FixedPointIter::new(&incr, observer); 88 | let value = fixed_point.iterate(); 89 | assert_eq!(value, 0); 90 | } 91 | 92 | #[test] 93 | fn dependencies() { 94 | let incr = IncrState::new(); 95 | let tillzero = fixed_point(&incr.weak(), 10_u32, |x| x.saturating_sub(1)); 96 | let mapped = tillzero.map(|x| x + 1); 97 | let observer = mapped.observe(); 98 | observer.subscribe(|t| { 99 | println!("observed {:?}", t); 100 | }); 101 | while !incr.is_stable() { 102 | incr.stabilise(); 103 | } 104 | assert_eq!(observer.value(), 1); 105 | } 106 | 107 | fn using_cutoff( 108 | state: &WeakState, 109 | init: T, 110 | mut f: impl FnMut(&mut T) -> T + 'static + NotObserver, 111 | ) -> (Incr, UntilStableValue) { 112 | let var = state.var(init); 113 | // TODO: Cloning var and using it in a node may make a ref cycle. 114 | // prefer var.weak() (but we need to override this for Var so it 115 | // is not a bare Incr). 116 | let v_mapped = var.clone(); 117 | 118 | // now, mapping var. if var is set during stabilisation, then 119 | // it queues var to be recomputed next round. 120 | // however this does not enqueue this map node. that only happens 121 | // during stabilisation IF var changes. 122 | let output = var.map(move |_input| { 123 | // returns the old value 124 | // this means the first output of this incremental 125 | // is just init. 126 | v_mapped.replace_with(|x| f(x)) 127 | }); 128 | 129 | // Do not trigger a recompute until our value is stable. 130 | let v_cutoff = var.clone(); 131 | output.set_cutoff_fn_boxed(move |_, _| { 132 | let was_changed = v_cutoff.was_changed_during_stabilisation(); 133 | println!("cutoff function ran; was it changed during stab? {was_changed:?}"); 134 | was_changed 135 | }); 136 | let until = UntilStableValue::new(output.observe()); 137 | (output, until) 138 | } 139 | 140 | #[test] 141 | fn dependencies_using_cutoff() { 142 | let incr = IncrState::new(); 143 | let cell = Rc::new(Cell::new(0i32)); 144 | let (tillzero, _) = using_cutoff(&incr.weak(), 10_u32, |x| x.saturating_sub(1)); 145 | // add a dependency and observe that 146 | let map_observer = tillzero.map(|x| x + 1).observe(); 147 | let o_cell = cell.clone(); 148 | map_observer.subscribe(move |t| { 149 | println!("observed {:?}", t); 150 | if let Update::Changed(_) = t { 151 | o_cell.set(o_cell.get() + 1); 152 | } 153 | }); 154 | while !incr.is_stable() { 155 | incr.stabilise(); 156 | } 157 | assert_eq!(map_observer.value(), 1); 158 | // this time we didn't fire until tillzero had settled. 159 | assert_eq!(cell.get(), 1); 160 | } 161 | 162 | struct UntilStableValue { 163 | change_count: Rc>, 164 | token: SubscriptionToken, 165 | observer: Observer, 166 | } 167 | 168 | impl UntilStableValue { 169 | fn new(observer: Observer) -> Self { 170 | let change_count = Rc::new(Cell::new(0i32)); 171 | let count = change_count.clone(); 172 | let token = observer.subscribe(move |_val| { 173 | if let Update::Changed(_) = _val { 174 | count.set(count.get() + 1); 175 | } 176 | }); 177 | Self { 178 | change_count, 179 | token, 180 | observer, 181 | } 182 | } 183 | 184 | fn iterate(&self, state: &IncrState) -> T { 185 | let next_change_count = self.change_count.get() + 1; 186 | while self.change_count.get() < next_change_count && !state.is_stable() { 187 | // this will possibly increment change_count 188 | // if it doesn't, it's because the observed node did not emit a change event. 189 | state.stabilise(); 190 | } 191 | self.observer.value() 192 | } 193 | } 194 | 195 | impl Drop for UntilStableValue { 196 | fn drop(&mut self) { 197 | self.observer.unsubscribe(self.token).unwrap() 198 | } 199 | } 200 | 201 | #[test] 202 | fn dependencies_using_cutoff_iterated() { 203 | let incr = IncrState::new(); 204 | let cell = Rc::new(Cell::new(0i32)); 205 | let (tillzero, until_stable) = using_cutoff(&incr.weak(), 10_u32, |x| x.saturating_sub(1)); 206 | // add a dependency and observe that 207 | let map_observer = tillzero.map(|x| x + 1).observe(); 208 | let o_cell = cell.clone(); 209 | map_observer.subscribe(move |t| { 210 | println!("observed {:?}", t); 211 | if let Update::Changed(_) = t { 212 | o_cell.set(o_cell.get() + 1); 213 | } 214 | }); 215 | until_stable.iterate(&incr); 216 | assert_eq!(map_observer.value(), 1); 217 | // this time we didn't fire until tillzero had settled. 218 | assert_eq!(cell.get(), 1); 219 | } 220 | 221 | #[test] 222 | fn two_fixedpoints_iterated() { 223 | let incr = IncrState::new(); 224 | 225 | let (_from_10, until_stable_10) = using_cutoff(&incr.weak(), 10_u32, |x| x.saturating_sub(1)); 226 | let (from_20, ________________) = using_cutoff(&incr.weak(), 20_u32, |x| x.saturating_sub(1)); 227 | let still_20 = from_20.map(|&x| x); 228 | 229 | let o_from_20 = from_20.observe(); 230 | let o_still_20 = still_20.observe(); 231 | assert_eq!(until_stable_10.iterate(&incr), 0); 232 | assert_eq!(o_from_20.value(), 10); 233 | 234 | // until_stable_10 only did 10 stabilise()s. 235 | // so from_20 hasn't gotten to a fixed point yet, and so any 236 | // downstream nodes have still not been queued for a recompute. 237 | assert_eq!(o_still_20.value(), 20); 238 | } 239 | 240 | #[test] 241 | fn two_fixedpoints_combined() { 242 | let incr = IncrState::new(); 243 | let (from_10, until_stable_10) = using_cutoff(&incr.weak(), 10_u32, |x| x.saturating_sub(1)); 244 | let (from_20, ____________) = using_cutoff(&incr.weak(), 20_u32, |x| x.saturating_sub(1)); 245 | let o_from_20 = from_20.observe(); 246 | 247 | let counter = Rc::new(Cell::new(0)); 248 | let count = counter.clone(); 249 | let combined = (from_10 % from_20) 250 | .map(move |&ten, &twenty| { 251 | println!("combined is recomputing"); 252 | count.set(count.get() + 1); 253 | ten + twenty 254 | }) 255 | .observe(); 256 | 257 | assert_eq!(until_stable_10.iterate(&incr), 0); 258 | assert_eq!(o_from_20.value(), 10); 259 | 260 | // combined was only recomputed once 261 | assert_eq!(counter.get(), 2); 262 | assert_eq!(combined.value(), 10); 263 | 264 | // blast the rest of the way. 265 | while !incr.is_stable() { 266 | incr.stabilise(); 267 | } 268 | assert_eq!(counter.get(), 3); 269 | assert_eq!(combined.value(), 0); 270 | } 271 | 272 | #[test] 273 | fn transitive_closure() { 274 | use im_rc::{hashmap, hashset, HashMap, HashSet}; 275 | let incr = IncrState::new(); 276 | 277 | // (1, 2), (2, 3), (3, 4) gets (1, 3), (2, 4) added 278 | // (1, 2), (2, 3), (3, 4), (1, 3) gets (1, 4) added 279 | // 280 | let map: HashMap> = hashmap! { 281 | 1 => hashset!{2}, 282 | 2 => hashset!{3}, 283 | 3 => hashset!{4}, 284 | }; 285 | 286 | let (_node, until_stable) = using_cutoff(&incr.weak(), map, |map| { 287 | let mut new = map.clone(); 288 | for (&a, a_trans) in map.iter() { 289 | for &b in a_trans.iter() { 290 | let b_trans = map.get(&b); 291 | for &c in b_trans.into_iter().flatten() { 292 | let new_a_trans = new.entry(a).or_default(); 293 | new_a_trans.insert(c); 294 | } 295 | } 296 | } 297 | println!("transitive closure round produced: {new:?}"); 298 | new 299 | }); 300 | 301 | let output = until_stable.iterate(&incr); 302 | assert_eq!( 303 | output, 304 | hashmap! { 305 | 1 => hashset!{2, 3, 4}, 306 | 2 => hashset!{3, 4}, 307 | 3 => hashset!{4}, 308 | } 309 | ) 310 | } 311 | 312 | #[allow(dead_code)] 313 | fn using_cutoff_bind(init: Incr, f: F) -> (Incr, UntilStableValue) 314 | where 315 | T: Default, 316 | F: FnMut(&mut T) -> T + 'static + Clone + NotObserver, 317 | { 318 | let state = init.state(); 319 | let var = state.var(T::default()); 320 | let v_mapped = var.clone(); 321 | 322 | // now, mapping var. if var is set during stabilisation, then 323 | // it queues var to be recomputed next round. 324 | // however this does not enqueue this map node. that only happens 325 | // during stabilisation IF var changes. 326 | let output = init.bind(move |init_val| { 327 | println!("setting to new init val: {:?}", init_val); 328 | v_mapped.set(init_val.clone()); 329 | // returns the old value 330 | // this means the first output of this incremental 331 | // is just init. 332 | let v = v_mapped.clone(); 333 | let mut f_ = f.clone(); 334 | let output = v_mapped.map(move |_x| v.replace_with(|x| f_(x))); 335 | // Do not trigger a recompute until our value is stable. 336 | output 337 | }); 338 | let v_cutoff = var.clone(); 339 | output.set_cutoff_fn_boxed(move |_, _| { 340 | let was_changed = v_cutoff.was_changed_during_stabilisation(); 341 | println!("cutoff function ran; was it changed during stab? {was_changed:?}"); 342 | was_changed 343 | }); 344 | let until = UntilStableValue::new(output.observe()); 345 | (output, until) 346 | } 347 | 348 | mod transitive_closure { 349 | use super::*; 350 | use im_rc::{hashmap, hashset, HashMap, HashSet}; 351 | use incremental::IntoIncr; 352 | use test_log::test; 353 | 354 | type EfficientSet = HashMap>; 355 | 356 | fn transitive_closure( 357 | input_set: impl IntoIncr, 358 | ) -> (Incr, UntilStableValue) { 359 | using_cutoff_bind(input_set.into_incr(), |map| { 360 | let mut new = map.clone(); 361 | for (&a, a_trans) in map.iter() { 362 | for &b in a_trans.iter() { 363 | let b_trans = map.get(&b); 364 | for &c in b_trans.into_iter().flatten() { 365 | let new_a_trans = new.entry(a).or_default(); 366 | new_a_trans.insert(c); 367 | } 368 | } 369 | } 370 | println!("transitive closure round produced: {new:?}"); 371 | new 372 | }) 373 | } 374 | 375 | #[test] 376 | fn with_query() { 377 | let incr = IncrState::new(); 378 | let query = incr.var((1, 4)); 379 | let map = incr.var(hashmap! { 380 | 1 => hashset!{2}, 381 | 2 => hashset!{3}, 382 | 3 => hashset!{4}, 383 | }); 384 | 385 | let (closure, until_stable) = transitive_closure(map.watch()); 386 | 387 | let is_in_set = (query.watch() % closure) 388 | .map(|(from, to), map| map.get(from).map_or(false, |trans| trans.contains(to))) 389 | .observe(); 390 | 391 | until_stable.iterate(&incr); 392 | 393 | query.set((2, 1)); 394 | incr.stabilise(); 395 | assert_eq!(is_in_set.value(), false); 396 | query.set((1, 3)); 397 | incr.stabilise(); 398 | assert_eq!(is_in_set.value(), true); 399 | map.modify(|m| { 400 | m.entry(3).or_default().insert(1); 401 | }); 402 | until_stable.iterate(&incr); 403 | assert_eq!(is_in_set.value(), true); 404 | } 405 | 406 | #[test] 407 | fn bound() { 408 | let incr = IncrState::new(); 409 | 410 | // (1, 2), (2, 3), (3, 4) gets (1, 3), (2, 4) added 411 | // (1, 2), (2, 3), (3, 4), (1, 3) gets (1, 4) added 412 | // 413 | let initial: HashMap> = hashmap! { 414 | 1 => hashset!{2}, 415 | 2 => hashset!{3}, 416 | 3 => hashset!{4}, 417 | }; 418 | 419 | let map_var = incr.var(initial); 420 | 421 | let (_node, until_stable) = transitive_closure(map_var.watch()); 422 | 423 | let output = until_stable.iterate(&incr); 424 | assert_eq!( 425 | output, 426 | hashmap! { 427 | 1 => hashset!{2, 3, 4}, 428 | 2 => hashset!{3, 4}, 429 | 3 => hashset!{4}, 430 | } 431 | ); 432 | map_var.update(|mut v| { 433 | v.entry(3).or_default().insert(1); 434 | v 435 | }); 436 | let output = until_stable.iterate(&incr); 437 | assert_eq!( 438 | output, 439 | hashmap! { 440 | 1 => hashset!{1, 2, 3, 4}, 441 | 2 => hashset!{1, 2, 3, 4}, 442 | 3 => hashset!{1, 2, 3, 4}, 443 | } 444 | ); 445 | } 446 | } 447 | -------------------------------------------------------------------------------- /src/kind/expert.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | any::Any, 3 | cell::{Cell, RefCell}, 4 | rc::Rc, 5 | }; 6 | 7 | use crate::node::Node; 8 | use crate::{ 9 | boxes::{new_unsized, SmallBox}, 10 | incrsan::not_observer_boxed_trait, 11 | }; 12 | use crate::{incrsan::NotObserver, ValueInternal}; 13 | use crate::{CellIncrement, Incr, NodeRef, Value}; 14 | 15 | pub(crate) trait ExpertEdge: Any + NotObserver { 16 | /// Called from run_edge_callback 17 | fn on_change(&self); 18 | fn packed(&self) -> NodeRef; 19 | fn index_cell(&self) -> &Cell>; 20 | fn erased_input(&self) -> &Node; 21 | } 22 | 23 | pub(crate) type PackedEdge = Rc; 24 | 25 | not_observer_boxed_trait! { 26 | type BoxedOnChange = Box; 27 | } 28 | 29 | pub(crate) struct Edge { 30 | pub child: Incr, 31 | pub on_change: RefCell>>, 32 | /* [index] is defined whenever the [edge] is in the [children] of some [t]. Then it is 33 | the index of this [edge] in that [children] array. It might seem redundant with all 34 | the other indexes we have, but it is necessary to remove children. The index may 35 | change as sibling children are removed. */ 36 | pub index: Cell>, 37 | } 38 | 39 | impl Edge { 40 | fn new(child: Incr, on_change: Option>) -> Self { 41 | Self { 42 | child, 43 | on_change: RefCell::new(on_change), 44 | index: None.into(), 45 | } 46 | } 47 | } 48 | 49 | impl ExpertEdge for Edge { 50 | fn on_change(&self) { 51 | let mut handler = self.on_change.borrow_mut(); 52 | if let Some(h) = &mut *handler { 53 | let v = self.child.node.value_as_ref(); 54 | h(v.as_ref().unwrap()); 55 | } 56 | } 57 | fn packed(&self) -> NodeRef { 58 | self.child.node.packed() 59 | } 60 | 61 | fn index_cell(&self) -> &Cell> { 62 | &self.index 63 | } 64 | 65 | fn erased_input(&self) -> &Node { 66 | self.child.node.erased() 67 | } 68 | } 69 | 70 | pub(crate) trait ObservabilityChange: FnMut(bool) + 'static + NotObserver {} 71 | impl ObservabilityChange for T where T: FnMut(bool) + 'static + NotObserver {} 72 | 73 | pub(crate) trait Recompute: 74 | FnMut() -> SmallBox + 'static + NotObserver 75 | { 76 | } 77 | impl Recompute for T where T: FnMut() -> SmallBox + 'static + NotObserver {} 78 | 79 | pub(crate) struct ExpertNode { 80 | pub recompute: RefCell>>, 81 | pub on_observability_change: RefCell>>, 82 | pub children: RefCell>, 83 | pub force_stale: Cell, 84 | pub num_invalid_children: Cell, 85 | pub will_fire_all_callbacks: Cell, 86 | } 87 | 88 | impl Drop for ExpertNode { 89 | fn drop(&mut self) { 90 | self.children.take(); 91 | self.recompute.take(); 92 | self.on_observability_change.take(); 93 | } 94 | } 95 | 96 | pub enum MakeStale { 97 | AlreadyStale, 98 | Ok, 99 | } 100 | 101 | impl ExpertNode { 102 | pub(crate) fn new_obs( 103 | mut recompute: impl FnMut() -> T + 'static + NotObserver, 104 | on_observability_change: impl ObservabilityChange, 105 | ) -> Self { 106 | Self { 107 | recompute: RefCell::new(Some(Box::new(move || new_unsized!(recompute())))), 108 | on_observability_change: RefCell::new(Some(Box::new(on_observability_change))), 109 | children: vec![].into(), 110 | force_stale: false.into(), 111 | num_invalid_children: 0.into(), 112 | will_fire_all_callbacks: true.into(), 113 | } 114 | } 115 | pub(crate) fn incr_invalid_children(&self) { 116 | self.num_invalid_children.increment(); 117 | } 118 | pub(crate) fn decr_invalid_children(&self) { 119 | self.num_invalid_children.increment(); 120 | } 121 | 122 | pub(crate) fn make_stale(&self) -> MakeStale { 123 | if self.force_stale.get() { 124 | MakeStale::AlreadyStale 125 | } else { 126 | self.force_stale.set(true); 127 | MakeStale::Ok 128 | } 129 | } 130 | pub(crate) fn add_child_edge(&self, edge: PackedEdge) -> i32 { 131 | assert!(edge.index_cell().get().is_none()); 132 | let borrow_span = 133 | tracing::debug_span!("expert.children.borrow_mut() in ExpertNode::add_child_edge"); 134 | borrow_span.in_scope(|| { 135 | let mut children = self.children.borrow_mut(); 136 | let new_child_index = children.len() as i32; 137 | edge.index_cell().set(Some(new_child_index)); 138 | children.push(edge); 139 | self.force_stale.set(true); 140 | tracing::debug!("expert added child, ix {new_child_index}"); 141 | new_child_index 142 | }) 143 | } 144 | pub(crate) fn swap_children(&self, one: usize, two: usize) { 145 | let borrow_span = 146 | tracing::debug_span!("expert.children.borrow_mut() in ExpertNode::swap_children"); 147 | borrow_span.in_scope(|| { 148 | let mut children = self.children.borrow_mut(); 149 | let c1 = children[one].index_cell(); 150 | let c2 = children[two].index_cell(); 151 | c1.swap(c2); 152 | children.swap(one, two); 153 | }); 154 | } 155 | pub(crate) fn last_child_edge(&self) -> Option { 156 | let children = self.children.borrow(); 157 | children.last().cloned() 158 | } 159 | pub(crate) fn pop_child_edge(&self) -> Option { 160 | let mut children = self.children.borrow_mut(); 161 | let packed_edge = children.pop()?; 162 | self.force_stale.set(true); 163 | packed_edge.index_cell().set(None); 164 | Some(packed_edge) 165 | } 166 | pub(crate) fn before_main_computation(&self) -> Result<(), Invalid> { 167 | if self.num_invalid_children.get() > 0 { 168 | Err(Invalid) 169 | } else { 170 | self.force_stale.set(false); 171 | if self.will_fire_all_callbacks.replace(false) { 172 | let borrow_span = tracing::debug_span!( 173 | "expert.children.borrow_mut() in ExpertNode::before_main_computation" 174 | ); 175 | 176 | let cloned = borrow_span.in_scope(|| self.children.borrow().clone()); 177 | tracing::debug!("running on_change for {} children", cloned.len()); 178 | for child in cloned { 179 | child.on_change() 180 | } 181 | } 182 | Ok(()) 183 | } 184 | } 185 | pub(crate) fn observability_change(&self, is_now_observable: bool) { 186 | if let Some(handler) = self.on_observability_change.borrow_mut().as_mut() { 187 | handler(is_now_observable); 188 | } 189 | if !is_now_observable { 190 | // for next time. this is a reset. 191 | self.will_fire_all_callbacks.set(true); 192 | /* If we don't reset num_invalid_children, we would double count them: just imagine 193 | what happens we if reconnect/disconnect/reconnect/disconnect with an invalid 194 | child. */ 195 | self.num_invalid_children.set(0); 196 | } 197 | } 198 | pub(crate) fn run_edge_callback(&self, child_index: i32) { 199 | if !self.will_fire_all_callbacks.get() { 200 | let child = { 201 | let borrow_span = tracing::debug_span!( 202 | "expert.children.borrow_mut() in ExpertNode::run_edge_callback" 203 | ); 204 | borrow_span.in_scope(|| { 205 | let children = self.children.borrow(); 206 | let Some(child) = children.get(child_index as usize) else { 207 | return None; 208 | }; 209 | // clone the child, so we can drop the borrow of the children vector. 210 | // the child on_change callback may add or remove children. It needs borrow_mut access! 211 | Some(child.clone()) 212 | }) 213 | }; 214 | let Some(child) = child else { 215 | return; 216 | }; 217 | child.on_change() 218 | } 219 | } 220 | } 221 | 222 | pub(crate) struct Invalid; 223 | 224 | use core::fmt::Debug; 225 | impl Debug for ExpertNode { 226 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 227 | f.debug_struct("ExpertNode").finish() 228 | } 229 | } 230 | 231 | pub mod public { 232 | use std::rc::{Rc, Weak}; 233 | 234 | use crate::incrsan::NotObserver; 235 | use crate::{Incr, Value, WeakIncr, WeakState}; 236 | 237 | use super::Edge; 238 | 239 | #[derive(Clone)] 240 | pub struct Dependency { 241 | edge: Weak>, 242 | } 243 | 244 | impl core::fmt::Debug for Dependency { 245 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 246 | f.write_str("Dependency") 247 | } 248 | } 249 | impl PartialEq for Dependency { 250 | fn eq(&self, other: &Self) -> bool { 251 | crate::weak_thin_ptr_eq(&self.edge, &other.edge) 252 | } 253 | } 254 | 255 | impl Dependency { 256 | pub fn node(&self) -> Incr { 257 | self.edge.upgrade().unwrap().child.clone() 258 | } 259 | } 260 | 261 | impl Dependency { 262 | pub fn value_cloned(&self) -> T { 263 | self.edge.upgrade().unwrap().child.node.latest() 264 | } 265 | } 266 | 267 | pub struct Node { 268 | incr: Incr, 269 | } 270 | 271 | // impl Clone for Node { 272 | // fn clone(&self) -> Self { 273 | // } 274 | // } 275 | 276 | use crate::state::expert; 277 | impl Node { 278 | pub fn weak(&self) -> WeakNode { 279 | WeakNode { 280 | incr: self.incr.weak(), 281 | } 282 | } 283 | 284 | pub fn new(state: &WeakState, f: impl FnMut() -> T + 'static + NotObserver) -> Node { 285 | fn ignore(_: bool) {} 286 | Node::new_(state, f, ignore) 287 | } 288 | pub fn new_( 289 | state: &WeakState, 290 | f: impl FnMut() -> T + 'static + NotObserver, 291 | obs_change: impl FnMut(bool) + 'static + NotObserver, 292 | ) -> Self { 293 | let incr = expert::create::(&state.upgrade_inner().unwrap(), f, obs_change); 294 | Self { incr } 295 | } 296 | pub fn new_cyclic( 297 | state: &WeakState, 298 | f: impl FnOnce(WeakIncr) -> F + NotObserver, 299 | ) -> Node 300 | where 301 | F: FnMut() -> T + 'static + NotObserver, 302 | { 303 | fn ignore(_: bool) {} 304 | Node::new_cyclic_(state, f, ignore) 305 | } 306 | pub fn new_cyclic_( 307 | state: &WeakState, 308 | f: impl FnOnce(WeakIncr) -> F + NotObserver, 309 | obs_change: impl FnMut(bool) + 'static + NotObserver, 310 | ) -> Node 311 | where 312 | F: FnMut() -> T + 'static + NotObserver, 313 | { 314 | let incr = 315 | expert::create_cyclic::(&state.upgrade_inner().unwrap(), f, obs_change); 316 | Self { incr } 317 | } 318 | pub fn watch(&self) -> Incr { 319 | self.incr.clone() 320 | } 321 | pub fn make_stale(&self) { 322 | expert::make_stale(&self.incr.node.packed()) 323 | } 324 | pub fn invalidate(&self) { 325 | expert::invalidate(&self.incr.node.packed()) 326 | } 327 | pub fn add_dependency(&self, on: &Incr) -> Dependency { 328 | let edge = Rc::new(Edge::new(on.clone(), None)); 329 | let dep = Dependency { 330 | edge: Rc::downgrade(&edge), 331 | }; 332 | expert::add_dependency(&self.incr.node.packed(), edge); 333 | dep 334 | } 335 | /// Add dependency with a change callback. 336 | /// 337 | /// Note that you should not use the change callback to 338 | /// add or remove dependencies. The scheduler isn't smart enough to initialize such a 339 | /// system and cut off any recursion that results, so it doesn't try. You can implement 340 | /// Bind-like behaviour by introducing a Map node, adding/removing dynamic dependencies in 341 | /// the Map function, and then adding a dependency on the Map node. The static dependency 342 | /// on the Map node ensures all the dynamic dependnencies are resolved before the expert 343 | /// Node runs. This way you also get cycle detection done by the system. 344 | pub fn add_dependency_with( 345 | &self, 346 | on: &Incr, 347 | on_change: impl FnMut(&D) + 'static + NotObserver, 348 | ) -> Dependency { 349 | let edge = Rc::new(Edge::new(on.clone(), Some(Box::new(on_change)))); 350 | let dep = Dependency { 351 | edge: Rc::downgrade(&edge), 352 | }; 353 | expert::add_dependency(&self.incr.node.packed(), edge); 354 | dep 355 | } 356 | /// Caution: if the Dependency is on an expert::Node, then running this may cause 357 | /// a related WeakNode to be deallocated. If you wish to use the related node after 358 | /// (i.e. to invalidate it) then upgrade the WeakNode first. 359 | pub fn remove_dependency(&self, dep: Dependency) { 360 | let edge = dep.edge.upgrade().unwrap(); 361 | expert::remove_dependency(&*self.incr.node, &*edge); 362 | } 363 | } 364 | 365 | #[derive(Clone)] 366 | pub struct WeakNode { 367 | incr: WeakIncr, 368 | } 369 | 370 | impl WeakNode { 371 | #[inline] 372 | pub fn watch(&self) -> WeakIncr { 373 | self.incr.clone() 374 | } 375 | #[inline] 376 | pub fn upgrade(&self) -> Option> { 377 | self.incr.upgrade().map(|incr| Node { incr }) 378 | } 379 | #[inline] 380 | pub fn make_stale(&self) { 381 | self.upgrade().unwrap().make_stale(); 382 | } 383 | #[inline] 384 | pub fn invalidate(&self) { 385 | self.upgrade().unwrap().invalidate(); 386 | } 387 | #[inline] 388 | pub fn add_dependency(&self, on: &Incr) -> Dependency { 389 | self.upgrade().unwrap().add_dependency(on) 390 | } 391 | /// See [Node::add_dependency_with], noting especially that you should not use the on_change 392 | /// callback to add dynamic dependencies to this expert node. 393 | pub fn add_dependency_with( 394 | &self, 395 | on: &Incr, 396 | on_change: impl FnMut(&D) + 'static + NotObserver, 397 | ) -> Dependency { 398 | self.upgrade().unwrap().add_dependency_with(on, on_change) 399 | } 400 | /// Caution: if the Dependency is on an expert::Node, then running this may cause 401 | /// a related WeakNode to be deallocated. If you wish to use the related node after 402 | /// (i.e. to invalidate it) then upgrade the WeakNode first. 403 | pub fn remove_dependency(&self, dep: Dependency) { 404 | self.upgrade().unwrap().remove_dependency(dep) 405 | } 406 | } 407 | 408 | impl AsRef> for Node { 409 | #[inline] 410 | fn as_ref(&self) -> &Incr { 411 | &self.incr 412 | } 413 | } 414 | } 415 | -------------------------------------------------------------------------------- /incremental-macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod debug; 2 | pub use debug::DebugWithDb; 3 | 4 | #[doc(hidden)] 5 | pub mod re_export { 6 | pub use incremental; 7 | pub use slotmap; 8 | #[cfg(feature = "string-interner")] 9 | pub use string_interner; 10 | } 11 | 12 | pub trait Indexed { 13 | type Storage: Default; 14 | fn register( 15 | _incr: &::incremental::IncrState, 16 | _storage: &::std::rc::Rc<::std::cell::RefCell>, 17 | ) { 18 | // default is not to register 19 | } 20 | } 21 | 22 | pub trait ProviderFor { 23 | fn __storage__(&self) -> &::std::cell::RefCell; 24 | } 25 | 26 | #[cfg(feature = "string-interner")] 27 | pub use string::InternedString; 28 | 29 | #[cfg(feature = "string-interner")] 30 | mod string { 31 | use super::ProviderFor; 32 | use crate::DebugWithDb; 33 | use string_interner::{DefaultSymbol, StringInterner}; 34 | 35 | pub struct InternedString(DefaultSymbol); 36 | 37 | impl super::Indexed for InternedString { 38 | type Storage = StringInterner; 39 | } 40 | 41 | impl InternedString { 42 | pub fn new(db: &impl ProviderFor, string: impl AsRef) -> Self { 43 | let mut interner = db.__storage__().borrow_mut(); 44 | Self(interner.get_or_intern(string.as_ref())) 45 | } 46 | } 47 | impl> DebugWithDb for InternedString { 48 | fn fmt( 49 | &self, 50 | f: &mut std::fmt::Formatter<'_>, 51 | db: &Db, 52 | _include_all_fields: bool, 53 | ) -> std::fmt::Result { 54 | let storage = db.__storage__().borrow(); 55 | let string = storage.resolve(self.0); 56 | write!(f, "{:?}", string) 57 | } 58 | } 59 | } 60 | 61 | #[macro_export] 62 | macro_rules! interned { 63 | ( 64 | $(#[$attr:meta])* 65 | $vis:vis type $id:ident = String; 66 | ) => { 67 | $(#[$attr])* 68 | #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 69 | $vis struct $id($crate::re_export::string_interner::DefaultSymbol); 70 | impl $id { 71 | $vis fn new(db: &impl $crate::ProviderFor<$crate::InternedString>, s: impl AsRef) -> Self { 72 | let mut storage = db.__storage__().borrow_mut(); 73 | let sym = storage.get_or_intern(s.as_ref()); 74 | $id(sym) 75 | } 76 | } 77 | impl ::std::fmt::Debug for $id { 78 | fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { 79 | let _usize = $crate::re_export::string_interner::Symbol::to_usize(self.0); 80 | write!(f, "{}({})", stringify!($id), _usize) 81 | } 82 | } 83 | impl> $crate::DebugWithDb for $id { 84 | fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>, db: &Db, include_all_fields: bool) -> ::std::fmt::Result { 85 | let storage = db.__storage__().borrow(); 86 | let string = storage.resolve(self.0).unwrap(); 87 | ::std::fmt::Debug::fmt(&string, f) 88 | } 89 | } 90 | }; 91 | 92 | // structs 93 | (@accept_ty String) => { String }; 94 | (@accept_ty $field_ty:ty) => { $field_ty }; 95 | (@clone $field:ident String) => { $field.clone() }; 96 | (@clone $field:ident $field_ty:ty) => { $field.clone() }; 97 | 98 | ($(#[$attr:meta])* $vis:vis struct $id:ident { 99 | $($field_vis:vis $field:ident : $field_ty:ty,)+ 100 | }) => { 101 | $(#[$attr])* 102 | $crate::re_export::slotmap::new_key_type! { $vis struct $id; } 103 | ::paste::paste! { 104 | impl $crate::Indexed for $id { 105 | type Storage = ( 106 | // look up by all but the id fields 107 | ::std::collections::HashMap<($($field_ty,)*), $id>, 108 | // get back a type with all fields on it 109 | $crate::re_export::slotmap::SlotMap<$id, [<__ $id Data >]>, 110 | ); 111 | } 112 | $vis struct [<__ $id Data >] { 113 | $($id_field: $id_field_ty,)? 114 | $($field: $field_ty,)* 115 | } 116 | #[allow(dead_code)] 117 | impl $id { 118 | $vis fn new( 119 | db: &impl $crate::ProviderFor, 120 | $($field: $crate::interned!(@accept_ty $field_ty),)+ 121 | ) -> Self { 122 | let mut storage = db.__storage__().borrow_mut(); 123 | let (__hashmap, __slotmap) = &mut *storage; 124 | let __key = ($($crate::interned!(@clone $field $field_ty),)*); 125 | if let Some(id) = __hashmap.get(&__key) { 126 | return *id 127 | } 128 | let __id = __slotmap.insert_with_key(|_k| [<__ $id Data>] { 129 | $($id_field: _k,)? 130 | $($field: $crate::interned!(@clone $field $field_ty),)* 131 | }); 132 | __hashmap.insert(__key, __id); 133 | __id 134 | } 135 | $( 136 | $field_vis fn $field(&self, db: &impl $crate::ProviderFor) -> $field_ty { 137 | let storage = db.__storage__().borrow(); 138 | let (_, slotmap) = &*storage; 139 | slotmap.get(*self).unwrap().$field.clone() 140 | } 141 | )* 142 | } 143 | impl<_Db: $crate::ProviderFor<$id>> $crate::DebugWithDb<_Db> for $id { 144 | fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>, _db: &_Db, _include_all_fields: bool) -> ::std::fmt::Result { 145 | let storage = _db.__storage__().borrow(); 146 | let (_, slotmap) = &*storage; 147 | use $crate::debug::helper::Fallback; 148 | let s = slotmap.get(*self).unwrap(); 149 | f 150 | .debug_struct(stringify!($id)) 151 | $(.field( 152 | stringify!($field), 153 | &$crate::debug::helper::SalsaDebug::< 154 | $field_ty, 155 | &_Db, 156 | >::salsa_debug( 157 | #[allow(clippy::needless_borrow)] 158 | &s.$field, 159 | _db, 160 | _include_all_fields, 161 | ), 162 | ))* 163 | .finish() 164 | } 165 | } 166 | } 167 | }; 168 | } 169 | 170 | // #[macro_export] 171 | // macro_rules! slot_indexed { 172 | // // structs 173 | // ($(#[$attr:meta])* $vis:vis struct $id:ident { 174 | // $($field_vis:vis $field:ident : $field_ty:ty,)+ 175 | // }) => { 176 | // $(#[$attr])* 177 | // $crate::re_export::slotmap::new_key_type! { $vis struct $id; } 178 | // ::paste::paste! { 179 | // impl $crate::Indexed for $id { 180 | // type Storage = 181 | // $crate::re_export::slotmap::SlotMap<$id, [<__ $id Data >]> ; 182 | // } 183 | // $vis struct [<__ $id Data >] { 184 | // $($field: $field_ty,)* 185 | // } 186 | // #[allow(dead_code)] 187 | // impl $id { 188 | // $vis fn new(__db: &impl $crate::ProviderFor, $($field: $field_ty,)+) -> Self { 189 | // let slotmap = __db.__storage__(); 190 | // let mut slotmap_ = slotmap.borrow_mut(); 191 | // slotmap_.insert([<__ $id Data>] { 192 | // $($field,)+ 193 | // }) 194 | // } 195 | // $( 196 | // $field_vis fn $field<'a>(&'_ self, __db: &'a impl $crate::ProviderFor) -> ::std::cell::Ref<'a, $field_ty> { 197 | // let slotmap = __db.__storage__(); 198 | // ::std::cell::Ref::map(slotmap.borrow(), |slotmap| { 199 | // &slotmap.get(*self).unwrap().$field 200 | // }) 201 | // } 202 | // )* 203 | // } 204 | // } 205 | // }; 206 | // } 207 | 208 | #[macro_export] 209 | macro_rules! memoized { 210 | (@doc_helper $( #[doc = $doc:expr] $( $thing:tt )* )* ) => ( $( #[doc = $doc] $( $thing )* )* ); 211 | ($(#[$attr:meta])* $vis:vis fn $function:ident($db:ident : &$db_ty:ty, $($arg:ident : $arg_ty:ty),+ $(,)?) -> Incr<$r:ty> { 212 | $($body:tt)* 213 | }) => { 214 | ::paste::paste! { 215 | $(#[$attr])* 216 | $vis fn $function($db: &$db_ty, $($arg: $arg_ty,)*) 217 | -> ::incremental::Incr<$r> { 218 | [<$function:camel>]::get(&$db, $($arg,)*) 219 | } 220 | 221 | /// Helper struct for 222 | $crate::memoized!{@doc_helper 223 | #[doc = concat!("Helper struct implementing [incremental_macros::Indexed] for [", stringify!($function), "]")] 224 | $vis struct [<$function:camel>]; 225 | } 226 | impl $crate::Indexed for [<$function:camel>] { 227 | type Storage = $crate::re_export::incremental::WeakHashMap< ($($arg_ty,)*), $r >; 228 | fn register(incr: &IncrState, storage: &::std::rc::Rc<::std::cell::RefCell>) { 229 | incr.add_weak_map(storage.clone()); 230 | } 231 | } 232 | #[allow(dead_code)] 233 | impl [<$function:camel>] { 234 | fn get($db: &$db_ty, $($arg : $arg_ty,)*) -> $crate::re_export::incremental::Incr<$r> { 235 | fn __fn($db: &$db_ty, $($arg: $arg_ty,)*) -> $crate::re_export::incremental::Incr<$r> { 236 | $($body)* 237 | } 238 | let storage = <$db_ty as $crate::ProviderFor<[<$function:camel>]>>::__storage__($db); 239 | let mut storage_ = storage.borrow_mut(); 240 | let entry = storage_.entry(($($arg.clone(),)*)); 241 | let execute = || { 242 | let top = ::incremental::Scope::top(); 243 | $db.incr.within_scope(top, || __fn($db, $($arg,)*)) 244 | }; 245 | 246 | match entry { 247 | ::std::collections::hash_map::Entry::Occupied(mut occ) => { 248 | let incr = occ.get().upgrade(); 249 | if let Some(i) = incr { 250 | return i 251 | } else { 252 | let val = execute(); 253 | occ.insert(val.weak()); 254 | return val 255 | } 256 | } 257 | ::std::collections::hash_map::Entry::Vacant(vacant) => { 258 | let val = execute(); 259 | vacant.insert(val.weak()); 260 | return val 261 | } 262 | } 263 | } 264 | } 265 | } 266 | } 267 | } 268 | 269 | /// Example 270 | /// 271 | /// ``` 272 | /// # use incremental_macros::db; 273 | /// use incremental_macros::InternedString; 274 | /// db! { 275 | /// pub struct Db provides { 276 | /// InternedString 277 | /// } 278 | /// } 279 | /// ``` 280 | /// 281 | #[macro_export] 282 | macro_rules! db { 283 | (@storage_path_to_ident $Db:ident, $memo_ty:ty, $(::)? $segment:ident $(:: $rest:ident)*) => { 284 | paste::paste! { 285 | impl $crate::ProviderFor<$memo_ty> for $Db { 286 | fn __storage__(&self) -> &::std::cell::RefCell<<$memo_ty as $crate::Indexed>::Storage> { 287 | &self.__storage.[< $segment:snake $(_ $rest:snake )* >] 288 | } 289 | } 290 | } 291 | }; 292 | (@input_arg #[input] $input:ident: $input_ty:ty) => { , $input: $input_ty }; 293 | (@input_arg $memo:ident: $memo_ty:ty) => { }; 294 | 295 | (@input_ident #[input] $input:ident: $input_ty:ty) => { $input }; 296 | (@input_ident $memo:ident: $memo_ty:ty) => {}; 297 | 298 | (@memo_init $state:ident, $memo_ty:ty) => { 299 | { 300 | let memo = ::std::rc::Rc::new(::std::cell::RefCell::new( 301 | <$memo_ty as $crate::Indexed>::Storage::default() 302 | )); 303 | <$memo_ty as $crate::Indexed>::register(&$state, &memo); 304 | memo 305 | } 306 | }; 307 | 308 | (@memo_ident #[input] $input:ident: $input_ty:ty) => { }; 309 | (@memo_ident $memo:ident: $memo_ty:ty) => { $memo }; 310 | 311 | (@s_munch () -> {$vis:vis struct $Db:ident { $($memo:ident : $memo_ty:ty,)* $(,)? } }) => { 312 | $crate::db!(@storage_inner $vis struct $Db { 313 | $($memo: $memo_ty),* 314 | }); 315 | }; 316 | (@s_munch ($(::)? $segment:ident $(::$rest:ident)* => $ty:ty, $($next:tt)*) -> {$vis:vis struct $Db:ident { $($memo:ident : $memo_ty:ty),* $(,)? }}) => { 317 | ::paste::paste! { 318 | $crate::db!(@s_munch ($($next)*) -> { $vis struct $Db { 319 | $($memo: $memo_ty,)* 320 | [< $segment:snake $(_ $rest:snake)* >]: $ty, 321 | } }); 322 | } 323 | }; 324 | 325 | (@storage_inner $vis:vis struct $Db:ident { 326 | $($memo:ident : $memo_ty:ty),* 327 | }) => { 328 | ::paste::paste! { 329 | #[derive(Clone)] 330 | struct [<$Db Storage>] { 331 | $($memo: $memo_ty,)* 332 | } 333 | 334 | impl [<$Db Storage>] { 335 | fn new($($memo: $memo_ty),*) -> Self { 336 | Self { $($memo),* } 337 | } 338 | } 339 | } 340 | }; 341 | 342 | ( 343 | $(#[$attr:meta])* 344 | $vis:vis struct $Db:ident provides $tt:tt 345 | ) => { 346 | $crate::db!($vis struct $Db {} provides $tt) 347 | }; 348 | ( 349 | $(#[$attr:meta])* 350 | $vis:vis struct $Db:ident { 351 | $($(#[$fattr:meta])* $field:ident : $field_ty:ty),* $(,)? 352 | } provides { 353 | $($memo_ty:ident),* $(,)? 354 | } 355 | ) => { 356 | 357 | /// Storage struct for $Db 358 | $crate::db!(@s_munch ($( 359 | $memo_ty => ::std::rc::Rc<::std::cell::RefCell<<$memo_ty as $crate::Indexed>::Storage>>, 360 | )*) -> {$vis struct $Db {}}); 361 | 362 | ::paste::paste! { 363 | $(#[$attr])* 364 | #[derive(Clone)] 365 | $vis struct $Db { 366 | $vis incr: $crate::re_export::incremental::WeakState, 367 | $($(#[$fattr])* $field: $field_ty,)* 368 | __storage: [<$Db Storage>] 369 | } 370 | 371 | impl $Db { 372 | $vis fn new( 373 | __state: &$crate::re_export::incremental::IncrState, 374 | $($field: $field_ty,)* 375 | ) -> Self { 376 | let __storage = [<$Db Storage>]::new( 377 | $($crate::db!(@memo_init __state, $memo_ty)),* 378 | ); 379 | Self { 380 | incr: __state.weak(), 381 | $($field,)* 382 | __storage, 383 | } 384 | } 385 | } 386 | } 387 | 388 | $( 389 | $crate::db!(@storage_path_to_ident $Db, $memo_ty, $memo_ty); 390 | )* 391 | }; 392 | } 393 | -------------------------------------------------------------------------------- /incremental-map/src/symmetric_fold.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | use test_log::test; 3 | 4 | use std::cmp::Ordering; 5 | use std::collections::{ 6 | btree_map::{IntoIter, Keys}, 7 | BTreeMap, 8 | }; 9 | use std::iter::Peekable; 10 | use std::ops::Deref; 11 | use std::rc::Rc; 12 | 13 | // Adapted from itertools. 14 | // For [1, 2, 3].merge([2, 4]) 15 | // you should get [1, 2, 3, 4]. 16 | struct MergeOnce 17 | where 18 | I: Iterator, 19 | J: Iterator, 20 | { 21 | a: Peekable, 22 | b: Peekable, 23 | fused: Option, 24 | } 25 | 26 | impl MergeOnce 27 | where 28 | I: Iterator, 29 | J: Iterator, 30 | { 31 | fn new(a: I, b: J) -> Self { 32 | Self { 33 | a: a.peekable(), 34 | b: b.peekable(), 35 | fused: None, 36 | } 37 | } 38 | } 39 | 40 | impl Iterator for MergeOnce 41 | where 42 | I: Iterator, 43 | J: Iterator, 44 | I::Item: PartialOrd, 45 | { 46 | type Item = I::Item; 47 | fn next(&mut self) -> Option { 48 | let (less_than, both) = match self.fused { 49 | Some(lt) => (lt, false), 50 | None => match (self.a.peek(), self.b.peek()) { 51 | (Some(a), Some(b)) => (a <= b, a == b), 52 | (Some(_), None) => { 53 | self.fused = Some(true); 54 | (true, false) 55 | } 56 | (None, Some(_)) => { 57 | self.fused = Some(false); 58 | (false, false) 59 | } 60 | (None, None) => return None, 61 | }, 62 | }; 63 | 64 | if less_than { 65 | if both { 66 | drop(self.b.next()); 67 | } 68 | self.a.next() 69 | } else { 70 | if both { 71 | drop(self.a.next()); 72 | } 73 | self.b.next() 74 | } 75 | } 76 | } 77 | 78 | // Same but with a custom comparator 79 | pub(crate) struct MergeOnceWith 80 | where 81 | I: Iterator, 82 | J: Iterator, 83 | F: Fn(&I::Item, &J::Item) -> Ordering, 84 | { 85 | a: Peekable, 86 | b: Peekable, 87 | fcmp: F, 88 | fused: Option, 89 | } 90 | 91 | impl Ordering> MergeOnceWith { 92 | pub(crate) fn new(a: I, b: J, fcmp: FCmp) -> Self { 93 | Self { 94 | a: a.peekable(), 95 | b: b.peekable(), 96 | fcmp, 97 | fused: None, 98 | } 99 | } 100 | } 101 | 102 | impl Iterator for MergeOnceWith 103 | where 104 | I: Iterator, 105 | J: Iterator, 106 | FCmp: Fn(&I::Item, &J::Item) -> Ordering, 107 | { 108 | type Item = MergeElement; 109 | fn next(&mut self) -> Option { 110 | let ordering: Ordering = match self.fused { 111 | Some(true) => Ordering::Less, 112 | Some(false) => Ordering::Greater, 113 | None => match (self.a.peek(), self.b.peek()) { 114 | (Some(a), Some(b)) => (self.fcmp)(a, b), 115 | (Some(_), None) => { 116 | self.fused = Some(true); 117 | Ordering::Less 118 | } 119 | (None, Some(_)) => { 120 | self.fused = Some(false); 121 | Ordering::Greater 122 | } 123 | (None, None) => return None, 124 | }, 125 | }; 126 | 127 | match ordering { 128 | Ordering::Equal => self 129 | .a 130 | .next() 131 | .zip(self.b.next()) 132 | .map(|(a, b)| MergeElement::Both(a, b)), 133 | Ordering::Less => self.a.next().map(MergeElement::Left), 134 | Ordering::Greater => self.b.next().map(MergeElement::Right), 135 | } 136 | } 137 | } 138 | 139 | pub(crate) struct SymmetricDiffOwned { 140 | self_: Peekable>, 141 | other: Peekable>, 142 | fused: Option, // keys: MergeOnce, Keys<'a, K, V>>, 143 | } 144 | 145 | impl Iterator for SymmetricDiffOwned 146 | where 147 | K: Ord, 148 | V: PartialEq, 149 | { 150 | type Item = DiffElement<(K, V)>; 151 | fn next(&mut self) -> Option { 152 | let less_than = loop { 153 | match self.fused { 154 | Some(lt) => break lt, 155 | None => match (self.self_.peek(), self.other.peek()) { 156 | (Some((ka, va)), Some((kb, vb))) => match ka.cmp(kb) { 157 | Ordering::Less => break true, 158 | Ordering::Greater => break false, 159 | Ordering::Equal => { 160 | let unequal = va != vb; 161 | let (sk, sv) = self.self_.next()?; 162 | let (ok, ov) = self.other.next()?; 163 | if unequal { 164 | return Some(DiffElement::Unequal((sk, sv), (ok, ov))); 165 | } else { 166 | continue; 167 | } 168 | } 169 | }, 170 | (Some(_), None) => { 171 | self.fused = Some(true); 172 | break true; 173 | } 174 | (None, Some(_)) => { 175 | self.fused = Some(false); 176 | break false; 177 | } 178 | (None, None) => return None, 179 | }, 180 | } 181 | }; 182 | if less_than { 183 | self.self_.next().map(|(k, v)| DiffElement::Left((k, v))) 184 | } else { 185 | self.other.next().map(|(k, v)| DiffElement::Right((k, v))) 186 | } 187 | } 188 | } 189 | 190 | pub(crate) struct SymmetricDiff<'a, K: 'a, V: 'a> { 191 | self_: &'a BTreeMap, 192 | other: &'a BTreeMap, 193 | keys: MergeOnce, Keys<'a, K, V>>, 194 | } 195 | impl<'a, K: 'a, V: 'a> Iterator for SymmetricDiff<'a, K, V> 196 | where 197 | K: Ord, 198 | V: PartialEq, 199 | { 200 | type Item = (&'a K, DiffElement<&'a V>); 201 | fn next(&mut self) -> Option { 202 | let mut key; 203 | let elem = loop { 204 | key = self.keys.next()?; 205 | let s = self.self_.get(key); 206 | let o = self.other.get(key); 207 | match (s, o) { 208 | (Some(a), Some(b)) if a != b => break DiffElement::Unequal(a, b), 209 | (Some(_), Some(_)) => continue, 210 | (Some(a), _) => break DiffElement::Left(a), 211 | (_, Some(b)) => break DiffElement::Right(b), 212 | _ => return None, 213 | } 214 | }; 215 | Some((key, elem)) 216 | } 217 | } 218 | 219 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] 220 | pub enum MergeElement { 221 | /// A key in `incr_merge` was present only in the left map 222 | Left(L), 223 | /// A key in `incr_merge` was present only in the right map 224 | Right(R), 225 | /// A key in `incr_merge` was present in both maps 226 | Both(L, R), 227 | } 228 | 229 | impl MergeElement { 230 | pub fn left(&self) -> Option<&L> { 231 | match self { 232 | Self::Left(l) | Self::Both(l, _) => Some(l), 233 | _ => None, 234 | } 235 | } 236 | pub fn into_left(self) -> Option { 237 | match self { 238 | Self::Left(l) | Self::Both(l, _) => Some(l), 239 | _ => None, 240 | } 241 | } 242 | pub fn right(&self) -> Option<&R> { 243 | match self { 244 | Self::Right(r) | Self::Both(_, r) => Some(r), 245 | _ => None, 246 | } 247 | } 248 | pub fn into_right(self) -> Option { 249 | match self { 250 | Self::Right(r) | Self::Both(_, r) => Some(r), 251 | _ => None, 252 | } 253 | } 254 | } 255 | 256 | impl MergeElement<&L, &R> 257 | where 258 | L: Clone, 259 | R: Clone, 260 | { 261 | pub fn cloned(&self) -> MergeElement { 262 | match *self { 263 | MergeElement::Both(a, b) => MergeElement::Both(a.clone(), b.clone()), 264 | MergeElement::Left(a) => MergeElement::Left(a.clone()), 265 | MergeElement::Right(b) => MergeElement::Right(b.clone()), 266 | } 267 | } 268 | } 269 | 270 | #[derive(Debug, PartialEq, Eq)] 271 | pub enum DiffElement { 272 | Unequal(V, V), 273 | Left(V), 274 | Right(V), 275 | } 276 | 277 | impl DiffElement { 278 | pub fn new_data(self) -> Option { 279 | match self { 280 | DiffElement::Left(_) => None, 281 | DiffElement::Right(r) | DiffElement::Unequal(_, r) => Some(r), 282 | } 283 | } 284 | } 285 | 286 | #[test] 287 | fn test_merge_once() { 288 | let i = [1i32, 2, 3][..].iter(); 289 | let j = [2i32, 4][..].iter(); 290 | let v: Vec<_> = MergeOnce::new(i, j).cloned().collect(); 291 | assert_eq!(v, vec![1, 2, 3, 4]); 292 | } 293 | 294 | pub(crate) trait SymmetricDiffMap<'a, K: 'a, V: 'a> { 295 | type Iter: Iterator)>; 296 | 297 | fn symmetric_diff(&'a self, other: &'a Self) -> Self::Iter; 298 | 299 | // Could be useful, I guess 300 | #[allow(unused)] 301 | fn symmetric_fold_with_inverse( 302 | &'a self, 303 | other: &'a Self, 304 | init: R, 305 | mut add: FAdd, 306 | mut remove: FRemove, 307 | ) -> R 308 | where 309 | FAdd: FnMut(R, &K, &V) -> R, 310 | FRemove: FnMut(R, &K, &V) -> R, 311 | K: Ord, 312 | V: PartialEq, 313 | { 314 | self.symmetric_diff(other) 315 | .fold(init, |mut acc, (key, elem)| match elem { 316 | DiffElement::Unequal(left, right) => { 317 | acc = add(acc, key, right); 318 | remove(acc, key, left) 319 | } 320 | DiffElement::Left(left) => remove(acc, key, left), 321 | DiffElement::Right(right) => add(acc, key, right), 322 | }) 323 | } 324 | } 325 | 326 | // Could be useful, I guess 327 | #[allow(unused)] 328 | pub(crate) trait SymmetricDiffMapOwned { 329 | type Iter: Iterator>; 330 | 331 | fn symmetric_diff_owned(self, other: Self) -> Self::Iter; 332 | 333 | fn symmetric_fold_owned( 334 | self, 335 | other: Self, 336 | init: R, 337 | mut add: FAdd, 338 | mut remove: FRemove, 339 | ) -> R 340 | where 341 | FAdd: FnMut(R, K, V) -> R, 342 | FRemove: FnMut(R, K, V) -> R, 343 | K: Ord, 344 | V: PartialEq, 345 | Self: Sized, 346 | { 347 | self.symmetric_diff_owned(other) 348 | .fold(init, |mut acc, elem| match elem { 349 | DiffElement::Unequal(left, right) => { 350 | acc = add(acc, right.0, right.1); 351 | remove(acc, left.0, left.1) 352 | } 353 | DiffElement::Left(left) => remove(acc, left.0, left.1), 354 | DiffElement::Right(right) => add(acc, right.0, right.1), 355 | }) 356 | } 357 | } 358 | 359 | impl<'a, K: Ord + 'a, V: PartialEq + 'a> SymmetricDiffMap<'a, K, V> for BTreeMap { 360 | type Iter = SymmetricDiff<'a, K, V>; 361 | fn symmetric_diff(&'a self, other: &'a Self) -> Self::Iter { 362 | SymmetricDiff { 363 | self_: self, 364 | other, 365 | keys: MergeOnce::new(self.keys(), other.keys()), 366 | } 367 | } 368 | } 369 | 370 | impl SymmetricDiffMapOwned for BTreeMap { 371 | type Iter = SymmetricDiffOwned; 372 | fn symmetric_diff_owned(self, other: Self) -> SymmetricDiffOwned { 373 | SymmetricDiffOwned { 374 | self_: self.into_iter().peekable(), 375 | other: other.into_iter().peekable(), 376 | fused: None, 377 | } 378 | } 379 | } 380 | 381 | /// A trait implemented by `BTreeMap` and `im_rc::OrdMap`. 382 | pub trait GenericMap { 383 | fn remove(&mut self, key: &K) -> Option; 384 | fn insert(&mut self, key: K, value: V) -> Option; 385 | } 386 | 387 | impl GenericMap for BTreeMap { 388 | #[inline] 389 | fn remove(&mut self, key: &K) -> Option { 390 | BTreeMap::remove(self, key) 391 | } 392 | 393 | #[inline] 394 | fn insert(&mut self, key: K, value: V) -> Option { 395 | BTreeMap::insert(self, key, value) 396 | } 397 | } 398 | 399 | /// A trait implemented by `BTreeMap`, `im_rc::OrdMap` and `Rc`. 400 | /// 401 | /// You frequently want to clone incrementals a lot. Making it so 402 | pub trait MutableMap { 403 | type UnderlyingMap: GenericMap; 404 | /// For Rc, this is BTreeMap. 405 | fn make_mut(&mut self) -> &mut Self::UnderlyingMap; 406 | } 407 | 408 | pub trait SymmetricMapMap: MutableMap { 409 | type OutputMap: SymmetricMapMap; 410 | 411 | fn filter_map_collect( 412 | &self, 413 | f: &mut impl FnMut(&K, &V) -> Option, 414 | ) -> Self::OutputMap; 415 | } 416 | 417 | pub trait SymmetricFoldMap { 418 | fn symmetric_fold( 419 | &self, 420 | other: &Self, 421 | init: R, 422 | f: impl FnMut(R, (&K, DiffElement<&V>)) -> R, 423 | ) -> R; 424 | fn len(&self) -> usize; 425 | fn is_empty(&self) -> bool { 426 | self.len() == 0 427 | } 428 | /// Basically `self.iter().fold(init, f)`. 429 | fn nonincremental_fold(&self, init: R, f: impl FnMut(R, (&K, &V)) -> R) -> R; 430 | } 431 | 432 | impl MutableMap for Rc> { 433 | type UnderlyingMap = BTreeMap; 434 | #[inline] 435 | fn make_mut(&mut self) -> &mut Self::UnderlyingMap { 436 | Rc::make_mut(self) 437 | } 438 | } 439 | 440 | impl SymmetricMapMap for Rc> { 441 | type OutputMap = Rc>; 442 | #[inline] 443 | fn filter_map_collect( 444 | &self, 445 | f: &mut impl FnMut(&K, &V) -> Option, 446 | ) -> Self::OutputMap { 447 | Rc::new(self.deref().filter_map_collect(f)) 448 | } 449 | } 450 | 451 | impl SymmetricFoldMap for Rc> { 452 | fn symmetric_fold( 453 | &self, 454 | other: &Self, 455 | init: R, 456 | f: impl FnMut(R, (&K, DiffElement<&V>)) -> R, 457 | ) -> R { 458 | let self_target = self.deref(); 459 | let other_target = other.deref(); 460 | self_target.symmetric_diff(other_target).fold(init, f) 461 | } 462 | #[inline] 463 | fn len(&self) -> usize { 464 | self.deref().len() 465 | } 466 | fn nonincremental_fold(&self, init: R, f: impl FnMut(R, (&K, &V)) -> R) -> R { 467 | self.deref().nonincremental_fold(init, f) 468 | } 469 | } 470 | 471 | impl MutableMap for BTreeMap { 472 | type UnderlyingMap = Self; 473 | #[inline] 474 | fn make_mut(&mut self) -> &mut Self::UnderlyingMap { 475 | self 476 | } 477 | } 478 | 479 | impl SymmetricMapMap for BTreeMap { 480 | type OutputMap = BTreeMap; 481 | fn filter_map_collect( 482 | &self, 483 | f: &mut impl FnMut(&K, &V) -> Option, 484 | ) -> Self::OutputMap { 485 | self.iter() 486 | .filter_map(|(k, v)| f(k, v).map(|v2| (k.clone(), v2))) 487 | .collect() 488 | } 489 | } 490 | impl SymmetricFoldMap for BTreeMap { 491 | fn symmetric_fold( 492 | &self, 493 | other: &Self, 494 | init: R, 495 | f: impl FnMut(R, (&K, DiffElement<&V>)) -> R, 496 | ) -> R { 497 | self.symmetric_diff(other).fold(init, f) 498 | } 499 | #[inline] 500 | fn len(&self) -> usize { 501 | self.len() 502 | } 503 | #[inline] 504 | fn nonincremental_fold(&self, init: R, f: impl FnMut(R, (&K, &V)) -> R) -> R { 505 | self.iter().fold(init, f) 506 | } 507 | } 508 | --------------------------------------------------------------------------------