├── rustfmt.toml ├── .gitignore ├── .github └── workflows │ ├── deploy.yaml │ └── test.yaml ├── src ├── methods.rs ├── fx_func.rs ├── ctx.rs ├── methods │ ├── tlbo.rs │ ├── pso.rs │ ├── fa.rs │ ├── rga.rs │ └── de.rs ├── solver.rs ├── tests.rs ├── algorithm.rs ├── obj_func.rs ├── fitness.rs ├── random.rs ├── lib.rs ├── pareto.rs └── solver_builder.rs ├── LICENSE ├── Cargo.toml └── README.md /rustfmt.toml: -------------------------------------------------------------------------------- 1 | unstable_features = true 2 | imports_granularity = "Crate" 3 | format_code_in_doc_comments = true 4 | wrap_comments = true 5 | struct_lit_width = 50 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | .idea/ 4 | /target 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | on: 3 | push: 4 | tags: [ v* ] 5 | jobs: 6 | build_and_test: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - uses: actions-rs/toolchain@v1 11 | with: 12 | toolchain: stable 13 | - run: | 14 | cargo login ${{ secrets.CRATE_IO }} 15 | cargo publish 16 | -------------------------------------------------------------------------------- /src/methods.rs: -------------------------------------------------------------------------------- 1 | //! Pre-implemented optimization methods. 2 | //! 3 | //! Each methods are also has some variants on implementation, 4 | //! current methods are just designed for application. 5 | pub use self::{ 6 | de::{De, Strategy}, 7 | fa::Fa, 8 | pso::Pso, 9 | rga::Rga, 10 | tlbo::Tlbo, 11 | }; 12 | 13 | pub mod de; 14 | pub mod fa; 15 | pub mod pso; 16 | pub mod rga; 17 | pub mod tlbo; 18 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: Test 2 | on: [push, pull_request] 3 | jobs: 4 | build_and_test: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v2 8 | - uses: actions-rs/toolchain@v1 9 | with: 10 | toolchain: stable 11 | - run: | 12 | cargo clippy --all-features -- -D warnings 13 | cargo test --all-features 14 | cargo test --no-default-features 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Yuan Chang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/fx_func.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | use alloc::boxed::Box; 3 | 4 | /// A quick interface help to create objective function from a callable object. 5 | /// 6 | /// See also [`ObjFunc`] for implementing the full definitions. 7 | /// 8 | /// ``` 9 | /// use metaheuristics_nature::{Fx, Rga, Solver}; 10 | /// 11 | /// let bound = [[-50., 50.]; 4]; 12 | /// let f = Fx::new(&bound, |&[a, b, c, d]| a * a + 8. * b * b + c * c + d * d); 13 | /// let s = Solver::build(Rga::default(), f) 14 | /// .seed(0) 15 | /// .task(|ctx| ctx.gen == 20) 16 | /// .solve(); 17 | /// ``` 18 | pub struct Fx<'b, 'f, Y: Fitness, const DIM: usize> { 19 | bound: &'b [[f64; 2]; DIM], 20 | #[allow(clippy::type_complexity)] 21 | func: Box Y + Sync + Send + 'f>, 22 | } 23 | 24 | impl<'b, 'f, Y: Fitness, const DIM: usize> Fx<'b, 'f, Y, DIM> { 25 | /// Create objective function from a callable object. 26 | pub fn new(bound: &'b [[f64; 2]; DIM], func: F) -> Self 27 | where 28 | F: Fn(&[f64; DIM]) -> Y + Sync + Send + 'f, 29 | { 30 | Self { func: Box::new(func), bound } 31 | } 32 | } 33 | 34 | impl Bounded for Fx<'_, '_, Y, DIM> { 35 | #[inline] 36 | fn bound(&self) -> &[[f64; 2]] { 37 | self.bound 38 | } 39 | } 40 | 41 | impl ObjFunc for Fx<'_, '_, Y, DIM> { 42 | type Ys = Y; 43 | fn fitness(&self, xs: &[f64]) -> Self::Ys { 44 | (self.func)(xs.try_into().unwrap_or_else(|_| unreachable!())) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "metaheuristics-nature" 3 | description = "A collection of nature-inspired metaheuristic algorithms." 4 | version = "10.1.0" 5 | authors = ["KmolYuan "] 6 | edition = "2021" 7 | license = "MIT" 8 | readme = "README.md" 9 | repository = "https://github.com/KmolYuan/metaheuristics-nature-rs" 10 | keywords = ["metaheuristic", "algorithm", "optimization"] 11 | categories = ["algorithms", "no-std"] 12 | 13 | [features] 14 | default = ["std"] 15 | std = [ 16 | "num-traits/std", 17 | "rand/std", 18 | "rand_distr/std", 19 | "rand_chacha/std", 20 | "serde?/std", 21 | ] 22 | clap = ["dep:clap", "std"] 23 | rayon = ["dep:rayon", "std"] 24 | serde = ["dep:serde"] 25 | 26 | [dependencies] 27 | num-traits = { version = "0.2", default-features = false, features = ["libm"] } 28 | rand_distr = { version = "0.4", default-features = false } 29 | rand_chacha = { version = "0.3", default-features = false } 30 | clap = { version = "4", features = ["derive"], optional = true } 31 | serde = { version = "1", features = ["derive"], optional = true } 32 | rayon = { version = "1", optional = true } 33 | 34 | [dependencies.rand] 35 | version = "0.8" 36 | default-features = false 37 | features = ["getrandom", "alloc"] 38 | 39 | [target.'cfg(target_arch = "wasm32")'.dependencies] 40 | getrandom = { version = "0.2", features = ["js"] } 41 | 42 | [lints.rust] 43 | missing-docs = "warn" 44 | unsafe-code = "deny" 45 | 46 | [workspace.lints.clippy] 47 | semicolon-if-nothing-returned = "warn" 48 | 49 | [package.metadata.docs.rs] 50 | all-features = true 51 | rustdoc-args = ["--cfg", "doc_cfg"] 52 | -------------------------------------------------------------------------------- /src/ctx.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | use alloc::vec::Vec; 3 | 4 | pub(crate) type BestCon = ::Best; 5 | 6 | /// A basic context type of the algorithms. 7 | /// 8 | /// This type provides a shared dataset if you want to implement a new method. 9 | /// The fields maybe expanded in the future, so it marked as non-exhaustive. 10 | /// 11 | /// # View the Progress 12 | /// 13 | /// You can view the progress from the [`SolverBuilder::task()`] and 14 | /// [`SolverBuilder::callback()`]. 15 | /// 16 | /// + `ctx.gen` - Get generation number. 17 | /// + `ctx.pop_num()` - Get population number. 18 | /// + `ctx.best.get_eval()` - Get the current best evaluation value. 19 | /// + `ctx.best.get_xs()` - Get the current best variables. 20 | /// 21 | /// # Implement an Algorithm 22 | /// 23 | /// Do everything you want to do with the context. Please see [`Algorithm`] for 24 | /// the implementation. 25 | #[non_exhaustive] 26 | pub struct Ctx { 27 | /// Best container 28 | pub best: BestCon, 29 | /// Current variables of all individuals 30 | pub pool: Vec>, 31 | /// Current fitness values of all individuals 32 | pub pool_y: Vec, 33 | /// Objective function object 34 | pub func: F, 35 | /// Generation (iteration) number 36 | pub gen: u64, 37 | } 38 | 39 | impl Ctx { 40 | pub(crate) fn from_parts( 41 | func: F, 42 | limit: usize, 43 | pool: Vec>, 44 | pool_y: Vec, 45 | ) -> Self { 46 | let mut best = BestCon::::from_limit(limit); 47 | best.update_all(&pool, &pool_y); 48 | Self { best, pool, pool_y, func, gen: 0 } 49 | } 50 | 51 | pub(crate) fn from_pool(func: F, limit: usize, pool: Vec>) -> Self { 52 | #[cfg(not(feature = "rayon"))] 53 | let iter = pool.iter(); 54 | #[cfg(feature = "rayon")] 55 | let iter = pool.par_iter(); 56 | let pool_y = iter.map(|xs| func.fitness(xs)).collect(); 57 | Self::from_parts(func, limit, pool, pool_y) 58 | } 59 | 60 | /// Get population number. 61 | #[inline] 62 | pub fn pop_num(&self) -> usize { 63 | self.pool.len() 64 | } 65 | 66 | /// Assign the index from source. 67 | pub fn set_from(&mut self, i: usize, xs: Vec, ys: F::Ys) { 68 | self.pool[i] = xs; 69 | self.pool_y[i] = ys; 70 | } 71 | 72 | /// Find the best, and set it globally. 73 | pub fn find_best(&mut self) { 74 | self.best.update_all(&self.pool, &self.pool_y); 75 | } 76 | } 77 | 78 | impl core::ops::Deref for Ctx { 79 | type Target = F; 80 | fn deref(&self) -> &Self::Target { 81 | &self.func 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/methods/tlbo.rs: -------------------------------------------------------------------------------- 1 | //! # Teaching Learning Based Optimization 2 | //! 3 | //! 4 | //! 5 | //! This method require round function. 6 | use crate::prelude::*; 7 | use alloc::vec::Vec; 8 | use core::iter::zip; 9 | 10 | /// Algorithm of the Teaching Learning Based Optimization. 11 | pub type Method = Tlbo; 12 | 13 | /// Teaching Learning Based Optimization settings. 14 | #[derive(Default, Clone, PartialEq, Eq)] 15 | #[cfg_attr(feature = "clap", derive(clap::Args))] 16 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 17 | pub struct Tlbo; 18 | 19 | impl Tlbo { 20 | /// Constant default value. 21 | pub const fn new() -> Self { 22 | Self 23 | } 24 | } 25 | 26 | impl AlgCfg for Tlbo { 27 | type Algorithm = Method; 28 | fn algorithm(self) -> Self::Algorithm { 29 | self 30 | } 31 | } 32 | 33 | fn register(ctx: &mut Ctx, i: usize, student: Vec) { 34 | let f_new = ctx.fitness(&student); 35 | if f_new.is_dominated(&ctx.pool_y[i]) { 36 | ctx.set_from(i, student, f_new); 37 | ctx.best.update(&ctx.pool[i], &ctx.pool_y[i]); 38 | } 39 | } 40 | 41 | fn teaching(ctx: &mut Ctx, rng: &mut Rng, i: usize) { 42 | let tf = rng.range(1f64..2.).round(); 43 | let best = ctx.best.sample_xs(rng); 44 | let student = zip(ctx.bound(), zip(&ctx.pool[i], best)) 45 | .enumerate() 46 | .map(|(s, (&[min, max], (base, best)))| { 47 | let mut mean = 0.; 48 | for other in &ctx.pool { 49 | mean += other[s]; 50 | } 51 | let dim = ctx.dim() as f64; 52 | mean /= dim; 53 | (base + rng.range(1.0..dim) * (best - tf * mean)).clamp(min, max) 54 | }) 55 | .collect(); 56 | register(ctx, i, student); 57 | } 58 | 59 | fn learning(ctx: &mut Ctx, rng: &mut Rng, i: usize) { 60 | let j = { 61 | let j = rng.ub(ctx.pop_num() - 1); 62 | if j >= i { 63 | j + 1 64 | } else { 65 | j 66 | } 67 | }; 68 | let student = zip(ctx.bound(), zip(&ctx.pool[i], &ctx.pool[j])) 69 | .map(|(&[min, max], (a, b))| { 70 | let diff = if ctx.pool_y[j].is_dominated(&ctx.pool_y[i]) { 71 | a - b 72 | } else { 73 | b - a 74 | }; 75 | (a + rng.range(1.0..ctx.dim() as f64) * diff).clamp(min, max) 76 | }) 77 | .collect(); 78 | register(ctx, i, student); 79 | } 80 | 81 | impl Algorithm for Method { 82 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 83 | for i in 0..ctx.pop_num() { 84 | teaching(ctx, rng, i); 85 | learning(ctx, rng, i); 86 | } 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # metaheuristics-nature 2 | 3 | [![dependency status](https://deps.rs/repo/github/KmolYuan/metaheuristics-nature-rs/status.svg)](https://deps.rs/crate/metaheuristics-nature/) 4 | [![documentation](https://docs.rs/metaheuristics-nature/badge.svg)](https://docs.rs/metaheuristics-nature) 5 | 6 | A collection of nature-inspired metaheuristic algorithms. This crate provides an objective function trait, well-known methods, and tool functions to implement your searching method. 7 | 8 | This crate implemented the following algorithms: 9 | + Real-coded Genetic Algorithm (RGA) 10 | + Differential Evolution (DE) 11 | + Particle Swarm Optimization (PSO) 12 | + Firefly Algorithm (FA) 13 | + Teaching-Learning Based Optimization (TLBO) 14 | 15 | Side functions: 16 | + Parallelable Seeded Random Number Generator (RNG) 17 | + This RNG is reproducible in single-thread and multi-thread programming. 18 | + Pareto front for Multi-Objective Optimization (MOO) 19 | + You can return multiple fitness in the objective function. 20 | + All fitness values will find the history-best solution as a set. 21 | 22 | Each algorithm gives the same API and default parameters to help you test different implementations. For example, you can test another algorithm by replacing `Rga` with `De`. 23 | 24 | ```rust 25 | use metaheuristics_nature as mh; 26 | 27 | let mut report = Vec::with_capacity(20); 28 | 29 | // Build and run the solver 30 | let s = mh::Solver::build(mh::Rga::default(), mh::tests::TestObj) 31 | .seed(0) 32 | .task(|ctx| ctx.gen == 20) 33 | .callback(|ctx| report.push(ctx.best.get_eval())) 34 | .solve(); 35 | // Get the optimized XY value of your function 36 | let (xs, p) = s.as_best(); 37 | // If `p` is a `WithProduct` type wrapped with the fitness value 38 | let err = p.ys(); 39 | let result = p.as_result(); 40 | // Get the history reports 41 | let y2 = &report[2]; 42 | ``` 43 | 44 | ### What kinds of problems can be solved? 45 | 46 | If your problem can be simulated and evaluated, the optimization method efficiently finds the best design! 🚀 47 | 48 | Assuming that your simulation can be done with a function `f`, by inputting the parameters `X` and the evaluation value `y`, then the optimization method will try to adjust `X={x0, x1, ...}` to obtain the smallest `y`. Their relationship can be written as `f(X) = y`. 49 | 50 | The number of the parameters `X` is called "dimension". Imagine `X` is the coordinate in the multi-dimension, and `y` is the weight of the "point." If the dimension increases, the problem will be more challenging to search. 51 | 52 | The "metaheuristic" algorithms use multiple points to search for the minimum value, which detects the local gradient across the most feasible solutions and keeps away from the local optimum, even with an unknown gradient or feasible region. 53 | 54 | Please have a look at the API documentation for more information. 55 | 56 | ### Gradient-based Methods 57 | 58 | For more straightforward functions, for example, if the 1st derivative function is known, gradient-based methods are recommended for the fastest speed. Such as [OSQP](https://osqp.org/). 59 | -------------------------------------------------------------------------------- /src/solver.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | use alloc::vec::Vec; 3 | 4 | /// A public API for using optimization methods. 5 | /// 6 | /// Users can simply obtain their solution and see the result. 7 | /// 8 | /// + The method is a type that implemented [`Algorithm`]. 9 | /// + The objective function is a type that implement [`ObjFunc`]. 10 | /// + A basic algorithm data is hold by [`Ctx`]. 11 | /// 12 | /// The builder of this type can infer the algorithm by [`AlgCfg::Algorithm`]. 13 | /// 14 | /// Please use [`Solver::build()`] method to start a task. 15 | /// 16 | /// The settings are defined in the [`SolverBuilder`] type. 17 | #[must_use = "please call `Solver::best_parameters()` or other methods to get the answer"] 18 | pub struct Solver { 19 | ctx: Ctx, 20 | seed: Seed, 21 | } 22 | 23 | impl Solver { 24 | pub(crate) fn new(ctx: Ctx, seed: Seed) -> Self { 25 | Self { ctx, seed } 26 | } 27 | 28 | /// Get the reference of the objective function. 29 | /// 30 | /// It's useful when you need to get the preprocessed data from the 31 | /// initialization process, which is stored in the objective function. 32 | pub fn func(&self) -> &F { 33 | &self.ctx.func 34 | } 35 | 36 | /// Get the reference of the best set. 37 | /// 38 | /// Use [`Solver::as_best()`] to get the best parameters and the fitness 39 | /// value directly. 40 | pub fn as_best_set(&self) -> &BestCon { 41 | &self.ctx.best 42 | } 43 | 44 | /// Get the reference of the best parameters and the fitness value. 45 | pub fn as_best(&self) -> (&[f64], &F::Ys) { 46 | self.ctx.best.as_result() 47 | } 48 | 49 | /// Get the reference of the best fitness value. 50 | pub fn as_best_xs(&self) -> &[f64] { 51 | self.as_best().0 52 | } 53 | 54 | /// Get the reference of the best fitness value. 55 | pub fn as_best_fit(&self) -> &F::Ys { 56 | self.as_best().1 57 | } 58 | 59 | /// Get the final best fitness value. 60 | pub fn get_best_eval(&self) -> ::Eval { 61 | self.as_best_fit().eval() 62 | } 63 | 64 | /// Get the final best element. 65 | pub fn into_result(self) -> P 66 | where 67 | F: ObjFunc>, 68 | P: MaybeParallel + Clone + 'static, 69 | { 70 | self.into_err_result().1 71 | } 72 | 73 | /// Get the fitness value and the final result. 74 | pub fn into_err_result(self) -> (Fit::Eval, P) 75 | where 76 | F: ObjFunc>, 77 | P: MaybeParallel + Clone + 'static, 78 | { 79 | let (f, p, _) = self.into_err_result_func(); 80 | (f, p) 81 | } 82 | 83 | /// Get the fitness value, final result and the objective function. 84 | pub fn into_err_result_func(self) -> (Fit::Eval, P, F) 85 | where 86 | F: ObjFunc>, 87 | P: MaybeParallel + Clone + 'static, 88 | { 89 | let (f, p) = self.ctx.best.into_result_fit().into_err_result(); 90 | (f, p, self.ctx.func) 91 | } 92 | 93 | /// Seed of the random number generator. 94 | pub fn seed(&self) -> Seed { 95 | self.seed 96 | } 97 | 98 | /// Get the pool from the last status. 99 | pub fn pool(&self) -> &[Vec] { 100 | &self.ctx.pool 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | #![doc(hidden)] 2 | use crate::prelude::*; 3 | 4 | const OFFSET: f64 = 7.; 5 | 6 | /// An example for doctest. 7 | pub struct TestObj; 8 | 9 | impl TestObj { 10 | /// A dummy constructor. 11 | pub const fn new() -> Self { 12 | Self 13 | } 14 | } 15 | 16 | impl Bounded for TestObj { 17 | fn bound(&self) -> &[[f64; 2]] { 18 | &[[-50., 50.]; 4] 19 | } 20 | } 21 | 22 | impl ObjFunc for TestObj { 23 | type Ys = WithProduct; 24 | 25 | fn fitness(&self, xs: &[f64]) -> Self::Ys { 26 | let y = OFFSET + xs[0] * xs[0] + 8. * xs[1] * xs[1] + xs[2] * xs[2] + xs[3] * xs[3]; 27 | WithProduct::new(y, y) 28 | } 29 | } 30 | 31 | /// A multi-objective example for doctest. 32 | pub struct TestMO; 33 | 34 | impl TestMO { 35 | /// A dummy constructor. 36 | pub const fn new() -> Self { 37 | Self 38 | } 39 | } 40 | 41 | impl Bounded for TestMO { 42 | fn bound(&self) -> &[[f64; 2]] { 43 | &[[-50., 50.]; 2] 44 | } 45 | } 46 | 47 | #[derive(Clone)] 48 | pub struct TestMOFit { 49 | cost: f64, 50 | weight: f64, 51 | } 52 | 53 | impl Fitness for TestMOFit { 54 | type Best = Pareto; 55 | type Eval = f64; 56 | 57 | fn is_dominated(&self, rhs: &Self) -> bool { 58 | self.cost <= rhs.cost && self.weight <= rhs.weight 59 | } 60 | 61 | fn eval(&self) -> Self::Eval { 62 | self.cost.max(self.weight) 63 | } 64 | } 65 | 66 | impl ObjFunc for TestMO { 67 | type Ys = WithProduct; 68 | 69 | fn fitness(&self, xs: &[f64]) -> Self::Ys { 70 | let ys = TestMOFit { cost: xs[0] * xs[0], weight: xs[1] * xs[1] }; 71 | WithProduct::new(ys, ()) 72 | } 73 | } 74 | 75 | #[cfg(test)] 76 | fn test() -> Solver 77 | where 78 | S: AlgCfg + Default, 79 | { 80 | let mut report = alloc::vec::Vec::new(); 81 | let s = Solver::build(S::default(), TestObj) 82 | .seed(0) 83 | .task(|ctx| ctx.best.as_result_fit().eval() - OFFSET < 1e-20) 84 | .callback(|ctx| report.push(ctx.best.get_eval())) 85 | .solve(); 86 | assert!(!report.is_empty()); 87 | assert_eq!(s.get_best_eval(), OFFSET); 88 | s 89 | } 90 | 91 | #[cfg(test)] 92 | macro_rules! assert_xs { 93 | ($case:expr) => { 94 | for x in $case.as_best_xs() { 95 | assert!(x.abs() < 2.1e-8, "x: {x}"); 96 | } 97 | }; 98 | } 99 | 100 | #[test] 101 | fn de() { 102 | assert_xs!(test::()); 103 | } 104 | 105 | #[test] 106 | fn pso() { 107 | assert_xs!(test::()); 108 | } 109 | 110 | #[test] 111 | fn fa() { 112 | assert_xs!(test::()); 113 | } 114 | 115 | #[test] 116 | fn rga() { 117 | assert_xs!(test::()); 118 | } 119 | 120 | #[test] 121 | fn tlbo() { 122 | assert_xs!(test::()); 123 | } 124 | 125 | #[cfg(feature = "rayon")] 126 | #[test] 127 | fn test_rng() { 128 | let mut rng1 = Rng::new(SeedOpt::U64(0)); 129 | rng1.stream(10); 130 | let mut rng2 = rng1.clone(); 131 | for _ in 0..200 { 132 | let non_parallel = rng1 133 | .stream(2500) 134 | .into_iter() 135 | .map(|mut rng| rng.rand()) 136 | .collect::>(); 137 | let parallel = rng2 138 | .stream(2500) 139 | .into_par_iter() 140 | .map(|mut rng| rng.rand()) 141 | .collect::>(); 142 | assert_eq!(non_parallel, parallel); 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /src/methods/pso.rs: -------------------------------------------------------------------------------- 1 | //! # Particle Swarm Optimization 2 | //! 3 | //! 4 | use crate::prelude::*; 5 | use alloc::vec::Vec; 6 | 7 | const DEF: Pso = Pso { cognition: 2.05, social: 2.05, velocity: 1.3 }; 8 | 9 | /// Particle Swarm Optimization settings. 10 | #[derive(Clone, PartialEq)] 11 | #[cfg_attr(feature = "clap", derive(clap::Args))] 12 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 13 | #[cfg_attr(feature = "serde", serde(default))] 14 | pub struct Pso { 15 | /// Cognition factor 16 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.cognition))] 17 | pub cognition: f64, 18 | /// Social factor 19 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.social))] 20 | pub social: f64, 21 | /// Velocity factor 22 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.velocity))] 23 | pub velocity: f64, 24 | } 25 | 26 | impl Pso { 27 | /// Constant default value. 28 | pub const fn new() -> Self { 29 | DEF 30 | } 31 | 32 | impl_builders! { 33 | /// Cognition factor. 34 | fn cognition(f64) 35 | /// Social factor. 36 | fn social(f64) 37 | /// Moving velocity. 38 | fn velocity(f64) 39 | } 40 | } 41 | 42 | impl Default for Pso { 43 | fn default() -> Self { 44 | DEF 45 | } 46 | } 47 | 48 | impl AlgCfg for Pso { 49 | type Algorithm = Method; 50 | fn algorithm(self) -> Self::Algorithm { 51 | Method { pso: self, past: Vec::new(), past_y: Vec::new() } 52 | } 53 | } 54 | 55 | /// Algorithm of the Particle Swarm Optimization. 56 | pub struct Method { 57 | pso: Pso, 58 | past: Vec>, 59 | past_y: Vec, 60 | } 61 | 62 | impl core::ops::Deref for Method { 63 | type Target = Pso; 64 | 65 | fn deref(&self) -> &Self::Target { 66 | &self.pso 67 | } 68 | } 69 | 70 | impl Algorithm for Method { 71 | fn init(&mut self, ctx: &mut Ctx, _: &mut Rng) { 72 | self.past = ctx.pool.clone(); 73 | self.past_y = ctx.pool_y.clone(); 74 | } 75 | 76 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 77 | let rng = rng.stream(ctx.pop_num()); 78 | let cognition = self.cognition; 79 | let social = self.social; 80 | let velocity = self.velocity; 81 | #[cfg(not(feature = "rayon"))] 82 | let iter = rng.into_iter(); 83 | #[cfg(feature = "rayon")] 84 | let iter = rng.into_par_iter(); 85 | iter.zip(&mut ctx.pool) 86 | .zip(&mut ctx.pool_y) 87 | .zip(&mut self.past) 88 | .zip(&mut self.past_y) 89 | .for_each(|((((mut rng, xs), ys), past), past_y)| { 90 | let alpha = rng.ub(cognition); 91 | let beta = rng.ub(social); 92 | let best = ctx.best.sample_xs(&mut rng); 93 | for s in 0..ctx.func.dim() { 94 | let v = velocity * xs[s] + alpha * (past[s] - xs[s]) + beta * (best[s] - xs[s]); 95 | xs[s] = ctx.func.clamp(s, v); 96 | } 97 | *ys = ctx.func.fitness(xs); 98 | if ys.is_dominated(&*past_y) { 99 | *past = xs.clone(); 100 | *past_y = ys.clone(); 101 | } 102 | }); 103 | ctx.find_best(); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/algorithm.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | 3 | /// Algorithm configurations. A trait for preparing the algorithm. 4 | /// 5 | /// The setting type is actually a builder of the [`AlgCfg::Algorithm`] type. 6 | /// 7 | /// Please note that the setting should not overlap with the [`SolverBuilder`]. 8 | pub trait AlgCfg { 9 | /// Associated algorithm. 10 | type Algorithm: Algorithm + 'static; 11 | /// Create the algorithm. 12 | fn algorithm(self) -> Self::Algorithm; 13 | /// Default population number. 14 | fn pop_num() -> usize { 15 | 200 16 | } 17 | } 18 | 19 | /// The methods of the metaheuristic algorithms. 20 | /// 21 | /// 1. Implement [`AlgCfg`] trait then indicate to a "method" type. 22 | /// 1. Implement `Algorithm` trait on the "method" type. 23 | /// 24 | /// Usually, the "method" type that implements this trait will not leak from the 25 | /// API. All most common dataset is store in the [`Ctx`] type. So the "method" 26 | /// type is used to store the additional data if any. 27 | /// 28 | /// ``` 29 | /// use metaheuristics_nature::prelude::*; 30 | /// 31 | /// /// A setting with fields. 32 | /// #[derive(Default)] 33 | /// pub struct MySetting1 { 34 | /// my_option: u32, 35 | /// } 36 | /// 37 | /// /// The implementation of the structure with fields. 38 | /// impl AlgCfg for MySetting1 { 39 | /// type Algorithm = Method; 40 | /// fn algorithm(self) -> Self::Algorithm { 41 | /// Method /* inherit setting */ 42 | /// } 43 | /// } 44 | /// 45 | /// /// No setting. 46 | /// #[derive(Default)] 47 | /// pub struct MySetting2; 48 | /// 49 | /// /// The implementation of a tuple-like structure. 50 | /// impl AlgCfg for MySetting2 { 51 | /// type Algorithm = Method; 52 | /// fn algorithm(self) -> Self::Algorithm { 53 | /// Method 54 | /// } 55 | /// } 56 | /// 57 | /// /// The type implements our algorithm. 58 | /// pub struct Method; 59 | /// 60 | /// impl Algorithm for Method { 61 | /// fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 62 | /// /* implement the method */ 63 | /// } 64 | /// } 65 | /// ``` 66 | /// 67 | /// The complete algorithm will be implemented by the [`Solver`](crate::Solver) 68 | /// type automatically. All you have to do is implement the "initialization" 69 | /// method and "generation" method, which are corresponded to the 70 | /// [`Algorithm::init()`] and [`Algorithm::generation()`] respectively. 71 | /// 72 | /// The generic type `F: ObjFunc` is the objective function marker, which is 73 | /// used to allow storing the types that are related to the objective function 74 | /// for the implementor `Self`. An actual example is 75 | /// [`crate::methods::pso::Method`]. 76 | pub trait Algorithm: MaybeParallel { 77 | /// Initialization implementation. 78 | /// 79 | /// The information of the [`Ctx`] can be obtained or modified at this phase 80 | /// preliminarily. 81 | /// 82 | /// The default behavior is do nothing. 83 | #[inline] 84 | #[allow(unused_variables)] 85 | fn init(&mut self, ctx: &mut Ctx, rng: &mut Rng) {} 86 | 87 | /// Processing implementation of each generation. 88 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng); 89 | } 90 | 91 | /// Implement for `Box>`. 92 | /// 93 | /// See also [`SolverBox`]. 94 | impl + ?Sized> Algorithm for alloc::boxed::Box { 95 | #[inline] 96 | fn init(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 97 | self.as_mut().init(ctx, rng); 98 | } 99 | 100 | #[inline] 101 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 102 | self.as_mut().generation(ctx, rng); 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /src/methods/fa.rs: -------------------------------------------------------------------------------- 1 | //! # Firefly Algorithm 2 | //! 3 | //! 4 | //! 5 | //! This method require exponential function. 6 | use crate::prelude::*; 7 | use alloc::vec::Vec; 8 | use core::iter::zip; 9 | 10 | /// Algorithm of the Firefly Algorithm. 11 | pub type Method = Fa; 12 | 13 | const DEF: Fa = Fa { alpha: 1., beta_min: 1., gamma: 0.01 }; 14 | 15 | /// Firefly Algorithm settings. 16 | #[derive(Clone, PartialEq)] 17 | #[cfg_attr(feature = "clap", derive(clap::Args))] 18 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 19 | #[cfg_attr(feature = "serde", serde(default))] 20 | pub struct Fa { 21 | /// Alpha factor 22 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.alpha))] 23 | pub alpha: f64, 24 | /// Min beta value 25 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.beta_min))] 26 | pub beta_min: f64, 27 | /// Gamma factor 28 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.gamma))] 29 | pub gamma: f64, 30 | } 31 | 32 | impl Fa { 33 | /// Constant default value. 34 | pub const fn new() -> Self { 35 | DEF 36 | } 37 | 38 | impl_builders! { 39 | /// Alpha factor. 40 | fn alpha(f64) 41 | /// Minimum beta factor. 42 | fn beta_min(f64) 43 | /// Gamma factor. 44 | fn gamma(f64) 45 | } 46 | } 47 | 48 | impl Default for Fa { 49 | fn default() -> Self { 50 | DEF 51 | } 52 | } 53 | 54 | impl AlgCfg for Fa { 55 | type Algorithm = Method; 56 | fn algorithm(self) -> Self::Algorithm { 57 | self 58 | } 59 | fn pop_num() -> usize { 60 | 80 61 | } 62 | } 63 | 64 | impl Method { 65 | fn move_firefly( 66 | &self, 67 | ctx: &Ctx, 68 | rng: &mut Rng, 69 | i: usize, 70 | j: usize, 71 | ) -> (Vec, F::Ys) { 72 | let (i, j) = if ctx.pool_y[j].is_dominated(&ctx.pool_y[i]) { 73 | (i, j) 74 | } else { 75 | (j, i) 76 | }; 77 | let r = zip(&ctx.pool[i], &ctx.pool[j]) 78 | .map(|(a, b)| a - b) 79 | .fold(0., |acc, x| acc + x * x); 80 | let beta = self.beta_min * (-self.gamma * r).exp(); 81 | let xs = zip(ctx.bound(), zip(&ctx.pool[i], &ctx.pool[j])) 82 | .map(|(&[min, max], (a, b))| { 83 | let step = self.alpha * (max - min) * rng.range(-0.5..0.5); 84 | let surround = a + beta * (b - a); 85 | (surround + step).clamp(min, max) 86 | }) 87 | .collect::>(); 88 | let ys = ctx.fitness(&xs); 89 | (xs, ys) 90 | } 91 | } 92 | 93 | impl Algorithm for Method { 94 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 95 | // Move fireflies 96 | let mut pool = ctx.pool.clone(); 97 | let mut pool_y = ctx.pool_y.clone(); 98 | let rng = rng.stream(ctx.pop_num()); 99 | #[cfg(not(feature = "rayon"))] 100 | let iter = rng.into_iter(); 101 | #[cfg(feature = "rayon")] 102 | let iter = rng.into_par_iter(); 103 | iter.zip(&mut pool) 104 | .zip(&mut pool_y) 105 | .enumerate() 106 | .for_each(|(i, ((mut rng, xs), ys))| { 107 | for j in i + 1..ctx.pop_num() { 108 | let (xs_new, ys_new) = self.move_firefly(ctx, &mut rng, i, j); 109 | if ys_new.is_dominated(ys) { 110 | *xs = xs_new; 111 | *ys = ys_new; 112 | } 113 | } 114 | }); 115 | ctx.pool = pool; 116 | ctx.pool_y = pool_y; 117 | ctx.find_best(); 118 | self.alpha *= 0.95; 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/obj_func.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | 3 | /// A problem is well bounded. 4 | /// 5 | /// Provide constant array reference or dynamic slice for the variables. 6 | pub trait Bounded: MaybeParallel { 7 | /// The upper bound and lower bound in `[[lower, upper]; number_of_vars]` 8 | /// form. 9 | /// 10 | /// This function should be cheap. 11 | fn bound(&self) -> &[[f64; 2]]; 12 | 13 | /// Get the number of variables (dimension) of the problem. 14 | #[inline] 15 | fn dim(&self) -> usize { 16 | self.bound().len() 17 | } 18 | 19 | /// Get the upper bound and the lower bound values. 20 | #[inline] 21 | fn bound_of(&self, s: usize) -> [f64; 2] { 22 | self.bound()[s] 23 | } 24 | 25 | ///Get the width of the upper bound and the lower bound. 26 | fn bound_width(&self, s: usize) -> f64 { 27 | let [min, max] = self.bound_of(s); 28 | max - min 29 | } 30 | 31 | /// Get the upper bound and the lower bound as a range. 32 | /// 33 | /// The variable is constrain with lower <= x <= upper. 34 | fn bound_range(&self, s: usize) -> core::ops::RangeInclusive { 35 | let [min, max] = self.bound_of(s); 36 | min..=max 37 | } 38 | 39 | /// Get the lower bound. 40 | #[inline] 41 | fn lb(&self, s: usize) -> f64 { 42 | self.bound_of(s)[0] 43 | } 44 | 45 | /// Get the upper bound. 46 | #[inline] 47 | fn ub(&self, s: usize) -> f64 { 48 | self.bound_of(s)[1] 49 | } 50 | 51 | /// Check the bounds of the index `s` with the value `v`, and set the value 52 | /// to max and min if out of bound. 53 | fn clamp(&self, s: usize, v: f64) -> f64 { 54 | let [min, max] = self.bound_of(s); 55 | v.clamp(min, max) 56 | } 57 | } 58 | 59 | /// A trait for the objective function. 60 | /// 61 | /// ``` 62 | /// use metaheuristics_nature::{Bounded, ObjFunc}; 63 | /// 64 | /// struct MyFunc; 65 | /// 66 | /// impl Bounded for MyFunc { 67 | /// fn bound(&self) -> &[[f64; 2]] { 68 | /// &[[0., 50.]; 3] 69 | /// } 70 | /// } 71 | /// 72 | /// impl ObjFunc for MyFunc { 73 | /// type Ys = f64; 74 | /// 75 | /// fn fitness(&self, x: &[f64]) -> Self::Ys { 76 | /// x[0] * x[0] + x[1] * x[1] + x[2] * x[2] 77 | /// } 78 | /// } 79 | /// ``` 80 | /// 81 | /// The objective function returns fitness value that used to evaluate the 82 | /// objective. The lower bound and upper bound represents the number of 83 | /// variables at the same time. 84 | /// 85 | /// This trait is designed as immutable and there should only has shared data. 86 | pub trait ObjFunc: Bounded { 87 | /// Type of the fitness value. 88 | /// 89 | /// # Wrappers 90 | /// 91 | /// There are some wrappers for the fitness value: [`WithProduct`] and 92 | /// [`MakeSingle`]. 93 | type Ys: Fitness; 94 | 95 | /// Return fitness, the smaller value represents a good result. 96 | /// 97 | /// # How to design the fitness value? 98 | /// 99 | /// Regularly, the evaluation value **should not** lower than zero, 100 | /// because it is not easy to control with multiplication, 101 | /// and a negative infinity can directly break the result. 102 | /// Instead, a positive enhanced floating point value is the better choice, 103 | /// and the zero is the best result. 104 | /// 105 | /// In another hand, some marker can help you to compare with other design, 106 | /// please see [`Fitness`] for more information. 107 | /// 108 | /// # Penalty 109 | /// 110 | /// In another hand, positive infinity represents the worst, or illogical 111 | /// result. In fact, the searching area (or we called feasible solution) 112 | /// should keeping not bad results, instead of evaluating them as the 113 | /// worst one, because of we can keep the searching inspection around 114 | /// the best result, to finding our potential winner. 115 | /// 116 | /// In order to distinguish how bad the result is, we can add a penalty 117 | /// value, which represents the "fault" on the result. 118 | /// 119 | /// Under most circumstances, the result is not good enough, appearing on 120 | /// its fitness value. But sometimes a result is badly than our normal 121 | /// results, if we mark them as the worst one (infinity), it will become 122 | /// a great "wall", which is not suitable for us to search across it. 123 | /// 124 | /// So that, we use secondary evaluation function to measure the result from 125 | /// other requirements, we call it "constraint" or "penalty function". 126 | /// The penalty value usually multiply a weight factor for increasing its 127 | /// influence. 128 | /// 129 | /// # Adaptive Value 130 | /// 131 | /// Sometimes a value that adjusts with converge states can help to restrict 132 | /// the searching. The adaptive value can be set by hiding its mutability 133 | /// with [`std::cell::Cell`] but not recommended. Please use the adaptive 134 | /// value from the algorithm, not from the objective function. 135 | fn fitness(&self, xs: &[f64]) -> Self::Ys; 136 | } 137 | -------------------------------------------------------------------------------- /src/fitness.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | use alloc::sync::Arc; 3 | 4 | /// Trait for dominance comparison. 5 | /// 6 | /// By default, the trait is implemented for types that implement `PartialOrd + 7 | /// Clone + 'static`, which means a clonable non-lifetime type comparable with 8 | /// `a < b` is equivalent to [`a.is_dominated(b)`](Fitness::is_dominated) for 9 | /// using single objective. 10 | /// 11 | /// # Example 12 | /// 13 | /// Single objective problems can simply use the `f32`/`f64` number type. 14 | /// 15 | /// Multi-objective problems can specify the [`Pareto`] container as 16 | /// [`Fitness::Best`] and implement [`Fitness::eval()`] to decide the final 17 | /// fitness value. 18 | /// 19 | /// ``` 20 | /// use metaheuristics_nature::{pareto::Pareto, Fitness}; 21 | /// 22 | /// #[derive(Clone)] 23 | /// struct MyObject { 24 | /// cost: f64, 25 | /// weight: f64, 26 | /// } 27 | /// 28 | /// impl Fitness for MyObject { 29 | /// type Best = Pareto; 30 | /// type Eval = f64; 31 | /// fn is_dominated(&self, rhs: &Self) -> bool { 32 | /// self.cost <= rhs.cost && self.weight <= rhs.weight 33 | /// } 34 | /// fn eval(&self) -> Self::Eval { 35 | /// self.cost.max(self.weight) 36 | /// } 37 | /// } 38 | /// ``` 39 | pub trait Fitness: MaybeParallel + Clone + 'static { 40 | /// The best element container. 41 | /// + Use [`SingleBest`] for single objective. 42 | /// + Use [`Pareto`] for multi-objective. 43 | type Best: Best; 44 | /// A value to compare the final fitness value. 45 | type Eval: PartialOrd + 'static; 46 | /// Check if `self` dominates `rhs`. 47 | fn is_dominated(&self, rhs: &Self) -> bool; 48 | /// Evaluate the final fitness value. 49 | /// 50 | /// Used in [`Best::as_result()`] and [`Best::update()`] when reaching the 51 | /// limit. 52 | fn eval(&self) -> Self::Eval; 53 | } 54 | 55 | impl Fitness for T { 56 | type Best = SingleBest; 57 | type Eval = Self; 58 | fn is_dominated(&self, rhs: &Self) -> bool { 59 | self < rhs 60 | } 61 | fn eval(&self) -> Self::Eval { 62 | self.clone() 63 | } 64 | } 65 | 66 | /// A [`Fitness`] type carrying a multi-objective [`Fitness`] value. Make it 67 | /// become a single objective task via using [`Fitness::eval()`]. 68 | /// 69 | /// This wrapper type is overrided [`Fitness::Best`] to [`SingleBest`]. A 70 | /// multi-objective fitness type can be tested in single mode by setting 71 | /// [`ObjFunc::Ys`] to `MakeSingle` and wrapping the final result with 72 | /// `MakeSingle(MyMOFit { .. })`. 73 | #[derive(Clone, Debug)] 74 | #[repr(transparent)] 75 | pub struct MakeSingle(pub Y) 76 | where 77 | Y::Eval: Fitness; 78 | 79 | impl Fitness for MakeSingle 80 | where 81 | Y::Eval: Fitness, 82 | { 83 | type Best = SingleBest; 84 | type Eval = Y::Eval; 85 | fn is_dominated(&self, rhs: &Self) -> bool { 86 | self.eval().is_dominated(&rhs.eval()) 87 | } 88 | #[inline] 89 | fn eval(&self) -> Self::Eval { 90 | self.0.eval() 91 | } 92 | } 93 | 94 | /// A [`Fitness`] type carrying final results. 95 | /// 96 | /// You can use [`Solver::as_best_xs()`] / [`Solver::as_best_fit()`] / 97 | /// [`Solver::get_best_eval()`] to access product field. 98 | #[derive(Clone, Debug)] 99 | pub struct WithProduct { 100 | ys: Y, 101 | product: Arc

, 102 | } 103 | 104 | impl WithProduct { 105 | /// Create a product from an existing [`Arc`] object, where `P` can be 106 | /// unknown size. 107 | pub fn new_from_arc(ys: Y, product: Arc

) -> Self { 108 | Self { ys, product } 109 | } 110 | 111 | /// Get the reference to the final result. 112 | pub fn as_result(&self) -> &P { 113 | self.product.as_ref() 114 | } 115 | } 116 | 117 | impl WithProduct { 118 | /// Create a product. 119 | pub fn new(ys: Y, product: P) -> Self { 120 | Self::new_from_arc(ys, Arc::new(product)) 121 | } 122 | 123 | /// Get the fitness value. 124 | pub fn ys(&self) -> Y 125 | where 126 | Y: Clone, 127 | { 128 | self.ys.clone() 129 | } 130 | 131 | /// Consume and get the final result. 132 | pub fn into_result(self) -> P 133 | where 134 | P: Clone, 135 | { 136 | Arc::unwrap_or_clone(self.product) 137 | } 138 | 139 | /// Get the fitness value and the final result. 140 | pub fn into_err_result(self) -> (Y::Eval, P) 141 | where 142 | P: Clone, 143 | Y: Fitness, 144 | { 145 | (self.ys.eval(), Arc::unwrap_or_clone(self.product)) 146 | } 147 | } 148 | 149 | impl Fitness for WithProduct 150 | where 151 | P: MaybeParallel + Clone + 'static, 152 | { 153 | type Best = Y::Best; 154 | type Eval = Y::Eval; 155 | fn is_dominated(&self, rhs: &Self) -> bool { 156 | self.ys.is_dominated(&rhs.ys) 157 | } 158 | fn eval(&self) -> Self::Eval { 159 | self.ys.eval() 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /src/methods/rga.rs: -------------------------------------------------------------------------------- 1 | //! # Real-coded Genetic Algorithm 2 | //! 3 | //! Aka Real-valued Genetic Algorithm. 4 | //! 5 | //! 6 | //! 7 | //! This method require floating point power function. 8 | use crate::prelude::*; 9 | use alloc::vec::Vec; 10 | use core::iter::zip; 11 | 12 | /// Algorithm of the Real-coded Genetic Algorithm. 13 | pub type Method = Rga; 14 | 15 | const DEF: Rga = Rga { cross: 0.95, mutate: 0.05, win: 0.95, delta: 5. }; 16 | 17 | /// Real-coded Genetic Algorithm settings. 18 | #[derive(Clone, PartialEq)] 19 | #[cfg_attr(feature = "clap", derive(clap::Args))] 20 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 21 | #[cfg_attr(feature = "serde", serde(default))] 22 | pub struct Rga { 23 | /// Crossover rate 24 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.cross))] 25 | pub cross: f64, 26 | /// Mutation rate 27 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.mutate))] 28 | pub mutate: f64, 29 | /// Win rate 30 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.win))] 31 | pub win: f64, 32 | /// Delta 33 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.delta))] 34 | pub delta: f64, 35 | } 36 | 37 | impl Rga { 38 | /// Constant default value. 39 | pub const fn new() -> Self { 40 | DEF 41 | } 42 | 43 | impl_builders! { 44 | /// Crossing probability. 45 | fn cross(f64) 46 | /// Mutation probability. 47 | fn mutate(f64) 48 | /// Winning probability. 49 | fn win(f64) 50 | /// Delta factor. 51 | fn delta(f64) 52 | } 53 | } 54 | 55 | impl Default for Rga { 56 | fn default() -> Self { 57 | DEF 58 | } 59 | } 60 | 61 | impl AlgCfg for Rga { 62 | type Algorithm = Method; 63 | fn algorithm(self) -> Self::Algorithm { 64 | self 65 | } 66 | fn pop_num() -> usize { 67 | 500 68 | } 69 | } 70 | 71 | impl Method { 72 | fn get_delta(&self, gen: u64, rng: &mut Rng, y: f64) -> f64 { 73 | let r = if gen < 100 { gen as f64 / 100. } else { 1. }; 74 | rng.rand() * y * (1. - r).powf(self.delta) 75 | } 76 | } 77 | 78 | impl Algorithm for Method { 79 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 80 | // Select 81 | let mut pool = ctx.pool.clone(); 82 | let mut pool_y = ctx.pool_y.clone(); 83 | for (xs, ys) in zip(&mut pool, &mut pool_y) { 84 | let [a, b] = rng.array(0..ctx.pop_num()); 85 | let i = if ctx.pool_y[a].is_dominated(&ctx.pool_y[b]) { 86 | a 87 | } else { 88 | b 89 | }; 90 | if rng.maybe(self.win) { 91 | *xs = ctx.pool[i].clone(); 92 | *ys = ctx.pool_y[i].clone(); 93 | } 94 | } 95 | ctx.pool = pool; 96 | ctx.pool_y = pool_y; 97 | { 98 | let i = rng.ub(ctx.pop_num()); 99 | let (xs, ys) = ctx.best.sample(rng); 100 | ctx.set_from(i, xs.to_vec(), ys.clone()); 101 | } 102 | // Crossover 103 | for i in (0..ctx.pop_num() - 1).step_by(2) { 104 | if !rng.maybe(self.cross) { 105 | continue; 106 | } 107 | #[cfg(not(feature = "rayon"))] 108 | let iter = rng.stream(3).into_iter(); 109 | #[cfg(feature = "rayon")] 110 | let iter = rng.stream(3).into_par_iter(); 111 | let mut ret: [_; 3] = iter 112 | .enumerate() 113 | .map(|(id, mut rng)| { 114 | let xs = zip(ctx.bound(), zip(&ctx.pool[i], &ctx.pool[i + 1])) 115 | .map(|(&[min, max], (a, b))| { 116 | let v = match id { 117 | 0 => 0.5 * (a + b), 118 | 1 => 1.5 * a - 0.5 * b, 119 | _ => -0.5 * a + 1.5 * b, 120 | }; 121 | rng.clamp(v, min..=max) 122 | }) 123 | .collect::>(); 124 | let ys = ctx.fitness(&xs); 125 | (ys, xs) 126 | }) 127 | .collect::>() 128 | .try_into() 129 | .unwrap_or_else(|_| unreachable!()); 130 | ret.sort_unstable_by(|(a, _), (b, _)| a.eval().partial_cmp(&b.eval()).unwrap()); 131 | let [(t1_f, t1_x), (t2_f, t2_x), ..] = ret; 132 | ctx.set_from(i, t1_x, t1_f); 133 | ctx.set_from(i + 1, t2_x, t2_f); 134 | } 135 | // Mutate 136 | let dim = ctx.dim(); 137 | for (xs, ys) in zip(&mut ctx.pool, &mut ctx.pool_y) { 138 | if !rng.maybe(self.mutate) { 139 | continue; 140 | } 141 | let s = rng.ub(dim); 142 | if rng.maybe(0.5) { 143 | xs[s] += self.get_delta(ctx.gen, rng, ctx.func.ub(s) - xs[s]); 144 | } else { 145 | xs[s] -= self.get_delta(ctx.gen, rng, xs[s] - ctx.func.lb(s)); 146 | } 147 | *ys = ctx.func.fitness(xs); 148 | } 149 | ctx.find_best(); 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /src/random.rs: -------------------------------------------------------------------------------- 1 | //! Random number generator module. 2 | use alloc::vec::Vec; 3 | use rand::{ 4 | distributions::{ 5 | uniform::{SampleRange, SampleUniform}, 6 | Distribution, Standard, 7 | }, 8 | Rng as _, SeedableRng as _, 9 | }; 10 | use rand_chacha::ChaCha8Rng as ChaCha; 11 | 12 | /// The seed type of the ChaCha algorithm. 13 | pub type Seed = [u8; 32]; 14 | 15 | /// The seed option. 16 | /// 17 | /// Can be converted from `Option`, `u64`, and [`Seed`]. 18 | #[derive(Copy, Clone)] 19 | pub enum SeedOpt { 20 | /// Seed from non-crypto u64 21 | U64(u64), 22 | /// Crypto seed series (32 bytes) 23 | Seed(Seed), 24 | /// Auto-decided crypto seed 25 | Entropy, 26 | } 27 | 28 | impl From> for SeedOpt { 29 | fn from(opt: Option) -> Self { 30 | match opt { 31 | Some(seed) => Self::U64(seed), 32 | None => Self::Entropy, 33 | } 34 | } 35 | } 36 | 37 | impl From for SeedOpt { 38 | fn from(seed: u64) -> Self { 39 | Self::U64(seed) 40 | } 41 | } 42 | 43 | impl From for SeedOpt { 44 | fn from(seed: Seed) -> Self { 45 | Self::Seed(seed) 46 | } 47 | } 48 | 49 | /// An uniformed random number generator. 50 | #[derive(Clone, Debug)] 51 | pub struct Rng { 52 | rng: ChaCha, 53 | } 54 | 55 | impl Rng { 56 | /// Create generator by a given seed. 57 | /// If none, create the seed from CPU random function. 58 | pub fn new(seed: SeedOpt) -> Self { 59 | let rng = match seed { 60 | SeedOpt::Seed(seed) => ChaCha::from_seed(seed), 61 | SeedOpt::U64(seed) => ChaCha::seed_from_u64(seed), 62 | SeedOpt::Entropy => ChaCha::from_entropy(), 63 | }; 64 | Self { rng } 65 | } 66 | 67 | /// Seed of this generator. 68 | #[inline] 69 | pub fn seed(&self) -> Seed { 70 | self.rng.get_seed() 71 | } 72 | 73 | /// Stream for parallel threading. 74 | /// 75 | /// Use the iterators `.zip()` method to fork this RNG set. 76 | pub fn stream(&mut self, n: usize) -> Vec { 77 | // Needs to "run" the RNG to avoid constantly opening new branches 78 | let stream = self.rng.get_stream(); 79 | self.rng.set_stream(stream.wrapping_add(n as _)); 80 | (0..n) 81 | .map(|i| { 82 | let mut rng = self.clone(); 83 | rng.rng.set_stream(stream.wrapping_add(i as _)); 84 | rng 85 | }) 86 | .collect() 87 | } 88 | 89 | /// A low-level access to the RNG type. 90 | /// 91 | /// Please import necessary traits first. 92 | pub fn gen_with(&mut self, f: impl FnOnce(&mut ChaCha) -> R) -> R { 93 | f(&mut self.rng) 94 | } 95 | 96 | /// Generate a random value by standard distribution. 97 | pub fn gen(&mut self) -> T 98 | where 99 | Standard: Distribution, 100 | { 101 | self.rng.gen() 102 | } 103 | 104 | /// Generate a classic random value between `0..1` (exclusive range). 105 | #[inline] 106 | pub fn rand(&mut self) -> f64 { 107 | self.ub(1.) 108 | } 109 | 110 | /// Generate a random boolean by positive (`true`) factor. 111 | #[inline] 112 | pub fn maybe(&mut self, p: f64) -> bool { 113 | self.rng.gen_bool(p) 114 | } 115 | 116 | /// Generate a random value by range. 117 | #[inline] 118 | pub fn range(&mut self, range: R) -> T 119 | where 120 | T: SampleUniform, 121 | R: SampleRange, 122 | { 123 | self.rng.gen_range(range) 124 | } 125 | 126 | /// Sample from a distribution. 127 | #[inline] 128 | pub fn sample(&mut self, distr: D) -> T 129 | where 130 | D: Distribution, 131 | { 132 | self.rng.sample(distr) 133 | } 134 | 135 | /// Generate a random value by upper bound (exclusive range). 136 | /// 137 | /// The lower bound is zero. 138 | #[inline] 139 | pub fn ub(&mut self, ub: U) -> U 140 | where 141 | U: Default + SampleUniform, 142 | core::ops::Range: SampleRange, 143 | { 144 | self.range(U::default()..ub) 145 | } 146 | 147 | /// Generate a random value by range. 148 | #[inline] 149 | pub fn clamp(&mut self, v: T, range: R) -> T 150 | where 151 | T: SampleUniform + PartialOrd, 152 | R: SampleRange + core::ops::RangeBounds, 153 | { 154 | if range.contains(&v) { 155 | v 156 | } else { 157 | self.range(range) 158 | } 159 | } 160 | 161 | /// Sample with Gaussian distribution. 162 | #[inline] 163 | pub fn normal(&mut self, mean: F, std: F) -> F 164 | where 165 | F: num_traits::Float, 166 | rand_distr::StandardNormal: Distribution, 167 | { 168 | self.sample(rand_distr::Normal::new(mean, std).unwrap()) 169 | } 170 | 171 | /// Shuffle a slice. 172 | pub fn shuffle(&mut self, s: &mut S) { 173 | s.shuffle(&mut self.rng); 174 | } 175 | 176 | /// Choose a random value from the slice. 177 | pub fn choose<'a, S: rand::seq::SliceRandom + ?Sized>(&mut self, s: &'a S) -> &'a S::Item { 178 | s.choose(&mut self.rng).expect("Empty slice") 179 | } 180 | 181 | /// Generate a random array with no-repeat values. 182 | pub fn array(&mut self, candi: C) -> [A; N] 183 | where 184 | A: Default + Copy + PartialEq + SampleUniform, 185 | C: IntoIterator, 186 | { 187 | let mut candi = candi.into_iter().collect::>(); 188 | self.shuffle(candi.as_mut_slice()); 189 | candi[..N].try_into().expect("candi.len() < N") 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | //! # Terminologies 3 | //! 4 | //! For unifying the terms, in this documentation, 5 | //! 6 | //! + "Iteration" is called "generation". (Avoid confusion with iterators) 7 | //! + "Function" that evaluates the design is called "objective function". 8 | //! + "Return value" of the objective function is called "fitness". 9 | //! 10 | //! # Algorithms 11 | //! 12 | //! There are two traits [`Algorithm`] and [`AlgCfg`]. 13 | //! The previous is used to design the optimization method, 14 | //! and the latter is the setting interface. 15 | //! 16 | //! [`Solver`] is a simple interface for obtaining the solution, or analyzing 17 | //! the result. This type allows you to use the pre-defined methods without 18 | //! importing any traits. 19 | //! 20 | //! All provided methods are listed in the module [`methods`]. 21 | //! 22 | //! For making your owned method, please see [`prelude`]. 23 | //! 24 | //! # Objective Function 25 | //! 26 | //! For a quick demo with callable object, please see [`Fx`]. 27 | //! 28 | //! You can define your question as an objective function through implementing 29 | //! [`ObjFunc`], and then the upper bound, lower bound, and an objective 30 | //! function [`ObjFunc::fitness()`] returns [`Fitness`] should be defined. 31 | //! 32 | //! # Random Function 33 | //! 34 | //! This crate uses a 64bit ChaCha algorithm ([`random::Rng`]) to generate 35 | //! uniform random values. Before that, a random seed is required. The seed is 36 | //! generated by `getrandom` crate, please see its support platform. 37 | //! 38 | //! # Features 39 | //! 40 | //! The crate features: 41 | //! + `std`: Default feature. Enable standard library function, such as timing 42 | //! and threading. If `std` is disabled, crate "libm" will be enabled for the 43 | //! math functions. 44 | //! + `rayon`: Enable parallel computation via `rayon`. Disable it for the 45 | //! platform that doesn't supported threading, or if your objective function 46 | //! is not complicate enough. This feature require `std` feature. 47 | //! + `clap`: Add CLI argument support for the provided algorithms and their 48 | //! options. 49 | //! 50 | //! # Compatibility 51 | //! 52 | //! If you are using this crate for providing objective function, 53 | //! other downstream crates of yours may have some problems with compatibility. 54 | //! 55 | //! The most important thing is using a stable version, specifying the major 56 | //! version number. Then re-export (`pub use`) this crate for the downstream 57 | //! crates. 58 | //! 59 | //! This crate does the same things on `rand` and `rayon`. 60 | #![cfg_attr(doc_cfg, feature(doc_auto_cfg))] 61 | #![cfg_attr(not(feature = "std"), no_std)] 62 | extern crate alloc; 63 | #[cfg(not(feature = "std"))] 64 | extern crate core as std; 65 | pub use rand; 66 | #[cfg(feature = "rayon")] 67 | pub use rayon; 68 | 69 | pub use self::{ 70 | algorithm::*, ctx::*, fitness::*, fx_func::*, methods::*, obj_func::*, solver::*, 71 | solver_builder::*, 72 | }; 73 | 74 | /// A tool macro used to generate multiple builder functions (methods). 75 | /// 76 | /// For example, 77 | /// 78 | /// ``` 79 | /// # use metaheuristics_nature::impl_builders; 80 | /// # type Ty = bool; 81 | /// # struct S { 82 | /// # name1: Ty, 83 | /// # name2: Ty, 84 | /// # } 85 | /// impl S { 86 | /// impl_builders! { 87 | /// /// Doc 1 88 | /// fn name1(Ty) 89 | /// /// Doc 2 90 | /// fn name2(Ty) 91 | /// } 92 | /// } 93 | /// ``` 94 | /// 95 | /// will become 96 | /// 97 | /// ``` 98 | /// # type Ty = bool; 99 | /// # struct S { 100 | /// # name1: Ty, 101 | /// # name2: Ty, 102 | /// # } 103 | /// impl S { 104 | /// /// Doc 1 105 | /// pub fn name1(mut self, name1: Ty) -> Self { 106 | /// self.name1 = name1; 107 | /// self 108 | /// } 109 | /// /// Doc 2 110 | /// pub fn name2(mut self, name2: Ty) -> Self { 111 | /// self.name2 = name2; 112 | /// self 113 | /// } 114 | /// } 115 | /// ``` 116 | #[macro_export] 117 | macro_rules! impl_builders { 118 | ($($(#[$meta:meta])* fn $name:ident($ty:ty))+) => {$( 119 | $(#[$meta])* 120 | pub fn $name(self, $name: $ty) -> Self { 121 | Self { $name, ..self } 122 | } 123 | )+}; 124 | } 125 | 126 | /// A prelude module for algorithm implementation. 127 | /// 128 | /// This module includes all items of this crate, some hidden types, 129 | /// and external items from "ndarray" and "rayon" (if `rayon` feature enabled). 130 | pub mod prelude { 131 | pub use super::*; 132 | pub use crate::{pareto::*, random::*}; 133 | 134 | #[cfg(feature = "rayon")] 135 | #[doc(no_inline)] 136 | pub use crate::rayon::prelude::*; 137 | #[cfg(not(feature = "std"))] 138 | pub use num_traits::Float as _; 139 | } 140 | 141 | mod algorithm; 142 | mod ctx; 143 | mod fitness; 144 | mod fx_func; 145 | pub mod methods; 146 | mod obj_func; 147 | pub mod pareto; 148 | pub mod random; 149 | mod solver; 150 | mod solver_builder; 151 | pub mod tests; 152 | 153 | /// A marker trait for parallel computation. 154 | /// 155 | /// Require `Sync + Send` if the `rayon` feature is enabled, otherwise require 156 | /// nothing. 157 | #[cfg(not(feature = "rayon"))] 158 | pub trait MaybeParallel {} 159 | #[cfg(not(feature = "rayon"))] 160 | impl MaybeParallel for T {} 161 | 162 | /// A marker trait for parallel computation. 163 | /// 164 | /// Require `Sync + Send` if the `rayon` feature is enabled, otherwise require 165 | /// nothing. 166 | #[cfg(feature = "rayon")] 167 | pub trait MaybeParallel: Sync + Send {} 168 | #[cfg(feature = "rayon")] 169 | impl MaybeParallel for T {} 170 | 171 | #[cfg(feature = "rayon")] 172 | macro_rules! maybe_send_box { 173 | ($($traits:tt)+) => { 174 | Box 175 | }; 176 | } 177 | #[cfg(not(feature = "rayon"))] 178 | macro_rules! maybe_send_box { 179 | ($($traits:tt)+) => { 180 | Box 181 | }; 182 | } 183 | pub(crate) use maybe_send_box; 184 | -------------------------------------------------------------------------------- /src/methods/de.rs: -------------------------------------------------------------------------------- 1 | //! # Differential Evolution 2 | //! 3 | //! 4 | use self::Strategy::*; 5 | use crate::prelude::*; 6 | use alloc::{boxed::Box, vec::Vec}; 7 | 8 | /// Algorithm of the Differential Evolution. 9 | pub type Method = De; 10 | type Func = Box, &[f64], usize) -> f64>; 11 | 12 | const DEF: De = De { strategy: C1F1, f: 0.6, cross: 0.9 }; 13 | 14 | /// The Differential Evolution strategy. 15 | /// 16 | /// Each strategy has different formulas on the recombination. 17 | /// 18 | /// # Variable formula 19 | /// 20 | /// This formula decide how to generate new variable *n*. 21 | /// Where *vi* is the random indicator on the individuals, 22 | /// they are different from each other. 23 | /// 24 | /// + *f1*: best{n} + F * (v0{n} - v1{n}) 25 | /// + *f2*: v0{n} + F * (v1{n} - v2{n}) 26 | /// + *f3*: self{n} + F * (best{n} - self{n} + v0{n} - v1{n}) 27 | /// + *f4*: best{n} + F * (v0{n} + v1{n} - v2{n} - v3{n}) 28 | /// + *f5*: v4{n} + F * (v0{n} + v1{n} - v2{n} - v3{n}) 29 | /// 30 | /// # Crossover formula 31 | /// 32 | /// + *c1*: Continue crossover in order until end with probability. 33 | /// + *c2*: Each variable has independent probability. 34 | #[derive(Default, Copy, Clone, PartialEq, Eq)] 35 | #[cfg_attr(feature = "clap", derive(clap::ValueEnum))] 36 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 37 | pub enum Strategy { 38 | /// *f1* + *c1* 39 | #[default] 40 | C1F1, 41 | /// *f2* + *c1* 42 | C1F2, 43 | /// *f3* + *c1* 44 | C1F3, 45 | /// *f4* + *c1* 46 | C1F4, 47 | /// *f5* + *c1* 48 | C1F5, 49 | /// *f1* + *c2* 50 | C2F1, 51 | /// *f2* + *c2* 52 | C2F2, 53 | /// *f3* + *c2* 54 | C2F3, 55 | /// *f4* + *c2* 56 | C2F4, 57 | /// *f5* + *c2* 58 | C2F5, 59 | } 60 | 61 | impl Strategy { 62 | /// A list of all strategies. 63 | pub const LIST: [Self; 10] = [C1F1, C1F2, C1F3, C1F4, C1F5, C2F1, C2F2, C2F3, C2F4, C2F5]; 64 | } 65 | 66 | /// Differential Evolution settings. 67 | #[derive(Clone, PartialEq)] 68 | #[cfg_attr(feature = "clap", derive(clap::Args))] 69 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 70 | #[cfg_attr(feature = "serde", serde(default))] 71 | pub struct De { 72 | /// Strategy 73 | #[cfg_attr(feature = "clap", clap(long, value_enum, default_value_t = DEF.strategy))] 74 | pub strategy: Strategy, 75 | /// F factor in the formula 76 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.f))] 77 | pub f: f64, 78 | /// Crossover rate 79 | #[cfg_attr(feature = "clap", clap(long, default_value_t = DEF.cross))] 80 | pub cross: f64, 81 | } 82 | 83 | impl De { 84 | /// Constant default value. 85 | pub const fn new() -> Self { 86 | DEF 87 | } 88 | 89 | impl_builders! { 90 | /// Strategy of the formula. 91 | fn strategy(Strategy) 92 | /// F factor. 93 | fn f(f64) 94 | /// Crossing probability. 95 | fn cross(f64) 96 | } 97 | } 98 | 99 | impl Default for De { 100 | fn default() -> Self { 101 | DEF 102 | } 103 | } 104 | 105 | impl AlgCfg for De { 106 | type Algorithm = Method; 107 | fn algorithm(self) -> Self::Algorithm { 108 | self 109 | } 110 | fn pop_num() -> usize { 111 | 400 112 | } 113 | } 114 | 115 | impl Method { 116 | fn formula(&self, ctx: &Ctx, rng: &mut Rng) -> Func { 117 | let f = self.f; 118 | match self.strategy { 119 | C1F1 | C2F1 => { 120 | let [v0, v1] = rng.array(0..ctx.pop_num()); 121 | let best = ctx.best.sample_xs(rng).to_vec(); 122 | Box::new(move |ctx, _, s| best[s] + f * (ctx.pool[v0][s] - ctx.pool[v1][s])) 123 | } 124 | C1F2 | C2F2 => Box::new({ 125 | let [v0, v1, v2] = rng.array(0..ctx.pop_num()); 126 | move |ctx, _, s| ctx.pool[v0][s] + f * (ctx.pool[v1][s] - ctx.pool[v2][s]) 127 | }), 128 | C1F3 | C2F3 => Box::new({ 129 | let [v0, v1] = rng.array(0..ctx.pop_num()); 130 | let best = ctx.best.sample_xs(rng).to_vec(); 131 | move |ctx, xs, s| xs[s] + f * (best[s] - xs[s] + ctx.pool[v0][s] - ctx.pool[v1][s]) 132 | }), 133 | C1F4 | C2F4 => Box::new({ 134 | let [v0, v1, v2, v3] = rng.array(0..ctx.pop_num()); 135 | let best = ctx.best.sample_xs(rng).to_vec(); 136 | move |ctx, _, s| { 137 | best[s] 138 | + f * (ctx.pool[v0][s] + ctx.pool[v1][s] 139 | - ctx.pool[v2][s] 140 | - ctx.pool[v3][s]) 141 | } 142 | }), 143 | C1F5 | C2F5 => Box::new({ 144 | let [v0, v1, v2, v3, v4] = rng.array(0..ctx.pop_num()); 145 | move |ctx, _, s| { 146 | ctx.pool[v4][s] 147 | + f * (ctx.pool[v0][s] + ctx.pool[v1][s] 148 | - ctx.pool[v2][s] 149 | - ctx.pool[v3][s]) 150 | } 151 | }), 152 | } 153 | } 154 | 155 | fn c1(&self, ctx: &Ctx, rng: &mut Rng, xs: &mut [f64], formula: Func) 156 | where 157 | F: ObjFunc, 158 | { 159 | let dim = ctx.dim(); 160 | for (i, s) in (0..dim).cycle().skip(rng.ub(dim)).take(dim).enumerate() { 161 | // At last one variables is modified 162 | if i >= 1 && !rng.maybe(self.cross) { 163 | break; 164 | } 165 | xs[s] = rng.clamp(formula(ctx, xs, s), ctx.bound_range(s)); 166 | } 167 | } 168 | 169 | fn c2(&self, ctx: &Ctx, rng: &mut Rng, xs: &mut [f64], formula: Func) 170 | where 171 | F: ObjFunc, 172 | { 173 | // At least one variable is modified 174 | let sss = rng.ub(ctx.dim()); 175 | for s in 0..ctx.dim() { 176 | if sss == s || rng.maybe(self.cross) { 177 | xs[s] = rng.clamp(formula(ctx, xs, s), ctx.bound_range(s)); 178 | } 179 | } 180 | } 181 | } 182 | 183 | impl Algorithm for Method { 184 | fn generation(&mut self, ctx: &mut Ctx, rng: &mut Rng) { 185 | let mut pool = ctx.pool.clone(); 186 | let mut pool_y = ctx.pool_y.clone(); 187 | let rng = rng.stream(ctx.pop_num()); 188 | #[cfg(not(feature = "rayon"))] 189 | let iter = rng.into_iter(); 190 | #[cfg(feature = "rayon")] 191 | let iter = rng.into_par_iter(); 192 | let (xs, ys): (Vec<_>, Vec<_>) = iter 193 | .zip(&mut pool) 194 | .zip(&mut pool_y) 195 | .filter_map(|((mut rng, xs), ys)| { 196 | // Generate Vector 197 | let formula = self.formula(ctx, &mut rng); 198 | // Recombination 199 | let mut xs_trial = xs.clone(); 200 | match self.strategy { 201 | C1F1 | C1F2 | C1F3 | C1F4 | C1F5 => { 202 | self.c1(ctx, &mut rng, &mut xs_trial, formula) 203 | } 204 | C2F1 | C2F2 | C2F3 | C2F4 | C2F5 => { 205 | self.c2(ctx, &mut rng, &mut xs_trial, formula) 206 | } 207 | } 208 | let ys_trial = ctx.fitness(&xs_trial); 209 | if ys_trial.is_dominated(ys) { 210 | *xs = xs_trial; 211 | *ys = ys_trial; 212 | Some((&*xs, &*ys)) 213 | } else { 214 | None 215 | } 216 | }) 217 | .unzip(); 218 | ctx.best.update_all(xs, ys); 219 | ctx.pool = pool; 220 | ctx.pool_y = pool_y; 221 | } 222 | } 223 | -------------------------------------------------------------------------------- /src/pareto.rs: -------------------------------------------------------------------------------- 1 | //! Single/Multi-objective best containers. 2 | use crate::prelude::*; 3 | use alloc::vec::Vec; 4 | use core::iter::zip; 5 | 6 | /// Single best element container. 7 | #[derive(Debug)] 8 | pub struct SingleBest { 9 | xs: Option>, 10 | ys: Option, 11 | } 12 | 13 | impl SingleBest { 14 | /// Get the current best element. 15 | pub fn get_eval(&self) -> ::Eval { 16 | Best::get_eval(self) 17 | } 18 | 19 | /// Get the current best design variables. 20 | pub fn get_xs(&self) -> &[f64] { 21 | Best::get_xs(self) 22 | } 23 | } 24 | 25 | /// Pareto front container for multi-objective optimization. 26 | #[derive(Debug)] 27 | pub struct Pareto { 28 | xs: Vec>, 29 | ys: Vec, 30 | limit: usize, 31 | } 32 | 33 | impl Pareto { 34 | /// Get the current best element. 35 | pub fn get_eval(&self) -> ::Eval { 36 | Best::get_eval(self) 37 | } 38 | 39 | /// Get the current best design variables. 40 | pub fn get_xs(&self) -> &[f64] { 41 | Best::get_xs(self) 42 | } 43 | 44 | /// Get the number of best elements. 45 | pub fn len(&self) -> usize { 46 | self.xs.len() 47 | } 48 | 49 | /// Check if the best elements are empty. 50 | pub fn is_empty(&self) -> bool { 51 | self.xs.is_empty() 52 | } 53 | 54 | /// Get the slice of best fitness values. 55 | pub fn as_pareto(&self) -> &[T] { 56 | &self.ys 57 | } 58 | 59 | fn update_no_limit(&mut self, xs: &[f64], ys: &T) { 60 | // Remove dominated solutions 61 | let mut has_dominated = false; 62 | for i in (0..self.xs.len()).rev() { 63 | let ys_curr = &self.ys[i]; 64 | if ys.is_dominated(ys_curr) { 65 | has_dominated = true; 66 | self.xs.swap_remove(i); 67 | self.ys.swap_remove(i); 68 | } else if !has_dominated && ys_curr.is_dominated(ys) { 69 | return; 70 | } 71 | } 72 | // Add the new solution 73 | self.xs.push(xs.to_vec()); 74 | self.ys.push(ys.clone()); 75 | } 76 | } 77 | 78 | impl Pareto> 79 | where 80 | P: MaybeParallel + Clone + 'static, 81 | Y: Fitness, 82 | { 83 | /// Convert the product into the pareto front. 84 | pub fn pareto_from_product(&self) -> Vec { 85 | self.ys.iter().map(|p| p.ys()).collect() 86 | } 87 | } 88 | 89 | /// A trait for best element container. 90 | pub trait Best: MaybeParallel { 91 | /// The type of the best element 92 | type Item: Fitness; 93 | /// Create a new best element container. 94 | fn from_limit(limit: usize) -> Self; 95 | /// Update the best element. 96 | fn update(&mut self, xs: &[f64], ys: &Self::Item); 97 | /// Update the best elements from a batch. 98 | fn update_all<'a, Ix, Iy>(&mut self, pool: Ix, pool_y: Iy) 99 | where 100 | Ix: IntoIterator>, 101 | Iy: IntoIterator, 102 | { 103 | zip(pool, pool_y).for_each(|(xs, ys)| self.update(xs, ys)); 104 | } 105 | /// Sample a random best element. 106 | fn sample(&self, rng: &mut Rng) -> (&[f64], &Self::Item); 107 | /// Sample a random design variables. 108 | /// 109 | /// # Panics 110 | /// 111 | /// Panics if the best element is not available. 112 | fn sample_xs(&self, rng: &mut Rng) -> &[f64] { 113 | self.sample(rng).0 114 | } 115 | /// Get the current best element. 116 | fn as_result(&self) -> (&[f64], &Self::Item); 117 | /// Get the current best fitness value. 118 | fn as_result_fit(&self) -> &Self::Item { 119 | self.as_result().1 120 | } 121 | /// Convert the best element into the target item in the final stage. 122 | /// 123 | /// See also [`Best::as_result_fit()`] for getting its reference. 124 | fn into_result_fit(self) -> Self::Item; 125 | /// Get the current best design variables. 126 | fn get_xs(&self) -> &[f64] { 127 | self.as_result().0 128 | } 129 | /// Get the current best evaluation value. 130 | fn get_eval(&self) -> ::Eval { 131 | self.as_result_fit().eval() 132 | } 133 | } 134 | 135 | impl Best for SingleBest { 136 | type Item = T; 137 | 138 | fn from_limit(_limit: usize) -> Self { 139 | Self { xs: None, ys: None } 140 | } 141 | 142 | fn update(&mut self, xs: &[f64], ys: &Self::Item) { 143 | if let (Some(best), Some(best_f)) = (&mut self.xs, &mut self.ys) { 144 | if ys.is_dominated(best_f) { 145 | *best = xs.to_vec(); 146 | *best_f = ys.clone(); 147 | } 148 | } else { 149 | self.xs = Some(xs.to_vec()); 150 | self.ys = Some(ys.clone()); 151 | } 152 | } 153 | 154 | fn sample(&self, _rng: &mut Rng) -> (&[f64], &Self::Item) { 155 | self.as_result() 156 | } 157 | 158 | fn as_result(&self) -> (&[f64], &Self::Item) { 159 | (self.xs.as_deref()) 160 | .zip(self.ys.as_ref()) 161 | .expect("No best element available") 162 | } 163 | 164 | fn into_result_fit(self) -> Self::Item { 165 | self.ys.expect("No best element available") 166 | } 167 | } 168 | 169 | impl Best for Pareto { 170 | type Item = T; 171 | 172 | fn from_limit(limit: usize) -> Self { 173 | let xs = Vec::with_capacity(limit + 1); 174 | let ys = Vec::with_capacity(limit + 1); 175 | Self { xs, ys, limit } 176 | } 177 | 178 | fn update(&mut self, xs: &[f64], ys: &Self::Item) { 179 | self.update_no_limit(xs, ys); 180 | // Prune the solution set 181 | if self.xs.len() > self.limit { 182 | let (i, _) = (self.ys.iter().map(T::eval).enumerate()) 183 | .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap()) 184 | .unwrap(); 185 | self.xs.swap_remove(i); 186 | self.ys.swap_remove(i); 187 | } 188 | } 189 | 190 | fn update_all<'a, Ix, Iy>(&mut self, pool: Ix, pool_y: Iy) 191 | where 192 | Ix: IntoIterator>, 193 | Iy: IntoIterator, 194 | { 195 | for (xs, ys) in zip(pool, pool_y) { 196 | self.update_no_limit(xs, ys); 197 | } 198 | if self.xs.len() <= self.limit { 199 | return; 200 | } 201 | // Prune the solution set 202 | let mut ind = (0..self.xs.len()).collect::>(); 203 | #[cfg(not(feature = "rayon"))] 204 | ind.sort_unstable_by(|i, j| self.ys[*i].eval().partial_cmp(&self.ys[*j].eval()).unwrap()); 205 | #[cfg(feature = "rayon")] 206 | ind.par_sort_unstable_by(|i, j| { 207 | self.ys[*i].eval().partial_cmp(&self.ys[*j].eval()).unwrap() 208 | }); 209 | // No copied vector sort 210 | for idx in 0..self.xs.len() { 211 | if ind[idx] != usize::MAX { 212 | let mut curr_idx = idx; 213 | loop { 214 | let tar_idx = ind[curr_idx]; 215 | ind[curr_idx] = usize::MAX; 216 | if ind[tar_idx] == usize::MAX { 217 | break; 218 | } 219 | self.xs.swap(curr_idx, tar_idx); 220 | self.ys.swap(curr_idx, tar_idx); 221 | curr_idx = tar_idx; 222 | } 223 | } 224 | } 225 | self.xs.truncate(self.limit); 226 | self.ys.truncate(self.limit); 227 | } 228 | 229 | fn sample(&self, rng: &mut Rng) -> (&[f64], &Self::Item) { 230 | let i = rng.ub(self.xs.len()); 231 | (&self.xs[i], &self.ys[i]) 232 | } 233 | 234 | fn as_result(&self) -> (&[f64], &Self::Item) { 235 | match zip(&self.xs, &self.ys) 236 | .map(|(xs, ys)| (xs, ys, ys.eval())) 237 | .min_by(|(.., a), (.., b)| a.partial_cmp(b).unwrap()) 238 | { 239 | Some((xs, ys, _)) => (xs, ys), 240 | None => panic!("No best element available"), 241 | } 242 | } 243 | 244 | fn into_result_fit(self) -> Self::Item { 245 | (self.ys.into_iter()) 246 | .map(|ys| (ys.eval(), ys)) 247 | .min_by(|(a, _), (b, _)| a.partial_cmp(b).unwrap()) 248 | .map(|(_, ys)| ys) 249 | .expect("No best element available") 250 | } 251 | } 252 | -------------------------------------------------------------------------------- /src/solver_builder.rs: -------------------------------------------------------------------------------- 1 | use crate::prelude::*; 2 | use alloc::{boxed::Box, vec::Vec}; 3 | 4 | /// A [`SolverBuilder`] that use a boxed algorithm. 5 | /// 6 | /// Generated by [`Solver::build_boxed()`] method. 7 | pub type SolverBox<'a, F> = SolverBuilder<'a, maybe_send_box!(Algorithm), F>; 8 | 9 | type PoolFunc<'a> = 10 | maybe_send_box!(Fn(usize, core::ops::RangeInclusive, &mut Rng) -> f64 + 'a); 11 | 12 | /// Initial pool generating options. 13 | /// 14 | /// Use [`SolverBuilder::init_pool()`] to set this option. 15 | pub enum Pool<'a, F: ObjFunc> { 16 | /// A ready-made pool and its fitness values. 17 | Ready { 18 | /// Pool 19 | pool: Vec>, 20 | /// Fitness values 21 | pool_y: Vec, 22 | }, 23 | /// Generate the pool uniformly with a filter function to check the 24 | /// validity. 25 | /// 26 | /// This filter function returns true if the design variables are valid. 27 | #[allow(clippy::type_complexity)] 28 | UniformBy(maybe_send_box!(Fn(&[f64]) -> bool + 'a)), 29 | /// Generate the pool with a specific function. 30 | /// 31 | /// The function signature is `fn(s, min..max, &rng) -> value` 32 | /// + `s` is the index of the variable 33 | /// + `min..max` is the range of the variable 34 | /// + `rng` is the random number generator 35 | /// 36 | /// Two examples are [`uniform_pool()`] and [`gaussian_pool()`]. 37 | /// 38 | /// ``` 39 | /// use metaheuristics_nature::{gaussian_pool, Pool, Rga, Solver}; 40 | /// # use metaheuristics_nature::tests::TestObj as MyFunc; 41 | /// 42 | /// let pool = Pool::Func(Box::new(gaussian_pool(&[0.; 4], &[1.; 4]))); 43 | /// let s = Solver::build(Rga::default(), MyFunc::new()) 44 | /// .seed(0) 45 | /// .task(|ctx| ctx.gen == 20) 46 | /// .init_pool(pool) 47 | /// .solve(); 48 | /// ``` 49 | Func(PoolFunc<'a>), 50 | } 51 | 52 | /// Collect configuration and build the solver. 53 | /// 54 | /// This type is created by [`Solver::build()`] method. 55 | /// 56 | /// + First, setting a fixed seed with [`SolverBuilder::seed()`] method to get a 57 | /// determined result is highly recommended. 58 | /// + Next is [`SolverBuilder::task()`] method with a termination condition. 59 | /// + Finally, call [`SolverBuilder::solve()`] method to start the algorithm. 60 | #[allow(clippy::type_complexity)] 61 | #[must_use = "solver builder do nothing unless call the \"solve\" method"] 62 | pub struct SolverBuilder<'a, A: Algorithm, F: ObjFunc> { 63 | func: F, 64 | algorithm: A, 65 | pop_num: usize, 66 | pareto_limit: usize, 67 | seed: SeedOpt, 68 | pool: Pool<'a, F>, 69 | task: maybe_send_box!(FnMut(&Ctx) -> bool + 'a), 70 | callback: maybe_send_box!(FnMut(&Ctx) + 'a), 71 | } 72 | 73 | impl<'a, A: Algorithm, F: ObjFunc> SolverBuilder<'a, A, F> { 74 | impl_builders! { 75 | /// Population number. 76 | /// 77 | /// # Default 78 | /// 79 | /// If not changed by the algorithm setting, the default number is 200. 80 | fn pop_num(usize) 81 | } 82 | 83 | /// Pareto front limit. 84 | /// 85 | /// It is not working for single-objective optimization. 86 | /// 87 | /// ``` 88 | /// use metaheuristics_nature::{Rga, Solver}; 89 | /// # use metaheuristics_nature::tests::TestMO as MyFunc; 90 | /// 91 | /// let s = Solver::build(Rga::default(), MyFunc::new()) 92 | /// .seed(0) 93 | /// .task(|ctx| ctx.gen == 20) 94 | /// .pareto_limit(10) 95 | /// .solve(); 96 | /// ``` 97 | /// 98 | /// # Default 99 | /// 100 | /// By default, there is no limit. The limit is set to `usize::MAX`. 101 | pub fn pareto_limit(self, pareto_limit: usize) -> Self 102 | where 103 | F::Ys: Fitness = Pareto>, 104 | { 105 | Self { pareto_limit, ..self } 106 | } 107 | 108 | /// Set a fixed random seed to get a determined result. 109 | /// 110 | /// # Default 111 | /// 112 | /// By default, the random seed is auto-decided so you cannot reproduce the 113 | /// result. Please print the seed via [`Solver::seed()`] method to get the 114 | /// seed that used in the algorithm. 115 | pub fn seed(self, seed: impl Into) -> Self { 116 | Self { seed: seed.into(), ..self } 117 | } 118 | 119 | /// Initialize the pool with the pool option. 120 | /// 121 | /// # Default 122 | /// 123 | /// By default, the pool is generated by the uniform distribution 124 | /// [`uniform_pool()`]. 125 | pub fn init_pool(self, pool: Pool<'a, F>) -> Self { 126 | Self { pool, ..self } 127 | } 128 | 129 | /// Termination condition. 130 | /// 131 | /// The task function will be check each iteration, breaks if the return is 132 | /// true. 133 | /// 134 | /// ``` 135 | /// use metaheuristics_nature::{Rga, Solver}; 136 | /// # use metaheuristics_nature::tests::TestObj as MyFunc; 137 | /// 138 | /// let s = Solver::build(Rga::default(), MyFunc::new()) 139 | /// .seed(0) 140 | /// .task(|ctx| ctx.gen == 20) 141 | /// .solve(); 142 | /// ``` 143 | /// 144 | /// # Default 145 | /// 146 | /// By default, the algorithm will iterate 200 generation. 147 | pub fn task<'b, C>(self, task: C) -> SolverBuilder<'b, A, F> 148 | where 149 | 'a: 'b, 150 | C: FnMut(&Ctx) -> bool + Send + 'b, 151 | { 152 | SolverBuilder { task: Box::new(task), ..self } 153 | } 154 | 155 | /// Set callback function. 156 | /// 157 | /// Callback function allows to change an outer mutable variable in each 158 | /// iteration. 159 | /// 160 | /// ``` 161 | /// use metaheuristics_nature::{Rga, Solver}; 162 | /// # use metaheuristics_nature::tests::TestObj as MyFunc; 163 | /// 164 | /// let mut report = Vec::with_capacity(20); 165 | /// let s = Solver::build(Rga::default(), MyFunc::new()) 166 | /// .seed(0) 167 | /// .task(|ctx| ctx.gen == 20) 168 | /// .callback(|ctx| report.push(ctx.best.get_eval())) 169 | /// .solve(); 170 | /// ``` 171 | /// 172 | /// # Default 173 | /// 174 | /// By default, this function does nothing. 175 | pub fn callback<'b, C>(self, callback: C) -> SolverBuilder<'b, A, F> 176 | where 177 | 'a: 'b, 178 | C: FnMut(&Ctx) + Send + 'b, 179 | { 180 | SolverBuilder { callback: Box::new(callback), ..self } 181 | } 182 | 183 | /// Create the task and run the algorithm, which may takes a lot of time. 184 | /// 185 | /// Generation `ctx.gen` is start from 1, initialized at 0. 186 | /// 187 | /// # Panics 188 | /// 189 | /// Panics before starting the algorithm if the following conditions are 190 | /// met: 191 | /// + The dimension size is zero. 192 | /// + The lower bound is greater than the upper bound. 193 | /// + Using the [`Pool::Ready`] option and the pool size or dimension size 194 | /// is not consistent. 195 | pub fn solve(self) -> Solver { 196 | let Self { 197 | func, 198 | mut algorithm, 199 | pop_num, 200 | pareto_limit, 201 | seed, 202 | pool, 203 | mut task, 204 | mut callback, 205 | } = self; 206 | assert!(func.dim() != 0, "Dimension should be greater than 0"); 207 | assert!( 208 | func.bound().iter().all(|[lb, ub]| lb <= ub), 209 | "Lower bound should be less than upper bound" 210 | ); 211 | let mut rng = Rng::new(seed); 212 | let mut ctx = match pool { 213 | Pool::Ready { pool, pool_y } => { 214 | assert_eq!(pool.len(), pool_y.len(), "Pool size mismatched"); 215 | let dim = func.dim(); 216 | pool.iter() 217 | .for_each(|xs| assert!(xs.len() == dim, "Pool dimension mismatched")); 218 | Ctx::from_parts(func, pareto_limit, pool, pool_y) 219 | } 220 | Pool::UniformBy(filter) => { 221 | let dim = func.dim(); 222 | let mut pool = Vec::with_capacity(pop_num); 223 | let rand_f = uniform_pool(); 224 | while pool.len() < pop_num { 225 | let xs = (0..dim) 226 | .map(|s| rand_f(s, func.bound_range(s), &mut rng)) 227 | .collect::>(); 228 | if filter(&xs) { 229 | pool.push(xs); 230 | } 231 | } 232 | Ctx::from_pool(func, pareto_limit, pool) 233 | } 234 | Pool::Func(f) => { 235 | let dim = func.dim(); 236 | let pool = (0..pop_num) 237 | .map(|_| { 238 | (0..dim) 239 | .map(|s| f(s, func.bound_range(s), &mut rng)) 240 | .collect() 241 | }) 242 | .collect(); 243 | Ctx::from_pool(func, pareto_limit, pool) 244 | } 245 | }; 246 | algorithm.init(&mut ctx, &mut rng); 247 | loop { 248 | callback(&ctx); 249 | if task(&ctx) { 250 | break; 251 | } 252 | ctx.gen += 1; 253 | algorithm.generation(&mut ctx, &mut rng); 254 | } 255 | Solver::new(ctx, rng.seed()) 256 | } 257 | } 258 | 259 | impl Solver { 260 | /// Start to build a solver. Take a setting and setup the configurations. 261 | /// 262 | /// The signature is something like `Solver::build(Rga::default(), 263 | /// MyFunc::new())`. Please check the [`SolverBuilder`] type, it will help 264 | /// you choose your configuration. 265 | /// 266 | /// If all things are well-setup, call [`SolverBuilder::solve()`]. 267 | /// 268 | /// The default value of each option can be found in their document. 269 | /// 270 | /// Use [`Solver::build_boxed()`] for dynamic dispatching. 271 | pub fn build(cfg: A, func: F) -> SolverBuilder<'static, A::Algorithm, F> { 272 | Self::build_default(cfg.algorithm(), A::pop_num(), func) 273 | } 274 | 275 | /// Start to build a solver with a boxed algorithm, the dynamic dispatching. 276 | /// 277 | /// This method allows you to choose the algorithm at runtime and mix them 278 | /// with the same type. 279 | /// 280 | /// ``` 281 | /// use metaheuristics_nature as mh; 282 | /// # use metaheuristics_nature::tests::TestObj as MyFunc; 283 | /// 284 | /// # let use_ga = true; 285 | /// let s = if use_ga { 286 | /// mh::Solver::build_boxed(mh::Rga::default(), MyFunc::new()) 287 | /// } else { 288 | /// mh::Solver::build_boxed(mh::De::default(), MyFunc::new()) 289 | /// }; 290 | /// ``` 291 | /// 292 | /// Use [`Solver::build()`] for optimized memory allocation and access. 293 | pub fn build_boxed(cfg: A, func: F) -> SolverBox<'static, F> { 294 | Self::build_default(Box::new(cfg.algorithm()), A::pop_num(), func) 295 | } 296 | 297 | fn build_default>( 298 | algorithm: A, 299 | pop_num: usize, 300 | func: F, 301 | ) -> SolverBuilder<'static, A, F> { 302 | SolverBuilder { 303 | func, 304 | algorithm, 305 | pop_num, 306 | pareto_limit: usize::MAX, 307 | seed: SeedOpt::Entropy, 308 | pool: Pool::Func(Box::new(uniform_pool())), 309 | task: Box::new(|ctx| ctx.gen == 200), 310 | callback: Box::new(|_| ()), 311 | } 312 | } 313 | } 314 | 315 | /// A function generates a uniform pool. 316 | /// 317 | /// See also [`gaussian_pool()`], [`Pool::Func`], and 318 | /// [`SolverBuilder::init_pool()`]. 319 | pub fn uniform_pool() -> PoolFunc<'static> { 320 | Box::new(move |_, range, rng| rng.range(range)) 321 | } 322 | 323 | /// A function generates a Gaussian pool. 324 | /// 325 | /// Where `mean` is the mean value, `std` is the standard deviation. 326 | /// 327 | /// See also [`uniform_pool()`], [`Pool::Func`], and 328 | /// [`SolverBuilder::init_pool()`]. 329 | /// 330 | /// # Panics 331 | /// 332 | /// Panic when the lengths of `mean` and `std` are not the same. 333 | pub fn gaussian_pool<'a>(mean: &'a [f64], std: &'a [f64]) -> PoolFunc<'a> { 334 | assert_eq!(mean.len(), std.len()); 335 | Box::new(move |s, _, rng| rng.normal(mean[s], std[s])) 336 | } 337 | --------------------------------------------------------------------------------