├── LICENSE
├── README.md
├── pyMultiobjective
├── __init__.py
├── algorithm
│ ├── __init__.py
│ ├── c_n_ii.py
│ ├── ctaea.py
│ ├── grea.py
│ ├── hype.py
│ ├── ibea_fc.py
│ ├── ibea_hv.py
│ ├── moead.py
│ ├── n_ii.py
│ ├── n_iii.py
│ ├── naemo.py
│ ├── omopso.py
│ ├── paes.py
│ ├── rvea.py
│ ├── s_ii.py
│ ├── smpso.py
│ ├── sms_emoa.py
│ └── u_n_iii.py
├── test_functions
│ ├── __init__.py
│ └── mult_many.py
└── util
│ ├── __init__.py
│ ├── graphs.py
│ └── indicators.py
└── setup.py
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright © 2023 by Valdecy Pereira
2 |
3 | pyMultiobjective is free software: you can redistribute it and/or modify
4 | it under the terms of the GNU General Public License as published by
5 | the Free Software Foundation, either version 3 of the License, or
6 | (at your option) any later version.
7 |
8 | pyMultiobjective is distributed in the hope that it will be useful,
9 | but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | GNU General Public License for more details.
12 |
13 | You should have received a copy of the GNU General Public License
14 | along with pyMultiobjective. If not, see .
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # pyMultiobjective
2 |
3 | ## Introduction
4 |
5 | A python library for the following Multiobjective Optimization Algorithms or Many Objectives Optimization Algorithms: **C-NSGA II** (Clustered Non-Dominated Sorting Genetic Algorithm II); **CTAEA** (Constrained Two Archive Evolutionary Algorithm); **GrEA** (Grid-based Evolutionary Algorithm); **HypE** (Hypervolume Estimation Multiobjective Optimization Algorithm); **IBEA-FC** (Indicator-Based Evolutionary Algorithm with Fast Comparison Indicator); **IBEA-HV** (Indicator-Based Evolutionary Algorithm with Hypervolume Indicator); **MOEA/D** (Multiobjective Evolutionary Algorithm Based on Decomposition); **NAEMO** (Neighborhood-sensitive Archived Evolutionary Many-objective Optimization); **NSGA II** (Non-Dominated Sorting Genetic Algorithm II); **NSGA III** (Non-Dominated Sorting Genetic Algorithm III); **OMOPSO** (Optimized Multiobjective Particle Swarm Optimization); **PAES** (Pareto Archived Evolution Strategy) with Fast Non-Dominance Sorting); **RVEA** (Reference Vector Guided Evolutionary Algorithm); **SMPSO** (Speed-Constrained Multiobjective Particle Swarm Optimization); **SMS-EMOA** (S-Metric Selection Evolutionary Multiobjective Optimization Algorithm); **SPEA2** (Strength Pareto Evolutionary Algorithm 2); **U-NSGA III** (Unified Non-Dominated Sorting Genetic Algorithm III).
6 |
7 | ## Usage
8 |
9 | 1. Install
10 | ```bash
11 | pip install pyMultiobjective
12 | ```
13 |
14 | 2. Import
15 | ```py3
16 |
17 | # Import NSGA III
18 | from pyMultiobjective.algorithm import non_dominated_sorting_genetic_algorithm_III
19 |
20 | # Import Test Functions. Available Test Functions: Dent, DTLZ1, DTLZ2, DTLZ3, DTLZ4, DTLZ5, DTLZ6, DTLZ7, Fonseca-Fleming, Kursawe, Poloni, Schaffer1, Schaffer2, ZDT1, ZDT2, ZDT3, ZDT4, ZDT6, Viennet1, Viennet2, Viennet3
21 | from pyMultiobjective.test_functions import dent_f1, dent_f2
22 |
23 | # OR Define your Own Custom Function. The function input should be a list of values,
24 | # each value represents a dimenstion (x1, x2, ...xn) of the problem.
25 |
26 | # Run NSGA III
27 | parameters = {
28 | 'references': 5,
29 | 'min_values': (-5, -5),
30 | 'max_values': (5, 5),
31 | 'mutation_rate': 0.1,
32 | 'generations': 1500,
33 | 'mu': 1,
34 | 'eta': 1,
35 | 'k': 2,
36 | 'verbose': True
37 | }
38 | sol = non_dominated_sorting_genetic_algorithm_III(list_of_functions = [dent_f1, dent_f2], **parameters)
39 |
40 | # Import Graphs
41 | from pyMultiobjective.util import graphs
42 |
43 | # Plot Solution - Scatter Plot
44 | parameters = {
45 | 'min_values': (-5, -5),
46 | 'max_values': (5, 5),
47 | 'step': (0.1, 0.1),
48 | 'solution': sol,
49 | 'show_pf': True,
50 | 'show_pts': True,
51 | 'show_sol': True,
52 | 'pf_min': True, # True = Minimum Pareto Front; False = Maximum Pareto Front
53 | 'custom_pf': [], # Input a custom Pareto Front(numpy array where each column is an Objective Function)
54 | 'view': 'browser'
55 | }
56 | graphs.plot_mooa_function(list_of_functions = [dent_f1, dent_f2], **parameters)
57 |
58 | # Plot Solution - Parallel Plot
59 | parameters = {
60 | 'min_values': (-5, -5),
61 | 'max_values': (5, 5),
62 | 'step': (0.1, 0.1),
63 | 'solution': sol,
64 | 'show_pf': True,
65 | 'pf_min': True, # True = Minimum Pareto Front; False = Maximum Pareto Front
66 | 'custom_pf': [], # Input a custom Pareto Front(numpy array where each column is an Objective Function)
67 | 'view': 'browser'
68 | }
69 | graphs.parallel_plot(list_of_functions = [dent_f1, dent_f2], **parameters)
70 |
71 | # Plot Solution - Andrews Plot
72 | parameters = {
73 | 'min_values': (-5, -5),
74 | 'max_values': (5, 5),
75 | 'step': (0.1, 0.1),
76 | 'solution': sol,
77 | 'normalize': True,
78 | 'size_x': 15,
79 | 'size_y': 15,
80 | 'show_pf': True,
81 | 'pf_min': True, # True = Minimum Pareto Front; False = Maximum Pareto Front
82 | 'custom_pf': [] # Input a custom Pareto Front(numpy array where each column is an Objective Function)
83 | }
84 | graphs.andrews_plot(list_of_functions = [dent_f1, dent_f2], **parameters)
85 |
86 | # Import Performance Indicators. Available Performance Indicators: GD, GD+, IGD, IGD+, Maximum Spread, Spacing and Hypervolume
87 | from pyMultiobjective.util import indicators
88 |
89 | parameters = {
90 | 'min_values': (-5, -5),
91 | 'max_values': (5, 5),
92 | 'step': (0.1, 0.1),
93 | 'solution': sol,
94 | 'pf_min': True, # True = Minimum Pareto Front; False = Maximum Pareto Front
95 | 'custom_pf': [] # Input a custom Pareto Front(numpy array where each column is an Objective Function)
96 | }
97 | gd = indicators.gd_indicator(list_of_functions = [dent_f1, dent_f2], **parameters)
98 | gdp = indicators.gd_plus_indicator(list_of_functions = [dent_f1, dent_f2], **parameters)
99 | igd = indicators.igd_indicator(list_of_functions = [dent_f1, dent_f2], **parameters)
100 | igdp = indicators.igd_plus_indicator(list_of_functions = [dent_f1, dent_f2], **parameters)
101 | ms = indicators.ms_indicator(list_of_functions = [dent_f1, dent_f2], **parameters)
102 | sp = indicators.sp_indicator(list_of_functions = [dent_f1, dent_f2], **parameters)
103 |
104 | print('GD = ', gd)
105 | print('GDP = ', gdp)
106 | print('IGD = ', igd)
107 | print('IGDP = ', igdp)
108 | print('MS = ', ms)
109 | print('SP = ', sp)
110 |
111 |
112 | parameters = {
113 | 'solution': sol,
114 | 'n_objs': 2,
115 | 'ref_point': [], # A Reference Point. If empty, an arbitrary Reference Point will be Used
116 | }
117 | hypervolume = indicators.hv_indicator(**parameters)
118 | print('Hypervolume = ', hypervolume)
119 |
120 | ```
121 |
122 | 3. Try it in **Colab**
123 | - C-NSGA II ([ Colab Demo ](https://colab.research.google.com/drive/1sXxCWV6dDmNXmes7RDka4OqKOtM0t9YX?usp=sharing)) ( [ Original Paper ](https://open.metu.edu.tr/bitstream/handle/11511/69040/12625931.pdf))
124 | - CTAEA ([ Colab Demo ](https://colab.research.google.com/drive/1IC5m7JfmhT0ihWBhziQdfyq1PAHrmW1p?usp=sharing)) ( [ Original Paper ](https://doi.org/10.48550/arXiv.2103.06382))
125 | - GrEA ([ Colab Demo ](https://colab.research.google.com/drive/1H2w77kCGUj33qI7uIE-e68999zy1L8tf?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1109/TEVC.2012.2227145))
126 | - HypE ([ Colab Demo ](https://colab.research.google.com/drive/1cpIWZTECKfyf9jp_iiSuOJaWcWjy_NCr?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1162/EVCO_a_00009))
127 | - IBEA-FC ([ Colab Demo ](https://colab.research.google.com/drive/1BBD0nWaE5SqL5n2Jpa_fDYgkWGSpy8xu?usp=sharing)) ( [ Original Paper ](https://www.simonkuenzli.ch/docs/ZK04.pdf))
128 | - IBEA-HV ([ Colab Demo ](https://colab.research.google.com/drive/1XoiEAR3xpx0DbivrSp_QEFA32xm_R1lk?usp=sharing)) ( [ Original Paper ](https://www.simonkuenzli.ch/docs/ZK04.pdf))
129 | - MOEA/D ([ Colab Demo ](https://colab.research.google.com/drive/1BP2qM9coiOTq28ZYeQEqxHSCHBeh3-Io?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1109/TEVC.2007.892759))
130 | - NAEMO ([ Colab Demo ](https://colab.research.google.com/drive/1ctVjjOKhLQ1DqQJ0ozcvp2pClmbwBg8O?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1016/j.swevo.2018.12.002))
131 | - NSGA II ([ Colab Demo ](https://colab.research.google.com/drive/1aD1uiJOCezCG6lotMAQENGas4abEO3_6?usp=sharing)) ( [ Original Paper ](http://dx.doi.org/10.1109/4235.996017))
132 | - NSGA III ([ Colab Demo ](https://colab.research.google.com/drive/18zcEdU3NNplFiXAqH8g-oSrEhWB-uqQN?usp=sharing)) ( [ Original Paper ](http://dx.doi.org/10.1109/TEVC.2013.2281535))
133 | - OMOPSO ([ Colab Demo ](https://colab.research.google.com/drive/1cvSZllLYhU6UvuFM7KgDvb1YaNLZVU32?usp=sharing)) ( [ Original Paper ](http://dx.doi.org/10.1007/978-3-540-31880-4_35))
134 | - PAES ([ Colab Demo ](https://colab.research.google.com/drive/1iz5Q9CYiLpyYEKJzd0KwQrGrZykr49TX?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1109/CEC.1999.781913))
135 | - RVEA ([ Colab Demo ](https://colab.research.google.com/drive/1KYYAsMM52P6lxHRk5a9P8yrnRhwCgT5i?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1109/TEVC.2016.2519378))
136 | - SMPSO ([ Colab Demo ](https://colab.research.google.com/drive/17m9AT9ORHvVqeqaRjBga1XCEuyG1EPzz?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1109/MCDM.2009.4938830))
137 | - SMS-EMOA ([ Colab Demo ](https://colab.research.google.com/drive/1hCAW70vVRC-NXmkHPUdX_gK2aADyliQS?usp=sharing)) ( [ Original Paper ](https://doi.org/10.1016/j.ejor.2006.08.008))
138 | - SPEA2 ([ Colab Demo ](https://colab.research.google.com/drive/1OrxJxxAMSpKu_xSWc9UQlPOeM_mmVHmW?usp=sharing)) ( [ Original Paper ](https://kdd.cs.ksu.edu/Courses/CIS830/Handouts/P8.pdf))
139 | - U-NSGA III ([ Colab Demo ](https://colab.research.google.com/drive/1-AO_S6OlqzbA54DlMFBDGEL-wHh9hayH?usp=sharing)) ( [ Original Paper ](https://www.egr.msu.edu/~kdeb/papers/c2014022.pdf))
140 |
141 | 4. Test Functions
142 | - Dent ( [ Paper ](https://doi.org/10.1007/978-3-319-44003-3_12)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Dent.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1VTHJMmFUHw97tLu5jkTQCkupzj7VA2yp?usp=sharing))
143 | - DTLZ1 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ1.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1ENkr2yDACfRwX1ZIidwC15T_YWJrDMtk?usp=sharing))
144 | - DTLZ2 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ2.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1pmh6-4jWfQ2eXOzkUm2oydvKc5c0NALz?usp=sharing))
145 | - DTLZ3 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ3.txt) ) ( [ Plot ](https://colab.research.google.com/drive/10pmPlqgrkwAcjA15jWSimgIH5rTWraZN?usp=sharing))
146 | - DTLZ4 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ4.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1PbGRqbLI-wFWg1Orvr0X7Reh5er0vNdW?usp=sharing))
147 | - DTLZ5 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ5.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1mF6UR_yXBSC3E4vHbgfAHFxA5z3rVCWx?usp=sharing))
148 | - DTLZ6 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ6.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1U4E__uPcb5zdztFr8EiQqdegn53Xq7oX?usp=sharing))
149 | - DTLZ7 ( [ Paper ](https://doi.org/10.1109/CEC.2002.1007032)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/DTLZ7.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1mNK9WXdYTKP8NWYACXBeTZwWml1M4woZ?usp=sharing))
150 | - Fonseca-Fleming ( [ Paper ](https://doi.org/10.1162/evco.1995.3.1.1)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Fonseca_Fleming.txt) ) ( [ Plot ](https://colab.research.google.com/drive/14LS3MNRwmgbq9ZqA6K7vfbBqMJE4hH4v?usp=sharing))
151 | - Kursawe ( [ Paper ](https://doi.org/10.1007/BFb0029752)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Kursawe.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1bG15YQiRjVX9r4mtEpnt9D-IA2Cjx7Q_?usp=sharing))
152 | - Poloni ( [ Paper ](https://www.researchgate.net/publication/243686783_Hybrid_GA_for_multi_objective_aerodynamic_shape_optimization)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Poloni.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1USBnNWf-UayqZERHq5PM6WWExcwOrOin?usp=sharing))
153 | - Schaffer1 ( [ Paper ](https://www.researchgate.net/publication/236443691_Some_Experiments_in_Machine_Learning_Using_Vector_Evaluated_Genetic_Algorithms)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Schaffer1.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1XIpcbZOHJq1xneYiyxCeb-ZhK9QZ3-81?usp=sharing))
154 | - Schaffer2 ( [ Paper ](https://www.researchgate.net/publication/236443691_Some_Experiments_in_Machine_Learning_Using_Vector_Evaluated_Genetic_Algorithms)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Schaffer2.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1X5-c9e5wAss9pZE6xOUGt-cVhZTeQ50K?usp=sharing))
155 | - ZDT1 ( [ Paper ](https://doi.org/10.1162/106365600568202)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/ZDT1.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1Cvzz5o1KWT9vNEyeirUa_pH8jsyOTp8d?usp=sharing))
156 | - ZDT2 ( [ Paper ](https://doi.org/10.1162/106365600568202)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/ZDT2.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1AvXi0fV7PEdcByDDaPCyHP1CyARf3hQV?usp=sharing))
157 | - ZDT3 ( [ Paper ](https://doi.org/10.1162/106365600568202)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/ZDT3.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1XTq9CRYKDu5KPKMAY7w9edF05wJHXhFB?usp=sharing))
158 | - ZDT4 ( [ Paper ](https://doi.org/10.1162/106365600568202)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/ZDT4.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1pEQxM18HD2ZnwU4E7hXY3IYIvq7QrJ0U?usp=sharing))
159 | - ZDT6 ( [ Paper ](https://doi.org/10.1162/106365600568202)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/ZDT6.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1xAG_8N-K-X83DNj24tWDNbb15q_KDi7V?usp=sharing))
160 | - Viennet1 ( [ Paper ](https://doi.org/10.1080/00207729608929211)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Viennet1.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1JqtvAZw1Mh3VmEURZANtZBgYmd07j9vN?usp=sharing))
161 | - Viennet2 ( [ Paper ](https://doi.org/10.1080/00207729608929211)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Viennet2.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1tUFYPj9A3herBXg4rSylfBCP2jpDXIm8?usp=sharing))
162 | - Viennet3 ( [ Paper ](https://doi.org/10.1080/00207729608929211)) ( [Pareto Front](https://github.com/Valdecy/Datasets/raw/master/Pareto%20Front/Viennet3.txt) ) ( [ Plot ](https://colab.research.google.com/drive/1okdsDM01DTuvg6VG_hozU42_KqScBcz7?usp=sharing))
163 |
164 | 5. Peformance Indicators
165 | - GD ( [ Paper ](https://apps.dtic.mil/sti/pdfs/ADA364478.pdf))
166 | - GD+ ( [ Paper ](https://doi.org/10.1007/978-3-319-15892-1_8))
167 | - IGD ( [ Paper ](https://doi.org/10.1007/978-3-540-24694-7_71))
168 | - IGD+ ( [ Paper ](https://doi.org/10.1007/978-3-319-15892-1_8))
169 | - Maximum Spread ( [ Paper ](https://doi.org/10.1162/106365600568202))
170 | - Spacing ( [ Paper ](https://doi.org/10.1109/TEVC.2006.882428))
171 | - Hypervolume ( [ Paper ](https://scholar.afit.edu/cgi/viewcontent.cgi?article=6130&context=etd))
172 |
173 | # Single Objective Optimization
174 | For Single Objective Optimization try [pyMetaheuristic](https://github.com/Valdecy/pyMetaheuristic)
175 |
176 | # TSP (Travelling Salesman Problem)
177 | For Travelling Salesman Problems try [pyCombinatorial](https://github.com/Valdecy/pyCombinatorial)
178 |
179 | # Acknowledgement
180 | This section is dedicated to all the people that helped to improve or correct the code. Thank you very much!
181 |
182 | * Wei Chen (07.AUGUST.2019) - AFRL Summer Intern/Rising Senior at Stony Brook University.
183 |
--------------------------------------------------------------------------------
/pyMultiobjective/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/__init__.py:
--------------------------------------------------------------------------------
1 | from .c_n_ii import clustered_non_dominated_sorting_genetic_algorithm_II
2 | from .ctaea import constrained_two_archive_evolutionary_algorithm
3 | from .grea import grid_based_evolutionary_algorithm
4 | from .hype import hypervolume_estimation_mooa
5 | from .ibea_fc import indicator_based_evolutionary_algorithm_fc
6 | from .ibea_hv import indicator_based_evolutionary_algorithm_hv
7 | from .moead import multiobjective_evolutionary_algorithm_based_on_decomposition
8 | from .n_ii import non_dominated_sorting_genetic_algorithm_II
9 | from .n_iii import non_dominated_sorting_genetic_algorithm_III
10 | from .naemo import neighborhood_sensitive_archived_evolutionary_many_objective_optimization
11 | from .omopso import optimized_multiobjective_particle_swarm_optimization
12 | from .paes import pareto_archived_evolution_strategy
13 | from .rvea import reference_vector_guided_evolutionary_algorithm
14 | from .s_ii import strength_pareto_evolutionary_algorithm_2
15 | from .smpso import speed_constrained_multiobjective_particle_swarm_optimization
16 | from .sms_emoa import s_metric_selection_evolutionary_multiobjective_optimization_algorithm
17 | from .u_n_iii import unified_non_dominated_sorting_genetic_algorithm_III
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/c_n_ii.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: C-NSGA-II (Clustered Non-Dominated Sorting Genetic Algorithm II)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: c_n_ii.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Fast Non-Dominated Sorting
44 | def fast_non_dominated_sorting(population, number_of_functions = 2):
45 | S = [[] for i in range(0, population.shape[0])]
46 | front = [[]]
47 | n = [0 for i in range(0, population.shape[0])]
48 | rank = [0 for i in range(0, population.shape[0])]
49 | for p in range(0, population.shape[0]):
50 | S[p] = []
51 | n[p] = 0
52 | for q in range(0, population.shape[0]):
53 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
54 | if (q not in S[p]):
55 | S[p].append(q)
56 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
57 | n[p] = n[p] + 1
58 | if (n[p] == 0):
59 | rank[p] = 0
60 | if (p not in front[0]):
61 | front[0].append(p)
62 | i = 0
63 | while (front[i] != []):
64 | Q = []
65 | for p in front[i]:
66 | for q in S[p]:
67 | n[q] = n[q] - 1
68 | if(n[q] == 0):
69 | rank[q] = i+1
70 | if q not in Q:
71 | Q.append(q)
72 | i = i+1
73 | front.append(Q)
74 | del front[len(front)-1]
75 | rank = np.zeros((population.shape[0], 1))
76 | for i in range(0, len(front)):
77 | for j in range(0, len(front[i])):
78 | rank[front[i][j], 0] = i + 1
79 | return rank
80 |
81 | # Function: Sort Population by Rank
82 | def sort_population_by_rank(population, rank):
83 | idx = np.argsort(rank[:,0], axis = 0).tolist()
84 | rank = rank[idx,:]
85 | population = population[idx,:]
86 | return population, rank
87 |
88 | ############################################################################
89 |
90 | # Function: Dominance
91 | def dominance_function(solution_1, solution_2, number_of_functions = 2):
92 | count = 0
93 | dominance = True
94 | for k in range (1, number_of_functions + 1):
95 | if (solution_1[-k] <= solution_2[-k]):
96 | count = count + 1
97 | if (count == number_of_functions):
98 | dominance = True
99 | else:
100 | dominance = False
101 | return dominance
102 |
103 | # Function: Raw Fitness
104 | def raw_fitness_function(population, number_of_functions = 2):
105 | strength = np.zeros((population.shape[0], 1))
106 | raw_fitness = np.zeros((population.shape[0], 1))
107 | for i in range(0, population.shape[0]):
108 | for j in range(0, population.shape[0]):
109 | if (i != j):
110 | if dominance_function(solution_1 = population[i,:], solution_2 = population[j,:], number_of_functions = number_of_functions):
111 | strength[i,0] = strength[i,0] + 1
112 | for i in range(0, population.shape[0]):
113 | for j in range(0, population.shape[0]):
114 | if (i != j):
115 | if dominance_function(solution_1 = population[i,:], solution_2 = population[j,:], number_of_functions = number_of_functions):
116 | raw_fitness[j,0] = raw_fitness[j,0] + strength[i,0]
117 | return raw_fitness
118 |
119 | # Function: Build Distance Matrix
120 | def euclidean_distance(coordinates):
121 | a = coordinates
122 | b = a.reshape(np.prod(a.shape[:-1]), 1, a.shape[-1])
123 | return np.sqrt(np.einsum('ijk,ijk->ij', b - a, b - a)).squeeze()
124 |
125 | # Function: Fitness
126 | def fitness_calculation(population, raw_fitness, number_of_functions = 2):
127 | k = int(len(population)**(1/2)) - 1
128 | fitness = np.zeros((population.shape[0], 1))
129 | distance = euclidean_distance(population[:,population.shape[1]-number_of_functions:])
130 | for i in range(0, fitness.shape[0]):
131 | distance_ordered = (distance[distance[:,i].argsort()]).T
132 | fitness[i,0] = raw_fitness[i,0] + 1/(distance_ordered[i,k] + 2)
133 | return fitness
134 |
135 | # Function: Selection
136 | def roulette_wheel(fitness_new):
137 | fitness = np.zeros((fitness_new.shape[0], 2))
138 | for i in range(0, fitness.shape[0]):
139 | fitness[i,0] = 1/(1+ fitness[i,0] + abs(fitness[:,0].min()))
140 | fit_sum = fitness[:,0].sum()
141 | fitness[0,1] = fitness[0,0]
142 | for i in range(1, fitness.shape[0]):
143 | fitness[i,1] = (fitness[i,0] + fitness[i-1,1])
144 | for i in range(0, fitness.shape[0]):
145 | fitness[i,1] = fitness[i,1]/fit_sum
146 | ix = 0
147 | random = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
148 | for i in range(0, fitness.shape[0]):
149 | if (random <= fitness[i, 1]):
150 | ix = i
151 | break
152 | return ix
153 |
154 | ############################################################################
155 |
156 | # Function: Offspring
157 | def breeding(population, fitness, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
158 | offspring = np.copy(population)
159 | b_offspring = 0
160 | for i in range (0, offspring.shape[0]):
161 | parent_1, parent_2 = roulette_wheel(fitness), roulette_wheel(fitness)
162 | while parent_1 == parent_2:
163 | parent_2 = random.sample(range(0, len(population) - 1), 1)[0]
164 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
165 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
166 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
167 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
168 | if (rand <= 0.5):
169 | b_offspring = 2*(rand_b)
170 | b_offspring = b_offspring**(1/(mu + 1))
171 | elif (rand > 0.5):
172 | b_offspring = 1/(2*(1 - rand_b))
173 | b_offspring = b_offspring**(1/(mu + 1))
174 | if (rand_c >= 0.5):
175 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
176 | else:
177 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
178 | for k in range (1, len(list_of_functions) + 1):
179 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
180 | return offspring
181 |
182 | # Function: Mutation
183 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
184 | d_mutation = 0
185 | for i in range (0, offspring.shape[0]):
186 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
187 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
188 | if (probability < mutation_rate):
189 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
190 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
191 | if (rand <= 0.5):
192 | d_mutation = 2*(rand_d)
193 | d_mutation = d_mutation**(1/(eta + 1)) - 1
194 | elif (rand > 0.5):
195 | d_mutation = 2*(1 - rand_d)
196 | d_mutation = 1 - d_mutation**(1/(eta + 1))
197 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
198 | for k in range (1, len(list_of_functions) + 1):
199 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
200 | return offspring
201 |
202 | ############################################################################
203 |
204 | # C-NSGA II Function
205 | def clustered_non_dominated_sorting_genetic_algorithm_II(population_size = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, verbose = True):
206 | count = 0
207 | population = initial_population(population_size = population_size, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
208 | offspring = initial_population(population_size = population_size, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
209 | while (count <= generations):
210 | if (verbose == True):
211 | print('Generation = ', count)
212 | population = np.vstack([population, offspring])
213 | rank = fast_non_dominated_sorting(population, number_of_functions = len(list_of_functions))
214 | population, rank = sort_population_by_rank(population, rank)
215 | population, rank = population[0:population_size,:], rank[0:population_size,:]
216 | raw_fitness = raw_fitness_function(population, number_of_functions = len(list_of_functions))
217 | fitness = fitness_calculation(population, raw_fitness, number_of_functions = len(list_of_functions))
218 | offspring = breeding(population, fitness, mu = mu, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
219 | offspring = mutation(offspring, mutation_rate = mutation_rate, eta = eta, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
220 | count = count + 1
221 | return population
222 |
223 | ############################################################################
224 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/ctaea.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: CTAEA (Constrained Two Archive Evolutionary Algorithm)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: ctaea.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, population_size):
36 | for j in range(0, len(min_values)):
37 | population[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
40 | return population
41 |
42 | ############################################################################
43 |
44 | # Function: Dominance
45 | def dominance_function(solution_1, solution_2, number_of_functions = 2):
46 | count = 0
47 | dominance = True
48 | for k in range (1, number_of_functions + 1):
49 | if (solution_1[-k] <= solution_2[-k]):
50 | count = count + 1
51 | if (count == number_of_functions):
52 | dominance = True
53 | else:
54 | dominance = False
55 | return dominance
56 |
57 | # Function: Fast Non-Dominated Sorting
58 | def fast_non_dominated_sorting(population, number_of_functions = 2):
59 | S = [[] for i in range(0, population.shape[0])]
60 | front = [[]]
61 | n = [0 for i in range(0, population.shape[0])]
62 | rank = [0 for i in range(0, population.shape[0])]
63 | for p in range(0, population.shape[0]):
64 | S[p] = []
65 | n[p] = 0
66 | for q in range(0, population.shape[0]):
67 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
68 | if (q not in S[p]):
69 | S[p].append(q)
70 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
71 | n[p] = n[p] + 1
72 | if (n[p] == 0):
73 | rank[p] = 0
74 | if (p not in front[0]):
75 | front[0].append(p)
76 | i = 0
77 | while (front[i] != []):
78 | Q = []
79 | for p in front[i]:
80 | for q in S[p]:
81 | n[q] = n[q] - 1
82 | if(n[q] == 0):
83 | rank[q] = i+1
84 | if q not in Q:
85 | Q.append(q)
86 | i = i+1
87 | front.append(Q)
88 | del front[len(front)-1]
89 | rank = np.zeros((population.shape[0], 1))
90 | for i in range(0, len(front)):
91 | for j in range(0, len(front[i])):
92 | rank[front[i][j], 0] = i + 1
93 | return rank
94 |
95 | # Function: Sort Population by Rank
96 | def sort_population_by_rank(ca, rank, rp = 'none'):
97 | if rp == 'none':
98 | idx = np.argsort(rank[:,0], axis = 0).tolist()
99 | ca = ca[idx,:]
100 | else:
101 | idx = np.where(rank <= rp)[0].tolist()
102 | ca = ca[idx,:]
103 | return ca
104 |
105 | ############################################################################
106 |
107 | # Function: Offspring
108 | def breeding(ca, da, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2], size = 5):
109 | offspring = np.zeros((size, ca.shape[1]))
110 | cada = np.vstack([ca, da])
111 | parent_1 = 0
112 | parent_2 = 1
113 | b_offspring = 0
114 | rank_ca = fast_non_dominated_sorting(ca, number_of_functions = len(list_of_functions))
115 | rank_ca = rank_ca[rank_ca == 1]
116 | p_ca = rank_ca.shape[0]/(ca.shape[0] + da.shape[0])
117 | rank_da = fast_non_dominated_sorting(da, number_of_functions = len(list_of_functions))
118 | rank_da = rank_da[rank_da == 1]
119 | p_da = rank_da.shape[0]/ (ca.shape[0] + da.shape[0])
120 | for i in range (0, offspring.shape[0]):
121 | if (p_ca > p_da):
122 | parent_1 = random.sample(range(0, len(ca) - 1), 1)[0]
123 | else:
124 | parent_1 = random.sample(range(0, len(da) - 1), 1)[0] + ca.shape[0]
125 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
126 | if (rand < p_ca):
127 | parent_2 = random.sample(range(0, len(ca) - 1), 1)[0]
128 | else:
129 | parent_2 = random.sample(range(0, len(da) - 1), 1)[0] + ca.shape[0]
130 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
131 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
132 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
133 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
134 | if (rand <= 0.5):
135 | b_offspring = 2*(rand_b)
136 | b_offspring = b_offspring**(1/(mu + 1))
137 | elif (rand > 0.5):
138 | b_offspring = 1/(2*(1 - rand_b))
139 | b_offspring = b_offspring**(1/(mu + 1))
140 | if (rand_c >= 0.5):
141 | offspring[i,j] = np.clip(((1 + b_offspring)*cada[parent_1, j] + (1 - b_offspring)*cada[parent_2, j])/2, min_values[j], max_values[j])
142 | else:
143 | offspring[i,j] = np.clip(((1 - b_offspring)*cada[parent_1, j] + (1 + b_offspring)*cada[parent_2, j])/2, min_values[j], max_values[j])
144 | for k in range (1, len(list_of_functions) + 1):
145 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
146 | return offspring
147 |
148 | # Function: Mutation
149 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
150 | d_mutation = 0
151 | for i in range (0, offspring.shape[0]):
152 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
153 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
154 | if (probability < mutation_rate):
155 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
156 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
157 | if (rand <= 0.5):
158 | d_mutation = 2*(rand_d)
159 | d_mutation = d_mutation**(1/(eta + 1)) - 1
160 | elif (rand > 0.5):
161 | d_mutation = 2*(1 - rand_d)
162 | d_mutation = 1 - d_mutation**(1/(eta + 1))
163 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
164 | for k in range (1, len(list_of_functions) + 1):
165 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
166 | return offspring
167 |
168 | ############################################################################
169 |
170 | # Function: Reference Points
171 | def reference_points(M, p):
172 | def generator(r_points, M, Q, T, D):
173 | points = []
174 | if (D == M - 1):
175 | r_points[D] = Q / (1.0 * T)
176 | points.append(r_points)
177 | elif (D != M - 1):
178 | for i in range(Q + 1):
179 | r_points[D] = i / T
180 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
181 | return points
182 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
183 | return ref_points
184 |
185 | # Function: Normalize Objective Functions
186 | def normalization(ca, number_of_functions):
187 | M = number_of_functions
188 | z_min = np.min(ca[:,-M:], axis = 0)
189 | ca[:,-M:] = ca[:,-M:] - z_min
190 | w = np.zeros((M, M)) + 0.0000001
191 | np.fill_diagonal(w, 1)
192 | z_max = []
193 | for i in range(0, M):
194 | z_max.append(np.argmin(np.max(ca[:,-M:]/w[i], axis = 1)))
195 | if ( len(z_max) != len(set(z_max)) or M == 1):
196 | a = np.max(ca[:,-M:], axis = 0)
197 | else:
198 | k = np.ones((M, 1))
199 | z_max = np.vstack((ca[z_max,-M:]))
200 | a = np.matrix.dot(np.linalg.inv(z_max), k)
201 | a = (1/a).reshape(1, M)
202 | ca[:,-M:] = ca[:,-M:] /(a - z_min)
203 | return ca
204 |
205 | # Function: Distance from Point (p3) to a Line (p1, p2)
206 | def point_to_line(p1, p2, p3):
207 | p2 = p2 - p1
208 | dp = np.dot(p3, p2.T)
209 | pp = dp/np.linalg.norm(p2.T, axis = 0)
210 | pn = np.linalg.norm(p3, axis = 1)
211 | pn = np.array([pn,]*pp.shape[1]).transpose()
212 | dl = np.sqrt(pn**2 - pp**2)
213 | return dl
214 |
215 | # Function: Association to Reference Point
216 | def association(ca, weights, M):
217 | p = copy.deepcopy(ca)
218 | p = normalization(p, M)
219 | p1 = np.zeros((1, M))
220 | p2 = weights
221 | p3 = p[:,-M:]
222 | d = point_to_line(p1, p2, p3) # Matrix (Population, Reference)
223 | idx = np.argmin(d, axis = 1)
224 | niche = dict( zip( np.arange(weights.shape[0]), [[]] * weights.shape[0]) )
225 | n_ca = dict( zip( np.arange(weights.shape[0]), [[]] * weights.shape[0]) )
226 | idx_u = set(idx)
227 | for i in idx_u:
228 | niche.update({i: list(np.where(idx == i)[0])})
229 | idx_ = []
230 | for i in range(0, weights.shape[0]):
231 | if (len(niche[i]) != 0):
232 | individual = niche[i]
233 | idx_adp = np.argmin(np.amin(d[individual,:], axis = 1))
234 | idx_.append( individual[idx_adp] )
235 | n_ca.update({i: [individual[idx_adp]]})
236 | return ca[idx_ , :], n_ca
237 |
238 | # Function: Update CA
239 | def update_ca(ca, offspring, weights, M):
240 | ca = np.vstack([ca, offspring])
241 | rank = fast_non_dominated_sorting(ca, M)
242 | ca = sort_population_by_rank(ca, rank, 1)
243 | ca,_ = association(ca, weights, M)
244 | return ca[:weights.shape[0],:]
245 |
246 | # Function: Update DA
247 | def update_da(ca, da, offspring, weights, M):
248 | da = np.vstack([da, offspring])
249 | _, n_da = association(da, weights, M)
250 | _, n_ca = association(ca, weights, M)
251 | s = np.zeros((weights.shape[0], ca.shape[1]))
252 | idx_del = []
253 | s.fill(float('+inf'))
254 | for i in range(0, weights.shape[0]):
255 | if (len(n_ca[i]) != 0 and len(n_da[i]) != 0):
256 | if ( dominance_function(ca[n_ca[i][0], :], da[n_da[i][0],:], M)):
257 | s[i,:] = ca[n_ca[i][0], :]
258 | else:
259 | s[i,:] = da[n_da[i][0],:]
260 | elif (len(n_ca[i]) == 0 and len(n_da[i]) != 0):
261 | s[i,:] = da[n_da[i][0],:]
262 | elif (len(n_ca[i]) != 0 and len(n_da[i]) == 0):
263 | s[i,:] = ca[n_ca[i][0], :]
264 | elif (len(n_ca[i]) == 0 and len(n_da[i]) == 0):
265 | idx_del.append(i)
266 | da = np.delete(s, idx_del, axis = 0)
267 | return da
268 |
269 | ############################################################################
270 |
271 | # CTAEA Function
272 | def constrained_two_archive_evolutionary_algorithm(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, k = 4, verbose = True):
273 | count = 0
274 | references = max(5, references)
275 | M = len(list_of_functions)
276 | weights = reference_points(M = M, p = references)
277 | size = k*weights.shape[0]
278 | ca = initial_population(size, min_values, max_values, list_of_functions)
279 | da = initial_population(size, min_values, max_values, list_of_functions)
280 | print('Total Number of Points on Reference Hyperplane: ', int(weights.shape[0]), ' Population Size: ', int(size))
281 | while (count <= generations):
282 | if (verbose == True):
283 | print('Generation = ', count)
284 | offspring = breeding(ca, da, min_values, max_values, mu, list_of_functions, size)
285 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
286 | ca = np.vstack([ca, offspring])
287 | ca, _ = association(ca, weights, M)
288 | ca = update_ca(ca, offspring, weights, M)
289 | da = update_da(ca, da, offspring, weights, M)
290 | count = count + 1
291 | return da
292 |
293 | ############################################################################
294 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/grea.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: GrEA (Grid-based Evolutionary Algorithm)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: grea.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Dominance Solution 1 over Solution 2
44 | def dominance_function(solution_1, solution_2, number_of_functions = 2):
45 | count = 0
46 | dominance = True
47 | for k in range (1, number_of_functions + 1):
48 | if (solution_1[-k] <= solution_2[-k]):
49 | count = count + 1
50 | if (count == number_of_functions):
51 | dominance = True
52 | else:
53 | dominance = False
54 | return dominance
55 |
56 | # Function: Offspring
57 | def breeding(population, population_size, k, grid, gcd, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
58 | offspring = np.zeros((population_size*k, population.shape[1]))
59 | parent_1 = 0
60 | parent_2 = 1
61 | b_offspring = 0
62 | for i in range (0, offspring.shape[0]):
63 | if (len(population)>4):
64 | i1, i2, i3, i4 = random.sample(range(0, len(population) - 1), 4)
65 | if (dominance_function(population[i1, :], population[i2, :], len(list_of_functions)) or grid_dominance_function(grid[i1, :], grid[i2, :])):
66 | parent_1 = i1
67 | elif (dominance_function(population[i2, :], population[i1, :], len(list_of_functions)) or grid_dominance_function(grid[i2, :], grid[i1, :])):
68 | parent_1 = i2
69 | elif (gcd[i1] < gcd[i2]):
70 | parent_1 = i1
71 | elif (gcd[i1] > gcd[i2]):
72 | parent_1 = i2
73 | elif (int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1) > 0.5):
74 | parent_1 = i1
75 | else:
76 | parent_1 = i2
77 | if (dominance_function(population[i3, :], population[i4, :], len(list_of_functions)) or grid_dominance_function(grid[i3, :], grid[i4, :])):
78 | parent_2 = i3
79 | elif (dominance_function(population[i4, :], population[i3, :], len(list_of_functions)) or grid_dominance_function(grid[i4, :], grid[i3, :])):
80 | parent_2 = i4
81 | elif (gcd[i3] < gcd[i4]):
82 | parent_2 = i3
83 | elif (gcd[i3] > gcd[i4]):
84 | parent_2 = i4
85 | elif (int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1) > 0.5):
86 | parent_2 = i3
87 | else:
88 | parent_2 = i4
89 | else:
90 | parent_1, parent_2 = random.sample(range(0, len(population) - 1), 2)
91 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
92 | if (rand > 0.5):
93 | parent_1, parent_2 = parent_2, parent_1
94 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
95 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
96 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
97 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
98 | if (rand <= 0.5):
99 | b_offspring = 2*(rand_b)
100 | b_offspring = b_offspring**(1/(mu + 1))
101 | elif (rand > 0.5):
102 | b_offspring = 1/(2*(1 - rand_b))
103 | b_offspring = b_offspring**(1/(mu + 1))
104 | if (rand_c >= 0.5):
105 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
106 | else:
107 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
108 | for m in range (1, len(list_of_functions) + 1):
109 | offspring[i,-m] = list_of_functions[-m](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
110 | return offspring
111 |
112 | # Function: Mutation
113 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
114 | d_mutation = 0
115 | for i in range (0, offspring.shape[0]):
116 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
117 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
118 | if (probability < mutation_rate):
119 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
120 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
121 | if (rand <= 0.5):
122 | d_mutation = 2*(rand_d)
123 | d_mutation = d_mutation**(1/(eta + 1)) - 1
124 | elif (rand > 0.5):
125 | d_mutation = 2*(1 - rand_d)
126 | d_mutation = 1 - d_mutation**(1/(eta + 1))
127 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
128 | for k in range (1, len(list_of_functions) + 1):
129 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
130 | return offspring
131 |
132 | ############################################################################
133 |
134 | # Function: Fast Non-Dominated Sorting
135 | def fast_non_dominated_sorting(population, number_of_functions = 2):
136 | S = [[] for i in range(0, population.shape[0])]
137 | front = [[]]
138 | n = [0 for i in range(0, population.shape[0])]
139 | rank = [0 for i in range(0, population.shape[0])]
140 | for p in range(0, population.shape[0]):
141 | S[p] = []
142 | n[p] = 0
143 | for q in range(0, population.shape[0]):
144 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
145 | if (q not in S[p]):
146 | S[p].append(q)
147 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
148 | n[p] = n[p] + 1
149 | if (n[p] == 0):
150 | rank[p] = 0
151 | if (p not in front[0]):
152 | front[0].append(p)
153 | i = 0
154 | while (front[i] != []):
155 | Q = []
156 | for p in front[i]:
157 | for q in S[p]:
158 | n[q] = n[q] - 1
159 | if(n[q] == 0):
160 | rank[q] = i+1
161 | if q not in Q:
162 | Q.append(q)
163 | i = i+1
164 | front.append(Q)
165 | del front[len(front)-1]
166 | rank = np.zeros((population.shape[0], 1))
167 | for i in range(0, len(front)):
168 | for j in range(0, len(front[i])):
169 | rank[front[i][j], 0] = i + 1
170 | return rank
171 |
172 | # Function: Sort Population by Rank
173 | def sort_population_by_rank(population, rank, rp = 'none'):
174 | if rp == 'none':
175 | idx = np.argsort(rank[:,0], axis = 0).tolist()
176 | population = population[idx,:]
177 | else:
178 | idx = np.where(rank <= rp)[0].tolist()
179 | population = population[idx,:]
180 | return population
181 |
182 | ############################################################################
183 |
184 | # Function: Grid
185 | def grid_location(population, divisions = 9, list_of_functions = [func_1, func_2]):
186 | M = len(list_of_functions)
187 | grid = np.zeros((population.shape[0], M))
188 | min_f = np.amin(population[:,population.shape[1]-M:], axis = 0)
189 | max_f = np.amax(population[:,population.shape[1]-M:], axis = 0)
190 | lower = [min_f[i] - ( max_f[i] - min_f[i] )/(2*divisions) for i in range(0, M)]
191 | upper = [max_f[i] - ( max_f[i] - min_f[i] )/(2*divisions) for i in range(0, M)]
192 | hyper_box = [(upper[i] - lower[i])/divisions for i in range(0, len(lower))]
193 | for i in range(0, grid.shape[0]):
194 | for j in range(0, grid.shape[1]):
195 | grid[i,j] = np.floor((population[i, population.shape[1]-M+j] - lower[j])/hyper_box[j])
196 | return grid
197 |
198 | # Function: GD
199 | def grid_diff(grid):
200 | gd = np.zeros((grid.shape[0], grid.shape[0]))
201 | for i in range(0, grid.shape[0]):
202 | for j in range(i, grid.shape[0]):
203 | if (i != j):
204 | gd[i, j] = np.sum(np.abs(grid[i,:] - grid[j, :]))
205 | gd[j, i] = gd[i, j]
206 | return gd
207 |
208 | # Function: GR
209 | def grid_rank(grid):
210 | gr = np.zeros((grid.shape[0]))
211 | for i in range(0, gr.shape[0]):
212 | gr[i] = np.sum(grid[i,:])
213 | return gr
214 |
215 | # Function: GR Adjustment
216 | def grid_rank_adjustment(offspring, ix, grid, gr, gd, M):
217 | e_g_ng_n_pd = np.zeros((offspring.shape[0], 5))
218 | for i in range(0, offspring.shape[1]):
219 | if (gd[i, ix] == 0):
220 | e_g_ng_n_pd[i,0] = 1
221 | if (grid_dominance_function(grid[i,:], grid[ix,:])):
222 | e_g_ng_n_pd[i,1] = 1
223 | else:
224 | e_g_ng_n_pd[i,2] = 0
225 | if (gd[i, ix] < M):
226 | e_g_ng_n_pd[i,3] = 1
227 | for i in range(0, offspring.shape[1]):
228 | if (e_g_ng_n_pd[i,0] == 1):
229 | gr[i] = gr[i] + M + 2
230 | if (e_g_ng_n_pd[i,1] == 1):
231 | gr[i] = gr[i] + M
232 | if (e_g_ng_n_pd[i,0] == 0 and e_g_ng_n_pd[i,2] == 1):
233 | e_g_ng_n_pd[i,-1] == 0
234 | for i in range(0, offspring.shape[1]):
235 | if (e_g_ng_n_pd[i,2] == 1 and e_g_ng_n_pd[i,2] == 1 and e_g_ng_n_pd[i,0] == 0):
236 | if (e_g_ng_n_pd[i,-1] <= M - gd[i, ix]):
237 | e_g_ng_n_pd[i,-1] == M - gd[i, ix]
238 | for j in range(0, offspring.shape[1]):
239 | if (e_g_ng_n_pd[j,-1] < e_g_ng_n_pd[i,-1]):
240 | e_g_ng_n_pd[j,-1] = e_g_ng_n_pd[i,-1]
241 | for i in range(0, offspring.shape[1]):
242 | if (e_g_ng_n_pd[i,3] == 1 and e_g_ng_n_pd[i,0] == 0):
243 | gr[i] = gr[i] + e_g_ng_n_pd[i,-1]
244 | return gr
245 |
246 | # Function: GCD
247 | def grid_crowding_distance(gd, list_of_functions):
248 | M = len(list_of_functions)
249 | gcd = np.zeros((gd.shape[0]))
250 | for i in range(0, gcd.shape[0]):
251 | for j in range(0, gd.shape[1]):
252 | if (gd[i, j] < M and i != j):
253 | gcd[i] = gcd[i] + (M - gcd[i])
254 | return gcd
255 |
256 | # Function: GCPD
257 | def grid_coordinate_point_distance(population, grid, divisions = 9, list_of_functions = [func_1, func_2]):
258 | M = len(list_of_functions)
259 | gcpd = np.zeros((population.shape[0]))
260 | min_f = np.amin(population[:,population.shape[1]-M:], axis = 0)
261 | max_f = np.amax(population[:,population.shape[1]-M:], axis = 0)
262 | lower = [min_f[i] - ( max_f[i] - min_f[i] )/(2*divisions) for i in range(0, M)]
263 | upper = [max_f[i] - ( max_f[i] - min_f[i] )/(2*divisions) for i in range(0, M)]
264 | hyper_box = [(upper[i] - lower[i])/divisions for i in range(0, len(lower))]
265 | for i in range(0, gcpd.shape[0]):
266 | value = 0
267 | for j in range(0, M):
268 | value = value + ((population[i,population.shape[1]-M+j] - (lower[j] - grid[i,j]*hyper_box[j]))/hyper_box[j])**2
269 | gcpd[i] = value**(1/2)
270 | return gcpd
271 |
272 | # Function: Grid Dominance Solution 1 over Solution 2
273 | def grid_dominance_function(solution_1, solution_2):
274 | count = 0
275 | dominance = True
276 | for k in range (0, solution_1.shape[0]):
277 | if (solution_1[k] <= solution_2[k]):
278 | count = count + 1
279 | if (count == solution_1.shape[0]):
280 | dominance = True
281 | else:
282 | dominance = False
283 | return dominance
284 |
285 | ############################################################################
286 |
287 | # Function: Find Best Solution Q
288 | def find_best(offspring, q, ix, ix_list, grid, gr, gd, gcd, gcpd, M):
289 | for i in range(0, offspring.shape[0]):
290 | if (i not in ix_list):
291 | if (gr[i] < gr[ix]):
292 | q = np.copy(offspring[i, :])
293 | elif (gr[i] == gr[ix]):
294 | if (gcd[i] < gcd[ix]):
295 | q = np.copy(offspring[i, :])
296 | elif (gcd[i] == gcd[ix]):
297 | if (gcpd[i] < gcpd[ix]):
298 | q = np.copy(offspring[i, :])
299 | for i in range(0, offspring.shape[0]):
300 | if (gd[i, ix] < M):
301 | gcd[i] = gcd[i] + (M - gd[i, ix])
302 | gr = grid_rank_adjustment(offspring, ix, grid, gr, gd, M)
303 | return q, gr, gcd
304 |
305 | # Function: Select Archive
306 | def selection(offspring_, grid, gr, gd, gcd, gcpd, population_size, M):
307 | offspring = np.copy(offspring_)
308 | archive = np.zeros((population_size, offspring.shape[1]))
309 | rank = fast_non_dominated_sorting(offspring, M)
310 | q = sort_population_by_rank(offspring, rank, rp = 1)[0, :]
311 | ix_list = []
312 | ix = np.where(offspring == q )[0][0]
313 | ix_list.append(ix)
314 | archive[-1,:] = np.copy(q)
315 | for i in range(archive.shape[0]-2, -1, -1):
316 | q, gr, gcd = find_best(offspring, q, ix, ix_list, grid, gr, gd, gcd, gcpd, M)
317 | ix = np.where(offspring == q)[0][0]
318 | if (ix not in ix_list):
319 | ix_list.append(ix)
320 | archive[i,:] = np.copy(q)
321 | else:
322 | archive = np.delete(archive, i, 0)
323 | return archive
324 |
325 | ############################################################################
326 |
327 | # GrEA Function
328 | def grid_based_evolutionary_algorithm(population_size = 5, divisions = 10, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, k = 4, verbose = True):
329 | count = 0
330 | M = len(list_of_functions)
331 | population = initial_population(population_size*k, min_values, max_values, list_of_functions)
332 | while (count <= generations - 1):
333 | if (verbose == True):
334 | print('Generation = ', count)
335 | grid_p = grid_location(population, divisions, list_of_functions)
336 | gr_p = grid_rank(grid_p)
337 | gd_p = grid_diff(grid_p)
338 | gcd_p = grid_crowding_distance(gd_p, list_of_functions)
339 | gcpd_p = grid_coordinate_point_distance(population, grid_p, divisions, list_of_functions)
340 | archive_p = selection(population, grid_p, gr_p, gd_p, gcd_p, gcpd_p, population_size, M)
341 | offspring = breeding(population, population_size, k, grid_p, gcd_p, min_values, max_values, mu, list_of_functions)
342 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
343 | grid_o = grid_location(offspring, divisions, list_of_functions)
344 | gr_o = grid_rank(grid_o)
345 | gd_o = grid_diff(grid_o)
346 | gcd_o = grid_crowding_distance(gd_o, list_of_functions)
347 | gcpd_o = grid_coordinate_point_distance(offspring, grid_o, divisions, list_of_functions)
348 | archive_o = selection(offspring, grid_o, gr_o, gd_o, gcd_o, gcpd_o, population_size, M)
349 | archive = np.vstack([archive_p, archive_o])
350 | population = np.vstack([archive, offspring])[0:population_size*k,:]
351 | rank = fast_non_dominated_sorting(population, M)
352 | population = sort_population_by_rank(population, rank, rp = 1)
353 | count = count + 1
354 | return population
355 |
356 | ############################################################################
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/hype.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: HypE (Hypervolume Estimation Multiobjective Optimization Algorithm)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2022). Project: pyMultiojective, File: hype.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import pygmo as pg
18 | import random
19 | import os
20 |
21 | ############################################################################
22 |
23 | # Function 1
24 | def func_1():
25 | return
26 |
27 | # Function 2
28 | def func_2():
29 | return
30 |
31 | ############################################################################
32 |
33 | # Function: Initialize Variables
34 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
35 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
36 | for i in range(0, population_size):
37 | for j in range(0, len(min_values)):
38 | population[i,j] = random.uniform(min_values[j], max_values[j])
39 | for k in range (1, len(list_of_functions) + 1):
40 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
41 | return population
42 |
43 | ############################################################################
44 |
45 | # Function: Offspring
46 | def breeding(population, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2], size = 5):
47 | offspring = np.zeros((size, population.shape[1]))
48 | parent_1 = 0
49 | parent_2 = 1
50 | b_offspring = 0
51 | for i in range (0, offspring.shape[0]):
52 | if (len(population) - 1 >= 3):
53 | i1, i2 = random.sample(range(0, len(population) - 1), 2)
54 | elif (len(population) - 1 == 0):
55 | i1 = 0
56 | i2 = 0
57 | else:
58 | i1 = 0
59 | i2 = 1
60 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
61 | if (rand > 0.5):
62 | parent_1 = i1
63 | parent_2 = i2
64 | else:
65 | parent_1 = i2
66 | parent_2 = i1
67 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
68 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
69 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
70 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
71 | if (rand <= 0.5):
72 | b_offspring = 2*(rand_b)
73 | b_offspring = b_offspring**(1/(mu + 1))
74 | elif (rand > 0.5):
75 | b_offspring = 1/(2*(1 - rand_b))
76 | b_offspring = b_offspring**(1/(mu + 1))
77 | if (rand_c >= 0.5):
78 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
79 | else:
80 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
81 | for k in range (1, len(list_of_functions) + 1):
82 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
83 | return offspring
84 |
85 | # Function: Mutation
86 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
87 | d_mutation = 0
88 | for i in range (0, offspring.shape[0]):
89 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
90 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
91 | if (probability < mutation_rate):
92 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
93 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
94 | if (rand <= 0.5):
95 | d_mutation = 2*(rand_d)
96 | d_mutation = d_mutation**(1/(eta + 1)) - 1
97 | elif (rand > 0.5):
98 | d_mutation = 2*(1 - rand_d)
99 | d_mutation = 1 - d_mutation**(1/(eta + 1))
100 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
101 | for k in range (1, len(list_of_functions) + 1):
102 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
103 | return offspring
104 |
105 | ############################################################################
106 |
107 | # Function: Reference Points
108 | def reference_points(M, p):
109 | def generator(r_points, M, Q, T, D):
110 | points = []
111 | if (D == M - 1):
112 | r_points[D] = Q / T
113 | points.append(r_points)
114 | elif (D != M - 1):
115 | for i in range(Q + 1):
116 | r_points[D] = i / T
117 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
118 | return points
119 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
120 | return ref_points
121 |
122 | # Function: Normalize Objective Functions
123 | def normalization(population, number_of_functions):
124 | M = number_of_functions
125 | z_min = np.min(population[:,-M:], axis = 0)
126 | population[:,-M:] = population[:,-M:] - z_min
127 | w = np.zeros((M, M)) + 0.0000001
128 | np.fill_diagonal(w, 1)
129 | z_max = []
130 | for i in range(0, M):
131 | z_max.append(np.argmin(np.max(population[:,-M:]/w[i], axis = 1)))
132 | if ( len(z_max) != len(set(z_max)) or M == 1):
133 | a = np.max(population[:,-M:], axis = 0)
134 | else:
135 | k = np.ones((M, 1))
136 | z_max = np.vstack((population[z_max,-M:]))
137 | a = np.matrix.dot(np.linalg.inv(z_max), k)
138 | a = (1/a).reshape(1, M)
139 | population[:,-M:] = population[:,-M:] /(a - z_min)
140 | return population
141 |
142 | # Function: Distance from Point (p3) to a Line (p1, p2).
143 | def point_to_line(p1, p2, p3):
144 | p2 = p2 - p1
145 | dp = np.dot(p3, p2.T)
146 | pp = dp/np.linalg.norm(p2.T, axis = 0)
147 | pn = np.linalg.norm(p3, axis = 1)
148 | pn = np.array([pn,]*pp.shape[1]).transpose()
149 | dl = np.sqrt(pn**2 - pp**2)
150 | return dl
151 |
152 | # Function: Association
153 | def association(srp, population, z_max, number_of_functions):
154 | M = number_of_functions
155 | p = copy.deepcopy(population)
156 | p = normalization(p, M)
157 | p1 = np.zeros((1, M))
158 | p2 = srp
159 | p3 = p[:,-M:]
160 | g = point_to_line(p1, p2, p3) # Matrix (Population, Reference)
161 | idx = []
162 | arg = np.argmin(g, axis = 1)
163 | hv_c = pg.hypervolume(p[:,-M:])
164 | z = np.max(p[:,-M:], axis = 0)
165 | if any(z > z_max):
166 | z_max = np.maximum(z_max,z)
167 | hv = hv_c.contributions(z_max)
168 | d = 1/(hv + 0.0000000000000001)
169 | for ind in np.unique(arg).tolist():
170 | f = [i[0] for i in np.argwhere(arg == ind).tolist()]
171 | idx.append(f[d[f].argsort()[0]])
172 | if (len(idx) < 5):
173 | idx.extend([x for x in list(range(0, population.shape[0])) if x not in idx])
174 | idx = idx[:5]
175 | return idx
176 |
177 | ############################################################################
178 |
179 | # HypE Function
180 | def hypervolume_estimation_mooa(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 5, mu = 1, eta = 1, k = 4, verbose = True):
181 | count = 0
182 | references = max(5, references)
183 | M = len(list_of_functions)
184 | srp = reference_points(M = M, p = references)
185 | size = k*srp.shape[0]
186 | population = initial_population(size, min_values, max_values, list_of_functions)
187 | offspring = initial_population(size, min_values, max_values, list_of_functions)
188 | z_max = np.max(population[:,-M:], axis = 0)
189 | print('Total Number of Points on Reference Hyperplane: ', int(srp.shape[0]), ' Population Size: ', int(size))
190 | while (count <= generations):
191 | if (verbose == True):
192 | print('Generation = ', count)
193 | population = np.vstack([population, offspring])
194 | z_max = np.vstack([z_max, np.max(population[:,-M:], axis = 0)])
195 | z_max = np.max(z_max, axis = 0)
196 | idx = association(srp, population, z_max, M)
197 | population = population[idx, :]
198 | population = population[:size,:]
199 | offspring = breeding(population, min_values, max_values, mu, list_of_functions, size)
200 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
201 | count = count + 1
202 | return population[:srp.shape[0], :]
203 |
204 | ############################################################################
205 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/ibea_fc.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: IBEA-FC (Indicator-Based Evolutionary Algorithm with Fast Comparison Indicator)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2022). Project: pyMultiojective, File: ibea_fc.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Offspring
44 | def breeding(population, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2], size = 5):
45 | offspring = np.zeros((size, population.shape[1]))
46 | parent_1 = 0
47 | parent_2 = 1
48 | b_offspring = 0
49 | for i in range (0, offspring.shape[0]):
50 | if (len(population) - 1 >= 3):
51 | i1, i2 = random.sample(range(0, len(population) - 1), 2)
52 | elif (len(population) - 1 == 0):
53 | i1 = 0
54 | i2 = 0
55 | else:
56 | i1 = 0
57 | i2 = 1
58 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
59 | if (rand > 0.5):
60 | parent_1 = i1
61 | parent_2 = i2
62 | else:
63 | parent_1 = i2
64 | parent_2 = i1
65 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
66 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
67 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
68 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
69 | if (rand <= 0.5):
70 | b_offspring = 2*(rand_b)
71 | b_offspring = b_offspring**(1/(mu + 1))
72 | elif (rand > 0.5):
73 | b_offspring = 1/(2*(1 - rand_b))
74 | b_offspring = b_offspring**(1/(mu + 1))
75 | if (rand_c >= 0.5):
76 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
77 | else:
78 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
79 | for k in range (1, len(list_of_functions) + 1):
80 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
81 | return offspring
82 |
83 | # Function: Mutation
84 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
85 | d_mutation = 0
86 | for i in range (0, offspring.shape[0]):
87 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
88 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
89 | if (probability < mutation_rate):
90 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
91 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
92 | if (rand <= 0.5):
93 | d_mutation = 2*(rand_d)
94 | d_mutation = d_mutation**(1/(eta + 1)) - 1
95 | elif (rand > 0.5):
96 | d_mutation = 2*(1 - rand_d)
97 | d_mutation = 1 - d_mutation**(1/(eta + 1))
98 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
99 | for k in range (1, len(list_of_functions) + 1):
100 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
101 | return offspring
102 |
103 | ############################################################################
104 |
105 | # Function: Pareto Front
106 | def pareto_front_points(pts, pf_min = True):
107 | def pareto_front(pts, pf_min):
108 | pf = np.zeros(pts.shape[0], dtype = np.bool_)
109 | for i in range(0, pts.shape[0]):
110 | cost = pts[i, :]
111 | if (pf_min == True):
112 | g_cost = np.logical_not(np.any(pts > cost, axis = 1))
113 | b_cost = np.any(pts < cost, axis = 1)
114 | else:
115 | g_cost = np.logical_not(np.any(pts < cost, axis = 1))
116 | b_cost = np.any(pts > cost, axis = 1)
117 | dominated = np.logical_and(g_cost, b_cost)
118 | if (np.any(pf) == True):
119 | if (np.any(np.all(pts[pf] == cost, axis = 1)) == True):
120 | continue
121 | if not (np.any(dominated[:i]) == True or np.any(dominated[i + 1 :]) == True):
122 | pf[i] = True
123 | return pf
124 | idx = np.argsort(((pts - pts.mean(axis = 0))/(pts.std(axis = 0) + 1e-7)).sum(axis = 1))
125 | pts = pts[idx]
126 | pf = pareto_front(pts, pf_min)
127 | pf[idx] = pf.copy()
128 | return pf
129 |
130 | ############################################################################
131 |
132 | # Function: Fitness Selection
133 | def selection(population, population_size, M, z_min, z_max):
134 | f = (population[:, -M:] - z_min)/(z_max - z_min)
135 | eps = np.sum(f, axis = 1)
136 | idx = np.where(eps <= 1)[0].tolist()
137 | population = population[idx,:]
138 | population = population[:population_size,:]
139 | return population
140 |
141 | ############################################################################
142 |
143 | # IBEA-FC Function
144 | def indicator_based_evolutionary_algorithm_fc(population_size = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, verbose = True):
145 | count = 0
146 | M = len(list_of_functions)
147 | population = initial_population(population_size, min_values, max_values, list_of_functions)
148 | z_min = np.min(population[:,-M:], axis = 0)
149 | z_max = np.max(population[:,-M:], axis = 0)
150 | while (count <= generations - 1):
151 | if (verbose == True):
152 | print('Generation = ', count)
153 | offspring = breeding(population, min_values, max_values, mu, list_of_functions, population_size)
154 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
155 | population = np.vstack([population, offspring])
156 | z_min = np.vstack([z_min, np.min(population[:,-M:], axis = 0)])
157 | z_min = np.min(z_min, axis = 0)
158 | z_max = np.vstack([z_max, np.max(population[:,-M:], axis = 0)])
159 | z_max = np.max(z_max, axis = 0)
160 | population = selection(population, population_size, M, z_min, z_max)
161 | idx = pareto_front_points(population [:, -M:], pf_min = True)
162 | population = population [idx,:]
163 | count = count + 1
164 | return population
165 |
166 | ############################################################################
167 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/ibea_hv.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: IBEA-HV (Indicator-Based Evolutionary Algorithm with Hypervolume Indicator)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2022). Project: pyMultiojective, File: ibea_hv.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import pygmo as pg
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, population_size):
36 | for j in range(0, len(min_values)):
37 | population[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
40 | return population
41 |
42 | ############################################################################
43 |
44 | # Function: Offspring
45 | def breeding(population, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2], size = 5):
46 | offspring = np.zeros((size, population.shape[1]))
47 | parent_1 = 0
48 | parent_2 = 1
49 | b_offspring = 0
50 | for i in range (0, offspring.shape[0]):
51 | if (len(population) - 1 >= 3):
52 | i1, i2 = random.sample(range(0, len(population) - 1), 2)
53 | elif (len(population) - 1 == 0):
54 | i1 = 0
55 | i2 = 0
56 | else:
57 | i1 = 0
58 | i2 = 1
59 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
60 | if (rand > 0.5):
61 | parent_1 = i1
62 | parent_2 = i2
63 | else:
64 | parent_1 = i2
65 | parent_2 = i1
66 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
67 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
68 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
69 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
70 | if (rand <= 0.5):
71 | b_offspring = 2*(rand_b)
72 | b_offspring = b_offspring**(1/(mu + 1))
73 | elif (rand > 0.5):
74 | b_offspring = 1/(2*(1 - rand_b))
75 | b_offspring = b_offspring**(1/(mu + 1))
76 | if (rand_c >= 0.5):
77 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
78 | else:
79 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
80 | for k in range (1, len(list_of_functions) + 1):
81 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
82 | return offspring
83 |
84 | # Function: Mutation
85 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
86 | d_mutation = 0
87 | for i in range (0, offspring.shape[0]):
88 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
89 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
90 | if (probability < mutation_rate):
91 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
92 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
93 | if (rand <= 0.5):
94 | d_mutation = 2*(rand_d)
95 | d_mutation = d_mutation**(1/(eta + 1)) - 1
96 | elif (rand > 0.5):
97 | d_mutation = 2*(1 - rand_d)
98 | d_mutation = 1 - d_mutation**(1/(eta + 1))
99 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
100 | for k in range (1, len(list_of_functions) + 1):
101 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
102 | return offspring
103 |
104 | ############################################################################
105 |
106 | # Function: Fitness Selection
107 | def selection(population, population_size, M, z_max):
108 | hv_c = pg.hypervolume(population[:,-M:])
109 | hv = hv_c.contributions(z_max)
110 | idx = np.argsort(hv, axis = 0)[::-1]
111 | idx = [i for i in idx if hv[i] > 0]
112 | population = population[idx,:]
113 | population = population[:population_size,:]
114 | return population
115 |
116 | ############################################################################
117 |
118 | # IBEA-HV Function
119 | def indicator_based_evolutionary_algorithm_hv(population_size = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, verbose = True):
120 | count = 0
121 | M = len(list_of_functions)
122 | population = initial_population(2*population_size, min_values, max_values, list_of_functions)
123 | z_max = np.max(population[:,-M:], axis = 0)
124 | while (count <= generations - 1):
125 | if (verbose == True):
126 | print('Generation = ', count)
127 | offspring = breeding(population, min_values, max_values, mu, list_of_functions, 2*population_size)
128 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
129 | population = np.vstack([population, offspring])
130 | z_max = np.vstack([z_max, np.max(population[:,-M:], axis = 0)])
131 | z_max = np.max(z_max, axis = 0)
132 | population = selection(population, population_size, M, z_max)
133 | count = count + 1
134 | return population
135 |
136 | ############################################################################
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/moead.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: MOEA/D (Multiobjective Evolutionary Algorithm Based on Decomposition)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: moead.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Offspring
44 | def breeding(population, neighbours, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
45 | offspring = np.copy(population)
46 | parent_1 = 0
47 | parent_2 = 1
48 | b_offspring = 0
49 | for i in range (0, offspring.shape[0]):
50 | canditates = list(range(0, population.shape[0]))
51 | canditates.remove(i)
52 | canditates = canditates[:neighbours]
53 | i1, i2, i3, i4 = random.sample(canditates, 4)
54 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
55 | if (rand > 0.5):
56 | parent_1 = i1
57 | else:
58 | parent_1 = i2
59 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
60 | if (rand > 0.5):
61 | parent_2 = i3
62 | else:
63 | parent_2 = i4
64 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
65 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
66 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
67 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
68 | if (rand <= 0.5):
69 | b_offspring = 2*(rand_b)
70 | b_offspring = b_offspring**(1/(mu + 1))
71 | elif (rand > 0.5):
72 | b_offspring = 1/(2*(1 - rand_b))
73 | b_offspring = b_offspring**(1/(mu + 1))
74 | if (rand_c >= 0.5):
75 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
76 | else:
77 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
78 | for k in range (1, len(list_of_functions) + 1):
79 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
80 | return offspring
81 |
82 | # Function: Mutation
83 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
84 | d_mutation = 0
85 | for i in range (0, offspring.shape[0]):
86 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
87 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
88 | if (probability < mutation_rate):
89 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
90 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
91 | if (rand <= 0.5):
92 | d_mutation = 2*(rand_d)
93 | d_mutation = d_mutation**(1/(eta + 1)) - 1
94 | elif (rand > 0.5):
95 | d_mutation = 2*(1 - rand_d)
96 | d_mutation = 1 - d_mutation**(1/(eta + 1))
97 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
98 | for k in range (1, len(list_of_functions) + 1):
99 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
100 | return offspring
101 |
102 | ############################################################################
103 |
104 | # Function: Reference Points
105 | def reference_points(M, p):
106 | def generator(r_points, M, Q, T, D):
107 | points = []
108 | if (D == M - 1):
109 | r_points[D] = Q / T
110 | points.append(r_points)
111 | elif (D != M - 1):
112 | for i in range(Q + 1):
113 | r_points[D] = i / T
114 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
115 | return points
116 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
117 | return ref_points
118 |
119 | # Function: Selection
120 | def selection(population, offspring, M, weights, theta):
121 | z_min = np.min(np.vstack([population[:,-M:], offspring[:,-M:]]), axis = 0)
122 | population = np.vstack([population, offspring])
123 | pbi = np.zeros((population.shape[0], weights.shape[0]))
124 | for i in range(0, population.shape[0]):
125 | for j in range(0, weights.shape[0]):
126 | d1 = np.linalg.norm(np.dot((population[i,-M:].reshape(1, M) - z_min).T, weights[j,:].reshape(1, M) ))/np.linalg.norm(weights[j,:])
127 | d2 = np.linalg.norm(population[i,-M:] - z_min - d1*(weights[j,:]/np.linalg.norm(weights[j,:])))
128 | pbi[i,j] = d1 + theta*d2
129 | idx = []
130 | arg = np.argmin(pbi, axis = 1)
131 | d = np.amin(pbi, axis = 1)
132 | for ind in np.unique(arg).tolist():
133 | f = [i[0] for i in np.argwhere(arg == ind).tolist()]
134 | idx.append(f[d[f].argsort()[0]])
135 | idx.extend([x for x in list(range(0, population.shape[0])) if x not in idx])
136 | population = population[idx, :]
137 | population = population[:weights.shape[0], :]
138 | return population
139 |
140 | ############################################################################
141 |
142 | # MOEA/D Function
143 | def multiobjective_evolutionary_algorithm_based_on_decomposition(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, theta = 4, k = 4, verbose = True):
144 | count = 0
145 | references = max(5, references)
146 | M = len(list_of_functions)
147 | weights = reference_points(M = M, p = references)
148 | size = k*weights.shape[0]
149 | neighbours = max(4, int(size//10))
150 | population = initial_population(size, min_values, max_values, list_of_functions)
151 | print('Total Number of Points on Reference Hyperplane: ', int(weights.shape[0]), ' Population Size: ', int(size))
152 | while (count <= generations):
153 | if (verbose == True):
154 | print('Generation = ', count)
155 | offspring = breeding(population, neighbours, min_values, max_values, mu, list_of_functions)
156 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
157 | population = selection(population, offspring, M, weights, theta)
158 | count = count + 1
159 | return population
160 |
161 | ############################################################################
162 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/n_ii.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: NSGA-II (Non-Dominated Sorting Genetic Algorithm II)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: n_iii.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, population_size):
36 | for j in range(0, len(min_values)):
37 | population[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
40 | return population
41 |
42 | ############################################################################
43 |
44 | # Function: Fast Non-Dominated Sorting
45 | def fast_non_dominated_sorting(population, number_of_functions = 2):
46 | S = [[] for i in range(0, population.shape[0])]
47 | front = [[]]
48 | n = [0 for i in range(0, population.shape[0])]
49 | rank = [0 for i in range(0, population.shape[0])]
50 | for p in range(0, population.shape[0]):
51 | S[p] = []
52 | n[p] = 0
53 | for q in range(0, population.shape[0]):
54 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
55 | if (q not in S[p]):
56 | S[p].append(q)
57 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
58 | n[p] = n[p] + 1
59 | if (n[p] == 0):
60 | rank[p] = 0
61 | if (p not in front[0]):
62 | front[0].append(p)
63 | i = 0
64 | while (front[i] != []):
65 | Q = []
66 | for p in front[i]:
67 | for q in S[p]:
68 | n[q] = n[q] - 1
69 | if(n[q] == 0):
70 | rank[q] = i+1
71 | if q not in Q:
72 | Q.append(q)
73 | i = i+1
74 | front.append(Q)
75 | del front[len(front)-1]
76 | rank = np.zeros((population.shape[0], 1))
77 | for i in range(0, len(front)):
78 | for j in range(0, len(front[i])):
79 | rank[front[i][j], 0] = i + 1
80 | return rank
81 |
82 | # Function: Sort Population by Rank
83 | def sort_population_by_rank(population, rank):
84 | idx = np.argsort(rank[:,0], axis = 0).tolist()
85 | rank = rank[idx,:]
86 | population = population[idx,:]
87 | return population, rank
88 |
89 | # Function: Crowding Distance (Adapted from PYMOO)
90 | def crowding_distance_function(pop, M):
91 | infinity = 1e+11
92 | population = copy.deepcopy(pop[:,-M:])
93 | population = population.reshape((pop.shape[0], M))
94 | if (population.shape[0] <= 2):
95 | return np.full(population.shape[0], infinity)
96 | else:
97 | arg_1 = np.argsort(population, axis = 0, kind = 'mergesort')
98 | population = population[arg_1, np.arange(M)]
99 | dist = np.concatenate([population, np.full((1, M), np.inf)]) - np.concatenate([np.full((1, M), -np.inf), population])
100 | idx = np.where(dist == 0)
101 | a = np.copy(dist)
102 | b = np.copy(dist)
103 | for i, j in zip(*idx):
104 | a[i, j] = a[i - 1, j]
105 | for i, j in reversed(list(zip(*idx))):
106 | b[i, j] = b[i + 1, j]
107 | norm = np.max(population, axis = 0) - np.min(population, axis = 0)
108 | norm[norm == 0] = np.nan
109 | a, b = a[:-1]/norm, b[1:]/norm
110 | a[np.isnan(a)] = 0.0
111 | b[np.isnan(b)] = 0.0
112 | arg_2 = np.argsort(arg_1, axis = 0)
113 | crowding = np.sum(a[arg_2, np.arange(M)] + b[arg_2, np.arange(M)], axis = 1) / M
114 | crowding[np.isinf(crowding)] = infinity
115 | crowding = crowding.reshape((-1,1))
116 | return crowding
117 |
118 | # Function:Crowded Comparison Operator
119 | def crowded_comparison_operator(rank, crowding_distance, individual_1 = 0, individual_2 = 1):
120 | selection = False
121 | if (rank[individual_1,0] < rank[individual_2,0]) or ((rank[individual_1,0] == rank[individual_2,0]) and (crowding_distance[individual_1,0] > crowding_distance[individual_2,0])):
122 | selection = True
123 | return selection
124 |
125 | # Function: Offspring
126 | def breeding(population, rank, crowding_distance, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
127 | offspring = np.copy(population)
128 | parent_1 = 0
129 | parent_2 = 1
130 | b_offspring = 0
131 | for i in range (0, offspring.shape[0]):
132 | i1, i2, i3, i4 = random.sample(range(0, len(population) - 1), 4)
133 | if (crowded_comparison_operator(rank, crowding_distance, individual_1 = i1, individual_2 = i2) == True):
134 | parent_1 = i1
135 | elif (crowded_comparison_operator(rank, crowding_distance, individual_1 = i2, individual_2 = i1) == True):
136 | parent_1 = i2
137 | else:
138 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
139 | if (rand > 0.5):
140 | parent_1 = i1
141 | else:
142 | parent_1 = i2
143 | if (crowded_comparison_operator(rank, crowding_distance, individual_1 = i3, individual_2 = i4) == True):
144 | parent_2 = i3
145 | elif (crowded_comparison_operator(rank, crowding_distance, individual_1 = i4, individual_2 = i3) == True):
146 | parent_2 = i4
147 | else:
148 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
149 | if (rand > 0.5):
150 | parent_2 = i3
151 | else:
152 | parent_2 = i4
153 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
154 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
155 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
156 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
157 | if (rand <= 0.5):
158 | b_offspring = 2*(rand_b)
159 | b_offspring = b_offspring**(1/(mu + 1))
160 | elif (rand > 0.5):
161 | b_offspring = 1/(2*(1 - rand_b))
162 | b_offspring = b_offspring**(1/(mu + 1))
163 | if (rand_c >= 0.5):
164 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
165 | else:
166 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
167 | for k in range (1, len(list_of_functions) + 1):
168 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
169 | return offspring
170 |
171 | # Function: Mutation
172 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
173 | d_mutation = 0
174 | for i in range (0, offspring.shape[0]):
175 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
176 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
177 | if (probability < mutation_rate):
178 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
179 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
180 | if (rand <= 0.5):
181 | d_mutation = 2*(rand_d)
182 | d_mutation = d_mutation**(1/(eta + 1)) - 1
183 | elif (rand > 0.5):
184 | d_mutation = 2*(1 - rand_d)
185 | d_mutation = 1 - d_mutation**(1/(eta + 1))
186 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
187 | for k in range (1, len(list_of_functions) + 1):
188 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
189 | return offspring
190 |
191 | ############################################################################
192 |
193 | # NSGA II Function
194 | def non_dominated_sorting_genetic_algorithm_II(population_size = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, verbose = True):
195 | count = 0
196 | population = initial_population(population_size = population_size, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
197 | offspring = initial_population(population_size = population_size, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
198 | while (count <= generations):
199 | if (verbose == True):
200 | print('Generation = ', count)
201 | population = np.vstack([population, offspring])
202 | rank = fast_non_dominated_sorting(population, number_of_functions = len(list_of_functions))
203 | population, rank = sort_population_by_rank(population, rank)
204 | population, rank = population[0:population_size,:], rank[0:population_size,:]
205 | crowding_distance = crowding_distance_function(population, len(list_of_functions))
206 | offspring = breeding(population, rank, crowding_distance, mu = mu, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
207 | offspring = mutation(offspring, mutation_rate = mutation_rate, eta = eta, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
208 | count = count + 1
209 | return population
210 |
211 | ############################################################################
212 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/n_iii.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: NSGA-III (Non-Dominated Sorting Genetic Algorithm III)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: n_iii.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, population_size):
36 | for j in range(0, len(min_values)):
37 | population[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
40 | return population
41 |
42 | ############################################################################
43 |
44 | # Function: Fast Non-Dominated Sorting
45 | def fast_non_dominated_sorting(population, number_of_functions = 2):
46 | S = [[] for i in range(0, population.shape[0])]
47 | front = [[]]
48 | n = [0 for i in range(0, population.shape[0])]
49 | rank = [0 for i in range(0, population.shape[0])]
50 | for p in range(0, population.shape[0]):
51 | S[p] = []
52 | n[p] = 0
53 | for q in range(0, population.shape[0]):
54 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
55 | if (q not in S[p]):
56 | S[p].append(q)
57 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
58 | n[p] = n[p] + 1
59 | if (n[p] == 0):
60 | rank[p] = 0
61 | if (p not in front[0]):
62 | front[0].append(p)
63 | i = 0
64 | while (front[i] != []):
65 | Q = []
66 | for p in front[i]:
67 | for q in S[p]:
68 | n[q] = n[q] - 1
69 | if(n[q] == 0):
70 | rank[q] = i+1
71 | if q not in Q:
72 | Q.append(q)
73 | i = i+1
74 | front.append(Q)
75 | del front[len(front)-1]
76 | rank = np.zeros((population.shape[0], 1))
77 | for i in range(0, len(front)):
78 | for j in range(0, len(front[i])):
79 | rank[front[i][j], 0] = i + 1
80 | return rank
81 |
82 | # Function: Sort Population by Rank
83 | def sort_population_by_rank(population, rank, rp = 'none'):
84 | control = 1
85 | if rp == 'none':
86 | idx = np.argsort(rank[:,0], axis = 0).tolist()
87 | population = population[idx,:]
88 | else:
89 | idx = np.where(rank <= rp)[0].tolist()
90 | while len(idx) < 5:
91 | idx = np.where(rank <= rp + control)[0].tolist()
92 | control = control + 1
93 | population = population[idx,:]
94 | return population
95 |
96 | # Function: Offspring
97 | def breeding(population, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
98 | offspring = np.copy(population)
99 | parent_1 = 0
100 | parent_2 = 1
101 | b_offspring = 0
102 | for i in range (0, offspring.shape[0]):
103 | i1, i2, i3, i4 = random.sample(range(0, len(population) - 1), 4)
104 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
105 | if (rand > 0.5):
106 | parent_1 = i1
107 | else:
108 | parent_1 = i2
109 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
110 | if (rand > 0.5):
111 | parent_2 = i3
112 | else:
113 | parent_2 = i4
114 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
115 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
116 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
117 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
118 | if (rand <= 0.5):
119 | b_offspring = 2*(rand_b)
120 | b_offspring = b_offspring**(1/(mu + 1))
121 | elif (rand > 0.5):
122 | b_offspring = 1/(2*(1 - rand_b))
123 | b_offspring = b_offspring**(1/(mu + 1))
124 | if (rand_c >= 0.5):
125 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
126 | else:
127 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
128 | for k in range (1, len(list_of_functions) + 1):
129 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
130 | return offspring
131 |
132 | # Function: Mutation
133 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
134 | d_mutation = 0
135 | for i in range (0, offspring.shape[0]):
136 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
137 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
138 | if (probability < mutation_rate):
139 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
140 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
141 | if (rand <= 0.5):
142 | d_mutation = 2*(rand_d)
143 | d_mutation = d_mutation**(1/(eta + 1)) - 1
144 | elif (rand > 0.5):
145 | d_mutation = 2*(1 - rand_d)
146 | d_mutation = 1 - d_mutation**(1/(eta + 1))
147 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
148 | for k in range (1, len(list_of_functions) + 1):
149 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
150 | return offspring
151 |
152 | ############################################################################
153 |
154 | # Function: Reference Points
155 | def reference_points(M, p):
156 | def generator(r_points, M, Q, T, D):
157 | points = []
158 | if (D == M - 1):
159 | r_points[D] = Q / T
160 | points.append(r_points)
161 | elif (D != M - 1):
162 | for i in range(Q + 1):
163 | r_points[D] = i / T
164 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
165 | return points
166 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
167 | return ref_points
168 |
169 | # Function: Normalize Objective Functions
170 | def normalization(population, number_of_functions):
171 | M = number_of_functions
172 | z_min = np.min(population[:,-M:], axis = 0)
173 | population[:,-M:] = population[:,-M:] - z_min
174 | w = np.zeros((M, M)) + 0.0000001
175 | np.fill_diagonal(w, 1)
176 | z_max = []
177 | for i in range(0, M):
178 | z_max.append(np.argmin(np.max(population[:,-M:]/w[i], axis = 1)))
179 | if ( len(z_max) != len(set(z_max)) or M == 1):
180 | a = np.max(population[:,-M:], axis = 0)
181 | else:
182 | k = np.ones((M, 1))
183 | z_max = np.vstack((population[z_max,-M:]))
184 | a = np.matrix.dot(np.linalg.inv(z_max), k)
185 | a = (1/a).reshape(1, M)
186 | population[:,-M:] = population[:,-M:] /(a - z_min)
187 | return population
188 |
189 | # Function: Distance from Point (p3) to a Line (p1, p2).
190 | def point_to_line(p1, p2, p3):
191 | p2 = p2 - p1
192 | dp = np.dot(p3, p2.T)
193 | pp = dp/np.linalg.norm(p2.T, axis = 0)
194 | pn = np.linalg.norm(p3, axis = 1)
195 | pn = np.array([pn,]*pp.shape[1]).transpose()
196 | dl = np.sqrt(pn**2 - pp**2)
197 | return dl
198 |
199 | # Function: Association
200 | def association(srp, population, number_of_functions):
201 | M = number_of_functions
202 | p = copy.deepcopy(population)
203 | p = normalization(p, M)
204 | p1 = np.zeros((1, M))
205 | p2 = srp
206 | p3 = p[:,-M:]
207 | g = point_to_line(p1, p2, p3) # Matrix (Population, Reference)
208 | idx = []
209 | arg = np.argmin(g, axis = 1)
210 | d = np.amin(g, axis = 1)
211 | for ind in np.unique(arg).tolist():
212 | f = [i[0] for i in np.argwhere(arg == ind).tolist()]
213 | idx.append(f[d[f].argsort()[0]])
214 | if (len(idx) < 5):
215 | idx.extend([x for x in list(range(0, population.shape[0])) if x not in idx])
216 | idx = idx[:5]
217 | return idx
218 |
219 | # Function: Sort Population by Association
220 | def sort_population_by_association(srp, population, number_of_functions):
221 | M = number_of_functions
222 | idx = association(srp, population, M)
223 | population = population[idx, :]
224 | return population
225 |
226 | ############################################################################
227 |
228 | # NSGA III Function
229 | def non_dominated_sorting_genetic_algorithm_III(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 5, mu = 1, eta = 1, k = 4, verbose = True):
230 | count = 0
231 | references = max(5, references)
232 | M = len(list_of_functions)
233 | srp = reference_points(M = M, p = references)
234 | size = k*srp.shape[0]
235 | population = initial_population(size, min_values, max_values, list_of_functions)
236 | offspring = initial_population(size, min_values, max_values, list_of_functions)
237 | print('Total Number of Points on Reference Hyperplane: ', int(srp.shape[0]), ' Population Size: ', int(size))
238 | while (count <= generations):
239 | if (verbose == True):
240 | print('Generation = ', count)
241 | population = np.vstack([population, offspring])
242 | rank = fast_non_dominated_sorting(population, number_of_functions = M)
243 | population = sort_population_by_rank(population, rank, rp = 1)
244 | population = sort_population_by_association(srp, population, number_of_functions = M)
245 | population = population[0:size,:]
246 | offspring = breeding(population, min_values, max_values, mu, list_of_functions)
247 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
248 | count = count + 1
249 | return population[ :srp.shape[0], :]
250 |
251 | ############################################################################
252 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/naemo.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: NAEMO (Neighborhood-sensitive Archived Evolutionary Many-objective Optimization)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: naemo.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Fast Non-Dominated Sorting
44 | def fast_non_dominated_sorting(population, number_of_functions = 2):
45 | S = [[] for i in range(0, population.shape[0])]
46 | front = [[]]
47 | n = [0 for i in range(0, population.shape[0])]
48 | rank = [0 for i in range(0, population.shape[0])]
49 | for p in range(0, population.shape[0]):
50 | S[p] = []
51 | n[p] = 0
52 | for q in range(0, population.shape[0]):
53 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
54 | if (q not in S[p]):
55 | S[p].append(q)
56 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
57 | n[p] = n[p] + 1
58 | if (n[p] == 0):
59 | rank[p] = 0
60 | if (p not in front[0]):
61 | front[0].append(p)
62 | i = 0
63 | while (front[i] != []):
64 | Q = []
65 | for p in front[i]:
66 | for q in S[p]:
67 | n[q] = n[q] - 1
68 | if(n[q] == 0):
69 | rank[q] = i+1
70 | if q not in Q:
71 | Q.append(q)
72 | i = i+1
73 | front.append(Q)
74 | del front[len(front)-1]
75 | rank = np.zeros((population.shape[0], 1))
76 | for i in range(0, len(front)):
77 | for j in range(0, len(front[i])):
78 | rank[front[i][j], 0] = i + 1
79 | return rank
80 |
81 | # Function: Sort Population by Rank
82 | def sort_population_by_rank(population, rank, rp = 'none'):
83 | if rp == 'none':
84 | idx = np.argsort(rank[:,0], axis = 0).tolist()
85 | population = population[idx,:]
86 | else:
87 | idx = np.where(rank <= rp)[0].tolist()
88 | population = population[idx,:]
89 | return population
90 |
91 | ############################################################################
92 |
93 | # Function: Offspring
94 | def breeding(archive, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], k = 4):
95 | offspring = np.zeros((k*len(archive), archive[0][0].shape[0]))
96 | parent_1 = 0
97 | parent_2 = 0
98 | b_offspring = 0
99 | d_mutation = 0
100 | count = 0
101 | for i in range (0, len(archive)):
102 | for _ in range(0, k):
103 | if (len(archive[i]) > 2):
104 | i1, i2 = random.sample(range(0, len(archive[i])), 2)
105 | elif (len(archive[i]) == 1):
106 | i1 = 0
107 | i2 = 0
108 | elif (len(archive[i]) == 2):
109 | i1 = 0
110 | i2 = 1
111 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
112 | if (rand > 0.5 and i1 != i2):
113 | parent_1 = i1
114 | parent_2 = i2
115 | else:
116 | parent_1 = i2
117 | parent_2 = i1
118 | n = random.randrange(offspring.shape[1] - len(list_of_functions)) + 1
119 | dim = random.sample(range(0, offspring.shape[1] - len(list_of_functions)), n)
120 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
121 | if (i1 != i2):
122 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
123 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
124 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
125 | mu = np.random.normal(0, 5, 1)[0]
126 | if (rand <= 0.5):
127 | b_offspring = 2*(rand_b)
128 | b_offspring = b_offspring**(1/(mu + 1))
129 | elif (rand > 0.5):
130 | b_offspring = 1/(2*(1 - rand_b))
131 | b_offspring = b_offspring**(1/(mu + 1))
132 | if (rand_c >= 0.5):
133 | offspring[i,j] = np.clip(((1 + b_offspring)*archive[parent_1][0][j] + (1 - b_offspring)*archive[parent_2][0][j])/2, min_values[j], max_values[j])
134 | else:
135 | offspring[i,j] = np.clip(((1 - b_offspring)*archive[parent_1][0][j] + (1 + b_offspring)*archive[parent_2][0][j])/2, min_values[j], max_values[j])
136 | elif (i1 == i2 and j in dim):
137 | eta = np.random.normal(0, 0.1, 1)[0]
138 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
139 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
140 | if (rand <= 0.5):
141 | d_mutation = 2*(rand_d)
142 | d_mutation = d_mutation**(1/(eta + 1)) - 1
143 | elif (rand > 0.5):
144 | d_mutation = 2*(1 - rand_d)
145 | d_mutation = 1 - d_mutation**(1/(eta + 1))
146 | offspring[count,j] = np.clip((offspring[count,j] + d_mutation), min_values[j], max_values[j])
147 | for m in range (1, len(list_of_functions) + 1):
148 | offspring[count,-m] = list_of_functions[-m](offspring[count,0:offspring.shape[1]-len(list_of_functions)])
149 | count = count + 1
150 | return offspring
151 |
152 | ############################################################################
153 |
154 | # Function: Reference Points
155 | def reference_points(M, p):
156 | def generator(r_points, M, Q, T, D):
157 | points = []
158 | if (D == M - 1):
159 | r_points[D] = Q / (1.0 * T)
160 | points.append(r_points)
161 | elif (D != M - 1):
162 | for i in range(Q + 1):
163 | r_points[D] = i / T
164 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
165 | return points
166 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
167 | return ref_points
168 |
169 | # Function: Association to Reference Point
170 | def association(population, weights, M, theta = 5, solution = 'best'):
171 | z_min = np.min(population[:,-M:], axis = 0)
172 | pbi = np.zeros((population.shape[0], weights.shape[0]))
173 | for i in range(0, population.shape[0]):
174 | for j in range(0, weights.shape[0]):
175 | d1 = np.linalg.norm(np.dot((population[i,-M:].reshape(1, M) - z_min).T, weights[j,:].reshape(1, M) ))/np.linalg.norm(weights[j,:])
176 | d2 = np.linalg.norm(population[i,-M:] - z_min - d1*(weights[j,:]/np.linalg.norm(weights[j,:])))
177 | pbi[i,j] = d1 + theta*d2
178 | arg = np.argmin(pbi, axis = 1)
179 | d = np.amin(pbi, axis = 1)
180 | niche = dict( zip( np.arange(weights.shape[0]), [[]] * weights.shape[0]) )
181 | nc = dict( zip( np.arange(weights.shape[0]), [[]] * weights.shape[0]) )
182 | idx_u = set(arg)
183 | idx_ = []
184 | for i in idx_u:
185 | X = list(np.where(arg == i)[0])
186 | Y = list(d[X])
187 | Z = [x for _, x in sorted(zip(Y, X))]
188 | niche.update({i: Z})
189 | for i in range(0, weights.shape[0]):
190 | if (len(niche[i]) != 0 and solution == 'best'):
191 | individual = niche[i]
192 | idx_adp = np.argmin(d[individual])
193 | idx_.append( individual[idx_adp] )
194 | nc.update({i: [individual[idx_adp]]})
195 | elif(len(niche[i]) != 0 and solution != 'best'):
196 | individual = niche[i]
197 | idx_.append( individual )
198 | nc.update({i: [individual]})
199 | if (solution == 'best'):
200 | population = population[idx_ , :]
201 | else:
202 | archive = [[] for _ in range(0, weights.shape[0])]
203 | for i in range(0, weights.shape[0]):
204 | idx = niche[i]
205 | if (len(idx) != 0):
206 | for j in range(0, len(idx)):
207 | archive[i].append(population[idx[j],:])
208 | population = archive
209 | return population, nc
210 |
211 | # Function: Create Arquive for Each Reference Vector
212 | def create_arquive(weights, size, min_values, max_values, list_of_functions, theta, k):
213 | archive = [[] for _ in range(0, weights.shape[0])]
214 | for _ in range(0, k) :
215 | candidates = initial_population(size*15, min_values, max_values, list_of_functions)
216 | candidates, nc = association(candidates, weights, len(list_of_functions), theta)
217 | j = 0
218 | for key in nc:
219 | if (len(nc[key]) != 0 and len(archive[key]) <= k):
220 | archive[key].append(candidates[key-j,:])
221 | else:
222 | j = j + 1
223 | for i in range(0, len(archive)):
224 | if (len(archive[i]) == 0):
225 | candidate = initial_population(1, min_values, max_values, list_of_functions)
226 | archive[i].append(candidate[0,:])
227 | return archive
228 |
229 | # Function: Select Only Non-Dominated Individuals
230 | def clean_arquive(archive, M, k):
231 | new_archive = [[] for _ in range(0, len(archive))]
232 | for i in range(0, len(archive)):
233 | rank = fast_non_dominated_sorting(np.asarray(archive[i]), M)
234 | for j in range(0, rank.shape[0]):
235 | if (rank[j, 0] == 1 and len(new_archive[i]) <= k):
236 | new_archive[i].append(archive[i][j])
237 | return new_archive
238 |
239 | ############################################################################
240 |
241 | # NAEMO Function neighborhood_sensitive_archived_evolutionary_many_objective_optimization
242 | def neighborhood_sensitive_archived_evolutionary_many_objective_optimization(references = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, theta = 5, k = 4, verbose = True):
243 | count = 0
244 | references = max(5, references)
245 | M = len(list_of_functions)
246 | weights = reference_points(M = M, p = references)
247 | size = k*weights.shape[0]
248 | archive = create_arquive(weights, size, min_values, max_values, list_of_functions, theta, k)
249 | print('Total Number of Points on Reference Hyperplane: ', int(weights.shape[0]), ' Population Size: ', int(size))
250 | while (count <= generations):
251 | if (verbose == True):
252 | print('Generation = ', count)
253 | archive = clean_arquive(archive, M, k)
254 | offspring = breeding(archive, min_values, max_values, list_of_functions, k)
255 | arch_1, _ = association(offspring, weights, M, theta, solution = '')
256 | archive = [archive[i] + arch_1[i] for i in range(0, len(archive)) ]
257 | count = count + 1
258 | archive = clean_arquive(archive, M, k)
259 | archive = [item for sublist in archive for item in sublist]
260 | solution = np.array(archive)
261 | solution, _ = association(solution, weights, M, theta)
262 | return solution
263 |
264 | ############################################################################
265 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/omopso.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: OMOPSO (Optimized Multiobjective Particle Swarm Optimization)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2022). Project: pyMultiojective, File: omopso.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_position(swarm_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | position = np.zeros((swarm_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, swarm_size):
36 | for j in range(0, len(min_values)):
37 | position[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | position[i,-k] = list_of_functions[-k](list(position[i,0:position.shape[1]-len(list_of_functions)]))
40 | return position
41 |
42 | ############################################################################
43 |
44 | # Function: Velocity
45 | def velocity_vector(position, leaders, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
46 | r1 = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
47 | r2 = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
48 | w = np.random.uniform(low = -0.5, high = 0.5, size = 1)[0]
49 | c1 = np.random.uniform(low = -2.0, high = 2.0, size = 1)[0]
50 | c2 = np.random.uniform(low = -2.5, high = 2.5, size = 1)[0]
51 | vel_ = np.zeros((position.shape[0], position.shape[1]))
52 | for i in range(0, vel_.shape[0]):
53 | if (leaders.shape[0] > 2):
54 | ind_1 = random.sample(range(0, len(leaders) - 1), 1)
55 | else:
56 | ind_1 = 0
57 | for j in range(0, len(min_values)):
58 | vel_[i,j] = np.clip(w*position[i,j] + c1*r1*(leaders[ind_1, j] - position[i,j]) + c2*r2*(leaders[ind_1, j] - position[i,j]), min_values[j], max_values[j])
59 | for k in range (1, len(list_of_functions) + 1):
60 | vel_[i,-k] = list_of_functions[-k](list(vel_[i,0:vel_.shape[1]-len(list_of_functions)]))
61 | return vel_
62 |
63 | # Function: Update Position
64 | def update_position(position, velocity, M):
65 | for i in range(0, position.shape[0]):
66 | if (dominance_function(solution_1 = position[i,:], solution_2 = velocity[i,:], number_of_functions = M) == False):
67 | position[i, :] = np.copy(velocity[i,:])
68 | return position
69 |
70 | ############################################################################
71 |
72 | # Function: Dominance
73 | def dominance_function(solution_1, solution_2, number_of_functions = 2):
74 | count = 0
75 | dominance = True
76 | for k in range (1, number_of_functions + 1):
77 | if (solution_1[-k] <= solution_2[-k]):
78 | count = count + 1
79 | if (count == number_of_functions):
80 | dominance = True
81 | else:
82 | dominance = False
83 | return dominance
84 |
85 | # Function: Crowding Distance (Adapted from PYMOO)
86 | def crowding_distance_function(pop, M):
87 | position = copy.deepcopy(pop[:,-M:])
88 | position = position.reshape((pop.shape[0], M))
89 | if (position.shape[0] <= 2):
90 | return np.full( position.shape[0], float('+inf'))
91 | else:
92 | arg_1 = np.argsort( position, axis = 0, kind = 'mergesort')
93 | position = position[arg_1, np.arange(M)]
94 | dist = np.concatenate([ position, np.full((1, M), np.inf)]) - np.concatenate([np.full((1, M), -np.inf), position])
95 | idx = np.where(dist == 0)
96 | a = np.copy(dist)
97 | b = np.copy(dist)
98 | for i, j in zip(*idx):
99 | a[i, j] = a[i - 1, j]
100 | for i, j in reversed(list(zip(*idx))):
101 | b[i, j] = b[i + 1, j]
102 | norm = np.max( position, axis = 0) - np.min(position, axis = 0)
103 | norm[norm == 0] = np.nan
104 | a, b = a[:-1]/norm, b[1:]/norm
105 | a[np.isnan(a)] = 0.0
106 | b[np.isnan(b)] = 0.0
107 | arg_2 = np.argsort(arg_1, axis = 0)
108 | crowding = np.sum(a[arg_2, np.arange(M)] + b[arg_2, np.arange(M)], axis = 1) / M
109 | crowding[np.isinf(crowding)] = float('+inf')
110 | crowding = crowding.reshape((-1,1))
111 | return crowding
112 |
113 | # Function: Pareto Front
114 | def pareto_front_points(pts, pf_min = True):
115 | def pareto_front(pts, pf_min):
116 | pf = np.zeros(pts.shape[0], dtype = np.bool_)
117 | for i in range(0, pts.shape[0]):
118 | cost = pts[i, :]
119 | if (pf_min == True):
120 | g_cost = np.logical_not(np.any(pts > cost, axis = 1))
121 | b_cost = np.any(pts < cost, axis = 1)
122 | else:
123 | g_cost = np.logical_not(np.any(pts < cost, axis = 1))
124 | b_cost = np.any(pts > cost, axis = 1)
125 | dominated = np.logical_and(g_cost, b_cost)
126 | if (np.any(pf) == True):
127 | if (np.any(np.all(pts[pf] == cost, axis = 1)) == True):
128 | continue
129 | if not (np.any(dominated[:i]) == True or np.any(dominated[i + 1 :]) == True):
130 | pf[i] = True
131 | return pf
132 | idx = np.argsort(((pts - pts.mean(axis = 0))/(pts.std(axis = 0) + 1e-7)).sum(axis = 1))
133 | pts = pts[idx]
134 | pf = pareto_front(pts, pf_min)
135 | pf[idx] = pf.copy()
136 | return pf
137 |
138 | ############################################################################
139 |
140 | # Function: Leaders Selection
141 | def selection_leaders(swarm_size, M, leaders, velocity, position):
142 | leaders = np.vstack([leaders, np.unique(velocity, axis = 0), position])
143 | idx = pareto_front_points(leaders[:, -M:], pf_min = True)
144 | if (len(idx) > 0):
145 | leaders = leaders[idx, :]
146 | crowding = crowding_distance_function(leaders, M)
147 | arg = np.argsort(crowding , axis = 0)[::-1].tolist()
148 | try:
149 | arg = [i[0] for i in arg ]
150 | except:
151 | arg = [i for i in arg ]
152 | if (len(arg) > 0):
153 | leaders = leaders[arg, :]
154 | leaders = np.unique(leaders, axis = 0)
155 | leaders = leaders[:swarm_size, :]
156 | return leaders
157 |
158 | # Function: Epsilon Dominance
159 | def selection_dominance(eps_dom, position, M):
160 | solution = np.vstack([eps_dom, position])
161 | solution = np.unique(solution, axis = 0)
162 | eps_dom = np.copy(solution)
163 | idx = pareto_front_points(solution[:, -M:], pf_min = True)
164 | eps_dom = eps_dom[idx,:]
165 | return eps_dom
166 |
167 | ############################################################################
168 |
169 | # Function: Mutation
170 | def mutation(position, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
171 | d_mutation = 0
172 | for i in range (0, position.shape[0]):
173 | for j in range(0, position.shape[1] - len(list_of_functions)):
174 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
175 | if (probability < mutation_rate):
176 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
177 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
178 | if (rand <= 0.5):
179 | d_mutation = 2*(rand_d)
180 | d_mutation = d_mutation**(1/(eta + 1)) - 1
181 | elif (rand > 0.5):
182 | d_mutation = 2*(1 - rand_d)
183 | d_mutation = 1 - d_mutation**(1/(eta + 1))
184 | position[i,j] = np.clip((position[i,j] + d_mutation), min_values[j], max_values[j])
185 | for k in range (1, len(list_of_functions) + 1):
186 | position[i,-k] = list_of_functions[-k](position[i,0:position.shape[1]-len(list_of_functions)])
187 | return position
188 |
189 | ############################################################################
190 |
191 | # OMPSO Function
192 | def optimized_multiobjective_particle_swarm_optimization(swarm_size = 5, min_values = [-5,-5], max_values = [5,5], iterations = 500, list_of_functions = [func_1, func_2], mutation_rate = 0.1, eta = 3, verbose = True):
193 | count = 0
194 | M = len(list_of_functions)
195 | position = initial_position(swarm_size, min_values, max_values, list_of_functions)
196 | velocity = initial_position(swarm_size, min_values, max_values, list_of_functions)
197 | leaders = initial_position(swarm_size, min_values, max_values, list_of_functions)
198 | eps_dom = initial_position(swarm_size, min_values, max_values, list_of_functions)
199 | while (count <= iterations):
200 | if (verbose == True):
201 | print('Generation = ', count)
202 | position = update_position(position, velocity, M)
203 | position = mutation(position, mutation_rate, eta, min_values, max_values, list_of_functions)
204 | velocity = velocity_vector(position, leaders, min_values, max_values, list_of_functions)
205 | leaders = selection_leaders(swarm_size, M, leaders, velocity, position)
206 | eps_dom = selection_dominance(eps_dom, np.vstack([position, velocity, leaders]), M)
207 | if (eps_dom.shape[0] > swarm_size):
208 | crowding = crowding_distance_function(eps_dom, M)
209 | arg = np.argsort(crowding , axis = 0)[::-1].tolist()
210 | try:
211 | arg = [i[0] for i in arg ]
212 | except:
213 | arg = [i for i in arg ]
214 | if (len(arg) > 0):
215 | eps_dom = eps_dom[arg, :]
216 | eps_dom = eps_dom[:swarm_size, :]
217 | count = count + 1
218 | if (len(eps_dom) == 0):
219 | return leaders
220 | else:
221 | return eps_dom
222 |
223 | ############################################################################
224 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/paes.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: PAES (Pareto Archived Evolution Strategy)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: paes.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Fast Non-Dominated Sorting
44 | def fast_non_dominated_sorting(population, number_of_functions = 2):
45 | S = [[] for i in range(0, population.shape[0])]
46 | front = [[]]
47 | n = [0 for i in range(0, population.shape[0])]
48 | rank = [0 for i in range(0, population.shape[0])]
49 | for p in range(0, population.shape[0]):
50 | S[p] = []
51 | n[p] = 0
52 | for q in range(0, population.shape[0]):
53 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
54 | if (q not in S[p]):
55 | S[p].append(q)
56 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
57 | n[p] = n[p] + 1
58 | if (n[p] == 0):
59 | rank[p] = 0
60 | if (p not in front[0]):
61 | front[0].append(p)
62 | i = 0
63 | while (front[i] != []):
64 | Q = []
65 | for p in front[i]:
66 | for q in S[p]:
67 | n[q] = n[q] - 1
68 | if(n[q] == 0):
69 | rank[q] = i+1
70 | if q not in Q:
71 | Q.append(q)
72 | i = i+1
73 | front.append(Q)
74 | del front[len(front)-1]
75 | rank = np.zeros((population.shape[0], 1))
76 | for i in range(0, len(front)):
77 | for j in range(0, len(front[i])):
78 | rank[front[i][j], 0] = i + 1
79 | return rank
80 |
81 | # Function: Sort Population by Rank
82 | def sort_population_by_rank(population, rank):
83 | idx = np.argsort(rank[:,0], axis = 0).tolist()
84 | rank = rank[idx,:]
85 | population = population[idx,:]
86 | return population, rank
87 |
88 | ############################################################################
89 |
90 | # Function: Mutation
91 | def mutation(population, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
92 | d_mutation = 0
93 | mutation_rate = 2
94 | offspring = np.copy(population)
95 | for i in range (0, offspring.shape[0]):
96 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
97 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
98 | if (probability < mutation_rate):
99 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
100 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
101 | if (rand <= 0.5):
102 | d_mutation = 2*(rand_d)
103 | d_mutation = d_mutation**(1/(eta + 1)) - 1
104 | elif (rand > 0.5):
105 | d_mutation = 2*(1 - rand_d)
106 | d_mutation = 1 - d_mutation**(1/(eta + 1))
107 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
108 | for k in range (1, len(list_of_functions) + 1):
109 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
110 | return offspring
111 |
112 | ############################################################################
113 |
114 | # PAES Function
115 | def pareto_archived_evolution_strategy(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, eta = 1, verbose = True):
116 | count = 0
117 | M = len(list_of_functions)
118 | population = initial_population(population_size, min_values, max_values, list_of_functions)
119 | offspring = mutation(population, eta, min_values, max_values, list_of_functions)
120 | while (count <= generations):
121 | if (verbose == True):
122 | print('Generation = ', count)
123 | population = np.vstack([population, offspring])
124 | rank = fast_non_dominated_sorting(population, number_of_functions = M)
125 | population, _ = sort_population_by_rank(population, rank)
126 | population = population[0:population_size,:]
127 | offspring = mutation(population, eta, min_values, max_values, list_of_functions)
128 | count = count + 1
129 | return population
130 |
131 | ##############################################################################
132 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/rvea.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: RVEA (Reference Vector Guided Evolutionary Algorithm)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: rvea.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Offspring
44 | def breeding(population, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2], size = 5):
45 | offspring = np.zeros((size, population.shape[1]))
46 | parent_1 = 0
47 | parent_2 = 1
48 | b_offspring = 0
49 | for i in range (0, offspring.shape[0]):
50 | if (len(population) - 1 >= 3):
51 | i1, i2 = random.sample(range(0, len(population) - 1), 2)
52 | elif (len(population) - 1 == 0):
53 | i1 = 0
54 | i2 = 0
55 | else:
56 | i1 = 0
57 | i2 = 1
58 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
59 | if (rand > 0.5):
60 | parent_1 = i1
61 | parent_2 = i2
62 | else:
63 | parent_1 = i2
64 | parent_2 = i1
65 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
66 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
67 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
68 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
69 | if (rand <= 0.5):
70 | b_offspring = 2*(rand_b)
71 | b_offspring = b_offspring**(1/(mu + 1))
72 | elif (rand > 0.5):
73 | b_offspring = 1/(2*(1 - rand_b))
74 | b_offspring = b_offspring**(1/(mu + 1))
75 | if (rand_c >= 0.5):
76 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
77 | else:
78 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
79 | for k in range (1, len(list_of_functions) + 1):
80 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
81 | return offspring
82 |
83 | # Function: Mutation
84 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
85 | d_mutation = 0
86 | for i in range (0, offspring.shape[0]):
87 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
88 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
89 | if (probability < mutation_rate):
90 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
91 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
92 | if (rand <= 0.5):
93 | d_mutation = 2*(rand_d)
94 | d_mutation = d_mutation**(1/(eta + 1)) - 1
95 | elif (rand > 0.5):
96 | d_mutation = 2*(1 - rand_d)
97 | d_mutation = 1 - d_mutation**(1/(eta + 1))
98 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
99 | for k in range (1, len(list_of_functions) + 1):
100 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
101 | return offspring
102 |
103 | ############################################################################
104 |
105 | # Function: Reference Points
106 | def reference_points(M, p):
107 | def generator(r_points, M, Q, T, D):
108 | points = []
109 | if (D == M - 1):
110 | r_points[D] = Q / (1.0 * T)
111 | points.append(r_points)
112 | elif (D != M - 1):
113 | for i in range(Q + 1):
114 | r_points[D] = i / T
115 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
116 | return points
117 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
118 | return ref_points
119 |
120 | # Function: Nearest Vectors
121 | def nearest_vectors(weights):
122 | sorted_cosine = -np.sort( -np.dot(weights, weights.T), axis = 1 )
123 | arccosine_weights = np.arccos( np.clip(sorted_cosine[:,1], 0, 1 ) )
124 | return arccosine_weights
125 |
126 | # Function: Angle Penalized Distance Selection
127 | def selection(population, offspring, M, weights, neighbours, alpha, t, t_max):
128 | population = np.vstack([population, offspring])
129 | z_min = np.min(population[:,-M:], axis = 0)
130 | f = population[:,-M:] - z_min
131 | cos = np.dot(f, weights.T) / ( np.linalg.norm(f, axis = 1).reshape(-1, 1) + 1e-21 )
132 | arc_c = np.arccos( np.clip(cos, 0, 1) )
133 | idx = np.argmax(cos, axis = 1)
134 | niche = dict( zip( np.arange(weights.shape[0]), [[]] * weights.shape[0]) )
135 | idx_u = set(idx)
136 | for i in idx_u:
137 | niche.update({i: list(np.where(idx == i)[0])})
138 | idx_ = []
139 | for i in range(0, weights.shape[0]):
140 | if (len(niche[i]) != 0):
141 | individual = niche[i]
142 | arc_c_ind = arc_c[individual, i]
143 | arc_c_ind = arc_c_ind / neighbours[i]
144 | d = np.linalg.norm(population[individual, -M:] - z_min, axis = 1) * (1 + M * ((t / t_max) ** alpha ) * arc_c_ind)
145 | idx_adp = np.argmin(d)
146 | idx_.append( individual[idx_adp] )
147 | return population[idx_ , :]
148 |
149 | # Function: Adaptation
150 | def adaptation(population, weights, weights_, M):
151 | z_min = np.min(population[:,-M:], axis = 0)
152 | z_max = np.max(population[:,-M:], axis = 0)
153 | weights = weights_*(z_max - z_min)
154 | weights = weights / (np.linalg.norm(weights, axis = 1).reshape(-1, 1) )
155 | neighbours = nearest_vectors(weights)
156 | return weights, neighbours
157 |
158 | ############################################################################
159 |
160 | # RVEA Function
161 | def reference_vector_guided_evolutionary_algorithm(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, k = 4, alpha = 2, fr = 0.2, verbose = True):
162 | count = 0
163 | references = max(5, references)
164 | M = len(list_of_functions)
165 | weights = reference_points(M = M, p = references)
166 | weights = weights/np.linalg.norm(weights)
167 | weights_ = np.copy(weights)
168 | neighbours = nearest_vectors(weights)
169 | size = k*weights.shape[0]
170 | population = initial_population(size, min_values, max_values, list_of_functions)
171 | print('Total Number of Points on Reference Hyperplane: ', int(weights.shape[0]), ' Population Size: ', int(size))
172 | while (count <= generations):
173 | if (verbose == True):
174 | print('Generation = ', count)
175 | offspring = breeding(population, min_values, max_values, mu, list_of_functions, size)
176 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
177 | population = selection(population, offspring, M, weights, neighbours, alpha, count, generations)
178 | if ( (count/generations) // fr == 0 and count != 0):
179 | weights, neighbours = adaptation(population, weights, weights_, M)
180 | count = count + 1
181 | return population[:weights.shape[0],:]
182 |
183 | ##############################################################################
184 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/s_ii.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: SPEA2 (Strength Pareto Evolutionary Algorithm 2)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: s_ii.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import numpy as np
16 | import random
17 | import os
18 |
19 | ############################################################################
20 |
21 | # Function 1
22 | def func_1():
23 | return
24 |
25 | # Function 2
26 | def func_2():
27 | return
28 |
29 | ############################################################################
30 |
31 | # Function: Initialize Variables
32 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
33 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
34 | for i in range(0, population_size):
35 | for j in range(0, len(min_values)):
36 | population[i,j] = random.uniform(min_values[j], max_values[j])
37 | for k in range (1, len(list_of_functions) + 1):
38 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
39 | return population
40 |
41 | ############################################################################
42 |
43 | # Function: Dominance
44 | def dominance_function(solution_1, solution_2, number_of_functions = 2):
45 | count = 0
46 | dominance = True
47 | for k in range (1, number_of_functions + 1):
48 | if (solution_1[-k] <= solution_2[-k]):
49 | count = count + 1
50 | if (count == number_of_functions):
51 | dominance = True
52 | else:
53 | dominance = False
54 | return dominance
55 |
56 | # Function: Raw Fitness
57 | def raw_fitness_function(population, number_of_functions = 2):
58 | strength = np.zeros((population.shape[0], 1))
59 | raw_fitness = np.zeros((population.shape[0], 1))
60 | for i in range(0, population.shape[0]):
61 | for j in range(0, population.shape[0]):
62 | if (i != j):
63 | if dominance_function(solution_1 = population[i,:], solution_2 = population[j,:], number_of_functions = number_of_functions):
64 | strength[i,0] = strength[i,0] + 1
65 | for i in range(0, population.shape[0]):
66 | for j in range(0, population.shape[0]):
67 | if (i != j):
68 | if dominance_function(solution_1 = population[i,:], solution_2 = population[j,:], number_of_functions = number_of_functions):
69 | raw_fitness[j,0] = raw_fitness[j,0] + strength[i,0]
70 | return raw_fitness
71 |
72 | # Function: Build Distance Matrix
73 | def euclidean_distance(coordinates):
74 | a = coordinates
75 | b = a.reshape(np.prod(a.shape[:-1]), 1, a.shape[-1])
76 | return np.sqrt(np.einsum('ijk,ijk->ij', b - a, b - a)).squeeze()
77 |
78 | # Function: Fitness
79 | def fitness_calculation(population, raw_fitness, number_of_functions = 2):
80 | k = int(len(population)**(1/2)) - 1
81 | fitness = np.zeros((population.shape[0], 1))
82 | distance = euclidean_distance(population[:,population.shape[1]-number_of_functions:])
83 | for i in range(0, fitness.shape[0]):
84 | distance_ordered = (distance[distance[:,i].argsort()]).T
85 | fitness[i,0] = raw_fitness[i,0] + 1/(distance_ordered[i,k] + 2)
86 | return fitness
87 |
88 | # Function: Sort Population by Fitness
89 | def sort_population_by_fitness(population, fitness):
90 | idx = np.argsort(fitness[:,-1]).tolist()
91 | fitness = fitness[idx,:]
92 | population = population[idx,:]
93 | return population, fitness
94 |
95 | # Function: Selection
96 | def roulette_wheel(fitness_new):
97 | fitness = np.zeros((fitness_new.shape[0], 2))
98 | for i in range(0, fitness.shape[0]):
99 | fitness[i,0] = 1/(1+ fitness[i,0] + abs(fitness[:,0].min()))
100 | fit_sum = fitness[:,0].sum()
101 | fitness[0,1] = fitness[0,0]
102 | for i in range(1, fitness.shape[0]):
103 | fitness[i,1] = (fitness[i,0] + fitness[i-1,1])
104 | for i in range(0, fitness.shape[0]):
105 | fitness[i,1] = fitness[i,1]/fit_sum
106 | ix = 0
107 | random = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
108 | for i in range(0, fitness.shape[0]):
109 | if (random <= fitness[i, 1]):
110 | ix = i
111 | break
112 | return ix
113 |
114 | ############################################################################
115 |
116 | # Function: Offspring
117 | def breeding(population, fitness, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
118 | offspring = np.copy(population)
119 | b_offspring = 0
120 | for i in range (0, offspring.shape[0]):
121 | parent_1, parent_2 = roulette_wheel(fitness), roulette_wheel(fitness)
122 | while parent_1 == parent_2:
123 | parent_2 = random.sample(range(0, len(population) - 1), 1)[0]
124 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
125 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
126 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
127 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
128 | if (rand <= 0.5):
129 | b_offspring = 2*(rand_b)
130 | b_offspring = b_offspring**(1/(mu + 1))
131 | elif (rand > 0.5):
132 | b_offspring = 1/(2*(1 - rand_b))
133 | b_offspring = b_offspring**(1/(mu + 1))
134 | if (rand_c >= 0.5):
135 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
136 | else:
137 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
138 | for k in range (1, len(list_of_functions) + 1):
139 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
140 | return offspring
141 |
142 | # Function: Mutation
143 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
144 | d_mutation = 0
145 | for i in range (0, offspring.shape[0]):
146 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
147 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
148 | if (probability < mutation_rate):
149 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
150 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
151 | if (rand <= 0.5):
152 | d_mutation = 2*(rand_d)
153 | d_mutation = d_mutation**(1/(eta + 1)) - 1
154 | elif (rand > 0.5):
155 | d_mutation = 2*(1 - rand_d)
156 | d_mutation = 1 - d_mutation**(1/(eta + 1))
157 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
158 | for k in range (1, len(list_of_functions) + 1):
159 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
160 | return offspring
161 |
162 | ############################################################################
163 |
164 | # SPEA-2 Function
165 | def strength_pareto_evolutionary_algorithm_2(population_size = 5, archive_size = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 50, mu = 1, eta = 1, verbose = True):
166 | count = 0
167 | population = initial_population(population_size = population_size, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
168 | archive = initial_population(population_size = archive_size, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
169 | while (count <= generations):
170 | if (verbose == True):
171 | print('Generation = ', count)
172 | population = np.vstack([population, archive])
173 | raw_fitness = raw_fitness_function(population, number_of_functions = len(list_of_functions))
174 | fitness = fitness_calculation(population, raw_fitness, number_of_functions = len(list_of_functions))
175 | population, fitness = sort_population_by_fitness(population, fitness)
176 | population, archive, fitness = population[0:population_size,:], population[0:archive_size,:], fitness[0:archive_size,:]
177 | population = breeding(population, fitness, mu = mu, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
178 | population = mutation(population, mutation_rate = mutation_rate, eta = eta, min_values = min_values, max_values = max_values, list_of_functions = list_of_functions)
179 | count = count + 1
180 | return archive
181 |
182 | ############################################################################
183 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/smpso.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: SMPSO (Speed-Constrained Multiobjective Particle Swarm Optimization)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: smpso.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_position(swarm_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | position = np.zeros((swarm_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, swarm_size):
36 | for j in range(0, len(min_values)):
37 | position[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | position[i,-k] = list_of_functions[-k](list(position[i,0:position.shape[1]-len(list_of_functions)]))
40 | return position
41 |
42 | ############################################################################
43 |
44 | # Function: Updtade Position
45 | def update_position(position, velocity, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
46 | for i in range(0, position.shape[0]):
47 | for j in range(0, len(min_values)):
48 | position[i,j] = np.clip((position[i,j] + velocity[i,j]), min_values[j], max_values[j])
49 | for k in range (1, len(list_of_functions) + 1):
50 | position[i,-k] = list_of_functions[-k](list(position[i,0:position.shape[1]-len(list_of_functions)]))
51 | return position
52 |
53 | # Function: Initialize Velocity
54 | def initial_velocity(position, min_values = [-5,-5], max_values = [5,5]):
55 | velocity = np.zeros((position.shape[0], len(min_values)))
56 | for i in range(0, velocity.shape[0]):
57 | for j in range(0, velocity.shape[1]):
58 | velocity[i,j] = random.uniform(min_values[j], max_values[j])
59 | return velocity
60 |
61 | # Function: Velocity
62 | def velocity_vector(position, velocity_, archive, M, min_values = [-5,-5], max_values = [5,5]):
63 | r1 = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
64 | r2 = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
65 | w = np.random.uniform(low = 0.1, high = 0.5, size = 1)[0]
66 | c1 = np.random.uniform(low = 1.5, high = 2.5, size = 1)
67 | c2 = np.random.uniform(low = 1.5, high = 2.5, size = 1)[0]
68 | phi = 0
69 | if (c1 + c2 > 4):
70 | phi = c1 + c2
71 | else:
72 | phi = 0
73 | chi = 2 / (2 - phi - ( (phi**2) - 4*phi )**(1/2))
74 | velocity = np.zeros((position.shape[0], velocity_.shape[1]))
75 | crowding = crowding_distance_function(archive, M)
76 | delta = [(max_values[i] - min_values[i])/2 for i in range(0, len(min_values))]
77 | if (archive.shape[0] > 2):
78 | ind_1, ind_2 = random.sample(range(0, len(archive) - 1), 2)
79 | if (crowding[ind_1,0] < crowding[ind_2,0]):
80 | ind_1, ind_2 = ind_2, ind_1
81 | else:
82 | ind_1 = 0
83 | ind_2 = 0
84 | for i in range(0, velocity.shape[0]):
85 | for j in range(0, velocity.shape[1]):
86 | velocity[i,j] = (w*velocity_[i,j] + c1*r1*(archive[ind_1, j] - position[i,j]) + c2*r2*(archive[ind_2, j] - position[i,j]))*chi
87 | velocity[i,j] = np.clip(velocity[i,j], -delta[j], delta[j])
88 | return velocity
89 |
90 | ############################################################################
91 |
92 | # Function: Crowding Distance (Adapted from PYMOO)
93 | def crowding_distance_function(pop, M):
94 | infinity = 1e+11
95 | position = copy.deepcopy(pop[:,-M:])
96 | position = position.reshape((pop.shape[0], M))
97 | if (position.shape[0] <= 2):
98 | return np.full( position.shape[0], infinity)
99 | else:
100 | arg_1 = np.argsort( position, axis = 0, kind = 'mergesort')
101 | position = position[arg_1, np.arange(M)]
102 | dist = np.concatenate([ position, np.full((1, M), np.inf)]) - np.concatenate([np.full((1, M), -np.inf), position])
103 | idx = np.where(dist == 0)
104 | a = np.copy(dist)
105 | b = np.copy(dist)
106 | for i, j in zip(*idx):
107 | a[i, j] = a[i - 1, j]
108 | for i, j in reversed(list(zip(*idx))):
109 | b[i, j] = b[i + 1, j]
110 | norm = np.max( position, axis = 0) - np.min(position, axis = 0)
111 | norm[norm == 0] = np.nan
112 | a, b = a[:-1]/norm, b[1:]/norm
113 | a[np.isnan(a)] = 0.0
114 | b[np.isnan(b)] = 0.0
115 | arg_2 = np.argsort(arg_1, axis = 0)
116 | crowding = np.sum(a[arg_2, np.arange(M)] + b[arg_2, np.arange(M)], axis = 1) / M
117 | crowding[np.isinf(crowding)] = infinity
118 | crowding = crowding.reshape((-1,1))
119 | return crowding
120 |
121 | # Function: Pareto Front
122 | def pareto_front_points(pts, pf_min = True):
123 | def pareto_front(pts, pf_min):
124 | pf = np.zeros(pts.shape[0], dtype = np.bool_)
125 | for i in range(0, pts.shape[0]):
126 | cost = pts[i, :]
127 | if (pf_min == True):
128 | g_cost = np.logical_not(np.any(pts > cost, axis = 1))
129 | b_cost = np.any(pts < cost, axis = 1)
130 | else:
131 | g_cost = np.logical_not(np.any(pts < cost, axis = 1))
132 | b_cost = np.any(pts > cost, axis = 1)
133 | dominated = np.logical_and(g_cost, b_cost)
134 | if (np.any(pf) == True):
135 | if (np.any(np.all(pts[pf] == cost, axis = 1)) == True):
136 | continue
137 | if not (np.any(dominated[:i]) == True or np.any(dominated[i + 1 :]) == True):
138 | pf[i] = True
139 | return pf
140 | idx = np.argsort(((pts - pts.mean(axis = 0))/(pts.std(axis = 0) + 1e-7)).sum(axis = 1))
141 | pts = pts[idx]
142 | pf = pareto_front(pts, pf_min)
143 | pf[idx] = pf.copy()
144 | return pf
145 |
146 | ############################################################################
147 |
148 | # Function: Epsilon Dominance
149 | def selection_dominance(eps_dom, position, M):
150 | solution = np.vstack([eps_dom, position])
151 | solution = np.unique(solution, axis = 0)
152 | eps_dom = np.copy(solution)
153 | idx = pareto_front_points(solution[:, -M:], pf_min = True)
154 | eps_dom = eps_dom[idx,:]
155 | return eps_dom
156 |
157 | ############################################################################
158 |
159 | # Function: Mutation
160 | def mutation(position, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
161 | d_mutation = 0
162 | for i in range (0, position.shape[0]):
163 | for j in range(0, position.shape[1] - len(list_of_functions)):
164 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
165 | if (probability < mutation_rate):
166 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
167 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
168 | if (rand <= 0.5):
169 | d_mutation = 2*(rand_d)
170 | d_mutation = d_mutation**(1/(eta + 1)) - 1
171 | elif (rand > 0.5):
172 | d_mutation = 2*(1 - rand_d)
173 | d_mutation = 1 - d_mutation**(1/(eta + 1))
174 | position[i,j] = np.clip((position[i,j] + d_mutation), min_values[j], max_values[j])
175 | for k in range (1, len(list_of_functions) + 1):
176 | position[i,-k] = list_of_functions[-k](position[i,0:position.shape[1]-len(list_of_functions)])
177 | return position
178 |
179 | ############################################################################
180 |
181 | # SMPSO Function
182 | def speed_constrained_multiobjective_particle_swarm_optimization(swarm_size = 5, min_values = [-5,-5], max_values = [5,5], iterations = 50, list_of_functions = [func_1, func_2], mutation_rate = 0.1, eta = 3, verbose = True):
183 | count = 0
184 | M = len(list_of_functions)
185 | position = initial_position(swarm_size, min_values, max_values, list_of_functions)
186 | eps_dom = initial_position(swarm_size, min_values, max_values, list_of_functions)
187 | velocity = initial_velocity(position, min_values, max_values)
188 | while (count <= iterations):
189 | if (verbose == True):
190 | print('Generation = ', count)
191 | position = update_position(position, velocity, min_values, max_values, list_of_functions)
192 | position = mutation(position, mutation_rate, eta, min_values, max_values, list_of_functions)
193 | eps_dom = selection_dominance(eps_dom, position, M)
194 | if (eps_dom.shape[0] > swarm_size):
195 | crowding = crowding_distance_function(eps_dom, M)
196 | arg = np.argsort(crowding , axis = 0)[::-1].tolist()
197 | try:
198 | arg = [i[0] for i in arg ]
199 | except:
200 | arg = [i for i in arg ]
201 | if (len(arg) > 0):
202 | eps_dom = eps_dom[arg, :]
203 | eps_dom = eps_dom[:swarm_size, :]
204 | velocity = velocity_vector(position, velocity, eps_dom, M, min_values, max_values)
205 | count = count + 1
206 | return eps_dom
207 |
208 | ############################################################################
209 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/sms_emoa.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: SMS-EMOA (S-Metric Selection Evolutionary Multiobjective Optimization Algorithm)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: sms_emoa.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import pygmo as pg
18 | import random
19 | import os
20 |
21 | ############################################################################
22 |
23 | # Function 1
24 | def func_1():
25 | return
26 |
27 | # Function 2
28 | def func_2():
29 | return
30 |
31 | ############################################################################
32 |
33 | # Function: Initialize Variables
34 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
35 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
36 | for i in range(0, population_size):
37 | for j in range(0, len(min_values)):
38 | population[i,j] = random.uniform(min_values[j], max_values[j])
39 | for k in range (1, len(list_of_functions) + 1):
40 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
41 | return population
42 |
43 | ############################################################################
44 |
45 | # Function: Fast Non-Dominated Sorting
46 | def fast_non_dominated_sorting(population, number_of_functions = 2):
47 | S = [[] for i in range(0, population.shape[0])]
48 | front = [[]]
49 | n = [0 for i in range(0, population.shape[0])]
50 | rank = [0 for i in range(0, population.shape[0])]
51 | for p in range(0, population.shape[0]):
52 | S[p] = []
53 | n[p] = 0
54 | for q in range(0, population.shape[0]):
55 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
56 | if (q not in S[p]):
57 | S[p].append(q)
58 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
59 | n[p] = n[p] + 1
60 | if (n[p] == 0):
61 | rank[p] = 0
62 | if (p not in front[0]):
63 | front[0].append(p)
64 | i = 0
65 | while (front[i] != []):
66 | Q = []
67 | for p in front[i]:
68 | for q in S[p]:
69 | n[q] = n[q] - 1
70 | if(n[q] == 0):
71 | rank[q] = i+1
72 | if q not in Q:
73 | Q.append(q)
74 | i = i+1
75 | front.append(Q)
76 | del front[len(front)-1]
77 | rank = np.zeros((population.shape[0], 1))
78 | for i in range(0, len(front)):
79 | for j in range(0, len(front[i])):
80 | rank[front[i][j], 0] = i + 1
81 | return rank
82 |
83 | # Function: Sort Population by Rank
84 | def sort_population_by_rank(population, rank, rp = 'none'):
85 | control = 1
86 | if (rp == 'none'):
87 | idx = np.argsort(rank[:,0], axis = 0).tolist()
88 | population = population[idx,:]
89 | else:
90 | idx = np.where(rank <= rp)[0].tolist()
91 | while len(idx) < 5:
92 | idx = np.where(rank <= rp + control)[0].tolist()
93 | control = control + 1
94 | population = population[idx,:]
95 | return population
96 |
97 | ############################################################################
98 |
99 | # Function: Offspring
100 | def breeding(population, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
101 | offspring = np.copy(population)
102 | parent_1 = 0
103 | parent_2 = 1
104 | b_offspring = 0
105 | for i in range (0, offspring.shape[0]):
106 | i1, i2, i3, i4 = random.sample(range(0, len(population) - 1), 4)
107 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
108 | if (rand > 0.5):
109 | parent_1 = i1
110 | else:
111 | parent_1 = i2
112 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
113 | if (rand > 0.5):
114 | parent_2 = i3
115 | else:
116 | parent_2 = i4
117 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
118 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
119 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
120 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
121 | if (rand <= 0.5):
122 | b_offspring = 2*(rand_b)
123 | b_offspring = b_offspring**(1/(mu + 1))
124 | elif (rand > 0.5):
125 | b_offspring = 1/(2*(1 - rand_b))
126 | b_offspring = b_offspring**(1/(mu + 1))
127 | if (rand_c >= 0.5):
128 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
129 | else:
130 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
131 | for k in range (1, len(list_of_functions) + 1):
132 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
133 | return offspring
134 |
135 | # Function: Mutation
136 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
137 | d_mutation = 0
138 | for i in range (0, offspring.shape[0]):
139 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
140 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
141 | if (probability < mutation_rate):
142 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
143 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
144 | if (rand <= 0.5):
145 | d_mutation = 2*(rand_d)
146 | d_mutation = d_mutation**(1/(eta + 1)) - 1
147 | elif (rand > 0.5):
148 | d_mutation = 2*(1 - rand_d)
149 | d_mutation = 1 - d_mutation**(1/(eta + 1))
150 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
151 | for k in range (1, len(list_of_functions) + 1):
152 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
153 | return offspring
154 |
155 | ############################################################################
156 |
157 | # Function: Reference Points
158 | def reference_points(M, p):
159 | def generator(r_points, M, Q, T, D):
160 | points = []
161 | if (D == M - 1):
162 | r_points[D] = Q / T
163 | points.append(r_points)
164 | elif (D != M - 1):
165 | for i in range(Q + 1):
166 | r_points[D] = i / T
167 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
168 | return points
169 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
170 | return ref_points
171 |
172 | # Function: Normalize Objective Functions
173 | def normalization(population, number_of_functions):
174 | M = number_of_functions
175 | z_min = np.min(population[:,-M:], axis = 0)
176 | population[:,-M:] = population[:,-M:] - z_min
177 | w = np.zeros((M, M)) + 0.0000001
178 | np.fill_diagonal(w, 1)
179 | z_max = []
180 | for i in range(0, M):
181 | z_max.append(np.argmin(np.max(population[:,-M:]/w[i], axis = 1)))
182 | if ( len(z_max) != len(set(z_max)) or M == 1):
183 | a = np.max(population[:,-M:], axis = 0)
184 | else:
185 | k = np.ones((M, 1))
186 | z_max = np.vstack((population[z_max,-M:]))
187 | a = np.matrix.dot(np.linalg.inv(z_max), k)
188 | a = (1/a).reshape(1, M)
189 | population[:,-M:] = population[:,-M:] /(a - z_min)
190 | return population
191 |
192 | # Function: Distance from Point (p3) to a Line (p1, p2).
193 | def point_to_line(p1, p2, p3):
194 | p2 = p2 - p1
195 | dp = np.dot(p3, p2.T)
196 | pp = dp/np.linalg.norm(p2.T, axis = 0)
197 | pn = np.linalg.norm(p3, axis = 1)
198 | pn = np.array([pn,]*pp.shape[1]).transpose()
199 | dl = np.sqrt(pn**2 - pp**2)
200 | return dl
201 |
202 | # Function: Association
203 | def association(srp, population, z_max, number_of_functions):
204 | M = number_of_functions
205 | p = copy.deepcopy(population)
206 | p = normalization(p, M)
207 | p1 = np.zeros((1, M))
208 | p2 = srp
209 | p3 = p[:,-M:]
210 | g = point_to_line(p1, p2, p3) # Matrix (Population, Reference)
211 | idx = []
212 | arg = np.argmin(g, axis = 1)
213 | hv_c = pg.hypervolume(p[:,-M:])
214 | z = np.max(p[:,-M:], axis = 0)
215 | if any(z > z_max):
216 | z_max = np.maximum(z_max,z)
217 | hv = hv_c.contributions(z_max)
218 | d = 1/(hv + 0.0000000000000001)
219 | for ind in np.unique(arg).tolist():
220 | f = [i[0] for i in np.argwhere(arg == ind).tolist()]
221 | idx.append(f[d[f].argsort()[0]])
222 | if (len(idx) < 5):
223 | idx.extend([x for x in list(range(0, population.shape[0])) if x not in idx])
224 | idx = idx[:5]
225 | return idx
226 |
227 | ############################################################################
228 |
229 | # SMS-EMOA Function
230 | def s_metric_selection_evolutionary_multiobjective_optimization_algorithm(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 5, mu = 1, eta = 1, k = 4, verbose = True):
231 | count = 0
232 | references = max(5, references)
233 | M = len(list_of_functions)
234 | srp = reference_points(M = M, p = references)
235 | size = k*srp.shape[0]
236 | population = initial_population(size, min_values, max_values, list_of_functions)
237 | offspring = initial_population(size, min_values, max_values, list_of_functions)
238 | z_max = np.max(population[:,-M:], axis = 0)
239 | print('Total Number of Points on Reference Hyperplane: ', int(srp.shape[0]), ' Population Size: ', int(size))
240 | while (count <= generations):
241 | if (verbose == True):
242 | print('Generation = ', count)
243 | population = np.vstack([population, offspring])
244 | rank = fast_non_dominated_sorting(population, M)
245 | population = sort_population_by_rank(population, rank)
246 | z_max = np.vstack([z_max, np.max(population[:,-M:], axis = 0)])
247 | z_max = np.max(z_max, axis = 0)
248 | idx = association(srp, population, z_max, M)
249 | population = population[idx, :]
250 | population = population[0:size,:]
251 | offspring = breeding(population, min_values, max_values, mu, list_of_functions)
252 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
253 | count = count + 1
254 | return population[:srp.shape[0], :]
255 |
256 | ############################################################################
257 |
--------------------------------------------------------------------------------
/pyMultiobjective/algorithm/u_n_iii.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Course: Metaheuristics
7 | # Lesson: U-NSGA-III (Unified Non-Dominated Sorting Genetic Algorithm III)
8 |
9 | # Citation:
10 | # PEREIRA, V. (2021). Project: pyMultiojective, File: u_n_iii.py, GitHub repository:
11 |
12 | ############################################################################
13 |
14 | # Required Libraries
15 | import copy
16 | import numpy as np
17 | import random
18 | import os
19 |
20 | ############################################################################
21 |
22 | # Function 1
23 | def func_1():
24 | return
25 |
26 | # Function 2
27 | def func_2():
28 | return
29 |
30 | ############################################################################
31 |
32 | # Function: Initialize Variables
33 | def initial_population(population_size = 5, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
34 | population = np.zeros((population_size, len(min_values) + len(list_of_functions)))
35 | for i in range(0, population_size):
36 | for j in range(0, len(min_values)):
37 | population[i,j] = random.uniform(min_values[j], max_values[j])
38 | for k in range (1, len(list_of_functions) + 1):
39 | population[i,-k] = list_of_functions[-k](list(population[i,0:population.shape[1]-len(list_of_functions)]))
40 | return population
41 |
42 | ############################################################################
43 |
44 | # Function: Fast Non-Dominated Sorting
45 | def fast_non_dominated_sorting(population, number_of_functions = 2):
46 | S = [[] for i in range(0, population.shape[0])]
47 | front = [[]]
48 | n = [0 for i in range(0, population.shape[0])]
49 | rank = [0 for i in range(0, population.shape[0])]
50 | for p in range(0, population.shape[0]):
51 | S[p] = []
52 | n[p] = 0
53 | for q in range(0, population.shape[0]):
54 | if ((population[p,-number_of_functions:] <= population[q,-number_of_functions:]).all()):
55 | if (q not in S[p]):
56 | S[p].append(q)
57 | elif ((population[q,-number_of_functions:] <= population[p,-number_of_functions:]).all()):
58 | n[p] = n[p] + 1
59 | if (n[p] == 0):
60 | rank[p] = 0
61 | if (p not in front[0]):
62 | front[0].append(p)
63 | i = 0
64 | while (front[i] != []):
65 | Q = []
66 | for p in front[i]:
67 | for q in S[p]:
68 | n[q] = n[q] - 1
69 | if(n[q] == 0):
70 | rank[q] = i+1
71 | if q not in Q:
72 | Q.append(q)
73 | i = i+1
74 | front.append(Q)
75 | del front[len(front)-1]
76 | rank = np.zeros((population.shape[0], 1))
77 | for i in range(0, len(front)):
78 | for j in range(0, len(front[i])):
79 | rank[front[i][j], 0] = i + 1
80 | return rank
81 |
82 | # Function: Sort Population by Rank
83 | def sort_population_by_rank(population, rank, rp = 'none'):
84 | control = 1
85 | if rp == 'none':
86 | idx = np.argsort(rank[:,0], axis = 0).tolist()
87 | population = population[idx,:]
88 | else:
89 | idx = np.where(rank <= rp)[0].tolist()
90 | while len(idx) < 5:
91 | idx = np.where(rank <= rp + control)[0].tolist()
92 | control = control + 1
93 | population = population[idx,:]
94 | return population
95 |
96 | ############################################################################
97 |
98 | # Function: Offspring
99 | def breeding(population, srp, min_values = [-5,-5], max_values = [5,5], mu = 1, list_of_functions = [func_1, func_2]):
100 | offspring = np.copy(population)
101 | parent_1 = 0
102 | parent_2 = 1
103 | b_offspring = 0
104 | for i in range (0, offspring.shape[0]):
105 | i1, i2, i3, i4 = random.sample(range(0, len(population) - 1), 4)
106 | rank_ = fast_non_dominated_sorting(population[[i1, i2, i3, i4], :], len(list_of_functions))
107 | p1 = np.zeros((1, len(list_of_functions)))
108 | p2 = srp
109 | p3 = population[[i1, i2, i3, i4],-len(list_of_functions):]
110 | d_ = point_to_line(p1, p2, p3)
111 | d_ = np.amin(d_, axis = 1)
112 | if (rank_[0][0] < rank_[1][0]):
113 | parent_1 = i1
114 | elif (rank_[1][0] < rank_[0][0]):
115 | parent_1 = i2
116 | elif (rank_[0][0] == rank_[1][0]):
117 | if (d_[0] < d_[1]):
118 | parent_1 = i1
119 | elif(d_[1] < d_[0]):
120 | parent_1 = i2
121 | elif (d_[1] == d_[0]):
122 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
123 | if (rand > 0.5):
124 | parent_1 = i1
125 | else:
126 | parent_1 = i2
127 | if (rank_[2][0] < rank_[3][0]):
128 | parent_2 = i3
129 | elif (rank_[3][0] < rank_[2][0]):
130 | parent_2 = i4
131 | elif (rank_[2][0] == rank_[3][0]):
132 | if (d_[2] < d_[3]):
133 | parent_2 = i3
134 | elif(d_[3] < d_[2]):
135 | parent_2 = i4
136 | elif (d_[2] == d_[3]):
137 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
138 | if (rand > 0.5):
139 | parent_2 = i3
140 | else:
141 | parent_2 = i4
142 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
143 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
144 | rand_b = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
145 | rand_c = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
146 | if (rand <= 0.5):
147 | b_offspring = 2*(rand_b)
148 | b_offspring = b_offspring**(1/(mu + 1))
149 | elif (rand > 0.5):
150 | b_offspring = 1/(2*(1 - rand_b))
151 | b_offspring = b_offspring**(1/(mu + 1))
152 | if (rand_c >= 0.5):
153 | offspring[i,j] = np.clip(((1 + b_offspring)*population[parent_1, j] + (1 - b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
154 | else:
155 | offspring[i,j] = np.clip(((1 - b_offspring)*population[parent_1, j] + (1 + b_offspring)*population[parent_2, j])/2, min_values[j], max_values[j])
156 | for k in range (1, len(list_of_functions) + 1):
157 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
158 | return offspring
159 |
160 | # Function: Mutation
161 | def mutation(offspring, mutation_rate = 0.1, eta = 1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2]):
162 | d_mutation = 0
163 | for i in range (0, offspring.shape[0]):
164 | for j in range(0, offspring.shape[1] - len(list_of_functions)):
165 | probability = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
166 | if (probability < mutation_rate):
167 | rand = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
168 | rand_d = int.from_bytes(os.urandom(8), byteorder = 'big') / ((1 << 64) - 1)
169 | if (rand <= 0.5):
170 | d_mutation = 2*(rand_d)
171 | d_mutation = d_mutation**(1/(eta + 1)) - 1
172 | elif (rand > 0.5):
173 | d_mutation = 2*(1 - rand_d)
174 | d_mutation = 1 - d_mutation**(1/(eta + 1))
175 | offspring[i,j] = np.clip((offspring[i,j] + d_mutation), min_values[j], max_values[j])
176 | for k in range (1, len(list_of_functions) + 1):
177 | offspring[i,-k] = list_of_functions[-k](offspring[i,0:offspring.shape[1]-len(list_of_functions)])
178 | return offspring
179 |
180 | ############################################################################
181 |
182 | # Function: Reference Points
183 | def reference_points(M, p):
184 | def generator(r_points, M, Q, T, D):
185 | points = []
186 | if (D == M - 1):
187 | r_points[D] = Q / T
188 | points.append(r_points)
189 | elif (D != M - 1):
190 | for i in range(Q + 1):
191 | r_points[D] = i / T
192 | points.extend(generator(r_points.copy(), M, Q - i, T, D + 1))
193 | return points
194 | ref_points = np.array(generator(np.zeros(M), M, p, p, 0))
195 | return ref_points
196 |
197 | # Function: Normalize Objective Functions
198 | def normalization(population, number_of_functions):
199 | M = number_of_functions
200 | z_min = np.min(population[:,-M:], axis = 0)
201 | population[:,-M:] = population[:,-M:] - z_min
202 | w = np.zeros((M, M)) + 0.0000001
203 | np.fill_diagonal(w, 1)
204 | z_max = []
205 | for i in range(0, M):
206 | z_max.append(np.argmin(np.max(population[:,-M:]/w[i], axis = 1)))
207 | if ( len(z_max) != len(set(z_max)) or M == 1):
208 | a = np.max(population[:,-M:], axis = 0)
209 | else:
210 | k = np.ones((M, 1))
211 | z_max = np.vstack((population[z_max,-M:]))
212 | a = np.matrix.dot(np.linalg.inv(z_max), k)
213 | a = (1/a).reshape(1, M)
214 | population[:,-M:] = population[:,-M:] /(a - z_min)
215 | return population
216 |
217 | # Function: Distance from Point (p3) to a Line (p1, p2)
218 | def point_to_line(p1, p2, p3):
219 | p2 = p2 - p1
220 | dp = np.dot(p3, p2.T)
221 | pp = dp/np.linalg.norm(p2.T, axis = 0)
222 | pn = np.linalg.norm(p3, axis = 1)
223 | pn = np.array([pn,]*pp.shape[1]).transpose()
224 | dl = np.sqrt(pn**2 - pp**2)
225 | return dl
226 |
227 | # Function: Association
228 | def association(population, srp, M):
229 | p = copy.deepcopy(population)
230 | p = normalization(p, M)
231 | p1 = np.zeros((1, M))
232 | p2 = srp
233 | p3 = p[:,-M:]
234 | g = point_to_line(p1, p2, p3) # Matrix (Population, Reference)
235 | idx = []
236 | arg = np.argmin(g, axis = 1)
237 | d = np.amin(g, axis = 1)
238 | for ind in np.unique(arg).tolist():
239 | f = [i[0] for i in np.argwhere(arg == ind).tolist()]
240 | idx.append(f[d[f].argsort()[0]])
241 | if (len(idx) < 5):
242 | idx.extend([x for x in list(range(0, population.shape[0])) if x not in idx])
243 | idx = idx[:5]
244 | idx.extend([x for x in list(range(0, population.shape[0])) if x not in idx])
245 | return idx
246 |
247 | # Function: Sort Population by Association
248 | def sort_population_by_association(srp, population, number_of_functions):
249 | M = number_of_functions
250 | idx = association(population, srp, M)
251 | population = population[idx, :]
252 | return population
253 |
254 | ############################################################################
255 |
256 | # U-NSGA III Function
257 | def unified_non_dominated_sorting_genetic_algorithm_III(references = 5, mutation_rate = 0.1, min_values = [-5,-5], max_values = [5,5], list_of_functions = [func_1, func_2], generations = 5, mu = 1, eta = 1, k = 4, rp = 'none', verbose = True):
258 | count = 0
259 | references = max(5, references)
260 | M = len(list_of_functions)
261 | srp = reference_points(M = M, p = references)
262 | size = k*srp.shape[0]
263 | population = initial_population(size, min_values, max_values, list_of_functions)
264 | offspring = initial_population(size, min_values, max_values, list_of_functions)
265 | print('Total Number of Points on Reference Hyperplane: ', int(srp.shape[0]), ' Population Size: ', int(size))
266 | while (count <= generations):
267 | if (verbose == True):
268 | print('Generation = ', count)
269 | population = np.vstack([population, offspring])
270 | rank = fast_non_dominated_sorting(population, number_of_functions = M)
271 | population = sort_population_by_rank(population, rank, rp)
272 | population = sort_population_by_association(srp, population, number_of_functions = M)
273 | population = population[0:size,:]
274 | offspring = breeding(population, srp, min_values, max_values, mu, list_of_functions)
275 | offspring = mutation(offspring, mutation_rate, eta, min_values, max_values, list_of_functions)
276 | count = count + 1
277 | return population[ :srp.shape[0], :]
278 |
279 | ############################################################################
280 |
--------------------------------------------------------------------------------
/pyMultiobjective/test_functions/__init__.py:
--------------------------------------------------------------------------------
1 | from .mult_many import *
2 |
--------------------------------------------------------------------------------
/pyMultiobjective/util/__init__.py:
--------------------------------------------------------------------------------
1 | from .graphs import *
2 | from .indicators import *
3 |
--------------------------------------------------------------------------------
/pyMultiobjective/util/indicators.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 |
3 | # Created by: Prof. Valdecy Pereira, D.Sc.
4 | # UFF - Universidade Federal Fluminense (Brazil)
5 | # email: valdecy.pereira@gmail.com
6 | # Multivariate Indicators
7 |
8 | # Citation:
9 | # PEREIRA, V. (2022). GitHub repository:
10 |
11 | ############################################################################
12 |
13 | # Required Libraries
14 | import itertools
15 | import numpy as np
16 | import pygmo as pg
17 |
18 | from scipy import spatial
19 |
20 | ############################################################################
21 |
22 | # Available Indicators:
23 |
24 | # GD (https://apps.dtic.mil/sti/pdfs/ADA364478.pdf)
25 | # GD+ (https://doi.org/10.1007/978-3-319-15892-1_8)
26 | # IGD (https://doi.org/10.1007/978-3-540-24694-7_71)
27 | # IGD+ (https://doi.org/10.1007/978-3-319-15892-1_8)
28 | # MS (https://doi.org/10.1162/106365600568202)
29 | # SP (https://doi.org/10.1109/TEVC.2006.882428)
30 | # Hypervolume (https://scholar.afit.edu/cgi/viewcontent.cgi?article=6130&context=etd)
31 |
32 | ############################################################################
33 |
34 | # Helper Functions
35 |
36 | # Functon: Generate Data Points
37 | def generate_points(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], pf_min = True):
38 | x = []
39 | for j in range(0, len(min_values)):
40 | values = np.arange(min_values[j], max_values[j] + step[j], step[j])
41 | x.append(values)
42 | cartesian_product = list(itertools.product(*x))
43 | front = np.array(cartesian_product, dtype = np.dtype('float'))
44 | front = np.c_[ front, np.zeros( (len(cartesian_product), len(list_of_functions))) ]
45 | for j in range(0, len(list_of_functions)):
46 | value = [list_of_functions[j](item) for item in cartesian_product]
47 | front[:, len(min_values) + j] = value
48 | return front
49 |
50 | # Function: Pareto Front
51 | def pareto_front_points(pts, pf_min = True):
52 | def pareto_front(pts, pf_min):
53 | pf = np.zeros(pts.shape[0], dtype = np.bool_)
54 | for i in range(0, pts.shape[0]):
55 | cost = pts[i, :]
56 | if (pf_min == True):
57 | g_cost = np.logical_not(np.any(pts > cost, axis = 1))
58 | b_cost = np.any(pts < cost, axis = 1)
59 | else:
60 | g_cost = np.logical_not(np.any(pts < cost, axis = 1))
61 | b_cost = np.any(pts > cost, axis = 1)
62 | dominated = np.logical_and(g_cost, b_cost)
63 | if (np.any(pf) == True):
64 | if (np.any(np.all(pts[pf] == cost, axis = 1)) == True):
65 | continue
66 | if not (np.any(dominated[:i]) == True or np.any(dominated[i + 1 :]) == True):
67 | pf[i] = True
68 | return pf
69 | idx = np.argsort(((pts - pts.mean(axis = 0))/(pts.std(axis = 0) + 1e-7)).sum(axis = 1))
70 | pts = pts[idx]
71 | pf = pareto_front(pts, pf_min)
72 | pf[idx] = pf.copy()
73 | return pf
74 |
75 | ############################################################################
76 |
77 | # GD - Generational Distance
78 |
79 | # Function: GD
80 | def gd_indicator(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], solution = [], custom_pf = [], pf_min = True):
81 | if (solution.shape[1] > len(min_values)):
82 | sol = solution[:,len(min_values):]
83 | elif (solution.shape[1] == len(min_values)):
84 | sol = np.copy(solution)
85 | if (len(custom_pf) > 0):
86 | front = np.copy(custom_pf)
87 | else:
88 | front = generate_points(min_values, max_values, list_of_functions, step, pf_min)
89 | pf = pareto_front_points(pts = front[:,len(min_values):], pf_min = pf_min)
90 | front = front[pf, len(min_values):]
91 | d_i = [ ( spatial.KDTree(front).query(sol[i,:]) ) for i in range(0, sol.shape[0]) ]
92 | d = [item[0] for item in d_i]
93 | gd = np.sqrt(sum(d))/len(d)
94 | return gd
95 |
96 | ############################################################################
97 |
98 | # GD+ - Generational Distance Plus
99 |
100 | # Function: GD+
101 | def gd_plus_indicator(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], solution = [], custom_pf = [], pf_min = True):
102 | if (solution.shape[1] > len(min_values)):
103 | sol = solution[:,len(min_values):]
104 | elif (solution.shape[1] == len(min_values)):
105 | sol = np.copy(solution)
106 | if (len(custom_pf) > 0):
107 | front = np.copy(custom_pf)
108 | else:
109 | front = generate_points(min_values, max_values, list_of_functions, step, pf_min)
110 | pf = pareto_front_points(pts = front[:,len(min_values):], pf_min = pf_min)
111 | front = front[pf, len(min_values):]
112 | d_i = [ ( spatial.KDTree(front).query(sol[i,:]) ) for i in range(0, sol.shape[0]) ]
113 | idx = [item[1] for item in d_i]
114 | s = [max(max(sol[i,:] - front[idx[i],:]), 0)**2 for i in range(0, sol.shape[0])]
115 | gdp = np.sqrt(sum(s))/len(s)
116 | return gdp
117 |
118 | ############################################################################
119 |
120 | # IGD - Inverted Generational Distance
121 |
122 | # Function: IGD
123 | def igd_indicator(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], solution = [], custom_pf = [], pf_min = True):
124 | if (solution.shape[1] > len(min_values)):
125 | sol = solution[:,len(min_values):]
126 | elif (solution.shape[1] == len(min_values)):
127 | sol = np.copy(solution)
128 | if (len(custom_pf) > 0):
129 | front = np.copy(custom_pf)
130 | else:
131 | front = generate_points(min_values, max_values, list_of_functions, step, pf_min)
132 | pf = pareto_front_points(pts = front[:,len(min_values):], pf_min = pf_min)
133 | front = front[pf, len(min_values):]
134 | d_i = [ ( spatial.KDTree(sol).query(front[i,:]) ) for i in range(0, front.shape[0]) ]
135 | d = [item[0] for item in d_i]
136 | igd = np.sqrt(sum(d))/len(d)
137 | return igd
138 |
139 | ############################################################################
140 |
141 | # IGD+ - Inverted Generational Distance Plus
142 |
143 | # Function: IGD+
144 | def igd_plus_indicator(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], solution = [], custom_pf = [], pf_min = True):
145 | if (solution.shape[1] > len(min_values)):
146 | sol = solution[:,len(min_values):]
147 | elif (solution.shape[1] == len(min_values)):
148 | sol = np.copy(solution)
149 | if (len(custom_pf) > 0):
150 | front = np.copy(custom_pf)
151 | else:
152 | front = generate_points(min_values, max_values, list_of_functions, step, pf_min)
153 | pf = pareto_front_points(pts = front[:,len(min_values):], pf_min = pf_min)
154 | front = front[pf, len(min_values):]
155 | d_i = [ ( spatial.KDTree(sol).query(front[i,:]) ) for i in range(0, front.shape[0]) ]
156 | idx = [item[1] for item in d_i]
157 | s = [max(max(sol[idx[i],:] - front[i,:]), 0)**2 for i in range(0, front.shape[0])]
158 | igdp = np.sqrt(sum(s))/len(s)
159 | return igdp
160 |
161 | ############################################################################
162 |
163 | # MS - Maximum Spread
164 |
165 | # Function: Maximum Spread
166 | def ms_indicator(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], solution = [], custom_pf = [], pf_min = True):
167 | if (solution.shape[1] > len(min_values)):
168 | sol = solution[:,len(min_values):]
169 | elif (solution.shape[1] == len(min_values)):
170 | sol = np.copy(solution)
171 | if (len(custom_pf) > 0):
172 | front = np.copy(custom_pf)
173 | else:
174 | front = generate_points(min_values, max_values, list_of_functions, step, pf_min)
175 | pf = pareto_front_points(pts = front[:,len(min_values):], pf_min = pf_min)
176 | front = front[pf, len(min_values):]
177 | s_max = np.max(sol, axis = 0)
178 | s_min = np.min(sol, axis = 0)
179 | f_max = np.max(front, axis = 0)
180 | f_min = np.min(front, axis = 0)
181 | ms = 0
182 | for i in range(0, len(list_of_functions)):
183 | ms = ms + ((min(s_max[i], f_max[i]) - max(s_min[i], f_min[i]))/(f_max[i] - f_min[i]))**2
184 | ms = np.sqrt(ms/len(list_of_functions))
185 | return ms
186 |
187 | # SP - Spacing
188 |
189 | # Function: Spacing
190 | def sp_indicator(min_values = [-5, -5], max_values = [5, 5], list_of_functions = [], step = [0.1, 0.1], solution = [], custom_pf = [], pf_min = True):
191 | if (solution.shape[1] > len(min_values)):
192 | sol = solution[:,len(min_values):]
193 | elif (solution.shape[1] == len(min_values)):
194 | sol = np.copy(solution)
195 | dm = np.zeros(sol.shape[0])
196 | for i in range(0, sol.shape[0]):
197 | dm[i] = min([np.linalg.norm(sol[i] - sol[j]) for j in range(0, sol.shape[0]) if i != j])
198 | d_mean = np.mean(dm)
199 | spacing = np.sqrt(np.sum((dm - d_mean)**2)/sol.shape[0])
200 | return spacing
201 |
202 | ############################################################################
203 |
204 | # Hypervolume (S-Metric)
205 |
206 | # Function: Hypervolume
207 | def hv_indicator(solution = [], n_objs = 3, ref_point = [], normalize = False):
208 | if (solution.shape[1] > n_objs):
209 | sol = solution[:,-n_objs:]
210 | elif (solution.shape[1] == n_objs):
211 | sol = np.copy(solution)
212 | if (normalize == True):
213 | z_min = np.min(sol, axis = 0)
214 | z_max = np.max(sol, axis = 0)
215 | sol = np.clip((sol - z_min)/(z_max - z_min + 0.000000001), 0, 1)
216 | ref_point = [1]*n_objs
217 | if (len(ref_point) == 0):
218 | ref_point = [np.max(sol[:,j]) for j in range(0, sol.shape[1])]
219 | else:
220 | for j in range(0, len(ref_point)):
221 | if (ref_point[j] < np.max(sol[:,j])):
222 | print('Reference Point is Invalid: Outside Boundary')
223 | print('Correcting Position', j, '; Reference Point Value', ref_point[j], 'was changed to', np.max(sol[:,j]))
224 | print('')
225 | ref_point[j] = np.max(sol[:,j])
226 | print('Used Reference Point: ', ref_point, '; Normalization Procedure: ', normalize)
227 | print('')
228 | hv_c = pg.hypervolume(sol)
229 | hv = hv_c.compute(ref_point)
230 | return hv
231 |
232 | ############################################################################
233 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 | from pathlib import Path
3 |
4 | this_directory = Path(__file__).parent
5 | long_description = (this_directory / 'README.md').read_text()
6 |
7 | setup(
8 | name='pymultiobjective',
9 | version='1.5.7',
10 | license='GNU',
11 | author='Valdecy Pereira',
12 | author_email='valdecy.pereira@gmail.com',
13 | url='https://github.com/Valdecy/pyMultiobjective',
14 | packages=find_packages(),
15 | install_requires=[
16 | 'matplotlib',
17 | 'numpy',
18 | 'pandas',
19 | 'plotly',
20 | 'pygmo',
21 | 'scipy'
22 | ],
23 | description='A python library for Multiobjective Objectives Optimization Algorithms or Many Objectives Optimization Algorithms',
24 | long_description=long_description,
25 | long_description_content_type='text/markdown',
26 | )
27 |
--------------------------------------------------------------------------------