├── README.md ├── LICENSE ├── NGS_WOA.py ├── table(NGS_WOA).csv └── main.py /README.md: -------------------------------------------------------------------------------- 1 | # improved-WOA-based-on-nonlinear-adaptive-weight-and-golden-sine-operator 2 | J. Zhang and J. Wang, "Improved Whale Optimization Algorithm Based on Nonlinear Adaptive Weight and Golden Sine Operator", IEEE Access, vol. 8, pp. 77013-77048, 2020. 3 | 4 | https://doi.org/10.1109/ACCESS.2020.2989445 5 | 6 | 1. 重新優化了測試腳本 7 | 8 | 2. 測試腳本請參考我的另一個作品https://github.com/ZongSingHuang/Metaheuristic-benchmark 9 | 10 | 3. 礙於計算全部的測試函數所需時間太長,因此我只測前36個測試函數 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 ZongSingHuang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /NGS_WOA.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sat Sep 5 02:03:07 2020 4 | 5 | @author: ZongSing_NB 6 | 7 | Main reference: 8 | https://doi.org/10.1109/ACCESS.2020.2989445 9 | """ 10 | 11 | import numpy as np 12 | import matplotlib.pyplot as plt 13 | 14 | class NGS_WOA(): 15 | def __init__(self, fitness, D=30, P=20, G=500, ub=1, lb=0, 16 | b=1, a_max=2, a_min=0, l_max=1, l_min=-1): 17 | self.fitness = fitness 18 | self.D = D 19 | self.P = P 20 | self.G = G 21 | self.ub = ub 22 | self.lb = lb 23 | self.a_max = a_max 24 | self.a_min = a_min 25 | self.l_max = l_max 26 | self.l_min = l_min 27 | self.b = b 28 | 29 | self.gbest_X = np.zeros([self.D]) 30 | self.gbest_F = np.inf 31 | self.loss_curve = np.zeros(self.G) 32 | 33 | def opt(self): 34 | # 初始化 35 | tao = (np.sqrt(5)-1)/2 36 | m1 = -2*np.pi + (1-tao)*2*np.pi 37 | m2 = -2*np.pi + tao*2*np.pi 38 | self.X = np.random.uniform(low=self.lb, high=self.ub, size=[self.P, self.D]) 39 | 40 | # 迭代 41 | for g in range(self.G): 42 | # 適應值計算 43 | F = self.fitness(self.X) 44 | 45 | # 更新最佳解 46 | if np.min(F) < self.gbest_F: 47 | idx = F.argmin() 48 | self.gbest_X = self.X[idx].copy() 49 | self.gbest_F = F.min() 50 | 51 | # 收斂曲線 52 | self.loss_curve[g] = self.gbest_F 53 | 54 | # 更新 55 | a = self.a_max - (self.a_max-self.a_min)*(g/self.G) 56 | 57 | if g<0.5*self.G: 58 | C1 = 0.5*(1 + np.cos(np.pi*g/self.G))**0.5 # (8) 59 | else: 60 | C1 = 0.5*(1 - np.cos(np.pi+(np.pi*g/self.G)))**0.5 # (8) 61 | 62 | for i in range(self.P): 63 | p = np.random.uniform() 64 | r1 = np.random.uniform() 65 | r2 = np.random.uniform() 66 | A = 2*a*r1 - a 67 | C = 2*r2 68 | l = np.random.uniform(low=self.l_min, high=self.l_max) 69 | 70 | if p<0.5: 71 | if np.abs(A)<1: 72 | self.X[i, :] = self.gbest_X - C1*A*np.abs(C*self.gbest_X-self.X[i, :]) # (9) 73 | else: 74 | X_rand = self.X[np.random.randint(low=0, high=self.P, size=self.D), :] 75 | X_rand = np.diag(X_rand).copy() 76 | self.X[i, :] = X_rand - C1*A*np.abs(C*X_rand-self.X[i, :]) # (10) 77 | else: 78 | D = np.abs(self.gbest_X - self.X[i, :]) 79 | self.X[i, :] = D*np.exp(self.b*l)*C1*np.cos(2*np.pi*l) + self.gbest_X # (11) 80 | 81 | r3 = np.random.uniform() 82 | r4 = np.random.uniform() 83 | self.X = self.X*np.abs(np.sin(r3)) + r4*np.sin(r3)*np.abs(m1*self.gbest_X-m2*self.X) # (20) 84 | 85 | # 邊界處理 86 | self.X = np.clip(self.X, self.lb, self.ub) 87 | 88 | def plot_curve(self): 89 | plt.figure() 90 | plt.title('loss curve ['+str(round(self.loss_curve[-1], 3))+']') 91 | plt.plot(self.loss_curve, label='loss') 92 | plt.grid() 93 | plt.legend() 94 | plt.show() 95 | -------------------------------------------------------------------------------- /table(NGS_WOA).csv: -------------------------------------------------------------------------------- 1 | ,Sphere,Rastrigin,Ackley,Griewank,Schwefel P2.22,Rosenbrock,Sehwwefel P2.21,Quartic,Schwefel P1.2,Penalized 1,Penalized 2,Schwefel P2.26,Step,Kowalik,Shekel Foxholes,Goldstein-Price,Shekel 5,Branin,Hartmann 3,Shekel 7,Shekel 10,Six-Hump Camel-Back,Hartmann 6,Zakharov,Sum Squares,Alpine,Michalewicz,Exponential,Schaffer,Bent Cigar,Bohachevsky 1,Elliptic,Drop Wave,Cosine Mixture,Ellipsoidal,Levy and Montalvo 1 2 | avg,3.816278753566717e-275,0.0,4.440892098500626e-16,0.0,2.252329965524796e-140,28.130127708691216,3.1362319696973467e-135,8.415589407792053e-05,6.669067815569157e-242,0.707757942527468,0.8488804486568846,-8076.778799091627,0.0,0.0005300641005836464,10.96981716345694,9.494688456152682,-5.532048397496397,0.41056426326365364,-3.82345512028649,-6.552286276832332,-5.406151492495087,-1.0143815972922403,-3.1549852887039833,1.6209903458659372e-206,4.035977431758866e-290,3.0586383610466974e-149,-1.737093243010994,-1.0,0.0024558835426790504,1.975798177524656e-289,0.0,1.4296209571435148e-284,-1.0,-3.0,706.2954424302046,0.5950693312669824 3 | std,0.0,0.0,0.0,0.0,1.5766309157022098e-139,0.3818755430838633,2.1953623365111357e-134,8.298404211913617e-05,0.0,0.43213828453607783,0.5147579907502531,2761.5857365805664,0.0,0.00019215219826976976,3.403371959377181,19.90604253884917,1.4056310679140709,0.025198568852465724,0.04068652889651061,2.1086116701041857,1.1239878123739098,0.017626569434290227,0.14474155773337188,0.0,0.0,1.900375567194972e-148,0.1907134831915006,0.0,6.437653007061671e-08,0.0,0.0,0.0,0.0,0.0,190.66980760338325,0.385170089578209 4 | worst,1.9076698724745556e-273,0.0,4.440892098500626e-16,0.0,1.1261649406467932e-138,28.794057422517298,1.568115955254768e-133,0.0004094668248721366,3.266049649544142e-240,1.5969811553848772,2.9709274328530397,-2903.090881013612,0.0,0.0011695295981366807,12.670505812010852,84.25164641721648,-4.822411169673318,0.5014296912378349,-3.648454220697793,-4.997895783594115,-4.974959376226633,-0.987063291915659,-2.702199680825557,8.104951729329452e-205,2.0173202440890423e-288,1.3557875562973854e-147,-0.9999994021738478,-1.0,0.0024562321994519576,5.36496601701073e-288,0.0,7.148007824273175e-283,-1.0,-3.0,1267.4677038235016,1.5684733862283846 5 | best,0.0,0.0,4.440892098500626e-16,0.0,6.432516955420668e-184,27.172770257880067,4.283207608117957e-180,6.756419322733026e-06,2.7245003e-317,0.13062994964782468,0.10804670739545902,-12277.74247147766,0.0,0.00031156906514502467,1.9925767853225864,3.000166035788154,-10.15204470167904,0.3978874681813611,-3.862752194973508,-10.382488036868429,-9.617206031920192,-1.0316260787520335,-3.314680637024011,9.605911327437144e-301,0.0,3.0375657261761245e-187,-1.801301265340518,-1.0,0.0024558582053127997,0.0,0.0,0.0,-1.0,-3.0,420.47442551368,0.0789358361354642 6 | ideal,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-12569.486618173014,0.0,0.0003074861,0.9980038377944493,3.0,-10.1532,0.39788735772973816,-3.86278214782076,-10.1532,-10.1532,-1.031628453489877,-3.32236801141551,0.0,0.0,0.0,-1.8013034100985532,-1.0,0.0,0.0,0.0,0.0,-1.0,-3.0,0.0,0.0 7 | time,0.24977438449859618,0.2466186761856079,0.256217303276062,0.255497088432312,0.2500271224975586,0.2558607625961304,0.2500514221191406,0.26409293174743653,0.249175386428833,0.2860322570800781,0.2834654378890991,0.25185178756713866,0.2516146516799927,0.4249714803695679,0.4603325033187866,0.2505946111679077,0.27666252613067627,0.24310900688171386,0.2670699167251587,0.27935194969177246,0.267568302154541,0.24257126331329346,0.2691986656188965,0.26419556617736817,0.2527842426300049,0.2550568962097168,0.2510243606567383,0.24989691734313965,0.24001694679260255,0.2516100072860718,0.23970240116119385,0.25561676025390623,0.2388588523864746,0.251142897605896,0.2481221914291382,0.26846018314361575 8 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Thu Jul 16 21:43:03 2020 4 | 5 | @author: ZongSing_NB 6 | """ 7 | 8 | import time 9 | import functools 10 | 11 | import numpy as np 12 | import pandas as pd 13 | 14 | from NGS_WOA import NGS_WOA 15 | import benchmark 16 | import bound_X 17 | import ideal_F 18 | import dimension 19 | 20 | D = 30 21 | G = 500 22 | P = 30 23 | run_times = 50 24 | table = pd.DataFrame(np.zeros([6, 36]), index=['avg', 'std', 'worst', 'best', 'ideal', 'time']) 25 | loss_curves = np.zeros([G, 36]) 26 | F_table = np.zeros([run_times, 36]) 27 | for t in range(run_times): 28 | item = 0 29 | ub = bound_X.Sphere()[1]*np.ones(dimension.Sphere(D)) 30 | lb = bound_X.Sphere()[0]*np.ones(dimension.Sphere(D)) 31 | optimizer = NGS_WOA(fitness=benchmark.Sphere, 32 | D=dimension.Sphere(D), P=P, G=G, ub=ub, lb=lb) 33 | st = time.time() 34 | optimizer.opt() 35 | ed = time.time() 36 | F_table[t, item] = optimizer.gbest_F 37 | table[item]['avg'] += optimizer.gbest_F 38 | table[item]['time'] += ed - st 39 | table[item]['ideal'] = ideal_F.Sphere() 40 | loss_curves[:, item] += optimizer.loss_curve 41 | 42 | 43 | item = item + 1 44 | ub = bound_X.Rastrigin()[1]*np.ones(dimension.Rastrigin(D)) 45 | lb = bound_X.Rastrigin()[0]*np.ones(dimension.Rastrigin(D)) 46 | optimizer = NGS_WOA(fitness=benchmark.Rastrigin, 47 | D=dimension.Rastrigin(D), P=P, G=G, ub=ub, lb=lb) 48 | st = time.time() 49 | optimizer.opt() 50 | ed = time.time() 51 | F_table[t, item] = optimizer.gbest_F 52 | table[item]['avg'] += optimizer.gbest_F 53 | table[item]['time'] += ed - st 54 | table[item]['ideal'] = ideal_F.Rastrigin() 55 | loss_curves[:, item] += optimizer.loss_curve 56 | 57 | 58 | item = item + 1 59 | ub = bound_X.Ackley()[1]*np.ones(dimension.Ackley(D)) 60 | lb = bound_X.Ackley()[0]*np.ones(dimension.Ackley(D)) 61 | optimizer = NGS_WOA(fitness=benchmark.Ackley, 62 | D=dimension.Ackley(D), P=P, G=G, ub=ub, lb=lb) 63 | st = time.time() 64 | optimizer.opt() 65 | ed = time.time() 66 | F_table[t, item] = optimizer.gbest_F 67 | table[item]['avg'] += optimizer.gbest_F 68 | table[item]['time'] += ed - st 69 | table[item]['ideal'] = ideal_F.Ackley() 70 | loss_curves[:, item] += optimizer.loss_curve 71 | 72 | 73 | item = item + 1 74 | ub = bound_X.Griewank()[1]*np.ones(dimension.Griewank(D)) 75 | lb = bound_X.Griewank()[0]*np.ones(dimension.Griewank(D)) 76 | optimizer = NGS_WOA(fitness=benchmark.Griewank, 77 | D=dimension.Griewank(D), P=P, G=G, ub=ub, lb=lb) 78 | st = time.time() 79 | optimizer.opt() 80 | ed = time.time() 81 | F_table[t, item] = optimizer.gbest_F 82 | table[item]['avg'] += optimizer.gbest_F 83 | table[item]['time'] += ed - st 84 | table[item]['ideal'] = ideal_F.Griewank() 85 | loss_curves[:, item] += optimizer.loss_curve 86 | 87 | 88 | item = item + 1 89 | ub = bound_X.Schwefel_P222()[1]*np.pi*np.ones(dimension.Schwefel_P222(D)) 90 | lb = bound_X.Schwefel_P222()[0]*np.pi*np.ones(dimension.Schwefel_P222(D)) 91 | optimizer = NGS_WOA(fitness=benchmark.Schwefel_P222, 92 | D=dimension.Schwefel_P222(D), P=P, G=G, ub=ub, lb=lb) 93 | st = time.time() 94 | optimizer.opt() 95 | ed = time.time() 96 | F_table[t, item] = optimizer.gbest_F 97 | table[item]['avg'] += optimizer.gbest_F 98 | table[item]['time'] += ed - st 99 | table[item]['ideal'] = ideal_F.Schwefel_P222() 100 | loss_curves[:, item] += optimizer.loss_curve 101 | 102 | 103 | item = item + 1 104 | ub = bound_X.Rosenbrock()[1]*np.ones(dimension.Rosenbrock(D)) 105 | lb = bound_X.Rosenbrock()[0]*np.ones(dimension.Rosenbrock(D)) 106 | optimizer = NGS_WOA(fitness=benchmark.Rosenbrock, 107 | D=dimension.Rosenbrock(D), P=P, G=G, ub=ub, lb=lb) 108 | st = time.time() 109 | optimizer.opt() 110 | ed = time.time() 111 | F_table[t, item] = optimizer.gbest_F 112 | table[item]['avg'] += optimizer.gbest_F 113 | table[item]['time'] += ed - st 114 | table[item]['ideal'] = ideal_F.Rosenbrock() 115 | loss_curves[:, item] += optimizer.loss_curve 116 | 117 | 118 | item = item + 1 119 | ub = bound_X.Sehwwefel_P221()[1]*np.ones(dimension.Sehwwefel_P221(D)) 120 | lb = bound_X.Sehwwefel_P221()[0]*np.ones(dimension.Sehwwefel_P221(D)) 121 | optimizer = NGS_WOA(fitness=benchmark.Sehwwefel_P221, 122 | D=dimension.Sehwwefel_P221(D), P=P, G=G, ub=ub, lb=lb) 123 | st = time.time() 124 | optimizer.opt() 125 | ed = time.time() 126 | F_table[t, item] = optimizer.gbest_F 127 | table[item]['avg'] += optimizer.gbest_F 128 | table[item]['time'] += ed - st 129 | table[item]['ideal'] = ideal_F.Sehwwefel_P221() 130 | loss_curves[:, item] += optimizer.loss_curve 131 | 132 | 133 | item = item + 1 134 | ub = bound_X.Quartic()[1]*np.ones(dimension.Quartic(D)) 135 | lb = bound_X.Quartic()[0]*np.ones(dimension.Quartic(D)) 136 | optimizer = NGS_WOA(fitness=benchmark.Quartic, 137 | D=dimension.Quartic(D), P=P, G=G, ub=ub, lb=lb) 138 | st = time.time() 139 | optimizer.opt() 140 | ed = time.time() 141 | F_table[t, item] = optimizer.gbest_F 142 | table[item]['avg'] += optimizer.gbest_F 143 | table[item]['time'] += ed - st 144 | table[item]['ideal'] = ideal_F.Quartic() 145 | loss_curves[:, item] += optimizer.loss_curve 146 | 147 | 148 | item = item + 1 149 | ub = bound_X.Schwefel_P12()[1]*np.ones(dimension.Schwefel_P12(D)) 150 | lb = bound_X.Schwefel_P12()[0]*np.ones(dimension.Schwefel_P12(D)) 151 | optimizer = NGS_WOA(fitness=benchmark.Schwefel_P12, 152 | D=dimension.Schwefel_P12(D), P=P, G=G, ub=ub, lb=lb) 153 | st = time.time() 154 | optimizer.opt() 155 | ed = time.time() 156 | F_table[t, item] = optimizer.gbest_F 157 | table[item]['avg'] += optimizer.gbest_F 158 | table[item]['time'] += ed - st 159 | table[item]['ideal'] = ideal_F.Schwefel_P12() 160 | loss_curves[:, item] += optimizer.loss_curve 161 | 162 | 163 | item = item + 1 164 | ub = bound_X.Penalized1()[1]*np.ones(dimension.Penalized1(D)) 165 | lb = bound_X.Penalized1()[0]*np.ones(dimension.Penalized1(D)) 166 | optimizer = NGS_WOA(fitness=benchmark.Penalized1, 167 | D=dimension.Penalized1(D), P=P, G=G, ub=ub, lb=lb) 168 | st = time.time() 169 | optimizer.opt() 170 | ed = time.time() 171 | F_table[t, item] = optimizer.gbest_F 172 | table[item]['avg'] += optimizer.gbest_F 173 | table[item]['time'] += ed - st 174 | table[item]['ideal'] = ideal_F.Penalized1() 175 | loss_curves[:, item] += optimizer.loss_curve 176 | 177 | 178 | item = item + 1 179 | ub = bound_X.Penalized2()[1]*np.ones(dimension.Penalized2(D)) 180 | lb = bound_X.Penalized2()[0]*np.ones(dimension.Penalized2(D)) 181 | optimizer = NGS_WOA(fitness=benchmark.Penalized2, 182 | D=dimension.Penalized2(D), P=P, G=G, ub=ub, lb=lb) 183 | st = time.time() 184 | optimizer.opt() 185 | ed = time.time() 186 | F_table[t, item] = optimizer.gbest_F 187 | table[item]['avg'] += optimizer.gbest_F 188 | table[item]['time'] += ed - st 189 | table[item]['ideal'] = ideal_F.Penalized2() 190 | loss_curves[:, item] += optimizer.loss_curve 191 | 192 | 193 | item = item + 1 194 | ub = bound_X.Schwefel_226()[1]*np.ones(dimension.Schwefel_226(D)) 195 | lb = bound_X.Schwefel_226()[0]*np.ones(dimension.Schwefel_226(D)) 196 | optimizer = NGS_WOA(fitness=benchmark.Schwefel_226, 197 | D=dimension.Schwefel_226(dimension.Schwefel_226(D)), P=P, G=G, ub=ub, lb=lb) 198 | st = time.time() 199 | optimizer.opt() 200 | ed = time.time() 201 | F_table[t, item] = optimizer.gbest_F 202 | table[item]['avg'] += optimizer.gbest_F 203 | table[item]['time'] += ed - st 204 | table[item]['ideal'] = ideal_F.Schwefel_226(D) 205 | loss_curves[:, item] += optimizer.loss_curve 206 | 207 | 208 | item = item + 1 209 | ub = bound_X.Step()[1]*np.ones(dimension.Step(D)) 210 | lb = bound_X.Step()[0]*np.ones(dimension.Step(D)) 211 | optimizer = NGS_WOA(fitness=benchmark.Step, 212 | D=dimension.Step(D), P=P, G=G, ub=ub, lb=lb) 213 | st = time.time() 214 | optimizer.opt() 215 | ed = time.time() 216 | F_table[t, item] = optimizer.gbest_F 217 | table[item]['avg'] += optimizer.gbest_F 218 | table[item]['time'] += ed - st 219 | table[item]['ideal'] = ideal_F.Step() 220 | loss_curves[:, item] += optimizer.loss_curve 221 | 222 | 223 | item = item + 1 224 | ub = bound_X.Kowalik()[1]*np.ones(dimension.Kowalik()) 225 | lb = bound_X.Kowalik()[0]*np.ones(dimension.Kowalik()) 226 | optimizer = NGS_WOA(fitness=benchmark.Kowalik, 227 | D=dimension.Kowalik(), P=P, G=G, ub=ub, lb=lb) 228 | st = time.time() 229 | optimizer.opt() 230 | ed = time.time() 231 | F_table[t, item] = optimizer.gbest_F 232 | table[item]['avg'] += optimizer.gbest_F 233 | table[item]['time'] += ed - st 234 | table[item]['ideal'] = ideal_F.Kowalik() 235 | loss_curves[:, item] += optimizer.loss_curve 236 | 237 | 238 | item = item + 1 239 | ub = bound_X.ShekelFoxholes()[1]*np.ones(dimension.ShekelFoxholes()) 240 | lb = bound_X.ShekelFoxholes()[0]*np.ones(dimension.ShekelFoxholes()) 241 | optimizer = NGS_WOA(fitness=benchmark.ShekelFoxholes, 242 | D=dimension.ShekelFoxholes(), P=P, G=G, ub=ub, lb=lb) 243 | st = time.time() 244 | optimizer.opt() 245 | ed = time.time() 246 | F_table[t, item] = optimizer.gbest_F 247 | table[item]['avg'] += optimizer.gbest_F 248 | table[item]['time'] += ed - st 249 | table[item]['ideal'] = ideal_F.ShekelFoxholes() 250 | loss_curves[:, item] += optimizer.loss_curve 251 | 252 | 253 | item = item + 1 254 | ub = bound_X.GoldsteinPrice()[1]*np.ones(dimension.GoldsteinPrice()) 255 | lb = bound_X.GoldsteinPrice()[0]*np.ones(dimension.GoldsteinPrice()) 256 | optimizer = NGS_WOA(fitness=benchmark.GoldsteinPrice, 257 | D=dimension.GoldsteinPrice(), P=P, G=G, ub=ub, lb=lb) 258 | st = time.time() 259 | optimizer.opt() 260 | ed = time.time() 261 | F_table[t, item] = optimizer.gbest_F 262 | table[item]['avg'] += optimizer.gbest_F 263 | table[item]['time'] += ed - st 264 | table[item]['ideal'] = ideal_F.GoldsteinPrice() 265 | loss_curves[:, item] += optimizer.loss_curve 266 | 267 | 268 | item = item + 1 269 | Shekel5 = functools.partial(benchmark.Shekel, m=5) 270 | ub = bound_X.Shekel()[1]*np.ones(dimension.Shekel()) 271 | lb = bound_X.Shekel()[0]*np.ones(dimension.Shekel()) 272 | optimizer = NGS_WOA(fitness=Shekel5, 273 | D=dimension.Shekel(), P=P, G=G, ub=ub, lb=lb) 274 | st = time.time() 275 | optimizer.opt() 276 | ed = time.time() 277 | F_table[t, item] = optimizer.gbest_F 278 | table[item]['avg'] += optimizer.gbest_F 279 | table[item]['time'] += ed - st 280 | table[item]['ideal'] = ideal_F.Shekel() 281 | loss_curves[:, item] += optimizer.loss_curve 282 | 283 | 284 | item = item + 1 285 | ub = bound_X.Branin()[2:]*np.ones(dimension.Branin()) 286 | lb = bound_X.Branin()[:2]*np.ones(dimension.Branin()) 287 | optimizer = NGS_WOA(fitness=benchmark.Branin, 288 | D=dimension.Branin(), P=P, G=G, ub=ub, lb=lb) 289 | st = time.time() 290 | optimizer.opt() 291 | ed = time.time() 292 | F_table[t, item] = optimizer.gbest_F 293 | table[item]['avg'] += optimizer.gbest_F 294 | table[item]['time'] += ed - st 295 | table[item]['ideal'] = ideal_F.Branin() 296 | loss_curves[:, item] += optimizer.loss_curve 297 | 298 | 299 | item = item + 1 300 | ub = bound_X.Hartmann3()[1]*np.ones(dimension.Hartmann3()) 301 | lb = bound_X.Hartmann3()[0]*np.ones(dimension.Hartmann3()) 302 | optimizer = NGS_WOA(fitness=benchmark.Hartmann3, 303 | D=dimension.Hartmann3(), P=P, G=G, ub=ub, lb=lb) 304 | st = time.time() 305 | optimizer.opt() 306 | ed = time.time() 307 | F_table[t, item] = optimizer.gbest_F 308 | table[item]['avg'] += optimizer.gbest_F 309 | table[item]['time'] += ed - st 310 | table[item]['ideal'] = ideal_F.Hartmann3() 311 | loss_curves[:, item] += optimizer.loss_curve 312 | 313 | 314 | item = item + 1 315 | Shekel7 = functools.partial(benchmark.Shekel, m=7) 316 | ub = bound_X.Shekel()[1]*np.ones(dimension.Shekel()) 317 | lb = bound_X.Shekel()[0]*np.ones(dimension.Shekel()) 318 | optimizer = NGS_WOA(fitness=Shekel7, 319 | D=dimension.Shekel(), P=P, G=G, ub=ub, lb=lb) 320 | st = time.time() 321 | optimizer.opt() 322 | ed = time.time() 323 | F_table[t, item] = optimizer.gbest_F 324 | table[item]['avg'] += optimizer.gbest_F 325 | table[item]['time'] += ed - st 326 | table[item]['ideal'] = ideal_F.Shekel() 327 | loss_curves[:, item] += optimizer.loss_curve 328 | 329 | 330 | item = item + 1 331 | Shekel10 = functools.partial(benchmark.Shekel, m=10) 332 | ub = bound_X.Shekel()[1]*np.ones(dimension.Shekel()) 333 | lb = bound_X.Shekel()[0]*np.ones(dimension.Shekel()) 334 | optimizer = NGS_WOA(fitness=benchmark.Shekel, 335 | D=dimension.Shekel(), P=P, G=G, ub=ub, lb=lb) 336 | st = time.time() 337 | optimizer.opt() 338 | ed = time.time() 339 | F_table[t, item] = optimizer.gbest_F 340 | table[item]['avg'] += optimizer.gbest_F 341 | table[item]['time'] += ed - st 342 | table[item]['ideal'] = ideal_F.Shekel() 343 | loss_curves[:, item] += optimizer.loss_curve 344 | 345 | 346 | item = item + 1 347 | ub = bound_X.SixHumpCamelBack()[1]*np.ones(dimension.SixHumpCamelBack()) 348 | lb = bound_X.SixHumpCamelBack()[0]*np.ones(dimension.SixHumpCamelBack()) 349 | optimizer = NGS_WOA(fitness=benchmark.SixHumpCamelBack, 350 | D=dimension.SixHumpCamelBack(), P=P, G=G, ub=ub, lb=lb) 351 | st = time.time() 352 | optimizer.opt() 353 | ed = time.time() 354 | F_table[t, item] = optimizer.gbest_F 355 | table[item]['avg'] += optimizer.gbest_F 356 | table[item]['time'] += ed - st 357 | table[item]['ideal'] = ideal_F.SixHumpCamelBack() 358 | loss_curves[:, item] += optimizer.loss_curve 359 | 360 | 361 | item = item + 1 362 | ub = bound_X.Hartmann6()[1]*np.ones(dimension.Hartmann6()) 363 | lb = bound_X.Hartmann6()[0]*np.ones(dimension.Hartmann6()) 364 | optimizer = NGS_WOA(fitness=benchmark.Hartmann6, 365 | D=dimension.Hartmann6(), P=P, G=G, ub=ub, lb=lb) 366 | st = time.time() 367 | optimizer.opt() 368 | ed = time.time() 369 | F_table[t, item] = optimizer.gbest_F 370 | table[item]['avg'] += optimizer.gbest_F 371 | table[item]['time'] += ed - st 372 | table[item]['ideal'] = ideal_F.Hartmann6() 373 | loss_curves[:, item] += optimizer.loss_curve 374 | 375 | 376 | item = item + 1 377 | ub = bound_X.Zakharov()[1]*np.ones(dimension.Zakharov(D)) 378 | lb = bound_X.Zakharov()[0]*np.ones(dimension.Zakharov(D)) 379 | optimizer = NGS_WOA(fitness=benchmark.Zakharov, 380 | D=dimension.Zakharov(D), P=P, G=G, ub=ub, lb=lb) 381 | st = time.time() 382 | optimizer.opt() 383 | ed = time.time() 384 | F_table[t, item] = optimizer.gbest_F 385 | table[item]['avg'] += optimizer.gbest_F 386 | table[item]['time'] += ed - st 387 | table[item]['ideal'] = ideal_F.Zakharov() 388 | loss_curves[:, item] += optimizer.loss_curve 389 | 390 | 391 | item = item + 1 392 | ub = bound_X.SumSquares()[1]*np.ones(dimension.SumSquares(D)) 393 | lb = bound_X.SumSquares()[0]*np.ones(dimension.SumSquares(D)) 394 | optimizer = NGS_WOA(fitness=benchmark.SumSquares, 395 | D=dimension.SumSquares(D), P=P, G=G, ub=ub, lb=lb) 396 | st = time.time() 397 | optimizer.opt() 398 | ed = time.time() 399 | F_table[t, item] = optimizer.gbest_F 400 | table[item]['avg'] += optimizer.gbest_F 401 | table[item]['time'] += ed - st 402 | table[item]['ideal'] = ideal_F.SumSquares() 403 | loss_curves[:, item] += optimizer.loss_curve 404 | 405 | 406 | item = item + 1 407 | ub = bound_X.Alpine()[1]*np.ones(dimension.Alpine(D)) 408 | lb = bound_X.Alpine()[0]*np.ones(dimension.Alpine(D)) 409 | optimizer = NGS_WOA(fitness=benchmark.Alpine, 410 | D=dimension.Alpine(D), P=P, G=G, ub=ub, lb=lb) 411 | st = time.time() 412 | optimizer.opt() 413 | ed = time.time() 414 | F_table[t, item] = optimizer.gbest_F 415 | table[item]['avg'] += optimizer.gbest_F 416 | table[item]['time'] += ed - st 417 | table[item]['ideal'] = ideal_F.Alpine() 418 | loss_curves[:, item] += optimizer.loss_curve 419 | 420 | 421 | item = item + 1 422 | ub = bound_X.Michalewicz()[1]*np.ones(dimension.Michalewicz()) 423 | lb = bound_X.Michalewicz()[0]*np.ones(dimension.Michalewicz()) 424 | optimizer = NGS_WOA(fitness=benchmark.Michalewicz, 425 | D=dimension.Michalewicz(), P=P, G=G, ub=ub, lb=lb) 426 | st = time.time() 427 | optimizer.opt() 428 | ed = time.time() 429 | F_table[t, item] = optimizer.gbest_F 430 | table[item]['avg'] += optimizer.gbest_F 431 | table[item]['time'] += ed - st 432 | table[item]['ideal'] = ideal_F.Michalewicz(dimension.Michalewicz()) 433 | loss_curves[:, item] += optimizer.loss_curve 434 | 435 | 436 | item = item + 1 437 | ub = bound_X.Exponential()[1]*np.ones(dimension.Exponential(D)) 438 | lb = bound_X.Exponential()[0]*np.ones(dimension.Exponential(D)) 439 | optimizer = NGS_WOA(fitness=benchmark.Exponential, 440 | D=dimension.Exponential(D), P=P, G=G, ub=ub, lb=lb) 441 | st = time.time() 442 | optimizer.opt() 443 | ed = time.time() 444 | F_table[t, item] = optimizer.gbest_F 445 | table[item]['avg'] += optimizer.gbest_F 446 | table[item]['time'] += ed - st 447 | table[item]['ideal'] = ideal_F.Exponential() 448 | loss_curves[:, item] += optimizer.loss_curve 449 | 450 | 451 | item = item + 1 452 | ub = bound_X.Schaffer()[1]*np.ones(dimension.Schaffer()) 453 | lb = bound_X.Schaffer()[0]*np.ones(dimension.Schaffer()) 454 | optimizer = NGS_WOA(fitness=benchmark.Schaffer, 455 | D=dimension.Schaffer(), P=P, G=G, ub=ub, lb=lb) 456 | st = time.time() 457 | optimizer.opt() 458 | ed = time.time() 459 | F_table[t, item] = optimizer.gbest_F 460 | table[item]['avg'] += optimizer.gbest_F 461 | table[item]['time'] += ed - st 462 | table[item]['ideal'] = ideal_F.Schaffer() 463 | loss_curves[:, item] += optimizer.loss_curve 464 | 465 | 466 | item = item + 1 467 | ub = bound_X.BentCigar()[1]*np.ones(dimension.BentCigar(D)) 468 | lb = bound_X.BentCigar()[0]*np.ones(dimension.BentCigar(D)) 469 | optimizer = NGS_WOA(fitness=benchmark.BentCigar, 470 | D=dimension.BentCigar(D), P=P, G=G, ub=ub, lb=lb) 471 | st = time.time() 472 | optimizer.opt() 473 | ed = time.time() 474 | F_table[t, item] = optimizer.gbest_F 475 | table[item]['avg'] += optimizer.gbest_F 476 | table[item]['time'] += ed - st 477 | table[item]['ideal'] = ideal_F.BentCigar() 478 | loss_curves[:, item] += optimizer.loss_curve 479 | 480 | 481 | item = item + 1 482 | ub = bound_X.Bohachevsky1()[1]*np.ones(dimension.Bohachevsky1()) 483 | lb = bound_X.Bohachevsky1()[0]*np.ones(dimension.Bohachevsky1()) 484 | optimizer = NGS_WOA(fitness=benchmark.Bohachevsky1, 485 | D=dimension.Bohachevsky1(), P=P, G=G, ub=ub, lb=lb) 486 | st = time.time() 487 | optimizer.opt() 488 | ed = time.time() 489 | F_table[t, item] = optimizer.gbest_F 490 | table[item]['avg'] += optimizer.gbest_F 491 | table[item]['time'] += ed - st 492 | table[item]['ideal'] = ideal_F.Bohachevsky1() 493 | loss_curves[:, item] += optimizer.loss_curve 494 | 495 | 496 | item = item + 1 497 | ub = bound_X.Elliptic()[1]*np.ones(dimension.Elliptic(D)) 498 | lb = bound_X.Elliptic()[0]*np.ones(dimension.Elliptic(D)) 499 | optimizer = NGS_WOA(fitness=benchmark.Elliptic, 500 | D=dimension.Elliptic(D), P=P, G=G, ub=ub, lb=lb) 501 | st = time.time() 502 | optimizer.opt() 503 | ed = time.time() 504 | F_table[t, item] = optimizer.gbest_F 505 | table[item]['avg'] += optimizer.gbest_F 506 | table[item]['time'] += ed - st 507 | table[item]['ideal'] = ideal_F.Elliptic() 508 | loss_curves[:, item] += optimizer.loss_curve 509 | 510 | 511 | item = item + 1 512 | ub = bound_X.DropWave()[1]*np.ones(dimension.DropWave()) 513 | lb = bound_X.DropWave()[0]*np.ones(dimension.DropWave()) 514 | optimizer = NGS_WOA(fitness=benchmark.DropWave, 515 | D=dimension.DropWave(), P=P, G=G, ub=ub, lb=lb) 516 | st = time.time() 517 | optimizer.opt() 518 | ed = time.time() 519 | F_table[t, item] = optimizer.gbest_F 520 | table[item]['avg'] += optimizer.gbest_F 521 | table[item]['time'] += ed - st 522 | table[item]['ideal'] = ideal_F.DropWave() 523 | loss_curves[:, item] += optimizer.loss_curve 524 | 525 | 526 | item = item + 1 527 | ub = bound_X.CosineMixture()[1]*np.ones(dimension.CosineMixture(D)) 528 | lb = bound_X.CosineMixture()[0]*np.ones(dimension.CosineMixture(D)) 529 | optimizer = NGS_WOA(fitness=benchmark.CosineMixture, 530 | D=dimension.CosineMixture(dimension.CosineMixture(D)), P=P, G=G, ub=ub, lb=lb) 531 | st = time.time() 532 | optimizer.opt() 533 | ed = time.time() 534 | F_table[t, item] = optimizer.gbest_F 535 | table[item]['avg'] += optimizer.gbest_F 536 | table[item]['time'] += ed - st 537 | table[item]['ideal'] = ideal_F.CosineMixture(dimension.CosineMixture(D)) 538 | loss_curves[:, item] += optimizer.loss_curve 539 | 540 | 541 | item = item + 1 542 | ub = bound_X.Ellipsoidal(dimension.Ellipsoidal(D))[1]*np.ones(dimension.Ellipsoidal(D)) 543 | lb = bound_X.Ellipsoidal(dimension.Ellipsoidal(D))[0]*np.ones(dimension.Ellipsoidal(D)) 544 | optimizer = NGS_WOA(fitness=benchmark.Ellipsoidal, 545 | D=dimension.Ellipsoidal(D), P=P, G=G, ub=ub, lb=lb) 546 | st = time.time() 547 | optimizer.opt() 548 | ed = time.time() 549 | F_table[t, item] = optimizer.gbest_F 550 | table[item]['avg'] += optimizer.gbest_F 551 | table[item]['time'] += ed - st 552 | table[item]['ideal'] = ideal_F.Ellipsoidal() 553 | loss_curves[:, item] += optimizer.loss_curve 554 | 555 | 556 | item = item + 1 557 | ub = bound_X.LevyandMontalvo1()[1]*np.ones(dimension.LevyandMontalvo1(D)) 558 | lb = bound_X.LevyandMontalvo1()[0]*np.ones(dimension.LevyandMontalvo1(D)) 559 | optimizer = NGS_WOA(fitness=benchmark.LevyandMontalvo1, 560 | D=dimension.LevyandMontalvo1(D), P=P, G=G, ub=ub, lb=lb) 561 | st = time.time() 562 | optimizer.opt() 563 | ed = time.time() 564 | F_table[t, item] = optimizer.gbest_F 565 | table[item]['avg'] += optimizer.gbest_F 566 | table[item]['time'] += ed - st 567 | table[item]['ideal'] = ideal_F.LevyandMontalvo1() 568 | loss_curves[:, item] += optimizer.loss_curve 569 | 570 | 571 | print(t+1) 572 | 573 | loss_curves = loss_curves / run_times 574 | loss_curves = pd.DataFrame(loss_curves) 575 | loss_curves.columns = ['Sphere', 'Rastrigin', 'Ackley', 'Griewank', 'Schwefel P2.22', 576 | 'Rosenbrock', 'Sehwwefel P2.21', 'Quartic', 'Schwefel P1.2', 'Penalized 1', 577 | 'Penalized 2', 'Schwefel P2.26', 'Step', 'Kowalik', 'Shekel Foxholes', 578 | 'Goldstein-Price', 'Shekel 5', 'Branin', 'Hartmann 3', 'Shekel 7', 579 | 'Shekel 10', 'Six-Hump Camel-Back', 'Hartmann 6', 'Zakharov', 'Sum Squares', 580 | 'Alpine', 'Michalewicz', 'Exponential', 'Schaffer', 'Bent Cigar', 581 | 'Bohachevsky 1', 'Elliptic', 'Drop Wave', 'Cosine Mixture', 'Ellipsoidal', 582 | 'Levy and Montalvo 1'] 583 | loss_curves.to_csv('loss_curves(NGS_WOA).csv') 584 | 585 | table.loc[['avg', 'time']] = table.loc[['avg', 'time']] / run_times 586 | table.loc['worst'] = F_table.max(axis=0) 587 | table.loc['best'] = F_table.min(axis=0) 588 | table.loc['std'] = F_table.std(axis=0) 589 | table.columns = ['Sphere', 'Rastrigin', 'Ackley', 'Griewank', 'Schwefel P2.22', 590 | 'Rosenbrock', 'Sehwwefel P2.21', 'Quartic', 'Schwefel P1.2', 'Penalized 1', 591 | 'Penalized 2', 'Schwefel P2.26', 'Step', 'Kowalik', 'Shekel Foxholes', 592 | 'Goldstein-Price', 'Shekel 5', 'Branin', 'Hartmann 3', 'Shekel 7', 593 | 'Shekel 10', 'Six-Hump Camel-Back', 'Hartmann 6', 'Zakharov', 'Sum Squares', 594 | 'Alpine', 'Michalewicz', 'Exponential', 'Schaffer', 'Bent Cigar', 595 | 'Bohachevsky 1', 'Elliptic', 'Drop Wave', 'Cosine Mixture', 'Ellipsoidal', 596 | 'Levy and Montalvo 1'] 597 | table.to_csv('table(NGS_WOA).csv') --------------------------------------------------------------------------------