├── 1,sun_plot ├── PSO_pred.csv ├── PSO_test.csv ├── TSA_pred.csv ├── TSA_test.csv ├── alllll_result.csv ├── calculate_result.py ├── demoplot.py ├── real.csv ├── regular_pred.csv ├── regular_test.csv └── train.py ├── 1,sun_power ├── PSO_result.csv ├── new_begin_PSO_sun.py ├── new_begin_TSA_sun.py └── testing.xlsx ├── 2,beijing PM2.5 ├── PRSA_data_2010.1.1-2014.12.31.csv ├── PSO_result.csv ├── TSA_result.csv ├── lstm_model.pt ├── new_begin_PSO_beijngpm2.5.py └── new_begin_TSA_beijngpm2.5.py ├── 2,beijing_plot ├── PRSA_data_2010.1.1-2014.12.31.csv ├── PSO_pred.csv ├── PSO_test.csv ├── TSA_pred.csv ├── TSA_test.csv ├── alllll_result.csv ├── calculate_result_beijing.py ├── demoplot_beijing.py ├── lstm_model.pt ├── real.csv ├── regular_pred.csv └── train_beijing.py ├── 3,family_plot ├── PSO_pred.csv ├── PSO_test.csv ├── TSA_pred.csv ├── TSA_test.csv ├── alllll_result.csv ├── calculate_result_family.py ├── demoplot_family.py ├── family_power.xlsx ├── lstm_model.pt ├── real.csv ├── regular_pred.csv └── train_family.py ├── 3,family_power ├── PSO_result.csv ├── TSA_result.csv ├── family_power.xlsx ├── lstm_model.pt ├── new_begin_PSO_family_power.py └── new_begin_TSA_family_power.py ├── 4,airplot ├── AirQualityUCI.xlsx ├── PSO_pred.csv ├── PSO_test.csv ├── TSA_pred.csv ├── TSA_test.csv ├── alllll_result.csv ├── calculate_result_air.py ├── demoplot_air.py ├── lstm_model.pt ├── real.csv ├── regular_pred.csv └── train_air.py ├── 4,airquality ├── AirQualityUCI.xlsx ├── PSO_result.csv ├── TSA_result.csv ├── lstm_model.pt ├── new_begin_PSO_AirQuality.py └── new_begin_TSA_AirQuality.py └── README.md /1,sun_plot/alllll_result.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2 2 | 0,0.015118974267269094,0.010267893535251454,0.010551819225987425 3 | 1,0.12295923823474629,0.10133061499493357,0.10272204839267676 4 | 2,0.07602796154447893,0.060216469501346154,0.06025311923367755 5 | 3,4105164746.3855515,3089653642.131452,3232365617.627421 6 | -------------------------------------------------------------------------------- /1,sun_plot/calculate_result.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | df = np.array(pd.read_csv("real.csv"))[:,-1] 5 | df_PSO = np.array(pd.read_csv("PSO_pred.csv"))[:,-1] 6 | df_TSA = np.array(pd.read_csv('TSA_pred.csv'))[:,-1] 7 | df_regular = np.array(pd.read_csv('regular_pred.csv'))[:,-1] 8 | 9 | def MSE(y,yhat): 10 | return np.sum((y - yhat)**2)/len(y) 11 | 12 | def RMSE(y, yhat): 13 | return np.sqrt(MSE(y, yhat)) 14 | 15 | def MAPE(y, yhat): 16 | return np.sum(np.abs((y+1e-12 - yhat))/(y+1e-12))/len(y) 17 | 18 | def MAE(y, yhat): 19 | return np.sum(np.abs(y - yhat))/len(y) 20 | 21 | res = np.zeros((4,3)) 22 | for i in range(4): 23 | for j in range(3): 24 | if i == 0: 25 | if j == 0: 26 | res[i][j] = MSE(df,df_regular) 27 | elif j == 1: 28 | res[i][j] = MSE(df,df_PSO) 29 | elif j == 2: 30 | res[i][j] = MSE(df, df_TSA) 31 | elif i == 1: 32 | if j == 0: 33 | res[i][j] = RMSE(df,df_regular) 34 | elif j == 1: 35 | res[i][j] = RMSE(df,df_PSO) 36 | elif j == 2: 37 | res[i][j] = RMSE(df, df_TSA) 38 | elif i == 2: 39 | if j == 0: 40 | res[i][j] = MAE(df,df_regular) 41 | elif j == 1: 42 | res[i][j] = MAE(df,df_PSO) 43 | elif j == 2: 44 | res[i][j] = MAE(df, df_TSA) 45 | elif i == 3: 46 | if j == 0: 47 | res[i][j] = MAPE(df,df_regular) 48 | elif j == 1: 49 | res[i][j] = MAPE(df,df_PSO) 50 | elif j == 2: 51 | res[i][j] = MAPE(df, df_TSA) 52 | 53 | df = pd.DataFrame(res) 54 | df.to_csv('alllll_result.csv') -------------------------------------------------------------------------------- /1,sun_plot/demoplot.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签 5 | plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号 6 | 7 | ## 分辨率 1500*1200 8 | plt.rcParams['figure.dpi'] = 200 9 | plt.rcParams['savefig.dpi'] = 200 10 | 11 | df = pd.read_csv("real.csv") 12 | df_PSO = pd.read_csv("PSO_pred.csv") 13 | df_TSA = pd.read_csv('TSA_pred.csv') 14 | df_regular = pd.read_csv('regular_pred.csv') 15 | 16 | fig, ax = plt.subplots(figsize =(8*np.sqrt(2)+6, 8)) # 创建图实例 17 | 18 | ax.plot(np.linspace(0,df.shape[0]+1,df.shape[0]),df.iloc[:, -1],label='real value',marker='+') 19 | ax.plot(np.linspace(0,df_regular.shape[0]+1,df_regular.shape[0]),df_regular.iloc[:,-1],label='LSTM predicted value',marker='2') 20 | ax.plot(np.linspace(0,df_PSO.shape[0]+1,df_PSO.shape[0]),df_PSO.iloc[:,-1],label='PSO_LSTM predicted value',marker='1') 21 | ax.plot(np.linspace(0,df_TSA.shape[0]+1,df_TSA.shape[0]),df_TSA.iloc[:,-1],label='TSA_LSTM predicted value',marker='*') 22 | 23 | 24 | ax.set_xlabel('') #设置x轴名称 x label 25 | ax.set_ylabel('归一化之后的目标特征') #设置y轴名称 y label 26 | ax.set_title('太阳功率') #设置图名为Simple Plot 27 | ax.legend() #自动检测要在图例中显示的元素,并且显示 28 | 29 | plt.show() #图形可视化 30 | -------------------------------------------------------------------------------- /1,sun_plot/real.csv: -------------------------------------------------------------------------------- 1 | ,0 2 | 0,0.0 3 | 1,0.0 4 | 2,0.033972126 5 | 3,0.14808363 6 | 4,0.2909408 7 | 5,0.39808363 8 | 6,0.20296167 9 | 7,0.015679443 10 | 8,0.094076656 11 | 9,0.41986063 12 | 10,0.57926834 13 | 11,0.6994774 14 | 12,0.6228223 15 | 13,0.7212544 16 | 14,0.7848432 17 | 15,0.69599307 18 | 16,0.71428573 19 | 17,0.6149826 20 | 18,0.54442513 21 | 19,0.3379791 22 | 20,0.2543554 23 | 21,0.10191638 24 | 22,0.006968641 25 | 23,0.0 26 | 24,0.0 27 | 25,0.0 28 | 26,0.0 29 | 27,0.0 30 | 28,0.047038328 31 | 29,0.20731707 32 | 30,0.3606272 33 | 31,0.51132405 34 | 32,0.6393728 35 | 33,0.7212544 36 | 34,0.716899 37 | 35,0.7761324 38 | 36,0.76132405 39 | 37,0.59407663 40 | 38,0.62020904 41 | 39,0.7569687 42 | 40,0.73344946 43 | 41,0.72299653 44 | 42,0.6907666 45 | 43,0.6167248 46 | 44,0.53919864 47 | 45,0.37108013 48 | 46,0.23344947 49 | 47,0.09320558 50 | 48,0.0104529625 51 | 49,0.0 52 | 50,0.0 53 | 51,0.0 54 | 52,0.0 55 | 53,0.0 56 | 54,0.007839722 57 | 55,0.0705575 58 | 56,0.13763067 59 | 57,0.24825785 60 | 58,0.3432056 61 | 59,0.510453 62 | 60,0.55662024 63 | 61,0.47735193 64 | 62,0.37543553 65 | 63,0.3170732 66 | 64,0.26219514 67 | 65,0.25174215 68 | 66,0.24303137 69 | 67,0.17073171 70 | 68,0.10714286 71 | 69,0.085365854 72 | 70,0.06968641 73 | 71,0.047038328 74 | 72,0.020034844 75 | 73,0.004355401 76 | 74,0.0 77 | 75,0.0 78 | 76,0.0 79 | 77,0.0 80 | 78,0.0 81 | 79,0.0 82 | 80,0.026132405 83 | 81,0.21341464 84 | 82,0.33275262 85 | 83,0.49128923 86 | 84,0.6533101 87 | 85,0.7055749 88 | 86,0.80487806 89 | 87,0.8353659 90 | 88,0.65505224 91 | 89,0.5296167 92 | 90,0.5 93 | 91,0.58275265 94 | 92,0.61585367 95 | 93,0.6567944 96 | 94,0.6803136 97 | 95,0.7308363 98 | 96,0.6184669 99 | 97,0.44773522 100 | 98,0.25261325 101 | 99,0.12979095 102 | 100,0.008710802 103 | 101,0.0 104 | 102,0.0 105 | 103,0.0 106 | 104,0.0 107 | 105,0.0 108 | 106,0.029616727 109 | 107,0.18379791 110 | 108,0.3597561 111 | 109,0.5121951 112 | 110,0.651568 113 | 111,0.79616725 114 | 112,0.8527875 115 | 113,0.8684669 116 | 114,0.8292683 117 | 115,0.88414633 118 | 116,0.8789199 119 | 117,0.8554007 120 | 118,0.8554007 121 | 119,0.8379791 122 | 120,0.7273519 123 | 121,0.6707317 124 | 122,0.587108 125 | 123,0.43031362 126 | 124,0.27351916 127 | 125,0.10714286 128 | 126,0.004355401 129 | 127,0.0 130 | 128,0.0 131 | 129,0.0 132 | 130,0.0 133 | 131,0.0 134 | 132,0.031358887 135 | 133,0.1890244 136 | 134,0.38066202 137 | 135,0.54094076 138 | 136,0.65505224 139 | 137,0.7343206 140 | 138,0.8466899 141 | 139,0.9268293 142 | 140,0.94773525 143 | 141,0.95034844 144 | 142,0.95034844 145 | 143,0.9050523 146 | 144,0.8614983 147 | 145,0.8196864 148 | 146,0.6393728 149 | 147,0.39634147 150 | 148,0.20731707 151 | 149,0.13850175 152 | 150,0.064459935 153 | 151,0.013937282 154 | 152,0.0 155 | 153,0.0 156 | 154,0.0 157 | 155,0.0 158 | 156,0.0 159 | 157,0.0 160 | 158,0.025261326 161 | 159,0.19860627 162 | 160,0.39285713 163 | 161,0.54790944 164 | 162,0.67857146 165 | 163,0.78571427 166 | 164,0.8719512 167 | 165,0.9294425 168 | 166,0.9555749 169 | 167,0.9555749 170 | 168,0.9599303 171 | 169,0.94163764 172 | 170,0.8702091 173 | 171,0.8353659 174 | 172,0.75435543 175 | 173,0.62979096 176 | 174,0.5270035 177 | 175,0.27439025 178 | 176,0.078397214 179 | 177,0.007839722 180 | 178,0.0 181 | 179,0.0 182 | 180,0.0 183 | 181,0.0 184 | 182,0.0 185 | 183,0.0 186 | 184,0.004355401 187 | 185,0.0836237 188 | 186,0.24477352 189 | 187,0.37108013 190 | 188,0.4921603 191 | 189,0.5522648 192 | 190,0.60714287 193 | 191,0.49825785 194 | 192,0.66463417 195 | 193,0.6236934 196 | 194,0.57665503 197 | 195,0.6167248 198 | 196,0.61236936 199 | 197,0.54616725 200 | 198,0.6062718 201 | 199,0.5679442 202 | 200,0.46777004 203 | 201,0.35365853 204 | 202,0.22125436 205 | 203,0.07752613 206 | 204,0.00087108015 207 | 205,0.0 208 | 206,0.0 209 | 207,0.0 210 | 208,0.0 211 | 209,0.0 212 | 210,0.020905925 213 | 211,0.108013935 214 | 212,0.27351916 215 | 213,0.42334494 216 | 214,0.3780488 217 | 215,0.2979094 218 | 216,0.33972126 219 | 217,0.4172474 220 | 218,0.46951222 221 | 219,0.55487806 222 | 220,0.57404184 223 | 221,0.5932056 224 | 222,0.46080142 225 | 223,0.63501745 226 | 224,0.58623695 227 | 225,0.4747387 228 | 226,0.44250873 229 | 227,0.33275262 230 | 228,0.15679443 231 | 229,0.05749129 232 | 230,0.00087108015 233 | 231,0.0 234 | 232,0.0 235 | 233,0.0 236 | 234,0.0 237 | 235,0.0 238 | 236,0.00087108015 239 | 237,0.0113240415 240 | 238,0.045296166 241 | 239,0.082752615 242 | 240,0.34843206 243 | 241,0.7813589 244 | 242,0.78048784 245 | 243,0.87456447 246 | 244,0.7299652 247 | 245,0.6402439 248 | 246,0.78745645 249 | 247,0.91550523 250 | 248,0.75958186 251 | 249,0.630662 252 | 250,0.738676 253 | 251,0.7151568 254 | 252,0.6184669 255 | 253,0.49912894 256 | 254,0.27874565 257 | 255,0.078397214 258 | 256,0.00087108015 259 | 257,0.0 260 | 258,0.0 261 | 259,0.0 262 | 260,0.0 263 | 261,0.0 264 | 262,0.008710802 265 | 263,0.06533101 266 | 264,0.27003485 267 | 265,0.43466902 268 | 266,0.597561 269 | 267,0.738676 270 | 268,0.8336237 271 | 269,0.8928572 272 | 270,0.8614983 273 | 271,0.9033101 274 | 272,0.88414633 275 | 273,0.86585367 276 | 274,0.8266551 277 | 275,0.7987805 278 | 276,0.728223 279 | 277,0.65592337 280 | 278,0.5470383 281 | 279,0.3824042 282 | 280,0.21689896 283 | 281,0.05836237 284 | 282,0.0 285 | 283,0.0 286 | 284,0.0 287 | 285,0.0 288 | 286,0.0 289 | 287,0.0 290 | 288,0.009581882 291 | 289,0.06620209 292 | 290,0.23867597 293 | 291,0.3292683 294 | 292,0.3684669 295 | 293,0.22560976 296 | 294,0.18205576 297 | 295,0.14372823 298 | 296,0.2020906 299 | 297,0.20818816 300 | 298,0.16986063 301 | 299,0.13937283 302 | 300,0.1315331 303 | 301,0.19599304 304 | 302,0.18815331 305 | 303,0.14808363 306 | 304,0.13240418 307 | 305,0.112369336 308 | 306,0.027003484 309 | 307,0.015679443 310 | 308,0.0 311 | 309,0.0 312 | 310,0.0 313 | 311,0.0 314 | 312,0.0 315 | 313,0.0 316 | 314,0.00087108015 317 | 315,0.027003484 318 | 316,0.05226481 319 | 317,0.09320558 320 | 318,0.08449478 321 | 319,0.09581882 322 | 320,0.16811846 323 | 321,0.162892 324 | 322,0.13675958 325 | 323,0.12543555 326 | 324,0.15243903 327 | 325,0.19947736 328 | 326,0.1672474 329 | 327,0.1541812 330 | 328,0.22648084 331 | 329,0.19425087 332 | 330,0.13850175 333 | 331,0.11759582 334 | 332,0.15069686 335 | 333,0.05139373 336 | 334,0.0 337 | 335,0.0 338 | 336,0.0 339 | 337,0.0 340 | 338,0.0 341 | 339,0.0 342 | 340,0.012195122 343 | 341,0.13414635 344 | 342,0.34407666 345 | 343,0.45557493 346 | 344,0.57926834 347 | 345,0.630662 348 | 346,0.695122 349 | 347,0.7412892 350 | 348,0.8344948 351 | 349,0.804007 352 | 350,0.83623695 353 | 351,0.8545296 354 | 352,0.85888505 355 | 353,0.87108016 356 | 354,0.7970383 357 | 355,0.74390244 358 | 356,0.5731708 359 | 357,0.42944252 360 | 358,0.20731707 361 | 359,0.035714287 362 | 360,0.0 363 | 361,0.0 364 | 362,0.0 365 | 363,0.0 366 | 364,0.0 367 | 365,0.0 368 | 366,0.0026132406 369 | 367,0.008710802 370 | 368,0.0034843206 371 | 369,0.0104529625 372 | 370,0.007839722 373 | 371,0.004355401 374 | 372,0.012195122 375 | 373,0.03745645 376 | 374,0.054006968 377 | 375,0.067073174 378 | 376,0.043554008 379 | 377,0.035714287 380 | 378,0.031358887 381 | 379,0.013937282 382 | 380,0.0052264812 383 | 381,0.0026132406 384 | 382,0.00087108015 385 | 383,0.0 386 | 384,0.0 387 | 385,0.0 388 | 386,0.0 389 | 387,0.0 390 | 388,0.0 391 | 389,0.0 392 | 390,0.0 393 | 391,0.0 394 | 392,0.004355401 395 | 393,0.05574913 396 | 394,0.12804878 397 | 395,0.20557491 398 | 396,0.25696865 399 | 397,0.36149827 400 | 398,0.57752615 401 | 399,0.5888502 402 | 400,0.73344946 403 | 401,0.7465157 404 | 402,0.7369338 405 | 403,0.7456446 406 | 404,0.71341467 407 | 405,0.662892 408 | 406,0.58449477 409 | 407,0.52351916 410 | 408,0.3684669 411 | 409,0.25609756 412 | 410,0.12891987 413 | 411,0.020034844 414 | 412,0.0 415 | 413,0.0 416 | 414,0.0 417 | 415,0.0 418 | 416,0.0 419 | 417,0.0 420 | 418,0.0 421 | 419,0.0052264812 422 | 420,0.014808363 423 | 421,0.020905925 424 | 422,0.019163763 425 | 423,0.033101045 426 | 424,0.04442509 427 | 425,0.03745645 428 | 426,0.04878049 429 | 427,0.039198607 430 | 428,0.029616727 431 | 429,0.027003484 432 | 430,0.025261326 433 | 431,0.016550522 434 | 432,0.0104529625 435 | 433,0.004355401 436 | 434,0.0026132406 437 | 435,0.00087108015 438 | 436,0.0 439 | 437,0.0 440 | 438,0.0 441 | 439,0.0 442 | 440,0.0 443 | 441,0.0 444 | 442,0.0 445 | 443,0.0 446 | 444,0.00087108015 447 | 445,0.0104529625 448 | 446,0.019163763 449 | 447,0.08188153 450 | 448,0.14372823 451 | 449,0.39285713 452 | 450,0.57665503 453 | 451,0.7970383 454 | 452,0.8249129 455 | 453,0.8249129 456 | 454,0.8127178 457 | 455,0.880662 458 | 456,0.8667247 459 | 457,0.87108016 460 | 458,0.8466899 461 | 459,0.7029617 462 | 460,0.51829267 463 | 461,0.40331012 464 | 462,0.1445993 465 | 463,0.034843206 466 | 464,0.0 467 | 465,0.0 468 | 466,0.0 469 | 467,0.0 470 | 468,0.0 471 | 469,0.0 472 | 470,0.0026132406 473 | 471,0.0792683 474 | 472,0.24216028 475 | 473,0.39721254 476 | 474,0.56010455 477 | 475,0.7238676 478 | 476,0.7552265 479 | 477,0.7970383 480 | 478,0.8083624 481 | 479,0.8162021 482 | 480,0.78571427 483 | 481,0.77874565 484 | 482,0.7796167 485 | 483,0.7325784 486 | 484,0.67247385 487 | 485,0.5731708 488 | 486,0.45383275 489 | 487,0.2952962 490 | 488,0.123693384 491 | 489,0.022648083 492 | 490,0.0 493 | 491,0.0 494 | 492,0.0 495 | 493,0.0 496 | 494,0.0 497 | 495,0.0 498 | 496,0.0 499 | 497,0.0017421603 500 | 498,0.0034843206 501 | 499,0.012195122 502 | 500,0.020905925 503 | 501,0.030487806 504 | 502,0.043554008 505 | 503,0.054878052 506 | 504,0.05574913 507 | 505,0.05052265 508 | 506,0.05836237 509 | 507,0.06271777 510 | 508,0.064459935 511 | 509,0.05749129 512 | 510,0.042682927 513 | 511,0.033101045 514 | 512,0.023519164 515 | 513,0.015679443 516 | 514,0.004355401 517 | 515,0.00087108015 518 | 516,0.0 519 | 517,0.0 520 | 518,0.0 521 | 519,0.0 522 | 520,0.0 523 | 521,0.0 524 | 522,0.0017421603 525 | 523,0.06620209 526 | 524,0.1803136 527 | 525,0.37195122 528 | 526,0.5574913 529 | 527,0.6864112 530 | 528,0.630662 531 | 529,0.7674216 532 | 530,0.8196864 533 | 531,0.72299653 534 | 532,0.73606277 535 | 533,0.760453 536 | 534,0.7569687 537 | 535,0.6689896 538 | 536,0.6132404 539 | 537,0.5278746 540 | 538,0.42247388 541 | 539,0.28745645 542 | 540,0.118466906 543 | 541,0.020034844 544 | 542,0.0 545 | 543,0.0 546 | 544,0.0 547 | 545,0.0 548 | 546,0.0 549 | 547,0.0 550 | 548,0.00087108015 551 | 549,0.06794425 552 | 550,0.24477352 553 | 551,0.46341464 554 | 552,0.6428572 555 | 553,0.80139375 556 | 554,0.7996516 557 | 555,0.79616725 558 | 556,0.7778746 559 | 557,0.7221255 560 | 558,0.77700347 561 | 559,0.7552265 562 | 560,0.7273519 563 | 561,0.69599307 564 | 562,0.64372826 565 | 563,0.5609756 566 | 564,0.45296168 567 | 565,0.3170732 568 | 566,0.12108014 569 | 567,0.016550522 570 | 568,0.0 571 | 569,0.0 572 | 570,0.0 573 | 571,0.0 574 | 572,0.0 575 | 573,0.0 576 | 574,0.00087108015 577 | 575,0.075783975 578 | 576,0.24128921 579 | 577,0.44860628 580 | 578,0.56533104 581 | 579,0.55836236 582 | 580,0.5932056 583 | 581,0.46341464 584 | 582,0.41550523 585 | 583,0.38327527 586 | 584,0.42682928 587 | 585,0.46080142 588 | 586,0.60365856 589 | 587,0.49912894 590 | 588,0.3196864 591 | 589,0.20818816 592 | 590,0.14198606 593 | 591,0.10714286 594 | 592,0.06271777 595 | 593,0.008710802 596 | 594,0.0 597 | 595,0.0 598 | 596,0.0 599 | 597,0.0 600 | 598,0.0 601 | 599,0.0 602 | 600,0.0 603 | 601,0.00087108015 604 | 602,0.0017421603 605 | 603,0.0026132406 606 | 604,0.014808363 607 | 605,0.027874565 608 | 606,0.024390245 609 | 607,0.05052265 610 | 608,0.06271777 611 | 609,0.08188153 612 | 610,0.13763067 613 | 611,0.23954704 614 | 612,0.37717772 615 | 613,0.43205574 616 | 614,0.40940768 617 | 615,0.47996515 618 | 616,0.43292683 619 | 617,0.33536586 620 | 618,0.14634146 621 | 619,0.020905925 622 | 620,0.0 623 | 621,0.0 624 | 622,0.0 625 | 623,0.0 626 | 624,0.0 627 | 625,0.0 628 | 626,0.0 629 | 627,0.020905925 630 | 628,0.05226481 631 | 629,0.094076656 632 | 630,0.12543555 633 | 631,0.1010453 634 | 632,0.16898955 635 | 633,0.18292683 636 | 634,0.26567945 637 | 635,0.2682927 638 | 636,0.24564461 639 | 637,0.2543554 640 | 638,0.35017422 641 | 639,0.5243903 642 | 640,0.53048784 643 | 641,0.56010455 644 | 642,0.44163764 645 | 643,0.23344947 646 | 644,0.103658535 647 | 645,0.006968641 648 | 646,0.0 649 | 647,0.0 650 | 648,0.0 651 | 649,0.0 652 | 650,0.0 653 | 651,0.0 654 | 652,0.0 655 | 653,0.05662021 656 | 654,0.19425087 657 | 655,0.37717772 658 | 656,0.53048784 659 | 657,0.6393728 660 | 658,0.7099303 661 | 659,0.69599307 662 | 660,0.695122 663 | 661,0.6602788 664 | 662,0.5783972 665 | 663,0.52874565 666 | 664,0.6132404 667 | 665,0.48693383 668 | 666,0.31881532 669 | 667,0.20993032 670 | 668,0.15505226 671 | 669,0.07404181 672 | 670,0.019163763 673 | 671,0.0026132406 674 | 672,0.0 675 | 673,0.0 676 | 674,0.0 677 | 675,0.0 678 | 676,0.0 679 | 677,0.0 680 | 678,0.0 681 | 679,0.0 682 | 680,0.00087108015 683 | 681,0.013937282 684 | 682,0.029616727 685 | 683,0.12108014 686 | 684,0.20383276 687 | 685,0.23083624 688 | 686,0.19163764 689 | 687,0.3998258 690 | 688,0.3022648 691 | 689,0.33101046 692 | 690,0.41202092 693 | 691,0.5418119 694 | 692,0.45731708 695 | 693,0.43031362 696 | 694,0.42334494 697 | 695,0.27090594 698 | 696,0.112369336 699 | 697,0.009581882 700 | 698,0.0 701 | 699,0.0 702 | 700,0.0 703 | 701,0.0 704 | 702,0.0 705 | 703,0.0 706 | 704,0.0 707 | 705,0.033101045 708 | 706,0.14372823 709 | 707,0.35801393 710 | 708,0.5174216 711 | 709,0.630662 712 | 710,0.7151568 713 | 711,0.75087106 714 | 712,0.7656795 715 | 713,0.74041814 716 | 714,0.71428573 717 | 715,0.65505224 718 | 716,0.6341464 719 | 717,0.70905924 720 | 718,0.5905924 721 | 719,0.5331011 722 | 720,0.42247388 723 | 721,0.2761324 724 | 722,0.123693384 725 | 723,0.007839722 726 | 724,0.0 727 | 725,0.0 728 | 726,0.0 729 | 727,0.0 730 | 728,0.0 731 | 729,0.0 732 | 730,0.0 733 | 731,0.043554008 734 | 732,0.1890244 735 | 733,0.42944252 736 | 734,0.6219512 737 | 735,0.75435543 738 | 736,0.84843206 739 | 737,0.91114986 740 | 738,0.945122 741 | 739,0.95905924 742 | 740,0.96515685 743 | 741,0.9181185 744 | 742,0.86933804 745 | 743,0.8353659 746 | 744,0.81358886 747 | 745,0.66550523 748 | 746,0.5505227 749 | 747,0.3127178 750 | 748,0.11062718 751 | 749,0.0052264812 752 | 750,0.0 753 | 751,0.0 754 | 752,0.0 755 | 753,0.0 756 | 754,0.0 757 | 755,0.0 758 | 756,0.0 759 | 757,0.029616727 760 | 758,0.15679443 761 | 759,0.29268292 762 | 760,0.5139373 763 | 761,0.61236936 764 | 762,0.71341467 765 | 763,0.79616725 766 | 764,0.8405924 767 | 765,0.8597561 768 | 766,0.8736934 769 | 767,0.85714287 770 | 768,0.8283972 771 | 769,0.7831011 772 | 770,0.6994774 773 | 771,0.5923345 774 | 772,0.43815333 775 | 773,0.22648084 776 | 774,0.07404181 777 | 775,0.004355401 778 | 776,0.0 779 | 777,0.0 780 | 778,0.0 781 | 779,0.0 782 | 780,0.0 783 | 781,0.0 784 | 782,0.0 785 | 783,0.0052264812 786 | 784,0.054006968 787 | 785,0.12282231 788 | 786,0.21341464 789 | 787,0.33623692 790 | 788,0.42944252 791 | 789,0.52003485 792 | 790,0.56184673 793 | 791,0.61411154 794 | 792,0.619338 795 | 793,0.57752615 796 | 794,0.5383275 797 | 795,0.47822303 798 | 796,0.38066202 799 | 797,0.3510453 800 | 798,0.25 801 | 799,0.105400704 802 | 800,0.040069688 803 | 801,0.0026132406 804 | 802,0.0 805 | 803,0.0 806 | 804,0.0 807 | 805,0.0 808 | 806,0.0 809 | 807,0.0 810 | 808,0.0 811 | 809,0.0026132406 812 | 810,0.016550522 813 | 811,0.04878049 814 | 812,0.0879791 815 | 813,0.12891987 816 | 814,0.1541812 817 | 815,0.25 818 | 816,0.3083624 819 | 817,0.29268292 820 | 818,0.24477352 821 | 819,0.2944251 822 | 820,0.206446 823 | 821,0.3344948 824 | 822,0.48170733 825 | 823,0.41637632 826 | 824,0.23257841 827 | 825,0.123693384 828 | 826,0.05139373 829 | 827,0.0034843206 830 | 828,0.0 831 | 829,0.0 832 | 830,0.0 833 | 831,0.0 834 | 832,0.0 835 | 833,0.0 836 | 834,0.0 837 | 835,0.014808363 838 | 836,0.11498258 839 | 837,0.2857143 840 | 838,0.4790941 841 | 839,0.6428572 842 | 840,0.77003485 843 | 841,0.8510453 844 | 842,0.9006969 845 | 843,0.91289204 846 | 844,0.9277004 847 | 845,0.9224739 848 | 846,0.8928572 849 | 847,0.83275265 850 | 848,0.7560976 851 | 849,0.61759585 852 | 850,0.44686413 853 | 851,0.22473867 854 | 852,0.07404181 855 | 853,0.0026132406 856 | 854,0.0 857 | 855,0.0 858 | 856,0.0 859 | 857,0.0 860 | 858,0.0 861 | 859,0.0 862 | 860,0.0 863 | 861,0.0 864 | 862,0.0017421603 865 | 863,0.008710802 866 | 864,0.021777004 867 | 865,0.021777004 868 | 866,0.06097561 869 | 867,0.105400704 870 | 868,0.109756105 871 | 869,0.11324042 872 | 870,0.105400704 873 | 871,0.108013935 874 | 872,0.19163764 875 | 873,0.1933798 876 | 874,0.21254356 877 | 875,0.17682926 878 | 876,0.12543555 879 | 877,0.07752613 880 | 878,0.04790941 881 | 879,0.0017421603 882 | 880,0.0 883 | 881,0.0 884 | 882,0.0 885 | 883,0.0 886 | 884,0.0 887 | 885,0.0 888 | 886,0.0 889 | 887,0.013937282 890 | 888,0.12456446 891 | 889,0.32752612 892 | 890,0.55836236 893 | 891,0.6803136 894 | 892,0.7726481 895 | 893,0.8118467 896 | 894,0.8127178 897 | 895,0.8719512 898 | 896,0.804007 899 | 897,0.85017425 900 | 898,0.8092335 901 | 899,0.7796167 902 | 900,0.71777004 903 | 901,0.6271777 904 | 902,0.5 905 | 903,0.2682927 906 | 904,0.082752615 907 | 905,0.0026132406 908 | 906,0.0 909 | 907,0.0 910 | 908,0.0 911 | 909,0.0 912 | 910,0.0 913 | 911,0.0 914 | 912,0.0 915 | 913,0.016550522 916 | 914,0.1402439 917 | 915,0.3336237 918 | 916,0.56446 919 | 917,0.71602786 920 | 918,0.8057492 921 | 919,0.8597561 922 | 920,0.8989547 923 | 921,0.9137631 924 | 922,0.92421603 925 | 923,0.9067944 926 | 924,0.85365856 927 | 925,0.8179443 928 | 926,0.72648084 929 | 927,0.641115 930 | 928,0.5095819 931 | 929,0.26567945 932 | 930,0.099303134 933 | 931,0.0026132406 934 | 932,0.0 935 | 933,0.0 936 | 934,0.0 937 | 935,0.0 938 | 936,0.0 939 | 937,0.0 940 | 938,0.0 941 | 939,0.014808363 942 | 940,0.13414635 943 | 941,0.3292683 944 | 942,0.47212544 945 | 943,0.5714286 946 | 944,0.75435543 947 | 945,0.86062723 948 | 946,0.8527875 949 | 947,0.8057492 950 | 948,0.6576655 951 | 949,0.6942509 952 | 950,0.7865854 953 | 951,0.7552265 954 | 952,0.71341467 955 | 953,0.6019164 956 | 954,0.4703833 957 | 955,0.24564461 958 | 956,0.08449478 959 | 957,0.00087108015 960 | 958,0.0 961 | 959,0.0 962 | 960,0.0 963 | 961,0.0 964 | 962,0.0 965 | 963,0.0 966 | 964,0.0 967 | 965,0.0113240415 968 | 966,0.12195122 969 | 967,0.29181185 970 | 968,0.5087108 971 | 969,0.6759582 972 | 970,0.78048784 973 | 971,0.85017425 974 | 972,0.8928572 975 | 973,0.88850176 976 | 974,0.90243906 977 | 975,0.8980836 978 | 976,0.8815331 979 | 977,0.8283972 980 | 978,0.728223 981 | 979,0.6010453 982 | 980,0.43292683 983 | 981,0.19599304 984 | 982,0.059233453 985 | 983,0.00087108015 986 | 984,0.0 987 | 985,0.0 988 | 986,0.0 989 | 987,0.0 990 | 988,0.0 991 | 989,0.0 992 | 990,0.0 993 | 991,0.0 994 | 992,0.00087108015 995 | 993,0.0034843206 996 | 994,0.0034843206 997 | 995,0.0034843206 998 | 996,0.008710802 999 | 997,0.019163763 1000 | 998,0.054006968 1001 | 999,0.04878049 1002 | 1000,0.08623693 1003 | 1001,0.085365854 1004 | 1002,0.099303134 1005 | 1003,0.09756098 1006 | 1004,0.0749129 1007 | 1005,0.06620209 1008 | 1006,0.06097561 1009 | 1007,0.045296166 1010 | 1008,0.006968641 1011 | 1009,0.0 1012 | 1010,0.0 1013 | 1011,0.0 1014 | 1012,0.0 1015 | 1013,0.0 1016 | 1014,0.0 1017 | 1015,0.0 1018 | 1016,0.0 1019 | 1017,0.004355401 1020 | 1018,0.108013935 1021 | 1019,0.24303137 1022 | 1020,0.43466902 1023 | 1021,0.54790944 1024 | 1022,0.6254356 1025 | 1023,0.75261325 1026 | 1024,0.7883276 1027 | 1025,0.8162021 1028 | 1026,0.8379791 1029 | 1027,0.84320563 1030 | 1028,0.804007 1031 | 1029,0.78397214 1032 | 1030,0.7290941 1033 | 1031,0.59843206 1034 | 1032,0.41463414 1035 | 1033,0.1890244 1036 | 1034,0.05662021 1037 | 1035,0.0052264812 1038 | 1036,0.0 1039 | 1037,0.0 1040 | 1038,0.0 1041 | 1039,0.0 1042 | 1040,0.0 1043 | 1041,0.0 1044 | 1042,0.0 1045 | 1043,0.00087108015 1046 | 1044,0.028745646 1047 | 1045,0.13763067 1048 | 1046,0.325784 1049 | 1047,0.48954704 1050 | 1048,0.55662024 1051 | 1049,0.7290941 1052 | 1050,0.8214286 1053 | 1051,0.84320563 1054 | 1052,0.7221255 1055 | 1053,0.78571427 1056 | 1054,0.6019164 1057 | 1055,0.61411154 1058 | 1056,0.5331011 1059 | 1057,0.41637632 1060 | 1058,0.22648084 1061 | 1059,0.0966899 1062 | 1060,0.029616727 1063 | 1061,0.00087108015 1064 | 1062,0.0 1065 | 1063,0.0 1066 | 1064,0.0 1067 | 1065,0.0 1068 | 1066,0.0 1069 | 1067,0.0 1070 | 1068,0.0 1071 | 1069,0.004355401 1072 | 1070,0.09059233 1073 | 1071,0.21341464 1074 | 1072,0.44163764 1075 | 1073,0.60278744 1076 | 1074,0.695122 1077 | 1075,0.77177703 1078 | 1076,0.8205575 1079 | 1077,0.825784 1080 | 1078,0.85191643 1081 | 1079,0.85017425 1082 | 1080,0.8249129 1083 | 1081,0.77874565 1084 | 1082,0.63850176 1085 | 1083,0.412892 1086 | 1084,0.21080141 1087 | 1085,0.08188153 1088 | 1086,0.015679443 1089 | 1087,0.0 1090 | 1088,0.0 1091 | 1089,0.0 1092 | 1090,0.0 1093 | 1091,0.0 1094 | 1092,0.0 1095 | 1093,0.0 1096 | 1094,0.0 1097 | 1095,0.0 1098 | 1096,0.00087108015 1099 | 1097,0.008710802 1100 | 1098,0.020905925 1101 | 1099,0.025261326 1102 | 1100,0.036585364 1103 | 1101,0.07404181 1104 | 1102,0.082752615 1105 | 1103,0.0705575 1106 | 1104,0.059233453 1107 | 1105,0.071428575 1108 | 1106,0.05226481 1109 | 1107,0.04181185 1110 | 1108,0.06271777 1111 | 1109,0.03745645 1112 | 1110,0.019163763 1113 | 1111,0.016550522 1114 | 1112,0.0052264812 1115 | 1113,0.0 1116 | 1114,0.0 1117 | 1115,0.0 1118 | 1116,0.0 1119 | 1117,0.0 1120 | 1118,0.0 1121 | 1119,0.0 1122 | 1120,0.0 1123 | 1121,0.0 1124 | 1122,0.054006968 1125 | 1123,0.17770036 1126 | 1124,0.35888502 1127 | 1125,0.44337982 1128 | 1126,0.54790944 1129 | 1127,0.7029617 1130 | 1128,0.7621951 1131 | 1129,0.7430314 1132 | 1130,0.6332753 1133 | 1131,0.5879791 1134 | 1132,0.5174216 1135 | 1133,0.35714287 1136 | 1134,0.2682927 1137 | 1135,0.31010452 1138 | 1136,0.3336237 1139 | 1137,0.13066202 1140 | 1138,0.040069688 1141 | 1139,0.0 1142 | 1140,0.0 1143 | 1141,0.0 1144 | 1142,0.0 1145 | 1143,0.0 1146 | 1144,0.0 1147 | 1145,0.0 1148 | 1146,0.0 1149 | 1147,0.00087108015 1150 | 1148,0.07317073 1151 | 1149,0.206446 1152 | 1150,0.42160282 1153 | 1151,0.60278744 1154 | 1152,0.716899 1155 | 1153,0.8109756 1156 | 1154,0.86062723 1157 | 1155,0.8824042 1158 | 1156,0.9050523 1159 | 1157,0.8998258 1160 | 1158,0.8736934 1161 | 1159,0.825784 1162 | 1160,0.73519164 1163 | 1161,0.6054007 1164 | 1162,0.40592334 1165 | 1163,0.20470384 1166 | 1164,0.068815336 1167 | 1165,0.00087108015 1168 | 1166,0.0 1169 | 1167,0.0 1170 | 1168,0.0 1171 | 1169,0.0 1172 | 1170,0.0 1173 | 1171,0.0 1174 | 1172,0.0 1175 | 1173,0.0 1176 | 1174,0.08623693 1177 | 1175,0.22386761 1178 | 1176,0.44773522 1179 | 1177,0.587108 1180 | 1178,0.64372826 1181 | 1179,0.68728226 1182 | 1180,0.74041814 1183 | 1181,0.74041814 1184 | 1182,0.60365856 1185 | 1183,0.50087106 1186 | 1184,0.43466902 1187 | 1185,0.45731708 1188 | 1186,0.45121953 1189 | 1187,0.2865854 1190 | 1188,0.15069686 1191 | 1189,0.08101045 1192 | 1190,0.020905925 1193 | 1191,0.0 1194 | 1192,0.0 1195 | 1193,0.0 1196 | 1194,0.0 1197 | 1195,0.0 1198 | 1196,0.0 1199 | 1197,0.0 1200 | 1198,0.0 1201 | 1199,0.0 1202 | 1200,0.0026132406 1203 | 1201,0.034843206 1204 | 1202,0.118466906 1205 | 1203,0.1271777 1206 | 1204,0.12020906 1207 | 1205,0.15243903 1208 | 1206,0.12282231 1209 | 1207,0.09146342 1210 | 1208,0.071428575 1211 | 1209,0.06533101 1212 | 1210,0.07404181 1213 | 1211,0.071428575 1214 | 1212,0.035714287 1215 | 1213,0.05662021 1216 | 1214,0.078397214 1217 | 1215,0.033101045 1218 | 1216,0.009581882 1219 | 1217,0.0 1220 | 1218,0.0 1221 | 1219,0.0 1222 | 1220,0.0 1223 | 1221,0.0 1224 | 1222,0.0 1225 | 1223,0.0 1226 | 1224,0.0 1227 | 1225,0.0 1228 | 1226,0.0 1229 | 1227,0.0 1230 | 1228,0.0 1231 | 1229,0.0052264812 1232 | 1230,0.008710802 1233 | 1231,0.008710802 1234 | 1232,0.015679443 1235 | 1233,0.024390245 1236 | 1234,0.071428575 1237 | 1235,0.30052266 1238 | 1236,0.58536583 1239 | 1237,0.51480836 1240 | 1238,0.31445992 1241 | 1239,0.35452962 1242 | 1240,0.31358886 1243 | 1241,0.18815331 1244 | 1242,0.05226481 1245 | 1243,0.0 1246 | 1244,0.0 1247 | 1245,0.0 1248 | 1246,0.0 1249 | 1247,0.0 1250 | 1248,0.0 1251 | 1249,0.0 1252 | 1250,0.0 1253 | 1251,0.0 1254 | 1252,0.067073174 1255 | 1253,0.18292683 1256 | 1254,0.38937283 1257 | 1255,0.56533104 1258 | 1256,0.6054007 1259 | 1257,0.66637635 1260 | 1258,0.69599307 1261 | 1259,0.6228223 1262 | 1260,0.7325784 1263 | 1261,0.69599307 1264 | 1262,0.6271777 1265 | 1263,0.630662 1266 | 1264,0.5418119 1267 | 1265,0.43815333 1268 | 1266,0.2891986 1269 | 1267,0.16550523 1270 | 1268,0.054006968 1271 | 1269,0.00087108015 1272 | 1270,0.0 1273 | 1271,0.0 1274 | 1272,0.0 1275 | 1273,0.0 1276 | 1274,0.0 1277 | 1275,0.0 1278 | 1276,0.0 1279 | 1277,0.0 1280 | 1278,0.06533101 1281 | 1279,0.19076656 1282 | 1280,0.39024392 1283 | 1281,0.5879791 1284 | 1282,0.716899 1285 | 1283,0.8057492 1286 | 1284,0.8545296 1287 | 1285,0.8789199 1288 | 1286,0.880662 1289 | 1287,0.8771777 1290 | 1288,0.86236936 1291 | 1289,0.8214286 1292 | 1290,0.728223 1293 | 1291,0.59494776 1294 | 1292,0.39285713 1295 | 1293,0.19860627 1296 | 1294,0.067073174 1297 | 1295,0.00087108015 1298 | 1296,0.0 1299 | 1297,0.0 1300 | 1298,0.0 1301 | 1299,0.0 1302 | 1300,0.0 1303 | 1301,0.0 1304 | 1302,0.0 1305 | 1303,0.0 1306 | 1304,0.006097561 1307 | 1305,0.036585364 1308 | 1306,0.099303134 1309 | 1307,0.071428575 1310 | 1308,0.04181185 1311 | 1309,0.034843206 1312 | 1310,0.017421603 1313 | 1311,0.012195122 1314 | 1312,0.0052264812 1315 | 1313,0.0034843206 1316 | 1314,0.0 1317 | 1315,0.0 1318 | 1316,0.0 1319 | 1317,0.0 1320 | 1318,0.0 1321 | 1319,0.0 1322 | 1320,0.0 1323 | 1321,0.0 1324 | 1322,0.0 1325 | 1323,0.0 1326 | 1324,0.0 1327 | 1325,0.0 1328 | 1326,0.0 1329 | 1327,0.0 1330 | 1328,0.0 1331 | 1329,0.0 1332 | 1330,0.0 1333 | 1331,0.006968641 1334 | 1332,0.028745646 1335 | 1333,0.082752615 1336 | 1334,0.13675958 1337 | 1335,0.17421603 1338 | 1336,0.20296167 1339 | 1337,0.19076656 1340 | 1338,0.24564461 1341 | 1339,0.24390244 1342 | 1340,0.24128921 1343 | 1341,0.304007 1344 | 1342,0.369338 1345 | 1343,0.3170732 1346 | 1344,0.22822301 1347 | 1345,0.112369336 1348 | 1346,0.028745646 1349 | 1347,0.0 1350 | 1348,0.0 1351 | 1349,0.0 1352 | 1350,0.0 1353 | 1351,0.0 1354 | 1352,0.0 1355 | 1353,0.0 1356 | 1354,0.0 1357 | 1355,0.0 1358 | 1356,0.032229967 1359 | 1357,0.1010453 1360 | 1358,0.22125436 1361 | 1359,0.33536586 1362 | 1360,0.40069687 1363 | 1361,0.45296168 1364 | 1362,0.4965157 1365 | 1363,0.5174216 1366 | 1364,0.53745645 1367 | 1365,0.54965156 1368 | 1366,0.5574913 1369 | 1367,0.5095819 1370 | 1368,0.49128923 1371 | 1369,0.42334494 1372 | 1370,0.293554 1373 | 1371,0.12804878 1374 | 1372,0.038327526 1375 | 1373,0.0 1376 | 1374,0.0 1377 | 1375,0.0 1378 | 1376,0.0 1379 | 1377,0.0 1380 | 1378,0.0 1381 | 1379,0.0 1382 | 1380,0.0 1383 | 1381,0.0 1384 | 1382,0.027874565 1385 | 1383,0.109756105 1386 | 1384,0.2552265 1387 | 1385,0.40243903 1388 | 1386,0.48954704 1389 | 1387,0.5627178 1390 | 1388,0.61759585 1391 | 1389,0.64634144 1392 | 1390,0.6681185 1393 | 1391,0.6759582 1394 | 1392,0.6707317 1395 | 1393,0.6498258 1396 | 1394,0.6019164 1397 | 1395,0.5174216 1398 | 1396,0.3423345 1399 | 1397,0.1672474 1400 | 1398,0.06097561 1401 | 1399,0.0 1402 | 1400,0.0 1403 | 1401,0.0 1404 | 1402,0.0 1405 | 1403,0.0 1406 | 1404,0.0 1407 | 1405,0.0 1408 | 1406,0.0 1409 | 1407,0.0 1410 | 1408,0.0 1411 | 1409,0.0 1412 | 1410,0.0 1413 | 1411,0.00087108015 1414 | 1412,0.0017421603 1415 | 1413,0.0017421603 1416 | 1414,0.00087108015 1417 | 1415,0.00087108015 1418 | 1416,0.00087108015 1419 | 1417,0.00087108015 1420 | 1418,0.00087108015 1421 | 1419,0.00087108015 1422 | 1420,0.0034843206 1423 | 1421,0.0017421603 1424 | 1422,0.0017421603 1425 | 1423,0.0017421603 1426 | 1424,0.0 1427 | 1425,0.0 1428 | 1426,0.0 1429 | 1427,0.0 1430 | 1428,0.0 1431 | 1429,0.0 1432 | 1430,0.0 1433 | 1431,0.0 1434 | 1432,0.0 1435 | 1433,0.0 1436 | 1434,0.0 1437 | 1435,0.0 1438 | 1436,0.0 1439 | 1437,0.00087108015 1440 | 1438,0.00087108015 1441 | 1439,0.0017421603 1442 | 1440,0.0034843206 1443 | 1441,0.004355401 1444 | 1442,0.009581882 1445 | 1443,0.016550522 1446 | 1444,0.025261326 1447 | 1445,0.04181185 1448 | 1446,0.033972126 1449 | 1447,0.022648083 1450 | 1448,0.013066202 1451 | 1449,0.006968641 1452 | 1450,0.0026132406 1453 | 1451,0.0 1454 | 1452,0.0 1455 | 1453,0.0 1456 | 1454,0.0 1457 | 1455,0.0 1458 | 1456,0.0 1459 | 1457,0.0 1460 | 1458,0.0 1461 | 1459,0.0 1462 | 1460,0.008710802 1463 | 1461,0.028745646 1464 | 1462,0.07317073 1465 | 1463,0.1315331 1466 | 1464,0.18554007 1467 | 1465,0.24564461 1468 | 1466,0.32752612 1469 | 1467,0.42247388 1470 | 1468,0.47386762 1471 | 1469,0.5296167 1472 | 1470,0.56707317 1473 | 1471,0.56446 1474 | 1472,0.53222996 1475 | 1473,0.4660279 1476 | 1474,0.3118467 1477 | 1475,0.15940766 1478 | 1476,0.054878052 1479 | 1477,0.0 1480 | 1478,0.0 1481 | 1479,0.0 1482 | 1480,0.0 1483 | 1481,0.0 1484 | 1482,0.0 1485 | 1483,0.0 1486 | 1484,0.0 1487 | 1485,0.0 1488 | 1486,0.025261326 1489 | 1487,0.12020906 1490 | 1488,0.24216028 1491 | 1489,0.41811848 1492 | 1490,0.52874565 1493 | 1491,0.6097561 1494 | 1492,0.66114986 1495 | 1493,0.6890244 1496 | 1494,0.7081882 1497 | 1495,0.7195122 1498 | 1496,0.70470387 1499 | 1497,0.6768293 1500 | 1498,0.6149826 1501 | 1499,0.5270035 1502 | 1500,0.3423345 1503 | 1501,0.16376306 1504 | 1502,0.05662021 1505 | 1503,0.0 1506 | 1504,0.0 1507 | 1505,0.0 1508 | 1506,0.0 1509 | 1507,0.0 1510 | 1508,0.0 1511 | 1509,0.0 1512 | 1510,0.0 1513 | 1511,0.0 1514 | 1512,0.029616727 1515 | 1513,0.13850175 1516 | 1514,0.27787456 1517 | 1515,0.47299653 1518 | 1516,0.58972126 1519 | 1517,0.67334497 1520 | 1518,0.728223 1521 | 1519,0.72299653 1522 | 1520,0.7621951 1523 | 1521,0.738676 1524 | 1522,0.6933798 1525 | 1523,0.6567944 1526 | 1524,0.5958188 1527 | 1525,0.51132405 1528 | 1526,0.3466899 1529 | 1527,0.17073171 1530 | 1528,0.05574913 1531 | 1529,0.0 1532 | 1530,0.0 1533 | 1531,0.0 1534 | 1532,0.0 1535 | 1533,0.0 1536 | 1534,0.0 1537 | 1535,0.0 1538 | 1536,0.0 1539 | 1537,0.0 1540 | 1538,0.020905925 1541 | 1539,0.12108014 1542 | 1540,0.23432057 1543 | 1541,0.46428573 1544 | 1542,0.58536583 1545 | 1543,0.6820558 1546 | 1544,0.75087106 1547 | 1545,0.80139375 1548 | 1546,0.82752615 1549 | 1547,0.8292683 1550 | 1548,0.8170732 1551 | 1549,0.77874565 1552 | 1550,0.68728226 1553 | 1551,0.554007 1554 | 1552,0.33275262 1555 | 1553,0.15069686 1556 | 1554,0.043554008 1557 | 1555,0.00087108015 1558 | 1556,0.0 1559 | 1557,0.0 1560 | 1558,0.0 1561 | 1559,0.0 1562 | 1560,0.0 1563 | 1561,0.0 1564 | 1562,0.0 1565 | 1563,0.0 1566 | 1564,0.023519164 1567 | 1565,0.1358885 1568 | 1566,0.27090594 1569 | 1567,0.48519164 1570 | 1568,0.6271777 1571 | 1569,0.7412892 1572 | 1570,0.81533104 1573 | 1571,0.8527875 1574 | 1572,0.8667247 1575 | 1573,0.85714287 1576 | 1574,0.8318815 1577 | 1575,0.77526134 1578 | 1576,0.6707317 1579 | 1577,0.54616725 1580 | 1578,0.3519164 1581 | 1579,0.17944251 1582 | 1580,0.06271777 1583 | 1581,0.0017421603 1584 | 1582,0.0 1585 | 1583,0.0 1586 | 1584,0.0 1587 | 1585,0.0 1588 | 1586,0.0 1589 | 1587,0.0 1590 | 1588,0.0 1591 | 1589,0.0 1592 | 1590,0.025261326 1593 | 1591,0.13763067 1594 | 1592,0.2804878 1595 | 1593,0.41027874 1596 | 1594,0.61149824 1597 | 1595,0.7325784 1598 | 1596,0.7796167 1599 | 1597,0.82404184 1600 | 1598,0.8754356 1601 | 1599,0.86933804 1602 | 1600,0.8118467 1603 | 1601,0.75261325 1604 | 1602,0.59146345 1605 | 1603,0.42595822 1606 | 1604,0.24651568 1607 | 1605,0.14198606 1608 | 1606,0.05836237 1609 | 1607,0.0017421603 1610 | 1608,0.0 1611 | 1609,0.0 1612 | 1610,0.0 1613 | 1611,0.0 1614 | 1612,0.0 1615 | 1613,0.0 1616 | 1614,0.0 1617 | 1615,0.0 1618 | 1616,0.0 1619 | 1617,0.0104529625 1620 | 1618,0.034843206 1621 | 1619,0.16376306 1622 | 1620,0.38501742 1623 | 1621,0.5731708 1624 | 1622,0.6324042 1625 | 1623,0.6707317 1626 | 1624,0.6681185 1627 | 1625,0.65243906 1628 | 1626,0.61236936 1629 | 1627,0.5679442 1630 | 1628,0.43466902 1631 | 1629,0.31445992 1632 | 1630,0.21341464 1633 | 1631,0.05574913 1634 | 1632,0.0104529625 1635 | 1633,0.0 1636 | 1634,0.0 1637 | 1635,0.0 1638 | 1636,0.0 1639 | 1637,0.0 1640 | 1638,0.0 1641 | 1639,0.0 1642 | 1640,0.0 1643 | 1641,0.0 1644 | 1642,0.0 1645 | 1643,0.007839722 1646 | 1644,0.027003484 1647 | 1645,0.042682927 1648 | 1646,0.049651567 1649 | 1647,0.06794425 1650 | 1648,0.07229965 1651 | 1649,0.06968641 1652 | 1650,0.07317073 1653 | 1651,0.0705575 1654 | 1652,0.04790941 1655 | 1653,0.028745646 1656 | 1654,0.0113240415 1657 | 1655,0.0 1658 | 1656,0.0 1659 | 1657,0.0 1660 | 1658,0.0 1661 | 1659,0.0 1662 | 1660,0.0 1663 | 1661,0.0 1664 | 1662,0.0 1665 | 1663,0.0 1666 | 1664,0.0 1667 | 1665,0.0 1668 | 1666,0.0 1669 | 1667,0.0 1670 | 1668,0.0 1671 | 1669,0.0 1672 | 1670,0.0 1673 | 1671,0.0 1674 | 1672,0.0 1675 | 1673,0.00087108015 1676 | 1674,0.007839722 1677 | 1675,0.012195122 1678 | 1676,0.013066202 1679 | 1677,0.013937282 1680 | 1678,0.018292682 1681 | 1679,0.019163763 1682 | 1680,0.016550522 1683 | 1681,0.014808363 1684 | 1682,0.008710802 1685 | 1683,0.004355401 1686 | 1684,0.0017421603 1687 | 1685,0.0 1688 | 1686,0.0 1689 | 1687,0.0 1690 | 1688,0.0 1691 | 1689,0.0 1692 | 1690,0.0 1693 | 1691,0.0 1694 | 1692,0.0 1695 | 1693,0.0 1696 | 1694,0.0 1697 | 1695,0.004355401 1698 | 1696,0.027003484 1699 | 1697,0.04790941 1700 | 1698,0.08623693 1701 | 1699,0.12282231 1702 | 1700,0.1672474 1703 | 1701,0.20034844 1704 | 1702,0.20470384 1705 | 1703,0.20905924 1706 | 1704,0.15679443 1707 | 1705,0.19947736 1708 | 1706,0.1803136 1709 | 1707,0.184669 1710 | 1708,0.14198606 1711 | 1709,0.068815336 1712 | 1710,0.024390245 1713 | 1711,0.0 1714 | 1712,0.0 1715 | 1713,0.0 1716 | 1714,0.0 1717 | 1715,0.0 1718 | 1716,0.0 1719 | 1717,0.0 1720 | 1718,0.0 1721 | 1719,0.0 1722 | 1720,0.0026132406 1723 | 1721,0.028745646 1724 | 1722,0.09494773 1725 | 1723,0.162892 1726 | 1724,0.21080141 1727 | 1725,0.24912892 1728 | 1726,0.2726481 1729 | 1727,0.2900697 1730 | 1728,0.31010452 1731 | 1729,0.3379791 1732 | 1730,0.347561 1733 | 1731,0.34581882 1734 | 1732,0.35452962 1735 | 1733,0.33710802 1736 | 1734,0.24390244 1737 | 1735,0.13240418 1738 | 1736,0.05662021 1739 | 1737,0.0 1740 | 1738,0.0 1741 | 1739,0.0 1742 | 1740,0.0 1743 | 1741,0.0 1744 | 1742,0.0 1745 | 1743,0.0 1746 | 1744,0.0 1747 | 1745,0.0 1748 | 1746,0.00087108015 1749 | 1747,0.020034844 1750 | 1748,0.05749129 1751 | 1749,0.13066202 1752 | 1750,0.24825785 1753 | 1751,0.33101046 1754 | 1752,0.3109756 1755 | 1753,0.29268292 1756 | 1754,0.2857143 1757 | 1755,0.41898954 1758 | 1756,0.40679443 1759 | 1757,0.41550523 1760 | 1758,0.42334494 1761 | 1759,0.2952962 1762 | 1760,0.15940766 1763 | 1761,0.0879791 1764 | 1762,0.033101045 1765 | 1763,0.00087108015 1766 | 1764,0.0 1767 | 1765,0.0 1768 | 1766,0.0 1769 | 1767,0.0 1770 | 1768,0.0 1771 | 1769,0.0 1772 | 1770,0.0 1773 | 1771,0.0 1774 | 1772,0.0 1775 | 1773,0.0 1776 | 1774,0.0 1777 | 1775,0.0 1778 | 1776,0.00087108015 1779 | 1777,0.007839722 1780 | 1778,0.007839722 1781 | 1779,0.0113240415 1782 | 1780,0.012195122 1783 | 1781,0.0113240415 1784 | 1782,0.007839722 1785 | 1783,0.004355401 1786 | 1784,0.0034843206 1787 | 1785,0.00087108015 1788 | 1786,0.0 1789 | 1787,0.0 1790 | 1788,0.0 1791 | 1789,0.0 1792 | 1790,0.0 1793 | 1791,0.0 1794 | 1792,0.0 1795 | 1793,0.0 1796 | 1794,0.0 1797 | 1795,0.0 1798 | 1796,0.0 1799 | 1797,0.0 1800 | 1798,0.00087108015 1801 | 1799,0.016550522 1802 | 1800,0.04181185 1803 | 1801,0.087108016 1804 | 1802,0.11498258 1805 | 1803,0.16898955 1806 | 1804,0.26567945 1807 | 1805,0.3423345 1808 | 1806,0.36672473 1809 | 1807,0.37195122 1810 | 1808,0.36672473 1811 | 1809,0.36149827 1812 | 1810,0.3423345 1813 | 1811,0.2996516 1814 | 1812,0.20731707 1815 | 1813,0.108013935 1816 | 1814,0.05226481 1817 | 1815,0.0017421603 1818 | 1816,0.0 1819 | 1817,0.0 1820 | 1818,0.0 1821 | 1819,0.0 1822 | 1820,0.0 1823 | 1821,0.0 1824 | 1822,0.0 1825 | 1823,0.0 1826 | 1824,0.0026132406 1827 | 1825,0.008710802 1828 | 1826,0.034843206 1829 | 1827,0.24128921 1830 | 1828,0.3022648 1831 | 1829,0.35017422 1832 | 1830,0.4041812 1833 | 1831,0.3519164 1834 | 1832,0.15940766 1835 | 1833,0.06010453 1836 | 1834,0.013937282 1837 | 1835,0.0052264812 1838 | 1836,0.0034843206 1839 | 1837,0.0034843206 1840 | 1838,0.00087108015 1841 | 1839,0.00087108015 1842 | 1840,0.0 1843 | 1841,0.0 1844 | 1842,0.0 1845 | 1843,0.0 1846 | 1844,0.0 1847 | 1845,0.0 1848 | 1846,0.0 1849 | 1847,0.0 1850 | 1848,0.0 1851 | 1849,0.0 1852 | 1850,0.0 1853 | 1851,0.015679443 1854 | 1852,0.04790941 1855 | 1853,0.09581882 1856 | 1854,0.1358885 1857 | 1855,0.19512196 1858 | 1856,0.2987805 1859 | 1857,0.39895472 1860 | 1858,0.43815333 1861 | 1859,0.46951222 1862 | 1860,0.48170733 1863 | 1861,0.48954704 1864 | 1862,0.5087108 1865 | 1863,0.46167248 1866 | 1864,0.34059232 1867 | 1865,0.17857143 1868 | 1866,0.08101045 1869 | 1867,0.0017421603 1870 | 1868,0.0 1871 | 1869,0.0 1872 | 1870,0.0 1873 | 1871,0.0 1874 | 1872,0.0 1875 | 1873,0.0 1876 | 1874,0.0 1877 | 1875,0.0 1878 | 1876,0.0113240415 1879 | 1877,0.0966899 1880 | 1878,0.18379791 1881 | 1879,0.3292683 1882 | 1880,0.40940768 1883 | 1881,0.47299653 1884 | 1882,0.5156795 1885 | 1883,0.543554 1886 | 1884,0.55836236 1887 | 1885,0.56184673 1888 | 1886,0.5574913 1889 | 1887,0.53571427 1890 | 1888,0.49738675 1891 | 1889,0.45121953 1892 | 1890,0.33536586 1893 | 1891,0.17508711 1894 | 1892,0.07752613 1895 | 1893,0.0034843206 1896 | 1894,0.0 1897 | 1895,0.0 1898 | 1896,0.0 1899 | 1897,0.0 1900 | -------------------------------------------------------------------------------- /1,sun_plot/train.py: -------------------------------------------------------------------------------- 1 | ''' 目前已知参数是多少,只需要调整相应的参数,这里主要做的更改就是将把输出改成了指定的形式,便于后续画图和计算各项指标 ''' 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | from torch.autograd import Variable 8 | import math 9 | import csv 10 | from sklearn.preprocessing import MinMaxScaler 11 | import os 12 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 13 | 14 | 15 | 16 | # Define LSTM Neural Networks 17 | class LstmRNN(nn.Module): 18 | """ 19 | Parameters: 20 | - input_size: feature size 21 | - hidden_size: number of hidden units 22 | - output_size: number of output 23 | - num_layers: layers of LSTM to stack 24 | """ 25 | 26 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 27 | super().__init__() 28 | 29 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 30 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 31 | 32 | def forward(self, _x): 33 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 34 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 35 | x = x.view(s * b, h) 36 | x = self.linear1(x) 37 | x = x.view(s, b, -1) 38 | return x 39 | 40 | 41 | if __name__ == '__main__': 42 | 43 | # checking if GPU is available 44 | device = torch.device("cpu") 45 | 46 | if (torch.cuda.is_available()): 47 | device = torch.device("cuda:0") 48 | print('Training on GPU.') 49 | else: 50 | print('No GPU available, training on CPU.') 51 | 52 | # 数据读取&类型转换 53 | epoch = int(2000) 54 | lr = 0.01 55 | cell_size = int(20) 56 | num_layer = int(1) 57 | 58 | data = np.array(pd.read_excel('testing.xlsx')).astype('float32') 59 | data = data[~np.isnan(data).any(axis=1), :] 60 | 61 | #归一化 62 | scaler = MinMaxScaler() 63 | data = scaler.fit_transform(data) 64 | data_x = data[:,0:-1] 65 | data_y = data[:,-1] 66 | 67 | # 数据集分割 68 | data_len = len(data_x) 69 | t = np.linspace(0, data_len, data_len + 1) 70 | 71 | train_data_ratio = 0.8 # Choose 80% of the data for training 72 | train_data_len = int(data_len * train_data_ratio) 73 | 74 | train_x = data_x[5:train_data_len] 75 | train_y = data_y[5:train_data_len] 76 | t_for_training = t[5:train_data_len] 77 | 78 | test_x = data_x[train_data_len:] 79 | test_y = data_y[train_data_len:] 80 | t_for_testing = t[train_data_len:] 81 | 82 | # ----------------- train ------------------- 83 | INPUT_FEATURES_NUM = train_x.shape[1] 84 | OUTPUT_FEATURES_NUM = 1 85 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 86 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 87 | 88 | # transfer data to pytorch tensor 89 | train_x_tensor = torch.from_numpy(train_x_tensor) 90 | train_y_tensor = torch.from_numpy(train_y_tensor) 91 | 92 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, cell_size, output_size=OUTPUT_FEATURES_NUM, num_layers=num_layer) # 20 hidden units 93 | lstm_model.to(device) 94 | print('LSTM model:', lstm_model) 95 | print('model.parameters:', lstm_model.parameters) 96 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 97 | 98 | criterion = nn.MSELoss() 99 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=1e-2) 100 | prev_loss = 1000 101 | max_epochs = epoch 102 | 103 | train_x_tensor = train_x_tensor.to(device) 104 | train_y_tensor = train_y_tensor.to(device) 105 | 106 | for epoch in range(max_epochs): 107 | output = lstm_model(train_x_tensor).to(device) 108 | loss = criterion(output, train_y_tensor) 109 | 110 | optimizer.zero_grad() 111 | loss.backward() 112 | optimizer.step() 113 | 114 | if loss < prev_loss: 115 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 116 | prev_loss = loss 117 | 118 | if loss.item() < 1e-4: 119 | print('Epoch [{}/{}], Loss: {:.5f}'.format(epoch + 1, max_epochs, loss.item())) 120 | print("The loss value is reached") 121 | break 122 | else: 123 | print('Epoch: [{}/{}], Loss:{:.5f}'.format(epoch + 1, max_epochs, loss.item())) 124 | 125 | # prediction on training dataset 126 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 127 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 128 | 129 | # ----------------- test ------------------- 130 | lstm_model = lstm_model.eval() # switch to testing model 131 | 132 | # prediction on test dataset 133 | test_x_tensor = test_x.reshape(-1, 1, INPUT_FEATURES_NUM) 134 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 135 | test_x_tensor = test_x_tensor.to(device) 136 | 137 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 138 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 139 | 140 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 141 | print("test loss:", loss.item()) 142 | 143 | # ----------------- plot ------------------- 144 | '''plt.figure() 145 | plt.plot(t_for_training, train_y, 'b', label='y_trn') 146 | plt.plot(t_for_training, pred_y_for_train, 'y--', label='pre_trn')''' 147 | 148 | 149 | ''' 这里注意修改,然后注意修改的时TSA或者是PSO,或者是普通的LSTM ''' 150 | df = pd.DataFrame(test_y) 151 | df.to_csv('regular_test.csv') 152 | df = pd.DataFrame(pred_y_for_test) 153 | df.to_csv('regular_pred.csv') 154 | -------------------------------------------------------------------------------- /1,sun_power/PSO_result.csv: -------------------------------------------------------------------------------- 1 | 0.10130179673433304 2 | 0.10130179673433304 3 | 0.10130179673433304 4 | 0.10130179673433304 5 | 0.10130179673433304 6 | 0.10130179673433304 7 | 0.10130179673433304 8 | 0.10130179673433304 9 | 0.10130179673433304 10 | 0.10130179673433304 11 | 0.10130179673433304 12 | 0.10130179673433304 13 | 0.10130179673433304 14 | 0.10130179673433304 15 | 0.10129765421152115 16 | 0.10129765421152115 17 | 0.10129765421152115 18 | 0.10129765421152115 19 | 0.10129765421152115 20 | 0.10129765421152115 21 | 0.10129765421152115 22 | 0.10129765421152115 23 | 0.10129765421152115 24 | 0.10129765421152115 25 | 0.10077572613954544 26 | 0.10077572613954544 27 | 0.10077572613954544 28 | 0.10077572613954544 29 | 0.10077572613954544 30 | 0.10077572613954544 31 | 0.10077572613954544 32 | 0.10077572613954544 33 | 0.10077572613954544 34 | 0.10077572613954544 35 | 0.10077572613954544 36 | 0.10077572613954544 37 | 0.10077572613954544 38 | 0.10077572613954544 39 | 0.10077572613954544 40 | 0.10077572613954544 41 | 0.10077572613954544 42 | 0.10077572613954544 43 | 0.10077572613954544 44 | 0.10077572613954544 45 | 0.10077572613954544 46 | 0.10077572613954544 47 | 0.10077572613954544 48 | 0.10077572613954544 49 | 0.10077572613954544 50 | 0.10077572613954544 51 | 0.10077572613954544 52 | 0.10077572613954544 53 | 0.10077572613954544 54 | 0.10077572613954544 55 | 0.10077572613954544 56 | 0.10077572613954544 57 | 0.10077572613954544 58 | 0.10077572613954544 59 | 0.10077572613954544 60 | 0.10077572613954544 61 | 0.10077572613954544 62 | 0.10077572613954544 63 | 0.10077572613954544 64 | 0.10077572613954544 65 | 0.10077572613954544 66 | 0.10077572613954544 67 | 0.10077572613954544 68 | 0.10077572613954544 69 | 0.10077572613954544 70 | 0.10077572613954544 71 | 0.10077572613954544 72 | 0.10077572613954544 73 | 0.10077572613954544 74 | 0.10077572613954544 75 | 0.10077572613954544 76 | 0.10077572613954544 77 | 0.10077572613954544 78 | 0.10077572613954544 79 | 0.10077572613954544 80 | 0.10077572613954544 81 | 0.10077572613954544 82 | 0.10077572613954544 83 | 0.10077572613954544 84 | 0.10077572613954544 85 | 0.10077572613954544 86 | 0.10077572613954544 87 | 0.10077572613954544 88 | 0.10077572613954544 89 | 0.10077572613954544 90 | 0.10077572613954544 91 | 0.10077572613954544 92 | 0.10077572613954544 93 | 0.10077572613954544 94 | 0.10077572613954544 95 | 0.10077572613954544 96 | 0.10077572613954544 97 | 0.10077572613954544 98 | 0.10077572613954544 99 | 0.10077572613954544 100 | 0.10077572613954544 101 | 0.10077572613954544 102 | 0.10077572613954544 103 | 0.10077572613954544 104 | 0.10077572613954544 105 | 0.10077572613954544 106 | 0.10077572613954544 107 | 0.10077572613954544 108 | 0.10077572613954544 109 | 0.10077572613954544 110 | 0.10077572613954544 111 | 0.10077572613954544 112 | 0.10077572613954544 113 | 0.10077572613954544 114 | 0.10077572613954544 115 | 0.10077572613954544 116 | 0.10077572613954544 117 | 0.10077572613954544 118 | 0.10077572613954544 119 | 0.10077572613954544 120 | 0.10077572613954544 121 | 0.10077572613954544 122 | 0.10077572613954544 123 | 0.10077572613954544 124 | 0.10077572613954544 125 | 0.10077572613954544 126 | 0.10077572613954544 127 | 0.10077572613954544 128 | 0.10077572613954544 129 | 0.10077572613954544 130 | 0.10077572613954544 131 | 0.10077572613954544 132 | 0.10077572613954544 133 | 0.10077572613954544 134 | 0.10077572613954544 135 | 0.10077572613954544 136 | 0.10077572613954544 137 | 0.10077572613954544 138 | 0.10077572613954544 139 | 0.10077572613954544 140 | 0.10077572613954544 141 | 0.10077572613954544 142 | 0.10077572613954544 143 | 0.10077572613954544 144 | 0.10077572613954544 145 | 0.10077572613954544 146 | 0.10077572613954544 147 | 0.10077572613954544 148 | 0.10077572613954544 149 | 0.10077572613954544 150 | 0.10077572613954544 151 | 0.10077572613954544 152 | 0.10077572613954544 153 | 0.10077572613954544 154 | 0.10077572613954544 155 | 0.10077572613954544 156 | 0.10077572613954544 157 | 0.10077572613954544 158 | 0.10077572613954544 159 | 0.10077572613954544 160 | 0.10077572613954544 161 | 0.10077572613954544 162 | 0.10077572613954544 163 | 0.10077572613954544 164 | 0.10077572613954544 165 | 0.10077572613954544 166 | 0.10077572613954544 167 | 0.10077572613954544 168 | 0.10077572613954544 169 | 0.10077572613954544 170 | 0.10077572613954544 171 | 0.10077572613954544 172 | 0.10077572613954544 173 | 0.10077572613954544 174 | 0.10077572613954544 175 | 0.10077572613954544 176 | 0.10077572613954544 177 | 0.10077572613954544 178 | 0.10077572613954544 179 | 0.10077572613954544 180 | 0.10077572613954544 181 | 0.10077572613954544 182 | 0.10077572613954544 183 | 0.10077572613954544 184 | 0.10077572613954544 185 | 0.10077572613954544 186 | 0.10077572613954544 187 | 0.10077572613954544 188 | 0.10077572613954544 189 | 0.10077572613954544 190 | 0.10077572613954544 191 | 0.10077572613954544 192 | 0.10077572613954544 193 | 0.10077572613954544 194 | 0.10077572613954544 195 | 0.10077572613954544 196 | 0.10077572613954544 197 | 0.10077572613954544 198 | 0.10077572613954544 199 | 0.10077572613954544 200 | 0.10077572613954544 201 | 0.10077572613954544 202 | 0.10077572613954544 203 | 0.10077572613954544 204 | 0.10077572613954544 205 | 0.10077572613954544 206 | 0.10077572613954544 207 | 0.10077572613954544 208 | 0.10077572613954544 209 | 0.10077572613954544 210 | 0.10077572613954544 211 | 0.10077572613954544 212 | 0.10077572613954544 213 | 0.10077572613954544 214 | 0.10077572613954544 215 | 0.10077572613954544 216 | 0.10077572613954544 217 | 0.10077572613954544 218 | 0.10077572613954544 219 | 0.10077572613954544 220 | 0.10077572613954544 221 | 0.10077572613954544 222 | 0.10077572613954544 223 | 0.10077572613954544 224 | 0.10077572613954544 225 | 0.10077572613954544 226 | 0.10077572613954544 227 | 0.10077572613954544 228 | 0.10077572613954544 229 | 0.10077572613954544 230 | 0.10077572613954544 231 | 0.10077572613954544 232 | 0.10077572613954544 233 | 0.10077572613954544 234 | 0.10077572613954544 235 | 0.10077572613954544 236 | 0.10077572613954544 237 | 0.10077572613954544 238 | 0.10077572613954544 239 | 0.10077572613954544 240 | 0.10077572613954544 241 | 0.10077572613954544 242 | 0.10077572613954544 243 | 0.10077572613954544 244 | 0.10077572613954544 245 | 0.10077572613954544 246 | 0.10077572613954544 247 | 0.10077572613954544 248 | 0.10077572613954544 249 | 0.10077572613954544 250 | 0.10077572613954544 251 | 0.10077572613954544 252 | 0.10077572613954544 253 | 0.10077572613954544 254 | 0.10077572613954544 255 | 0.10077572613954544 256 | 0.10077572613954544 257 | 0.10077572613954544 258 | 0.10077572613954544 259 | 0.10077572613954544 260 | 0.10077572613954544 261 | 0.10077572613954544 262 | 0.10077572613954544 263 | 0.10077572613954544 264 | 0.10077572613954544 265 | 0.10077572613954544 266 | 0.10077572613954544 267 | 0.10077572613954544 268 | 0.10077572613954544 269 | 0.10077572613954544 270 | 0.10077572613954544 271 | 0.10077572613954544 272 | 0.10077572613954544 273 | 0.10077572613954544 274 | 0.10077572613954544 275 | 0.10077572613954544 276 | 0.10077572613954544 277 | 0.10077572613954544 278 | 0.10077572613954544 279 | 0.10077572613954544 280 | 0.10077572613954544 281 | 0.10077572613954544 282 | 0.10077572613954544 283 | 0.10077572613954544 284 | 0.10077572613954544 285 | 0.10077572613954544 286 | 0.10077572613954544 287 | 0.10077572613954544 288 | 0.10077572613954544 289 | 0.10077572613954544 290 | 0.10077572613954544 291 | 0.10077572613954544 292 | 0.10077572613954544 293 | 0.10077572613954544 294 | 0.10077572613954544 295 | 0.10077572613954544 296 | 0.10077572613954544 297 | 0.10077572613954544 298 | 0.10077572613954544 299 | 0.10077572613954544 300 | 0.10077572613954544 301 | 0.10077572613954544 302 | 0.10077572613954544 303 | 0.10077572613954544 304 | 1024.0,0.010730477828745576,18.0,3.0,0.1769728809595108 305 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 306 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 307 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 308 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 309 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 310 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 311 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 312 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 313 | 1717.0,0.014217198626897735,19.0,3.0,0.17545729875564575 314 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 315 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 316 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 317 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 318 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 319 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 320 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 321 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 322 | 1717.4286180122429,0.06892779058835607,19.456171045486414,3.475656305601099,0.17545729875564575 323 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 324 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 325 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 326 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 327 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 328 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 329 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 330 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 331 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 332 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 333 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 334 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 335 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 336 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 337 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 338 | 1024.0,0.010730477828745576,18.0,3.0,0.17493104934692383 339 | 603.0,0.024962927067874202,9.0,4.0,0.174555242061615 340 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 341 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 342 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 343 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 344 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 345 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 346 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 347 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 348 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 349 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 350 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 351 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 352 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 353 | 1052.7385977007152,0.07648252031935018,19.6296246442238,2.56583086801105,0.16015848517417908 354 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 355 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 356 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 357 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 358 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 359 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 360 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 361 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 362 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 363 | 1023.673247125894,0.08963379760659104,20.53703080527975,2.2072885850138126,0.16015848517417908 364 | 1022.3176790470409,0.09024715513829663,20.579350983848236,2.1905666647844653,0.16015848517417908 365 | 1022.3176790470409,0.09024715513829663,20.579350983848236,2.1905666647844653,0.16015848517417908 366 | 1022.3176790470409,0.09024715513829663,20.579350983848236,2.1905666647844653,0.16015848517417908 367 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 368 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 369 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 370 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 371 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 372 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 373 | 1047.2591966141115,0.07149050691196293,29.769255822722883,4.450457493378759,0.10131556540727615 374 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 375 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 376 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 377 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 378 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 379 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 380 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 381 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 382 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 383 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 384 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 385 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 386 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 387 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 388 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 389 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 390 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 391 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 392 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 393 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 394 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 395 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 396 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 397 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 398 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 399 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 400 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 401 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 402 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 403 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 404 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 405 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 406 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 407 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 408 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 409 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 410 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 411 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 412 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 413 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 414 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 415 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 416 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 417 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 418 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 419 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 420 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 421 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 422 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 423 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 424 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 425 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 426 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 427 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 428 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 429 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 430 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 431 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 432 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 433 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 434 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 435 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 436 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 437 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 438 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 439 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 440 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 441 | 1119.1409346556952,0.0687025360686426,13.068498693590008,1.5044430094560477,0.10131556540727615 442 | -------------------------------------------------------------------------------- /1,sun_power/new_begin_PSO_sun.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_excel('testing.xlsx')).astype('float32') 51 | data = data[~np.isnan(data).any(axis=1), :] 52 | 53 | #归一化 54 | scaler = MinMaxScaler() 55 | data = scaler.fit_transform(data) 56 | data_x = data[:, 0:-1] 57 | data_y = data[:, -1] 58 | 59 | # 数据集分割 60 | data_len = len(data_x) 61 | t = np.linspace(0, data_len, data_len + 1) 62 | 63 | train_data_ratio = 0.8 # Choose 80% of the data for training 64 | train_data_len = int(data_len * train_data_ratio) 65 | 66 | train_x = data_x[5:train_data_len] 67 | train_y = data_y[5:train_data_len] 68 | t_for_training = t[5:train_data_len] 69 | 70 | test_x = data_x[train_data_len:] 71 | test_y = data_y[train_data_len:] 72 | t_for_testing = t[train_data_len:] 73 | 74 | # ----------------- train ------------------- 75 | INPUT_FEATURES_NUM = train_x.shape[1] 76 | OUTPUT_FEATURES_NUM = 1 77 | 78 | 79 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 80 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 81 | 82 | # transfer data to pytorch tensor 83 | train_x_tensor = torch.from_numpy(train_x_tensor) 84 | train_y_tensor = torch.from_numpy(train_y_tensor) 85 | 86 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 87 | num_layers=num_layers) # 20 hidden units 88 | lstm_model.to(device) 89 | print('LSTM model:', lstm_model) 90 | print('model.parameters:', lstm_model.parameters) 91 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 92 | 93 | criterion = nn.MSELoss() 94 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 95 | 96 | prev_loss = 1000 97 | max_epochs = epoch 98 | 99 | train_x_tensor = train_x_tensor.to(device) 100 | train_y_tensor = train_y_tensor.to(device) 101 | for epoch in range(max_epochs): 102 | output = lstm_model(train_x_tensor).to(device) 103 | loss = criterion(output, train_y_tensor) 104 | 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | 109 | if loss < prev_loss: 110 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 111 | prev_loss = loss 112 | 113 | '''if loss.item() < 1e-4: 114 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 115 | print("The loss value is reached") 116 | break 117 | else: 118 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 119 | 120 | # prediction on training dataset 121 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 122 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 123 | 124 | # ----------------- test ------------------- 125 | lstm_model = lstm_model.eval() # switch to testing model 126 | 127 | # prediction on test dataset 128 | test_x_tensor = test_x.reshape(-1, 1, 129 | INPUT_FEATURES_NUM) 130 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 131 | test_x_tensor = test_x_tensor.to(device) 132 | 133 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 134 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 135 | 136 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 137 | 138 | print(X) 139 | print("test loss:", loss.item()) 140 | 141 | return loss.item() 142 | 143 | 144 | def PSO_LSTM(N, D, dmin, dmax, maxiter): 145 | c1 = 2 146 | c2 = 2 147 | w = 0.5 148 | pN = N # 粒子数量 149 | dim = D # 搜索维度 150 | 151 | DOWN = dmin 152 | UP = dmax 153 | 154 | X = np.zeros((pN, dim)) # 所有粒子的位置和速度 155 | V = np.zeros((pN, dim)) 156 | pbest = np.zeros((pN, dim)) # 个体经历的最佳位置和全局最佳位置 157 | gbest = np.zeros(dim) 158 | p_fit = np.zeros(pN) # 每个个体的历史最佳适应值 159 | 160 | fit = 1 161 | for i_episode in range(maxiter): 162 | """初始化s""" 163 | np.random.seed() 164 | # 初始粒子适应度计算 165 | print("计算初始全局最优") 166 | for i in range(pN): 167 | for j in range(dim): 168 | V[i][j] = np.random.random() 169 | if j == 1: 170 | X[i][j] = DOWN[j] + (UP[j] - DOWN[j])*np.random.random() 171 | else: 172 | X[i][j] = int(DOWN[j] + (UP[j] - DOWN[j])*np.random.random()) 173 | pbest[i] = X[i] # 个人历史最优 174 | 175 | p_fit[i] = train_LSTM(X[i]) 176 | if p_fit[i] < fit: 177 | fit = p_fit[i] 178 | gbest = X[i] 179 | 180 | for j in range(maxiter): 181 | 182 | for i in range(pN): 183 | temp = train_LSTM(X[i]) 184 | with open('PSO_result.csv' ,'a+' ,newline='') as f: 185 | csv_write = csv.writer(f) 186 | csv_write.writerow(np.append(gbest,fit)) # 用于以后画收敛曲线 187 | if temp < p_fit[i]: # 更新个体最优 188 | p_fit[i] = temp 189 | pbest[i] = X[i] 190 | if p_fit[i] < fit: # 更新全局最优 191 | gbest = X[i] 192 | fit = p_fit[i] 193 | 194 | # 更新位置 195 | for i in range(pN): 196 | # 这里先不用管个体的数量是整数还是小数,先不用考虑 197 | V[i] = w * V[i] + c1 * np.random.random() * (pbest[i] - X[i]) + c2 * np.random.random() *(gbest - X[i]) 198 | ww = 1 199 | for k in range(dim): 200 | if DOWN[k] < X[i][k] + V[i][k] < UP[k]: 201 | continue 202 | else: 203 | ww = 0 204 | X[i] = X[i] + V[i] * ww 205 | return gbest, fit 206 | 207 | if __name__ == '__main__': 208 | N = 10 209 | D = 4 210 | dmin = [500, 0.00001, 5, 1] 211 | dmax = [2000, 0.1, 30, 5] 212 | maxiter = 70 213 | PSO_bestParams, PSO_minimum = PSO_LSTM(N, D, dmin, dmax, maxiter) 214 | print(PSO_bestParams, PSO_minimum) 215 | -------------------------------------------------------------------------------- /1,sun_power/new_begin_TSA_sun.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_excel('testing.xlsx')).astype('float32') 51 | data = data[~np.isnan(data).any(axis=1), :] 52 | 53 | #归一化 54 | scaler = MinMaxScaler() 55 | data = scaler.fit_transform(data) 56 | data_x = data[:, 0:-1] 57 | data_y = data[:, -1] 58 | 59 | # 数据集分割 60 | data_len = len(data_x) 61 | t = np.linspace(0, data_len, data_len + 1) 62 | 63 | train_data_ratio = 0.8 # Choose 80% of the data for training 64 | train_data_len = int(data_len * train_data_ratio) 65 | 66 | train_x = data_x[5:train_data_len] 67 | train_y = data_y[5:train_data_len] 68 | t_for_training = t[5:train_data_len] 69 | 70 | test_x = data_x[train_data_len:] 71 | test_y = data_y[train_data_len:] 72 | t_for_testing = t[train_data_len:] 73 | 74 | # ----------------- train ------------------- 75 | INPUT_FEATURES_NUM = train_x.shape[1] 76 | OUTPUT_FEATURES_NUM = 1 77 | 78 | 79 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 80 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 81 | 82 | # transfer data to pytorch tensor 83 | train_x_tensor = torch.from_numpy(train_x_tensor) 84 | train_y_tensor = torch.from_numpy(train_y_tensor) 85 | 86 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 87 | num_layers=num_layers) # 20 hidden units 88 | lstm_model.to(device) 89 | print('LSTM model:', lstm_model) 90 | print('model.parameters:', lstm_model.parameters) 91 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 92 | 93 | criterion = nn.MSELoss() 94 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 95 | 96 | prev_loss = 1000 97 | max_epochs = epoch 98 | 99 | train_x_tensor = train_x_tensor.to(device) 100 | train_y_tensor = train_y_tensor.to(device) 101 | for epoch in range(max_epochs): 102 | output = lstm_model(train_x_tensor).to(device) 103 | loss = criterion(output, train_y_tensor) 104 | 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | 109 | if loss < prev_loss: 110 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 111 | prev_loss = loss 112 | 113 | '''if loss.item() < 1e-4: 114 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 115 | print("The loss value is reached") 116 | break 117 | else: 118 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 119 | 120 | # prediction on training dataset 121 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 122 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 123 | 124 | # ----------------- test ------------------- 125 | lstm_model = lstm_model.eval() # switch to testing model 126 | 127 | # prediction on test dataset 128 | test_x_tensor = test_x.reshape(-1, 1, 129 | INPUT_FEATURES_NUM) 130 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 131 | test_x_tensor = test_x_tensor.to(device) 132 | 133 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 134 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 135 | 136 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 137 | 138 | print(X) 139 | print("test loss:", loss.item()) 140 | return loss.item() 141 | 142 | 143 | 144 | def TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax): 145 | low = int(N * 0.1) 146 | high = int(N * 0.25) 147 | 148 | for run in range(maxrun): 149 | np.random.seed() 150 | # 重复的次数 151 | trees = np.zeros((N, D)) 152 | obj_trees = np.zeros((N, 1)) # 存放每个个体的目标值 153 | 154 | for i in range(N): 155 | for j in range(D): 156 | if j == 1: 157 | trees[i, j] = dmin[j] + np.random.rand() * (dmax[j] - dmin[j]) 158 | else: 159 | trees[i, j] = int(dmin[j] + np.random.rand() * (dmax[j] - dmin[j])) 160 | obj_trees[i] = train_LSTM(trees[i, :]) 161 | FEs = N # 因为当前已经产生了N个个体 162 | 163 | minimum = np.min(obj_trees) 164 | iter1 = 0 165 | 166 | while (FEs <= maxFEs): 167 | iter1 = iter1 + 1 168 | for i in range(N): 169 | # i是树木 170 | ns = int(low + (high - low) * np.random.rand()) + 1 171 | FEs = FEs + ns 172 | if ns > high: 173 | ns = high 174 | 175 | seeds = np.zeros((ns, D)) # 记录当前的种子的具体形式 176 | obj_seeds = np.zeros((ns, 1)) 177 | minimum, min_index = np.min(obj_trees), np.argmin(obj_trees) 178 | bestParams = trees[min_index, :] 179 | 180 | for j in range(ns): 181 | # j 是种子 182 | komus = int(np.random.rand() * N) + 1 183 | while komus == i or komus >=N or komus < 0 : 184 | komus = int(np.random.rand() * N) + 1 185 | seeds[j, :] = trees[j, :] 186 | 187 | for d in range(D): 188 | if np.random.rand() < ST: 189 | if d == 1: 190 | seeds[j, d] = iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 191 | else: 192 | seeds[j, d] = int(iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 193 | if seeds[j, d] > dmax[d]: 194 | seeds[j, d] = dmax[d] 195 | if seeds[j, d] < dmin[d]: 196 | seeds[j, d] = dmin[d] 197 | else: 198 | if d == 1: 199 | seeds[j, d] = iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 200 | else: 201 | seeds[j, d] = int(iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 202 | if seeds[j, d] > dmax[d]: 203 | seeds[j, d] = dmax[d] 204 | if seeds[j, d] < dmin[d]: 205 | seeds[j, d] = dmin[d] 206 | obj_seeds[j] = train_LSTM(seeds[j, :]) 207 | 208 | mini_seeds, mini_seeds_ind = np.min(obj_seeds), np.argmin(obj_seeds) 209 | 210 | if mini_seeds < obj_trees[i]: 211 | trees[i, :] = seeds[mini_seeds_ind, :] 212 | obj_trees[i] = mini_seeds 213 | 214 | min_tree, min_tree_index = np.min(obj_trees), np.argmin(obj_trees) 215 | if min_tree < minimum: 216 | minimum = min_tree 217 | bestParams = trees[min_tree_index, :] 218 | 219 | print('Iter={} .... min={} .... FES={} .... \n'.format(iter1, minimum, FEs)) 220 | with open('TSA_result.csv','a+',newline='') as f: 221 | csv_write = csv.writer(f) 222 | csv_write.writerow(np.append(bestParams,minimum))#用于以后画收敛曲线 223 | print('Run={} .... min={} ....\n'.format(run, minimum)) 224 | 225 | return bestParams,minimum 226 | 227 | if __name__ == '__main__': 228 | iw = 1 229 | maxrun = 1 230 | N = 20 231 | D = 4 232 | ST = 0.1 233 | maxFEs = 50 234 | dmin = [500, 0.00001, 5, 1] 235 | dmax = [2000, 0.1, 30, 5] 236 | TSA_bestParams, TSA_minimum = TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax) 237 | print(TSA_bestParams, TSA_minimum) 238 | -------------------------------------------------------------------------------- /1,sun_power/testing.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/1,sun_power/testing.xlsx -------------------------------------------------------------------------------- /2,beijing PM2.5/PSO_result.csv: -------------------------------------------------------------------------------- 1 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 2 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 3 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 4 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 5 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 6 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 7 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 8 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 9 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 10 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 11 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 12 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 13 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 14 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 15 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 16 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 17 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 18 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 19 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 20 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 21 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 22 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 23 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 24 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 25 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 26 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 27 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 28 | 1110.0,0.0992902903101674,29.0,2.0,0.009001516737043858 29 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 30 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 31 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 32 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 33 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 34 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 35 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 36 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 37 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 38 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 39 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 40 | 1855.0,0.06712979623604513,14.0,4.0,0.008997845463454723 41 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 42 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 43 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 44 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 45 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 46 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 47 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 48 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 49 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 50 | 1696.8951415122312,0.08565337010609296,17.212161654724007,3.587143505671194,0.008997845463454723 51 | 1781.9342183880037,0.07569018720699874,15.484452180732001,3.8092045827038477,0.008997845463454723 52 | 1781.9342183880037,0.07569018720699874,15.484452180732001,3.8092045827038477,0.008997845463454723 53 | 1781.9342183880037,0.07569018720699874,15.484452180732001,3.8092045827038477,0.008997845463454723 54 | 1781.9342183880037,0.07569018720699874,15.484452180732001,3.8092045827038477,0.008997845463454723 55 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 56 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 57 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 58 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 59 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 60 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 61 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 62 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 63 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 64 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 65 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 66 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 67 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 68 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 69 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 70 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 71 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 72 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 73 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 74 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 75 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 76 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 77 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 78 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 79 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 80 | 1675.7850497502575,0.08751962961997134,17.62708613689983,3.5389270267066335,0.00899756420403719 81 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 82 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 83 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 84 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 85 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 86 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 87 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 88 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 89 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 90 | 1702.7581809690396,0.09523777911018805,17.33047190401231,3.856292905044963,0.00899756420403719 91 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 92 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 93 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 94 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 95 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 96 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 97 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 98 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 99 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 100 | 1698.7765764787364,0.09409847428058485,17.374256237094503,3.8094453441885108,0.00899756420403719 101 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 102 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 103 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 104 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 105 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 106 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 107 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 108 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 109 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 110 | 1657.5921323299408,0.08231386929595688,17.82714738306526,3.3248691492654325,0.00899756420403719 111 | 1664.1534133188147,0.08419132827944277,17.75499523611989,3.4020691853742773,0.00899756420403719 112 | 1664.1534133188147,0.08419132827944277,17.75499523611989,3.4020691853742773,0.00899756420403719 113 | -------------------------------------------------------------------------------- /2,beijing PM2.5/TSA_result.csv: -------------------------------------------------------------------------------- 1 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 2 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 3 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 4 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 5 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 6 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 7 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 8 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 9 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 10 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 11 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 12 | 1766.0,0.06583637483389584,19.0,3.0,0.008997567929327488 13 | 1399.0,0.06647526627847007,5.0,4.0,0.008997565135359764 14 | 1399.0,0.06647526627847007,5.0,4.0,0.008997565135359764 15 | 1399.0,0.06647526627847007,5.0,4.0,0.008997565135359764 16 | 1864.0,0.048377156623205124,15.0,5.0,0.008997256867587566 17 | 1864.0,0.048377156623205124,15.0,5.0,0.008997256867587566 18 | 1864.0,0.048377156623205124,15.0,5.0,0.008997256867587566 19 | 1864.0,0.048377156623205124,15.0,5.0,0.008997256867587566 20 | 1864.0,0.048377156623205124,15.0,5.0,0.008997256867587566 21 | 1864.0,0.048377156623205124,15.0,5.0,0.008997256867587566 22 | 1839.0,0.03748037436614208,26.0,4.0,0.008997916243970394 23 | 1839.0,0.03748037436614208,26.0,4.0,0.008997916243970394 24 | 1839.0,0.03748037436614208,26.0,4.0,0.008997916243970394 25 | 1839.0,0.03748037436614208,26.0,4.0,0.008997916243970394 26 | -------------------------------------------------------------------------------- /2,beijing PM2.5/lstm_model.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/2,beijing PM2.5/lstm_model.pt -------------------------------------------------------------------------------- /2,beijing PM2.5/new_begin_PSO_beijngpm2.5.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_csv('PRSA_data_2010.1.1-2014.12.31.csv')).astype('float32') 51 | data = data[~np.isnan(data).any(axis=1), :] 52 | 53 | #归一化 54 | scaler = MinMaxScaler() 55 | data = scaler.fit_transform(data) 56 | data_x = data[:, 0:-1] 57 | data_y = data[:, -1] 58 | 59 | # 数据集分割 60 | data_len = len(data_x) 61 | t = np.linspace(0, data_len, data_len + 1) 62 | 63 | train_data_ratio = 0.8 # Choose 80% of the data for training 64 | train_data_len = int(data_len * train_data_ratio) 65 | 66 | train_x = data_x[5:train_data_len] 67 | train_y = data_y[5:train_data_len] 68 | t_for_training = t[5:train_data_len] 69 | 70 | test_x = data_x[train_data_len:] 71 | test_y = data_y[train_data_len:] 72 | t_for_testing = t[train_data_len:] 73 | 74 | # ----------------- train ------------------- 75 | INPUT_FEATURES_NUM = train_x.shape[1] 76 | OUTPUT_FEATURES_NUM = 1 77 | 78 | 79 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 80 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 81 | 82 | # transfer data to pytorch tensor 83 | train_x_tensor = torch.from_numpy(train_x_tensor) 84 | train_y_tensor = torch.from_numpy(train_y_tensor) 85 | 86 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 87 | num_layers=num_layers) # 20 hidden units 88 | lstm_model.to(device) 89 | print('LSTM model:', lstm_model) 90 | print('model.parameters:', lstm_model.parameters) 91 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 92 | 93 | criterion = nn.MSELoss() 94 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 95 | 96 | prev_loss = 1000 97 | max_epochs = epoch 98 | 99 | train_x_tensor = train_x_tensor.to(device) 100 | train_y_tensor = train_y_tensor.to(device) 101 | for epoch in range(max_epochs): 102 | output = lstm_model(train_x_tensor).to(device) 103 | loss = criterion(output, train_y_tensor) 104 | 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | 109 | if loss < prev_loss: 110 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 111 | prev_loss = loss 112 | 113 | '''if loss.item() < 1e-4: 114 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 115 | print("The loss value is reached") 116 | break 117 | else: 118 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 119 | 120 | # prediction on training dataset 121 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 122 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 123 | 124 | # ----------------- test ------------------- 125 | lstm_model = lstm_model.eval() # switch to testing model 126 | 127 | # prediction on test dataset 128 | test_x_tensor = test_x.reshape(-1, 1, 129 | INPUT_FEATURES_NUM) 130 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 131 | test_x_tensor = test_x_tensor.to(device) 132 | 133 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 134 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 135 | 136 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 137 | 138 | print(X) 139 | print("test loss:", loss.item()) 140 | 141 | return loss.item() 142 | 143 | 144 | def PSO_LSTM(N, D, dmin, dmax, maxiter): 145 | c1 = 2 146 | c2 = 2 147 | w = 0.5 148 | pN = N # 粒子数量 149 | dim = D # 搜索维度 150 | 151 | DOWN = dmin 152 | UP = dmax 153 | 154 | X = np.zeros((pN, dim)) # 所有粒子的位置和速度 155 | V = np.zeros((pN, dim)) 156 | pbest = np.zeros((pN, dim)) # 个体经历的最佳位置和全局最佳位置 157 | gbest = np.zeros(dim) 158 | p_fit = np.zeros(pN) # 每个个体的历史最佳适应值 159 | 160 | fit = 1 161 | for i_episode in range(maxiter): 162 | """初始化s""" 163 | np.random.seed() 164 | # 初始粒子适应度计算 165 | print("计算初始全局最优") 166 | for i in range(pN): 167 | for j in range(dim): 168 | V[i][j] = np.random.random() 169 | if j == 1: 170 | X[i][j] = DOWN[j] + (UP[j] - DOWN[j])*np.random.random() 171 | else: 172 | X[i][j] = int(DOWN[j] + (UP[j] - DOWN[j])*np.random.random()) 173 | pbest[i] = X[i] # 个人历史最优 174 | 175 | p_fit[i] = train_LSTM(X[i]) 176 | if p_fit[i] < fit: 177 | fit = p_fit[i] 178 | gbest = X[i] 179 | 180 | for j in range(maxiter): 181 | 182 | for i in range(pN): 183 | temp = train_LSTM(X[i]) 184 | with open('PSO_result.csv' ,'a+' ,newline='') as f: 185 | csv_write = csv.writer(f) 186 | csv_write.writerow(np.append(gbest,fit)) # 用于以后画收敛曲线 187 | if temp < p_fit[i]: # 更新个体最优 188 | p_fit[i] = temp 189 | pbest[i] = X[i] 190 | if p_fit[i] < fit: # 更新全局最优 191 | gbest = X[i] 192 | fit = p_fit[i] 193 | 194 | # 更新位置 195 | for i in range(pN): 196 | # 这里先不用管个体的数量是整数还是小数,先不用考虑 197 | V[i] = w * V[i] + c1 * np.random.random() * (pbest[i] - X[i]) + c2 * np.random.random() *(gbest - X[i]) 198 | ww = 1 199 | for k in range(dim): 200 | if DOWN[k] < X[i][k] + V[i][k] < UP[k]: 201 | continue 202 | else: 203 | ww = 0 204 | X[i] = X[i] + V[i] * ww 205 | return gbest, fit 206 | 207 | if __name__ == '__main__': 208 | N = 10 209 | D = 4 210 | dmin = [500, 0.00001, 5, 1] 211 | dmax = [2000, 0.1, 30, 5] 212 | maxiter = 70 213 | PSO_bestParams, PSO_minimum = PSO_LSTM(N, D, dmin, dmax, maxiter) 214 | print(PSO_bestParams, PSO_minimum) 215 | -------------------------------------------------------------------------------- /2,beijing PM2.5/new_begin_TSA_beijngpm2.5.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_csv('PRSA_data_2010.1.1-2014.12.31.csv')).astype('float32') 51 | data = data[~np.isnan(data).any(axis=1), :] 52 | 53 | #归一化 54 | scaler = MinMaxScaler() 55 | data = scaler.fit_transform(data) 56 | data_x = data[:, 0:-1] 57 | data_y = data[:, -1] 58 | 59 | # 数据集分割 60 | data_len = len(data_x) 61 | t = np.linspace(0, data_len, data_len + 1) 62 | 63 | train_data_ratio = 0.8 # Choose 80% of the data for training 64 | train_data_len = int(data_len * train_data_ratio) 65 | 66 | train_x = data_x[5:train_data_len] 67 | train_y = data_y[5:train_data_len] 68 | t_for_training = t[5:train_data_len] 69 | 70 | test_x = data_x[train_data_len:] 71 | test_y = data_y[train_data_len:] 72 | t_for_testing = t[train_data_len:] 73 | 74 | # ----------------- train ------------------- 75 | INPUT_FEATURES_NUM = train_x.shape[1] 76 | OUTPUT_FEATURES_NUM = 1 77 | 78 | 79 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 80 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 81 | 82 | # transfer data to pytorch tensor 83 | train_x_tensor = torch.from_numpy(train_x_tensor) 84 | train_y_tensor = torch.from_numpy(train_y_tensor) 85 | 86 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 87 | num_layers=num_layers) # 20 hidden units 88 | lstm_model.to(device) 89 | print('LSTM model:', lstm_model) 90 | print('model.parameters:', lstm_model.parameters) 91 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 92 | 93 | criterion = nn.MSELoss() 94 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 95 | 96 | prev_loss = 1000 97 | max_epochs = epoch 98 | 99 | train_x_tensor = train_x_tensor.to(device) 100 | train_y_tensor = train_y_tensor.to(device) 101 | for epoch in range(max_epochs): 102 | output = lstm_model(train_x_tensor).to(device) 103 | loss = criterion(output, train_y_tensor) 104 | 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | 109 | if loss < prev_loss: 110 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 111 | prev_loss = loss 112 | 113 | '''if loss.item() < 1e-4: 114 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 115 | print("The loss value is reached") 116 | break 117 | else: 118 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 119 | 120 | # prediction on training dataset 121 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 122 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 123 | 124 | # ----------------- test ------------------- 125 | lstm_model = lstm_model.eval() # switch to testing model 126 | 127 | # prediction on test dataset 128 | test_x_tensor = test_x.reshape(-1, 1, 129 | INPUT_FEATURES_NUM) 130 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 131 | test_x_tensor = test_x_tensor.to(device) 132 | 133 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 134 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 135 | 136 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 137 | 138 | print(X) 139 | print("test loss:", loss.item()) 140 | return loss.item() 141 | 142 | 143 | 144 | def TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax): 145 | low = int(N * 0.1) 146 | high = int(N * 0.25) 147 | 148 | for run in range(maxrun): 149 | np.random.seed() 150 | # 重复的次数 151 | trees = np.zeros((N, D)) 152 | obj_trees = np.zeros((N, 1)) # 存放每个个体的目标值 153 | 154 | for i in range(N): 155 | for j in range(D): 156 | if j == 1: 157 | trees[i, j] = dmin[j] + np.random.rand() * (dmax[j] - dmin[j]) 158 | else: 159 | trees[i, j] = int(dmin[j] + np.random.rand() * (dmax[j] - dmin[j])) 160 | obj_trees[i] = train_LSTM(trees[i, :]) 161 | FEs = N # 因为当前已经产生了N个个体 162 | 163 | minimum = np.min(obj_trees) 164 | iter1 = 0 165 | 166 | while (FEs <= maxFEs): 167 | iter1 = iter1 + 1 168 | for i in range(N): 169 | # i是树木 170 | ns = int(low + (high - low) * np.random.rand()) + 1 171 | FEs = FEs + ns 172 | if ns > high: 173 | ns = high 174 | 175 | seeds = np.zeros((ns, D)) # 记录当前的种子的具体形式 176 | obj_seeds = np.zeros((ns, 1)) 177 | minimum, min_index = np.min(obj_trees), np.argmin(obj_trees) 178 | bestParams = trees[min_index, :] 179 | 180 | for j in range(ns): 181 | # j 是种子 182 | komus = int(np.random.rand() * N) + 1 183 | while komus == i or komus >=N or komus < 0 : 184 | komus = int(np.random.rand() * N) + 1 185 | seeds[j, :] = trees[j, :] 186 | 187 | for d in range(D): 188 | if np.random.rand() < ST: 189 | if d == 1: 190 | seeds[j, d] = iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 191 | else: 192 | seeds[j, d] = int(iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 193 | if seeds[j, d] > dmax[d]: 194 | seeds[j, d] = dmax[d] 195 | if seeds[j, d] < dmin[d]: 196 | seeds[j, d] = dmin[d] 197 | else: 198 | if d == 1: 199 | seeds[j, d] = iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 200 | else: 201 | seeds[j, d] = int(iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 202 | if seeds[j, d] > dmax[d]: 203 | seeds[j, d] = dmax[d] 204 | if seeds[j, d] < dmin[d]: 205 | seeds[j, d] = dmin[d] 206 | obj_seeds[j] = train_LSTM(seeds[j, :]) 207 | 208 | mini_seeds, mini_seeds_ind = np.min(obj_seeds), np.argmin(obj_seeds) 209 | 210 | with open('TSA_result.csv', 'a+', newline='') as f: 211 | csv_write = csv.writer(f) 212 | csv_write.writerow(np.append(bestParams, minimum)) # 用于以后画收敛曲线 213 | 214 | if mini_seeds < obj_trees[i]: 215 | trees[i, :] = seeds[mini_seeds_ind, :] 216 | obj_trees[i] = mini_seeds 217 | 218 | min_tree, min_tree_index = np.min(obj_trees), np.argmin(obj_trees) 219 | if min_tree < minimum: 220 | minimum = min_tree 221 | bestParams = trees[min_tree_index, :] 222 | 223 | print('Iter={} .... min={} .... FES={} .... \n'.format(iter1, minimum, FEs)) 224 | with open('TSA_result.csv','a+',newline='') as f: 225 | csv_write = csv.writer(f) 226 | csv_write.writerow(np.append(bestParams,minimum))#用于以后画收敛曲线 227 | print('Run={} .... min={} ....\n'.format(run, minimum)) 228 | 229 | return bestParams,minimum 230 | 231 | if __name__ == '__main__': 232 | iw = 1 233 | maxrun = 5 234 | N = 20 235 | D = 4 236 | ST = 0.1 237 | maxFEs = 50 238 | dmin = [500, 0.00001, 5, 1] 239 | dmax = [2000, 0.1, 30, 5] 240 | TSA_bestParams, TSA_minimum = TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax) 241 | print(TSA_bestParams, TSA_minimum) 242 | -------------------------------------------------------------------------------- /2,beijing_plot/alllll_result.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2 2 | 0,0.004094260554171883,0.0034787041232025254,0.004199702287712542 3 | 1,0.06398640913640867,0.05898054020778824,0.06480511004320988 4 | 2,0.04351054187917684,0.039811158118351286,0.04257286669296288 5 | 3,0.9184931256315859,0.7245499001242842,0.7096849337961804 6 | -------------------------------------------------------------------------------- /2,beijing_plot/calculate_result_beijing.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | df = np.array(pd.read_csv("real.csv"))[:,-1] 5 | df_PSO = np.array(pd.read_csv("PSO_pred.csv"))[:,-1] 6 | df_TSA = np.array(pd.read_csv('TSA_pred.csv'))[:,-1] 7 | df_regular = np.array(pd.read_csv('regular_pred.csv'))[:,-1] 8 | 9 | def MSE(y,yhat): 10 | return np.sum((y - yhat)**2)/len(y) 11 | 12 | def RMSE(y, yhat): 13 | return np.sqrt(MSE(y, yhat)) 14 | 15 | def MAPE(y, yhat): 16 | return np.sum(np.abs((y+1e-12 - yhat))/(y+1e-12))/len(y) 17 | 18 | def MAE(y, yhat): 19 | return np.sum(np.abs(y - yhat))/len(y) 20 | 21 | res = np.zeros((4,3)) 22 | for i in range(4): 23 | for j in range(3): 24 | if i == 0: 25 | if j == 0: 26 | res[i][j] = MSE(df,df_regular) 27 | elif j == 1: 28 | res[i][j] = MSE(df,df_PSO) 29 | elif j == 2: 30 | res[i][j] = MSE(df, df_TSA) 31 | elif i == 1: 32 | if j == 0: 33 | res[i][j] = RMSE(df,df_regular) 34 | elif j == 1: 35 | res[i][j] = RMSE(df,df_PSO) 36 | elif j == 2: 37 | res[i][j] = RMSE(df, df_TSA) 38 | elif i == 2: 39 | if j == 0: 40 | res[i][j] = MAE(df,df_regular) 41 | elif j == 1: 42 | res[i][j] = MAE(df,df_PSO) 43 | elif j == 2: 44 | res[i][j] = MAE(df, df_TSA) 45 | elif i == 3: 46 | if j == 0: 47 | res[i][j] = MAPE(df,df_regular) 48 | elif j == 1: 49 | res[i][j] = MAPE(df,df_PSO) 50 | elif j == 2: 51 | res[i][j] = MAPE(df, df_TSA) 52 | 53 | df = pd.DataFrame(res) 54 | df.to_csv('alllll_result.csv') -------------------------------------------------------------------------------- /2,beijing_plot/demoplot_beijing.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签 5 | plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号 6 | 7 | ## 分辨率 1500*1200 8 | plt.rcParams['figure.dpi'] = 200 9 | plt.rcParams['savefig.dpi'] = 200 10 | 11 | df = pd.read_csv("real.csv") 12 | df_PSO = pd.read_csv("PSO_pred.csv") 13 | df_TSA = pd.read_csv('TSA_pred.csv') 14 | df_regular = pd.read_csv('regular_pred.csv') 15 | 16 | fig, ax = plt.subplots(figsize =(8*np.sqrt(2)+6, 8)) # 创建图实例 17 | 18 | ax.plot(np.linspace(0,df.shape[0]+1,df.shape[0]),df.iloc[:, -1],label='real value',marker='+') 19 | ax.plot(np.linspace(0,df_regular.shape[0]+1,df_regular.shape[0]),df_regular.iloc[:,-1],label='LSTM predicted value',marker='2') 20 | ax.plot(np.linspace(0,df_PSO.shape[0]+1,df_PSO.shape[0]),df_PSO.iloc[:,-1],label='PSO_LSTM predicted value',marker='1') 21 | ax.plot(np.linspace(0,df_TSA.shape[0]+1,df_TSA.shape[0]),df_TSA.iloc[:,-1],label='TSA_LSTM predicted value',marker='*') 22 | 23 | 24 | ax.set_xlabel('') #设置x轴名称 x label 25 | ax.set_ylabel('归一化之后的目标特征') #设置y轴名称 y label 26 | ax.set_title('北京PM2.5指数') #设置图名为Simple Plot 27 | ax.legend() #自动检测要在图例中显示的元素,并且显示 28 | 29 | plt.show() #图形可视化 30 | -------------------------------------------------------------------------------- /2,beijing_plot/lstm_model.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/2,beijing_plot/lstm_model.pt -------------------------------------------------------------------------------- /2,beijing_plot/train_beijing.py: -------------------------------------------------------------------------------- 1 | ''' 目前已知参数是多少,只需要调整相应的参数,这里主要做的更改就是将把输出改成了指定的形式,便于后续画图和计算各项指标 ''' 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | from torch.autograd import Variable 8 | import math 9 | import csv 10 | from sklearn.preprocessing import MinMaxScaler 11 | import os 12 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 13 | 14 | 15 | 16 | # Define LSTM Neural Networks 17 | class LstmRNN(nn.Module): 18 | """ 19 | Parameters: 20 | - input_size: feature size 21 | - hidden_size: number of hidden units 22 | - output_size: number of output 23 | - num_layers: layers of LSTM to stack 24 | """ 25 | 26 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 27 | super().__init__() 28 | 29 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 30 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 31 | 32 | def forward(self, _x): 33 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 34 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 35 | x = x.view(s * b, h) 36 | x = self.linear1(x) 37 | x = x.view(s, b, -1) 38 | return x 39 | 40 | 41 | if __name__ == '__main__': 42 | 43 | # checking if GPU is available 44 | device = torch.device("cpu") 45 | 46 | if (torch.cuda.is_available()): 47 | device = torch.device("cuda:0") 48 | print('Training on GPU.') 49 | else: 50 | print('No GPU available, training on CPU.') 51 | 1110.0, 0.0992902903101674, 29.0, 2.0, 52 | # 数据读取&类型转换 53 | epoch = int(1110) 54 | lr = 0.0992902903101674 55 | cell_size = int(29.0) 56 | num_layer = int(2.0) 57 | name = 'PSO' 58 | 59 | data = np.array(pd.read_csv('PRSA_data_2010.1.1-2014.12.31.csv')).astype('float32') 60 | data = data[~np.isnan(data).any(axis=1), :] 61 | 62 | #归一化 63 | scaler = MinMaxScaler() 64 | data = scaler.fit_transform(data) 65 | data_x = data[:,0:-1] 66 | data_y = data[:,-1] 67 | 68 | # 数据集分割 69 | data_len = len(data_x) 70 | t = np.linspace(0, data_len, data_len + 1) 71 | 72 | train_data_ratio = 0.8 # Choose 80% of the data for training 73 | train_data_len = int(data_len * train_data_ratio) 74 | 75 | train_x = data_x[5:train_data_len] 76 | train_y = data_y[5:train_data_len] 77 | t_for_training = t[5:train_data_len] 78 | 79 | test_x = data_x[train_data_len:] 80 | test_y = data_y[train_data_len:] 81 | t_for_testing = t[train_data_len:] 82 | 83 | # ----------------- train ------------------- 84 | INPUT_FEATURES_NUM = train_x.shape[1] 85 | OUTPUT_FEATURES_NUM = 1 86 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 87 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 88 | 89 | # transfer data to pytorch tensor 90 | train_x_tensor = torch.from_numpy(train_x_tensor) 91 | train_y_tensor = torch.from_numpy(train_y_tensor) 92 | 93 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, cell_size, output_size=OUTPUT_FEATURES_NUM, num_layers=num_layer) # 20 hidden units 94 | lstm_model.to(device) 95 | print('LSTM model:', lstm_model) 96 | print('model.parameters:', lstm_model.parameters) 97 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 98 | 99 | criterion = nn.MSELoss() 100 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=1e-2) 101 | prev_loss = 1000 102 | max_epochs = epoch 103 | 104 | train_x_tensor = train_x_tensor.to(device) 105 | train_y_tensor = train_y_tensor.to(device) 106 | 107 | for epoch in range(max_epochs): 108 | output = lstm_model(train_x_tensor).to(device) 109 | loss = criterion(output, train_y_tensor) 110 | 111 | optimizer.zero_grad() 112 | loss.backward() 113 | optimizer.step() 114 | 115 | if loss < prev_loss: 116 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 117 | prev_loss = loss 118 | 119 | if loss.item() < 1e-4: 120 | print('Epoch [{}/{}], Loss: {:.5f}'.format(epoch + 1, max_epochs, loss.item())) 121 | print("The loss value is reached") 122 | break 123 | else: 124 | print('Epoch: [{}/{}], Loss:{:.5f}'.format(epoch + 1, max_epochs, loss.item())) 125 | 126 | # prediction on training dataset 127 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 128 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 129 | 130 | # ----------------- test ------------------- 131 | lstm_model = lstm_model.eval() # switch to testing model 132 | 133 | # prediction on test dataset 134 | test_x_tensor = test_x.reshape(-1, 1, INPUT_FEATURES_NUM) 135 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 136 | test_x_tensor = test_x_tensor.to(device) 137 | 138 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 139 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 140 | 141 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 142 | print("test loss:", loss.item()) 143 | 144 | # ----------------- plot ------------------- 145 | '''plt.figure() 146 | plt.plot(t_for_training, train_y, 'b', label='y_trn') 147 | plt.plot(t_for_training, pred_y_for_train, 'y--', label='pre_trn')''' 148 | 149 | 150 | ''' 这里注意修改,然后注意修改的时TSA或者是PSO,或者是普通的LSTM ''' 151 | df = pd.DataFrame(test_y) 152 | df.to_csv(name+'_test.csv') 153 | df = pd.DataFrame(pred_y_for_test) 154 | df.to_csv(name+'_pred.csv') 155 | -------------------------------------------------------------------------------- /3,family_plot/alllll_result.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2 2 | 0,0.000108252,0.006952396,8.38E-05 3 | 1,0.010404404,0.083381028,0.009155084 4 | 2,0.006453025,0.065583143,0.005860917 5 | 3,0.113814285,1.059018748,0.093030913 6 | -------------------------------------------------------------------------------- /3,family_plot/calculate_result_family.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | df = np.array(pd.read_csv("real.csv"))[:,-1] 5 | df_PSO = np.array(pd.read_csv("PSO_pred.csv"))[:,-1] 6 | df_TSA = np.array(pd.read_csv('TSA_pred.csv'))[:,-1] 7 | df_regular = np.array(pd.read_csv('regular_pred.csv'))[:,-1] 8 | 9 | def MSE(y,yhat): 10 | return np.sum((y - yhat)**2)/len(y) 11 | 12 | def RMSE(y, yhat): 13 | return np.sqrt(MSE(y, yhat)) 14 | 15 | def MAPE(y, yhat): 16 | return np.sum(np.abs((y+1e-12 - yhat))/(y+1e-12))/len(y) 17 | 18 | def MAE(y, yhat): 19 | return np.sum(np.abs(y - yhat))/len(y) 20 | 21 | res = np.zeros((4,3)) 22 | for i in range(4): 23 | for j in range(3): 24 | if i == 0: 25 | if j == 0: 26 | res[i][j] = MSE(df,df_regular) 27 | elif j == 1: 28 | res[i][j] = MSE(df,df_PSO) 29 | elif j == 2: 30 | res[i][j] = MSE(df, df_TSA) 31 | elif i == 1: 32 | if j == 0: 33 | res[i][j] = RMSE(df,df_regular) 34 | elif j == 1: 35 | res[i][j] = RMSE(df,df_PSO) 36 | elif j == 2: 37 | res[i][j] = RMSE(df, df_TSA) 38 | elif i == 2: 39 | if j == 0: 40 | res[i][j] = MAE(df,df_regular) 41 | elif j == 1: 42 | res[i][j] = MAE(df,df_PSO) 43 | elif j == 2: 44 | res[i][j] = MAE(df, df_TSA) 45 | elif i == 3: 46 | if j == 0: 47 | res[i][j] = MAPE(df,df_regular) 48 | elif j == 1: 49 | res[i][j] = MAPE(df,df_PSO) 50 | elif j == 2: 51 | res[i][j] = MAPE(df, df_TSA) 52 | 53 | df = pd.DataFrame(res) 54 | df.to_csv('alllll_result.csv') -------------------------------------------------------------------------------- /3,family_plot/demoplot_family.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签 5 | plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号 6 | 7 | ## 分辨率 1500*1200 8 | plt.rcParams['figure.dpi'] = 200 9 | plt.rcParams['savefig.dpi'] = 200 10 | 11 | df = pd.read_csv("real.csv") 12 | df_PSO = pd.read_csv("PSO_pred.csv") 13 | df_TSA = pd.read_csv('TSA_pred.csv') 14 | df_regular = pd.read_csv('regular_pred.csv') 15 | 16 | fig, ax = plt.subplots(figsize =(8*np.sqrt(2)+6, 8)) # 创建图实例 17 | 18 | ax.plot(np.linspace(0,df.shape[0]+1,df.shape[0]),df.iloc[:, -1],label='real value',marker='+') 19 | ax.plot(np.linspace(0,df_regular.shape[0]+1,df_regular.shape[0]),df_regular.iloc[:,-1],label='LSTM predicted value',marker='2') 20 | ax.plot(np.linspace(0,df_PSO.shape[0]+1,df_PSO.shape[0]),df_PSO.iloc[:,-1],label='PSO_LSTM predicted value',marker='1') 21 | ax.plot(np.linspace(0,df_TSA.shape[0]+1,df_TSA.shape[0]),df_TSA.iloc[:,-1],label='TSA_LSTM predicted value',marker='*') 22 | 23 | 24 | ax.set_xlabel('') #设置x轴名称 x label 25 | ax.set_ylabel('归一化之后的目标特征') #设置y轴名称 y label 26 | ax.set_title('家庭用电量') #设置图名为Simple Plot 27 | ax.legend() #自动检测要在图例中显示的元素,并且显示 28 | 29 | plt.show() #图形可视化 30 | -------------------------------------------------------------------------------- /3,family_plot/family_power.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/3,family_plot/family_power.xlsx -------------------------------------------------------------------------------- /3,family_plot/lstm_model.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/3,family_plot/lstm_model.pt -------------------------------------------------------------------------------- /3,family_plot/train_family.py: -------------------------------------------------------------------------------- 1 | ''' 目前已知参数是多少,只需要调整相应的参数,这里主要做的更改就是将把输出改成了指定的形式,便于后续画图和计算各项指标 ''' 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | from torch.autograd import Variable 8 | import math 9 | import csv 10 | from sklearn.preprocessing import MinMaxScaler 11 | import os 12 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 13 | 14 | 15 | 16 | # Define LSTM Neural Networks 17 | class LstmRNN(nn.Module): 18 | """ 19 | Parameters: 20 | - input_size: feature size 21 | - hidden_size: number of hidden units 22 | - output_size: number of output 23 | - num_layers: layers of LSTM to stack 24 | """ 25 | 26 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 27 | super().__init__() 28 | 29 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 30 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 31 | 32 | def forward(self, _x): 33 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 34 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 35 | x = x.view(s * b, h) 36 | x = self.linear1(x) 37 | x = x.view(s, b, -1) 38 | return x 39 | 40 | 41 | if __name__ == '__main__': 42 | 43 | # checking if GPU is available 44 | device = torch.device("cpu") 45 | 46 | if (torch.cuda.is_available()): 47 | device = torch.device("cuda:0") 48 | print('Training on GPU.') 49 | else: 50 | print('No GPU available, training on CPU.') 51 | 1896.8003600599313,0.0197719249657055,24.593179318527724,3.632221396562308,0.043090518563985825 52 | # 数据读取&类型转换 53 | epoch = int(2000) 54 | lr = 0.01 55 | cell_size = int(20) 56 | num_layer = int(1) 57 | name = 'regular' 58 | 59 | data = np.array(pd.read_excel('family_power.xlsx')).astype('float32') 60 | data = data[~np.isnan(data).any(axis=1), :] 61 | 62 | #归一化 63 | scaler = MinMaxScaler() 64 | data = scaler.fit_transform(data) 65 | data_x = data[:,0:-1] 66 | data_y = data[:,-1] 67 | 68 | # 数据集分割 69 | data_len = len(data_x) 70 | t = np.linspace(0, data_len, data_len + 1) 71 | 72 | train_data_ratio = 0.8 # Choose 80% of the data for training 73 | train_data_len = int(data_len * train_data_ratio) 74 | 75 | train_x = data_x[5:train_data_len] 76 | train_y = data_y[5:train_data_len] 77 | t_for_training = t[5:train_data_len] 78 | 79 | test_x = data_x[train_data_len:] 80 | test_y = data_y[train_data_len:] 81 | t_for_testing = t[train_data_len:] 82 | 83 | # ----------------- train ------------------- 84 | INPUT_FEATURES_NUM = train_x.shape[1] 85 | OUTPUT_FEATURES_NUM = 1 86 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 87 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 88 | 89 | # transfer data to pytorch tensor 90 | train_x_tensor = torch.from_numpy(train_x_tensor) 91 | train_y_tensor = torch.from_numpy(train_y_tensor) 92 | 93 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, cell_size, output_size=OUTPUT_FEATURES_NUM, num_layers=num_layer) # 20 hidden units 94 | lstm_model.to(device) 95 | print('LSTM model:', lstm_model) 96 | print('model.parameters:', lstm_model.parameters) 97 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 98 | 99 | criterion = nn.MSELoss() 100 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=1e-2) 101 | prev_loss = 1000 102 | max_epochs = epoch 103 | 104 | train_x_tensor = train_x_tensor.to(device) 105 | train_y_tensor = train_y_tensor.to(device) 106 | 107 | for epoch in range(max_epochs): 108 | output = lstm_model(train_x_tensor).to(device) 109 | loss = criterion(output, train_y_tensor) 110 | 111 | optimizer.zero_grad() 112 | loss.backward() 113 | optimizer.step() 114 | 115 | if loss < prev_loss: 116 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 117 | prev_loss = loss 118 | 119 | if loss.item() < 1e-7: 120 | print('Epoch [{}/{}], Loss: {:.5f}'.format(epoch + 1, max_epochs, loss.item())) 121 | print("The loss value is reached") 122 | break 123 | else: 124 | print('Epoch: [{}/{}], Loss:{:.5f}'.format(epoch + 1, max_epochs, loss.item())) 125 | 126 | # prediction on training dataset 127 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 128 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 129 | 130 | # ----------------- test ------------------- 131 | lstm_model = lstm_model.eval() # switch to testing model 132 | 133 | # prediction on test dataset 134 | test_x_tensor = test_x.reshape(-1, 1, INPUT_FEATURES_NUM) 135 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 136 | test_x_tensor = test_x_tensor.to(device) 137 | 138 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 139 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 140 | 141 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 142 | print("test loss:", loss.item()) 143 | 144 | # ----------------- plot ------------------- 145 | '''plt.figure() 146 | plt.plot(t_for_training, train_y, 'b', label='y_trn') 147 | plt.plot(t_for_training, pred_y_for_train, 'y--', label='pre_trn')''' 148 | 149 | 150 | ''' 这里注意修改,然后注意修改的时TSA或者是PSO,或者是普通的LSTM ''' 151 | df = pd.DataFrame(test_y) 152 | df.to_csv(name+'_test.csv') 153 | df = pd.DataFrame(pred_y_for_test) 154 | df.to_csv(name+'_pred.csv') 155 | -------------------------------------------------------------------------------- /3,family_power/PSO_result.csv: -------------------------------------------------------------------------------- 1 | 1353.0,0.07666353964720257,27.0,4.0,0.06295545399188995 2 | 1353.0,0.07666353964720257,27.0,4.0,0.06295545399188995 3 | 1353.0,0.07666353964720257,27.0,4.0,0.06295545399188995 4 | 1353.0,0.07666353964720257,27.0,4.0,0.06295545399188995 5 | 1353.0,0.07666353964720257,27.0,4.0,0.06295545399188995 6 | 1633.0,0.07350565324291064,29.0,2.0,0.062680684030056 7 | 1633.0,0.07350565324291064,29.0,2.0,0.062680684030056 8 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 9 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 10 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 11 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 12 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 13 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 14 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 15 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 16 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 17 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 18 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 19 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 20 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 21 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 22 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 23 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 24 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 25 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 26 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 27 | 1353.0,0.07666353964720257,27.0,4.0,0.0613149031996727 28 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 29 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 30 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 31 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 32 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 33 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 34 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 35 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 36 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 37 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 38 | 1353.0,0.07666353964720257,27.0,4.0,0.044906288385391235 39 | 1238.577925577911,0.09968687988943253,27.38710490112834,3.5087865104145686,0.044113870710134506 40 | 1238.577925577911,0.09968687988943253,27.38710490112834,3.5087865104145686,0.044113870710134506 41 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 42 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 43 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 44 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 45 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 46 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 47 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 48 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 49 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 50 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 51 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 52 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 53 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 54 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 55 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 56 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 57 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 58 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.044113870710134506 59 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.03702947869896889 60 | 1418.039913397343,0.08995404222304282,26.918175749798223,4.286594804970856,0.03702947869896889 61 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 62 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 63 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 64 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 65 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 66 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 67 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 68 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 69 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 70 | 1299.1658478046365,0.09640098955275637,27.22879035610491,3.7713813308023525,0.03702947869896889 71 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 72 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 73 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 74 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 75 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 76 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 77 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 78 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 79 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 80 | 1324.3403239108382,0.09503569157375151,27.163010154631053,3.8804903213157877,0.03702947869896889 81 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 82 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 83 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 84 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 85 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 86 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 87 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 88 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 89 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 90 | 1389.4274604398665,0.09150579345045332,26.99293928856122,4.162585238887597,0.03702947869896889 91 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 92 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 93 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 94 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 95 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 96 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 97 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 98 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 99 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 100 | 1454.016857483397,0.08800288944257403,26.824169001836093,4.442522897949945,0.03702947869896889 101 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 102 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 103 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 104 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 105 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 106 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 107 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 108 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 109 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 110 | 1462.9112414245874,0.08752051656743765,26.80092822553675,4.481072151128768,0.03702947869896889 111 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 112 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 113 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 114 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 115 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 116 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 117 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 118 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 119 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 120 | 1405.7546910322474,0.09062031186837452,26.95027669199531,4.23334927943007,0.03702947869896889 121 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 122 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 123 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 124 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 125 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 126 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 127 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 128 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 129 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 130 | 1379.5785985452967,0.09203993093094855,27.01867408903849,4.119899171678515,0.03702947869896889 131 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 132 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 133 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 134 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 135 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 136 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 137 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 138 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 139 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 140 | 1390.7224509511084,0.09143556168394622,26.98955551459997,4.168197872366443,0.03702947869896889 141 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 142 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 143 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 144 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 145 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 146 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 147 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 148 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 149 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 150 | 1449.1535190330276,0.0882666449276731,26.836876769090168,4.421444646048836,0.03702947869896889 151 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 152 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 153 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 154 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 155 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 156 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 157 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 158 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 159 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 160 | 1430.939241326296,0.08925446753169176,26.884470166950567,4.342501932598553,0.03702947869896889 161 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 162 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 163 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 164 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 165 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 166 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 167 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 168 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 169 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 170 | 1407.6715750424119,0.09051635268948144,26.94526792772714,4.241657268776028,0.03702947869896889 171 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 172 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 173 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 174 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 175 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 176 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 177 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 178 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 179 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 180 | 1407.8394243035234,0.0905072496497198,26.944829342305677,4.242384746216315,0.03702947869896889 181 | 1418.3444142264761,0.0899375281011653,26.91738009765417,4.287914545565935,0.03702947869896889 182 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 183 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 184 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 185 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 186 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 187 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 188 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 189 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 190 | 1421.44016134074,0.08787751753004554,26.800965722122125,4.28922363160884,0.03360109403729439 191 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 192 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 193 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 194 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 195 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 196 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 197 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 198 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 199 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 200 | 1423.6230196699296,0.08754459316507522,26.783038662548837,4.297316416531301,0.03360109403729439 201 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 202 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 203 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 204 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 205 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 206 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 207 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 208 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 209 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 210 | 1422.1564518779123,0.08776827059952627,26.79508307597878,4.291879225506903,0.03360109403729439 211 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 212 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 213 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 214 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 215 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 216 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 217 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 218 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 219 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 220 | 1420.667318961568,0.0879953896131154,26.80731280896093,4.286358375962164,0.03360109403729439 221 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 222 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 223 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 224 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 225 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 226 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 227 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 228 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 229 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 230 | 1421.2326388988697,0.08790916835971818,26.802670032075937,4.288454257588464,0.03360109403729439 231 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 232 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 233 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 234 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 235 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 236 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 237 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 238 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 239 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 240 | 1421.8224123934904,0.08781921750787074,26.79782642659251,4.290640798959981,0.03360109403729439 241 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 242 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 243 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 244 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 245 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 246 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 247 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 248 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 249 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 250 | 1421.9112952969983,0.0878056612986512,26.797096462084543,4.290970325714766,0.03360109403729439 251 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 252 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 253 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 254 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 255 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 256 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 257 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 258 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 259 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 260 | 1421.4309267581586,0.08787892596663553,26.801041562551095,4.28918939508067,0.03360109403729439 261 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 262 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 263 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 264 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 265 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 266 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 267 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 268 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 269 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 270 | 1421.2011667220097,0.08791396842134165,26.802928502166242,4.288337576833253,0.03360109403729439 271 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 272 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 273 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 274 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 275 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 276 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 277 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 278 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 279 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 280 | 1421.1773035196597,0.08791760798025496,26.803124482383748,4.288249105785179,0.03360109403729439 281 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 282 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 283 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 284 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 285 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 286 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 287 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 288 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 289 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 290 | 1421.2180127462864,0.08791139910575227,26.802790151602334,4.288400032215467,0.03360109403729439 291 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 292 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 293 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 294 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 295 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 296 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 297 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 298 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 299 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 300 | 1421.4674360617457,0.08787335765441406,26.80074172428325,4.289324750608624,0.03360109403729439 301 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 302 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 303 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 304 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 305 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 306 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 307 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 308 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 309 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 310 | 1421.5650601218404,0.08785846826476461,26.799939971683134,4.2896866845514,0.03360109403729439 311 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 312 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 313 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 314 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 315 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 316 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 317 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 318 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 319 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 320 | 1421.5493831051951,0.08786085928608807,26.800068721592172,4.289628563178044,0.03360109403729439 321 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 322 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 323 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 324 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 325 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 326 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 327 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 328 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 329 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 330 | 1421.3678749631902,0.0878885424765663,26.801559385109478,4.288955635239808,0.03360109403729439 331 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 332 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 333 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 334 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 335 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 336 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 337 | 1421.345503976726,0.08789195444625889,26.801743110274643,4.2888726964711585,0.03360109403729439 338 | -------------------------------------------------------------------------------- /3,family_power/TSA_result.csv: -------------------------------------------------------------------------------- 1 | 1986.0,0.07840994980331037,22.0,4.0,nan 2 | -------------------------------------------------------------------------------- /3,family_power/family_power.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/3,family_power/family_power.xlsx -------------------------------------------------------------------------------- /3,family_power/lstm_model.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/3,family_power/lstm_model.pt -------------------------------------------------------------------------------- /3,family_power/new_begin_PSO_family_power.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_excel('family_power.xlsx')).astype('float32') 51 | data = data[~np.isnan(data).any(axis=1), :] 52 | 53 | #归一化 54 | scaler = MinMaxScaler() 55 | data = scaler.fit_transform(data) 56 | data_x = data[:, 0:-1] 57 | data_y = data[:, -1] 58 | 59 | # 数据集分割 60 | data_len = len(data_x) 61 | t = np.linspace(0, data_len, data_len + 1) 62 | 63 | train_data_ratio = 0.8 # Choose 80% of the data for training 64 | train_data_len = int(data_len * train_data_ratio) 65 | 66 | train_x = data_x[5:train_data_len] 67 | train_y = data_y[5:train_data_len] 68 | t_for_training = t[5:train_data_len] 69 | 70 | test_x = data_x[train_data_len:] 71 | test_y = data_y[train_data_len:] 72 | t_for_testing = t[train_data_len:] 73 | 74 | # ----------------- train ------------------- 75 | INPUT_FEATURES_NUM = train_x.shape[1] 76 | OUTPUT_FEATURES_NUM = 1 77 | 78 | 79 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 80 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 81 | 82 | # transfer data to pytorch tensor 83 | train_x_tensor = torch.from_numpy(train_x_tensor) 84 | train_y_tensor = torch.from_numpy(train_y_tensor) 85 | 86 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 87 | num_layers=num_layers) # 20 hidden units 88 | lstm_model.to(device) 89 | print('LSTM model:', lstm_model) 90 | print('model.parameters:', lstm_model.parameters) 91 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 92 | 93 | criterion = nn.MSELoss() 94 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 95 | 96 | prev_loss = 1000 97 | max_epochs = epoch 98 | 99 | train_x_tensor = train_x_tensor.to(device) 100 | train_y_tensor = train_y_tensor.to(device) 101 | for epoch in range(max_epochs): 102 | output = lstm_model(train_x_tensor).to(device) 103 | loss = criterion(output, train_y_tensor) 104 | 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | 109 | if loss < prev_loss: 110 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 111 | prev_loss = loss 112 | 113 | '''if loss.item() < 1e-4: 114 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 115 | print("The loss value is reached") 116 | break 117 | else: 118 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 119 | 120 | # prediction on training dataset 121 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 122 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 123 | 124 | # ----------------- test ------------------- 125 | lstm_model = lstm_model.eval() # switch to testing model 126 | 127 | # prediction on test dataset 128 | test_x_tensor = test_x.reshape(-1, 1, 129 | INPUT_FEATURES_NUM) 130 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 131 | test_x_tensor = test_x_tensor.to(device) 132 | 133 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 134 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 135 | 136 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 137 | 138 | print(X) 139 | print("test loss:", loss.item()) 140 | 141 | return loss.item() 142 | 143 | 144 | def PSO_LSTM(N, D, dmin, dmax, maxiter): 145 | c1 = 2 146 | c2 = 2 147 | w = 0.5 148 | pN = N # 粒子数量 149 | dim = D # 搜索维度 150 | 151 | DOWN = dmin 152 | UP = dmax 153 | 154 | X = np.zeros((pN, dim)) # 所有粒子的位置和速度 155 | V = np.zeros((pN, dim)) 156 | pbest = np.zeros((pN, dim)) # 个体经历的最佳位置和全局最佳位置 157 | gbest = np.zeros(dim) 158 | p_fit = np.zeros(pN) # 每个个体的历史最佳适应值 159 | 160 | fit = 1 161 | for i_episode in range(maxiter): 162 | """初始化s""" 163 | np.random.seed() 164 | # 初始粒子适应度计算 165 | print("计算初始全局最优") 166 | for i in range(pN): 167 | for j in range(dim): 168 | V[i][j] = np.random.random() 169 | if j == 1: 170 | X[i][j] = DOWN[j] + (UP[j] - DOWN[j])*np.random.random() 171 | else: 172 | X[i][j] = int(DOWN[j] + (UP[j] - DOWN[j])*np.random.random()) 173 | pbest[i] = X[i] # 个人历史最优 174 | 175 | p_fit[i] = train_LSTM(X[i]) 176 | if p_fit[i] < fit: 177 | fit = p_fit[i] 178 | gbest = X[i] 179 | 180 | for j in range(maxiter): 181 | 182 | for i in range(pN): 183 | temp = train_LSTM(X[i]) 184 | with open('PSO_result.csv' ,'a+' ,newline='') as f: 185 | csv_write = csv.writer(f) 186 | csv_write.writerow(np.append(gbest,fit)) # 用于以后画收敛曲线 187 | if temp < p_fit[i]: # 更新个体最优 188 | p_fit[i] = temp 189 | pbest[i] = X[i] 190 | if p_fit[i] < fit: # 更新全局最优 191 | gbest = X[i] 192 | fit = p_fit[i] 193 | 194 | # 更新位置 195 | for i in range(pN): 196 | # 这里先不用管个体的数量是整数还是小数,先不用考虑 197 | V[i] = w * V[i] + c1 * np.random.random() * (pbest[i] - X[i]) + c2 * np.random.random() *(gbest - X[i]) 198 | ww = 1 199 | for k in range(dim): 200 | if DOWN[k] < X[i][k] + V[i][k] < UP[k]: 201 | continue 202 | else: 203 | ww = 0 204 | X[i] = X[i] + V[i] * ww 205 | return gbest, fit 206 | 207 | if __name__ == '__main__': 208 | N = 10 209 | D = 4 210 | dmin = [500, 0.00001, 5, 1] 211 | dmax = [2000, 0.1, 30, 5] 212 | maxiter = 70 213 | PSO_bestParams, PSO_minimum = PSO_LSTM(N, D, dmin, dmax, maxiter) 214 | print(PSO_bestParams, PSO_minimum) 215 | -------------------------------------------------------------------------------- /3,family_power/new_begin_TSA_family_power.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_excel('family_power.xlsx')) 51 | data = data.astype('float32') 52 | data = data[~np.isnan(data).any(axis=1), :] 53 | 54 | #归一化 55 | scaler = MinMaxScaler() 56 | data = scaler.fit_transform(data) 57 | data_x = data[:, 0:-1] 58 | data_y = data[:, -1] 59 | 60 | # 数据集分割 61 | data_len = len(data_x) 62 | t = np.linspace(0, data_len, data_len + 1) 63 | 64 | train_data_ratio = 0.8 # Choose 80% of the data for training 65 | train_data_len = int(data_len * train_data_ratio) 66 | 67 | train_x = data_x[5:train_data_len] 68 | train_y = data_y[5:train_data_len] 69 | t_for_training = t[5:train_data_len] 70 | 71 | test_x = data_x[train_data_len:] 72 | test_y = data_y[train_data_len:] 73 | t_for_testing = t[train_data_len:] 74 | 75 | # ----------------- train ------------------- 76 | INPUT_FEATURES_NUM = train_x.shape[1] 77 | OUTPUT_FEATURES_NUM = 1 78 | 79 | 80 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 81 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 82 | 83 | # transfer data to pytorch tensor 84 | train_x_tensor = torch.from_numpy(train_x_tensor) 85 | train_y_tensor = torch.from_numpy(train_y_tensor) 86 | 87 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 88 | num_layers=num_layers) # 20 hidden units 89 | lstm_model.to(device) 90 | print('LSTM model:', lstm_model) 91 | print('model.parameters:', lstm_model.parameters) 92 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 93 | 94 | criterion = nn.MSELoss() 95 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 96 | 97 | prev_loss = 1000 98 | max_epochs = epoch 99 | 100 | train_x_tensor = train_x_tensor.to(device) 101 | train_y_tensor = train_y_tensor.to(device) 102 | for epoch in range(max_epochs): 103 | output = lstm_model(train_x_tensor).to(device) 104 | loss = criterion(output, train_y_tensor) 105 | 106 | optimizer.zero_grad() 107 | loss.backward() 108 | optimizer.step() 109 | 110 | if loss < prev_loss: 111 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 112 | prev_loss = loss 113 | 114 | '''if loss.item() < 1e-4: 115 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 116 | print("The loss value is reached") 117 | break 118 | else: 119 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 120 | 121 | # prediction on training dataset 122 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 123 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 124 | 125 | # ----------------- test ------------------- 126 | lstm_model = lstm_model.eval() # switch to testing model 127 | 128 | # prediction on test dataset 129 | test_x_tensor = test_x.reshape(-1, 1, 130 | INPUT_FEATURES_NUM) 131 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 132 | test_x_tensor = test_x_tensor.to(device) 133 | 134 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 135 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 136 | 137 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 138 | 139 | print(X) 140 | print("test loss:", loss.item()) 141 | return loss.item() 142 | 143 | 144 | 145 | def TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax): 146 | low = int(N * 0.1) 147 | high = int(N * 0.25) 148 | 149 | for run in range(maxrun): 150 | np.random.seed() 151 | # 重复的次数 152 | trees = np.zeros((N, D)) 153 | obj_trees = np.zeros((N, 1)) # 存放每个个体的目标值 154 | 155 | for i in range(N): 156 | for j in range(D): 157 | if j == 1: 158 | trees[i, j] = dmin[j] + np.random.rand() * (dmax[j] - dmin[j]) 159 | else: 160 | trees[i, j] = int(dmin[j] + np.random.rand() * (dmax[j] - dmin[j])) 161 | obj_trees[i] = train_LSTM(trees[i, :]) 162 | FEs = N # 因为当前已经产生了N个个体 163 | 164 | minimum = np.min(obj_trees) 165 | iter1 = 0 166 | 167 | while (FEs <= maxFEs): 168 | iter1 = iter1 + 1 169 | for i in range(N): 170 | # i是树木 171 | ns = int(low + (high - low) * np.random.rand()) + 1 172 | FEs = FEs + ns 173 | if ns > high: 174 | ns = high 175 | 176 | seeds = np.zeros((ns, D)) # 记录当前的种子的具体形式 177 | obj_seeds = np.zeros((ns, 1)) 178 | minimum, min_index = np.min(obj_trees), np.argmin(obj_trees) 179 | bestParams = trees[min_index, :] 180 | 181 | for j in range(ns): 182 | # j 是种子 183 | komus = int(np.random.rand() * N) + 1 184 | while komus == i or komus >=N or komus < 0 : 185 | komus = int(np.random.rand() * N) + 1 186 | seeds[j, :] = trees[j, :] 187 | 188 | for d in range(D): 189 | if np.random.rand() < ST: 190 | if d == 1: 191 | seeds[j, d] = iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 192 | else: 193 | seeds[j, d] = int(iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 194 | if seeds[j, d] > dmax[d]: 195 | seeds[j, d] = dmax[d] 196 | if seeds[j, d] < dmin[d]: 197 | seeds[j, d] = dmin[d] 198 | else: 199 | if d == 1: 200 | seeds[j, d] = iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 201 | else: 202 | seeds[j, d] = int(iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 203 | if seeds[j, d] > dmax[d]: 204 | seeds[j, d] = dmax[d] 205 | if seeds[j, d] < dmin[d]: 206 | seeds[j, d] = dmin[d] 207 | obj_seeds[j] = train_LSTM(seeds[j, :]) 208 | 209 | mini_seeds, mini_seeds_ind = np.min(obj_seeds), np.argmin(obj_seeds) 210 | 211 | if mini_seeds < obj_trees[i]: 212 | trees[i, :] = seeds[mini_seeds_ind, :] 213 | obj_trees[i] = mini_seeds 214 | 215 | min_tree, min_tree_index = np.min(obj_trees), np.argmin(obj_trees) 216 | if min_tree < minimum: 217 | minimum = min_tree 218 | bestParams = trees[min_tree_index, :] 219 | 220 | print('Iter={} .... min={} .... FES={} .... \n'.format(iter1, minimum, FEs)) 221 | with open('TSA_result.csv','a+',newline='') as f: 222 | csv_write = csv.writer(f) 223 | csv_write.writerow(np.append(bestParams,minimum))#用于以后画收敛曲线 224 | print('Run={} .... min={} ....\n'.format(run, minimum)) 225 | 226 | return bestParams,minimum 227 | 228 | if __name__ == '__main__': 229 | iw = 1 230 | maxrun = 1 231 | N = 20 232 | D = 4 233 | ST = 0.1 234 | maxFEs = 50 235 | dmin = [500, 0.00001, 5, 1] 236 | dmax = [2000, 0.1, 30, 5] 237 | TSA_bestParams, TSA_minimum = TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax) 238 | print(TSA_bestParams, TSA_minimum) 239 | -------------------------------------------------------------------------------- /4,airplot/AirQualityUCI.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/4,airplot/AirQualityUCI.xlsx -------------------------------------------------------------------------------- /4,airplot/alllll_result.csv: -------------------------------------------------------------------------------- 1 | ,0,1,2 2 | 0,0.04318543226485264,0.04358064825495645,0.039943404754106224 3 | 1,0.20781104942916928,0.20875978601003703,0.19985846180261227 4 | 2,0.08090498072115385,0.08702495868376067,0.07784469455662393 5 | 3,42636609716.87188,40838290144.23076,40710944684.81946 6 | -------------------------------------------------------------------------------- /4,airplot/calculate_result_air.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | df = np.array(pd.read_csv("real.csv"))[:,-1] 5 | df_PSO = np.array(pd.read_csv("PSO_pred.csv"))[:,-1] 6 | df_TSA = np.array(pd.read_csv('TSA_pred.csv'))[:,-1] 7 | df_regular = np.array(pd.read_csv('regular_pred.csv'))[:,-1] 8 | 9 | def MSE(y,yhat): 10 | return np.sum((y - yhat)**2)/len(y) 11 | 12 | def RMSE(y, yhat): 13 | return np.sqrt(MSE(y, yhat)) 14 | 15 | def MAPE(y, yhat): 16 | return np.sum(np.abs((y+1e-12 - yhat))/(y+1e-12))/len(y) 17 | 18 | def MAE(y, yhat): 19 | return np.sum(np.abs(y - yhat))/len(y) 20 | 21 | res = np.zeros((4,3)) 22 | for i in range(4): 23 | for j in range(3): 24 | if i == 0: 25 | if j == 0: 26 | res[i][j] = MSE(df,df_regular) 27 | elif j == 1: 28 | res[i][j] = MSE(df,df_PSO) 29 | elif j == 2: 30 | res[i][j] = MSE(df, df_TSA) 31 | elif i == 1: 32 | if j == 0: 33 | res[i][j] = RMSE(df,df_regular) 34 | elif j == 1: 35 | res[i][j] = RMSE(df,df_PSO) 36 | elif j == 2: 37 | res[i][j] = RMSE(df, df_TSA) 38 | elif i == 2: 39 | if j == 0: 40 | res[i][j] = MAE(df,df_regular) 41 | elif j == 1: 42 | res[i][j] = MAE(df,df_PSO) 43 | elif j == 2: 44 | res[i][j] = MAE(df, df_TSA) 45 | elif i == 3: 46 | if j == 0: 47 | res[i][j] = MAPE(df,df_regular) 48 | elif j == 1: 49 | res[i][j] = MAPE(df,df_PSO) 50 | elif j == 2: 51 | res[i][j] = MAPE(df, df_TSA) 52 | 53 | df = pd.DataFrame(res) 54 | df.to_csv('alllll_result.csv') -------------------------------------------------------------------------------- /4,airplot/demoplot_air.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | plt.rcParams['font.sans-serif'] = ['SimHei'] # 用来正常显示中文标签 5 | plt.rcParams['axes.unicode_minus'] = False # 用来正常显示负号 6 | 7 | ## 分辨率 1500*1200 8 | plt.rcParams['figure.dpi'] = 200 9 | plt.rcParams['savefig.dpi'] = 200 10 | 11 | df = pd.read_csv("real.csv") 12 | df_PSO = pd.read_csv("PSO_pred.csv") 13 | df_TSA = pd.read_csv('TSA_pred.csv') 14 | df_regular = pd.read_csv('regular_pred.csv') 15 | 16 | fig, ax = plt.subplots(figsize =(8*np.sqrt(2)+6, 8)) # 创建图实例 17 | 18 | ax.plot(np.linspace(0,df.shape[0]+1,df.shape[0]),df.iloc[:, -1],label='real value',marker='+') 19 | ax.plot(np.linspace(0,df_regular.shape[0]+1,df_regular.shape[0]),df_regular.iloc[:,-1],label='LSTM predicted value',marker='2') 20 | ax.plot(np.linspace(0,df_PSO.shape[0]+1,df_PSO.shape[0]),df_PSO.iloc[:,-1],label='PSO_LSTM predicted value',marker='1') 21 | ax.plot(np.linspace(0,df_TSA.shape[0]+1,df_TSA.shape[0]),df_TSA.iloc[:,-1],label='TSA_LSTM predicted value',marker='*') 22 | 23 | 24 | ax.set_xlabel('') #设置x轴名称 x label 25 | ax.set_ylabel('归一化之后的目标特征') #设置y轴名称 y label 26 | ax.set_title('空气质量预测') #设置图名为Simple Plot 27 | ax.legend() #自动检测要在图例中显示的元素,并且显示 28 | 29 | plt.show() #图形可视化 30 | -------------------------------------------------------------------------------- /4,airplot/lstm_model.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/4,airplot/lstm_model.pt -------------------------------------------------------------------------------- /4,airplot/train_air.py: -------------------------------------------------------------------------------- 1 | ''' 目前已知参数是多少,只需要调整相应的参数,这里主要做的更改就是将把输出改成了指定的形式,便于后续画图和计算各项指标 ''' 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | from torch.autograd import Variable 8 | import math 9 | import csv 10 | from sklearn.preprocessing import MinMaxScaler 11 | import os 12 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 13 | 14 | 15 | 16 | # Define LSTM Neural Networks 17 | class LstmRNN(nn.Module): 18 | """ 19 | Parameters: 20 | - input_size: feature size 21 | - hidden_size: number of hidden units 22 | - output_size: number of output 23 | - num_layers: layers of LSTM to stack 24 | """ 25 | 26 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 27 | super().__init__() 28 | 29 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 30 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 31 | 32 | def forward(self, _x): 33 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 34 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 35 | x = x.view(s * b, h) 36 | x = self.linear1(x) 37 | x = x.view(s, b, -1) 38 | return x 39 | 40 | 41 | if __name__ == '__main__': 42 | 43 | # checking if GPU is available 44 | device = torch.device("cpu") 45 | 46 | if (torch.cuda.is_available()): 47 | device = torch.device("cuda:0") 48 | print('Training on GPU.') 49 | else: 50 | print('No GPU available, training on CPU.') 51 | 1896.8003600599313, 0.0197719249657055, 24.593179318527724, 3.632221396562308, 0.043090518563985825 52 | # 数据读取&类型转换 53 | epoch = int(1896) 54 | lr = 0.0197719249657055 55 | cell_size = int(24.593179318527724) 56 | num_layer = int(3.632221396562308) 57 | name = 'PSO' 58 | 59 | data = np.array(pd.read_excel('AirQualityUCI.xlsx')).astype('float32') 60 | data = data[~np.isnan(data).any(axis=1), :] 61 | 62 | #归一化 63 | scaler = MinMaxScaler() 64 | data = scaler.fit_transform(data) 65 | data_x = data[:,0:-1] 66 | data_y = data[:,-1] 67 | 68 | # 数据集分割 69 | data_len = len(data_x) 70 | t = np.linspace(0, data_len, data_len + 1) 71 | 72 | train_data_ratio = 0.8 # Choose 80% of the data for training 73 | train_data_len = int(data_len * train_data_ratio) 74 | 75 | train_x = data_x[5:train_data_len] 76 | train_y = data_y[5:train_data_len] 77 | t_for_training = t[5:train_data_len] 78 | 79 | test_x = data_x[train_data_len:] 80 | test_y = data_y[train_data_len:] 81 | t_for_testing = t[train_data_len:] 82 | 83 | # ----------------- train ------------------- 84 | INPUT_FEATURES_NUM = train_x.shape[1] 85 | OUTPUT_FEATURES_NUM = 1 86 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 87 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 88 | 89 | # transfer data to pytorch tensor 90 | train_x_tensor = torch.from_numpy(train_x_tensor) 91 | train_y_tensor = torch.from_numpy(train_y_tensor) 92 | 93 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, cell_size, output_size=OUTPUT_FEATURES_NUM, num_layers=num_layer) # 20 hidden units 94 | lstm_model.to(device) 95 | print('LSTM model:', lstm_model) 96 | print('model.parameters:', lstm_model.parameters) 97 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 98 | 99 | criterion = nn.MSELoss() 100 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=1e-2) 101 | prev_loss = 1000 102 | max_epochs = epoch 103 | 104 | train_x_tensor = train_x_tensor.to(device) 105 | train_y_tensor = train_y_tensor.to(device) 106 | 107 | for epoch in range(max_epochs): 108 | output = lstm_model(train_x_tensor).to(device) 109 | loss = criterion(output, train_y_tensor) 110 | 111 | optimizer.zero_grad() 112 | loss.backward() 113 | optimizer.step() 114 | 115 | if loss < prev_loss: 116 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 117 | prev_loss = loss 118 | 119 | if loss.item() < 1e-4: 120 | print('Epoch [{}/{}], Loss: {:.5f}'.format(epoch + 1, max_epochs, loss.item())) 121 | print("The loss value is reached") 122 | break 123 | else: 124 | print('Epoch: [{}/{}], Loss:{:.5f}'.format(epoch + 1, max_epochs, loss.item())) 125 | 126 | # prediction on training dataset 127 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 128 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 129 | 130 | # ----------------- test ------------------- 131 | lstm_model = lstm_model.eval() # switch to testing model 132 | 133 | # prediction on test dataset 134 | test_x_tensor = test_x.reshape(-1, 1, INPUT_FEATURES_NUM) 135 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 136 | test_x_tensor = test_x_tensor.to(device) 137 | 138 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 139 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 140 | 141 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 142 | print("test loss:", loss.item()) 143 | 144 | # ----------------- plot ------------------- 145 | '''plt.figure() 146 | plt.plot(t_for_training, train_y, 'b', label='y_trn') 147 | plt.plot(t_for_training, pred_y_for_train, 'y--', label='pre_trn')''' 148 | 149 | 150 | ''' 这里注意修改,然后注意修改的时TSA或者是PSO,或者是普通的LSTM ''' 151 | df = pd.DataFrame(test_y) 152 | df.to_csv(name+'_test.csv') 153 | df = pd.DataFrame(pred_y_for_test) 154 | df.to_csv(name+'_pred.csv') 155 | -------------------------------------------------------------------------------- /4,airquality/AirQualityUCI.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/4,airquality/AirQualityUCI.xlsx -------------------------------------------------------------------------------- /4,airquality/TSA_result.csv: -------------------------------------------------------------------------------- 1 | 1239.0,0.08974823109065953,19.0,2.0,0.04326799511909485 2 | 1673.0,0.09840223305902018,26.0,2.0,0.04407365247607231 3 | 1673.0,0.09840223305902018,26.0,2.0,0.04407365247607231 4 | 1322.0,0.04636153099835993,8.0,4.0,nan 5 | 1322.0,0.04636153099835993,8.0,4.0,nan 6 | -------------------------------------------------------------------------------- /4,airquality/lstm_model.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WY-Lan/TSA-LSTM-and-PSO-LSTM/9404305d3227bab5e872392242cacb830ccf528d/4,airquality/lstm_model.pt -------------------------------------------------------------------------------- /4,airquality/new_begin_PSO_AirQuality.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_excel('AirQualityUCI.xlsx')).astype('float32') 51 | data = data[~np.isnan(data).any(axis=1), :] 52 | 53 | #归一化 54 | scaler = MinMaxScaler() 55 | data = scaler.fit_transform(data) 56 | data_x = data[:, 0:-1] 57 | data_y = data[:, -1] 58 | 59 | # 数据集分割 60 | data_len = len(data_x) 61 | t = np.linspace(0, data_len, data_len + 1) 62 | 63 | train_data_ratio = 0.8 # Choose 80% of the data for training 64 | train_data_len = int(data_len * train_data_ratio) 65 | 66 | train_x = data_x[5:train_data_len] 67 | train_y = data_y[5:train_data_len] 68 | t_for_training = t[5:train_data_len] 69 | 70 | test_x = data_x[train_data_len:] 71 | test_y = data_y[train_data_len:] 72 | t_for_testing = t[train_data_len:] 73 | 74 | # ----------------- train ------------------- 75 | INPUT_FEATURES_NUM = train_x.shape[1] 76 | OUTPUT_FEATURES_NUM = 1 77 | 78 | 79 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 80 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 81 | 82 | # transfer data to pytorch tensor 83 | train_x_tensor = torch.from_numpy(train_x_tensor) 84 | train_y_tensor = torch.from_numpy(train_y_tensor) 85 | 86 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 87 | num_layers=num_layers) # 20 hidden units 88 | lstm_model.to(device) 89 | print('LSTM model:', lstm_model) 90 | print('model.parameters:', lstm_model.parameters) 91 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 92 | 93 | criterion = nn.MSELoss() 94 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 95 | 96 | prev_loss = 1000 97 | max_epochs = epoch 98 | 99 | train_x_tensor = train_x_tensor.to(device) 100 | train_y_tensor = train_y_tensor.to(device) 101 | for epoch in range(max_epochs): 102 | output = lstm_model(train_x_tensor).to(device) 103 | loss = criterion(output, train_y_tensor) 104 | 105 | optimizer.zero_grad() 106 | loss.backward() 107 | optimizer.step() 108 | 109 | if loss < prev_loss: 110 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 111 | prev_loss = loss 112 | 113 | '''if loss.item() < 1e-4: 114 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 115 | print("The loss value is reached") 116 | break 117 | else: 118 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 119 | 120 | # prediction on training dataset 121 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 122 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 123 | 124 | # ----------------- test ------------------- 125 | lstm_model = lstm_model.eval() # switch to testing model 126 | 127 | # prediction on test dataset 128 | test_x_tensor = test_x.reshape(-1, 1, 129 | INPUT_FEATURES_NUM) 130 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 131 | test_x_tensor = test_x_tensor.to(device) 132 | 133 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 134 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 135 | 136 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 137 | 138 | print(X) 139 | print("test loss:", loss.item()) 140 | 141 | return loss.item() 142 | 143 | 144 | def PSO_LSTM(N, D, dmin, dmax, maxiter): 145 | c1 = 2 146 | c2 = 2 147 | w = 0.5 148 | pN = N # 粒子数量 149 | dim = D # 搜索维度 150 | 151 | DOWN = dmin 152 | UP = dmax 153 | 154 | X = np.zeros((pN, dim)) # 所有粒子的位置和速度 155 | V = np.zeros((pN, dim)) 156 | pbest = np.zeros((pN, dim)) # 个体经历的最佳位置和全局最佳位置 157 | gbest = np.zeros(dim) 158 | p_fit = np.zeros(pN) # 每个个体的历史最佳适应值 159 | 160 | fit = 1 161 | for i_episode in range(maxiter): 162 | """初始化s""" 163 | np.random.seed() 164 | # 初始粒子适应度计算 165 | print("计算初始全局最优") 166 | for i in range(pN): 167 | for j in range(dim): 168 | V[i][j] = np.random.random() 169 | if j == 1: 170 | X[i][j] = DOWN[j] + (UP[j] - DOWN[j])*np.random.random() 171 | else: 172 | X[i][j] = int(DOWN[j] + (UP[j] - DOWN[j])*np.random.random()) 173 | pbest[i] = X[i] # 个人历史最优 174 | 175 | p_fit[i] = train_LSTM(X[i]) 176 | if p_fit[i] < fit: 177 | fit = p_fit[i] 178 | gbest = X[i] 179 | 180 | for j in range(maxiter): 181 | 182 | for i in range(pN): 183 | temp = train_LSTM(X[i]) 184 | with open('PSO_result.csv' ,'a+' ,newline='') as f: 185 | csv_write = csv.writer(f) 186 | csv_write.writerow(np.append(gbest,fit)) # 用于以后画收敛曲线 187 | if temp < p_fit[i]: # 更新个体最优 188 | p_fit[i] = temp 189 | pbest[i] = X[i] 190 | if p_fit[i] < fit: # 更新全局最优 191 | gbest = X[i] 192 | fit = p_fit[i] 193 | 194 | # 更新位置 195 | for i in range(pN): 196 | # 这里先不用管个体的数量是整数还是小数,先不用考虑 197 | V[i] = w * V[i] + c1 * np.random.random() * (pbest[i] - X[i]) + c2 * np.random.random() *(gbest - X[i]) 198 | ww = 1 199 | for k in range(dim): 200 | if DOWN[k] < X[i][k] + V[i][k] < UP[k]: 201 | continue 202 | else: 203 | ww = 0 204 | X[i] = X[i] + V[i] * ww 205 | return gbest, fit 206 | 207 | if __name__ == '__main__': 208 | N = 10 209 | D = 4 210 | dmin = [500, 0.00001, 5, 1] 211 | dmax = [2000, 0.1, 30, 5] 212 | maxiter = 70 213 | PSO_bestParams, PSO_minimum = PSO_LSTM(N, D, dmin, dmax, maxiter) 214 | print(PSO_bestParams, PSO_minimum) 215 | -------------------------------------------------------------------------------- /4,airquality/new_begin_TSA_AirQuality.py: -------------------------------------------------------------------------------- 1 | import scipy.io as sio 2 | import numpy as np 3 | import torch 4 | from torch import nn 5 | import matplotlib.pyplot as plt 6 | import matplotlib 7 | import pandas as pd 8 | from torch.autograd import Variable 9 | from sklearn.preprocessing import MinMaxScaler 10 | import math 11 | import csv 12 | import os 13 | os.environ['KMP_DUPLICATE_LIB_OK']='TRUE' 14 | 15 | # Define LSTM Neural Networks 16 | class LstmRNN(nn.Module): 17 | """ 18 | Parameters: 19 | - input_size: feature size 20 | - hidden_size: number of hidden units 21 | - output_size: number of output 22 | - num_layers: layers of LSTM to stack 23 | """ 24 | 25 | def __init__(self, input_size, hidden_size=1, output_size=1, num_layers=1): 26 | super().__init__() 27 | 28 | self.lstm = nn.LSTM(input_size, hidden_size, num_layers) # utilize the LSTM model in torch.nn 29 | self.linear1 = nn.Linear(hidden_size, output_size) # 全连接层 30 | 31 | def forward(self, _x): 32 | x, _ = self.lstm(_x) # _x is input, size (seq_len, batch, input_size) 33 | s, b, h = x.shape # x is output, size (seq_len, batch, hidden_size) 34 | x = x.view(s * b, h) 35 | x = self.linear1(x) 36 | x = x.view(s, b, -1) 37 | return x 38 | 39 | 40 | def train_LSTM(X): 41 | 42 | device = torch.device("cuda:0") 43 | 44 | epoch = int(X[0]) 45 | lr = X[1] 46 | hidden_size = int(X[2]) 47 | num_layers = int(X[3]) 48 | 49 | # 数据读取&类型转换 50 | data = np.array(pd.read_excel('AirQualityUCI.xlsx')) 51 | data = data.astype('float32') 52 | data = data[~np.isnan(data).any(axis=1), :] 53 | 54 | #归一化 55 | scaler = MinMaxScaler() 56 | data = scaler.fit_transform(data) 57 | data_x = data[:, 0:-1] 58 | data_y = data[:, -1] 59 | 60 | # 数据集分割 61 | data_len = len(data_x) 62 | t = np.linspace(0, data_len, data_len + 1) 63 | 64 | train_data_ratio = 0.8 # Choose 80% of the data for training 65 | train_data_len = int(data_len * train_data_ratio) 66 | 67 | train_x = data_x[5:train_data_len] 68 | train_y = data_y[5:train_data_len] 69 | t_for_training = t[5:train_data_len] 70 | 71 | test_x = data_x[train_data_len:] 72 | test_y = data_y[train_data_len:] 73 | t_for_testing = t[train_data_len:] 74 | 75 | # ----------------- train ------------------- 76 | INPUT_FEATURES_NUM = train_x.shape[1] 77 | OUTPUT_FEATURES_NUM = 1 78 | 79 | 80 | train_x_tensor = train_x.reshape(-1, 1, INPUT_FEATURES_NUM) # set batch size to 1 81 | train_y_tensor = train_y.reshape(-1, 1, OUTPUT_FEATURES_NUM) # set batch size to 1 82 | 83 | # transfer data to pytorch tensor 84 | train_x_tensor = torch.from_numpy(train_x_tensor) 85 | train_y_tensor = torch.from_numpy(train_y_tensor) 86 | 87 | lstm_model = LstmRNN(INPUT_FEATURES_NUM, hidden_size, output_size=OUTPUT_FEATURES_NUM, 88 | num_layers=num_layers) # 20 hidden units 89 | lstm_model.to(device) 90 | print('LSTM model:', lstm_model) 91 | print('model.parameters:', lstm_model.parameters) 92 | print('train x tensor dimension:', Variable(train_x_tensor).size()) 93 | 94 | criterion = nn.MSELoss() 95 | optimizer = torch.optim.Adam(lstm_model.parameters(), lr=lr) 96 | 97 | prev_loss = 1000 98 | max_epochs = epoch 99 | 100 | train_x_tensor = train_x_tensor.to(device) 101 | train_y_tensor = train_y_tensor.to(device) 102 | for epoch in range(max_epochs): 103 | output = lstm_model(train_x_tensor).to(device) 104 | loss = criterion(output, train_y_tensor) 105 | 106 | optimizer.zero_grad() 107 | loss.backward() 108 | optimizer.step() 109 | 110 | if loss < prev_loss: 111 | torch.save(lstm_model.state_dict(), 'lstm_model.pt') # save model parameters to files 112 | prev_loss = loss 113 | 114 | '''if loss.item() < 1e-4: 115 | print('Epoch [{}/{}], Loss: {:.10f}'.format(epoch + 1, max_epochs, loss.item())) 116 | print("The loss value is reached") 117 | break 118 | else: 119 | print('Epoch: [{}/{}], Loss:{:.10f}'.format(epoch + 1, max_epochs, loss.item()))''' 120 | 121 | # prediction on training dataset 122 | pred_y_for_train = lstm_model(train_x_tensor).to(torch.device("cpu")) 123 | pred_y_for_train = pred_y_for_train.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 124 | 125 | # ----------------- test ------------------- 126 | lstm_model = lstm_model.eval() # switch to testing model 127 | 128 | # prediction on test dataset 129 | test_x_tensor = test_x.reshape(-1, 1, 130 | INPUT_FEATURES_NUM) 131 | test_x_tensor = torch.from_numpy(test_x_tensor) # 变为tensor 132 | test_x_tensor = test_x_tensor.to(device) 133 | 134 | pred_y_for_test = lstm_model(test_x_tensor).to(torch.device("cpu")) 135 | pred_y_for_test = pred_y_for_test.view(-1, OUTPUT_FEATURES_NUM).data.numpy() 136 | 137 | loss = criterion(torch.from_numpy(pred_y_for_test), torch.from_numpy(test_y)) 138 | 139 | print(X) 140 | print("test loss:", loss.item()) 141 | return loss.item() 142 | 143 | 144 | 145 | def TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax): 146 | low = int(N * 0.1) 147 | high = int(N * 0.25) 148 | 149 | for run in range(maxrun): 150 | np.random.seed() 151 | # 重复的次数 152 | trees = np.zeros((N, D)) 153 | obj_trees = np.zeros((N, 1)) # 存放每个个体的目标值 154 | 155 | for i in range(N): 156 | for j in range(D): 157 | if j == 1: 158 | trees[i, j] = dmin[j] + np.random.rand() * (dmax[j] - dmin[j]) 159 | else: 160 | trees[i, j] = int(dmin[j] + np.random.rand() * (dmax[j] - dmin[j])) 161 | obj_trees[i] = train_LSTM(trees[i, :]) 162 | FEs = N # 因为当前已经产生了N个个体 163 | 164 | minimum = np.min(obj_trees) 165 | iter1 = 0 166 | 167 | while (FEs <= maxFEs): 168 | iter1 = iter1 + 1 169 | for i in range(N): 170 | # i是树木 171 | ns = int(low + (high - low) * np.random.rand()) + 1 172 | FEs = FEs + ns 173 | if ns > high: 174 | ns = high 175 | 176 | seeds = np.zeros((ns, D)) # 记录当前的种子的具体形式 177 | obj_seeds = np.zeros((ns, 1)) 178 | minimum, min_index = np.min(obj_trees), np.argmin(obj_trees) 179 | bestParams = trees[min_index, :] 180 | 181 | for j in range(ns): 182 | # j 是种子 183 | komus = int(np.random.rand() * N) + 1 184 | while komus == i or komus >=N or komus < 0 : 185 | komus = int(np.random.rand() * N) + 1 186 | seeds[j, :] = trees[j, :] 187 | 188 | for d in range(D): 189 | if np.random.rand() < ST: 190 | if d == 1: 191 | seeds[j, d] = iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 192 | else: 193 | seeds[j, d] = int(iw * trees[i, d] + (bestParams[d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 194 | if seeds[j, d] > dmax[d]: 195 | seeds[j, d] = dmax[d] 196 | if seeds[j, d] < dmin[d]: 197 | seeds[j, d] = dmin[d] 198 | else: 199 | if d == 1: 200 | seeds[j, d] = iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2 201 | else: 202 | seeds[j, d] = int(iw * trees[i, d] + (trees[i, d] - trees[komus, d]) * (np.random.rand() - 0.5) * 2) 203 | if seeds[j, d] > dmax[d]: 204 | seeds[j, d] = dmax[d] 205 | if seeds[j, d] < dmin[d]: 206 | seeds[j, d] = dmin[d] 207 | obj_seeds[j] = train_LSTM(seeds[j, :]) 208 | 209 | mini_seeds, mini_seeds_ind = np.min(obj_seeds), np.argmin(obj_seeds) 210 | 211 | if mini_seeds < obj_trees[i]: 212 | trees[i, :] = seeds[mini_seeds_ind, :] 213 | obj_trees[i] = mini_seeds 214 | 215 | min_tree, min_tree_index = np.min(obj_trees), np.argmin(obj_trees) 216 | if min_tree < minimum: 217 | minimum = min_tree 218 | bestParams = trees[min_tree_index, :] 219 | 220 | print('Iter={} .... min={} .... FES={} .... \n'.format(iter1, minimum, FEs)) 221 | with open('TSA_result.csv','a+',newline='') as f: 222 | csv_write = csv.writer(f) 223 | csv_write.writerow(np.append(bestParams,minimum))#用于以后画收敛曲线 224 | print('Run={} .... min={} ....\n'.format(run, minimum)) 225 | 226 | return bestParams,minimum 227 | 228 | if __name__ == '__main__': 229 | iw = 1 230 | maxrun = 3 231 | N = 20 232 | D = 4 233 | ST = 0.1 234 | maxFEs = 100 235 | dmin = [500, 0.00001, 5, 1] 236 | dmax = [2000, 0.1, 30, 5] 237 | TSA_bestParams, TSA_minimum = TSA_LSTM(iw, maxrun, N, D, ST, maxFEs, dmin, dmax) 238 | print(TSA_bestParams, TSA_minimum) 239 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TSA-LSTM-and-PSO-LSTM 2 | Tree seed algorithm and Particle Swarm algorithm are used for searching the LSTM hyper parameters 3 | The hyper parameters including cell size, the number of layers, learning rate and banthch. 4 | --------------------------------------------------------------------------------