├── Example use of framework with GP type surrogate models.ipynb
├── Example use of framework with RF type surrogate models.ipynb
├── LICENSE
├── Manifold Visualization.ipynb
├── Performance Visualization.ipynb
├── README.md
├── TopCount_collection_GP_Matern52_LCB2.npy
├── datasets
├── AgNP_dataset.csv
├── AutoAM_dataset.csv
├── Crossed barrel_dataset.csv
├── P3HT_dataset.csv
└── Perovskite_dataset.csv
└── requirements.txt
/Example use of framework with GP type surrogate models.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import torch\n",
11 | "import math\n",
12 | "import matplotlib\n",
13 | "import matplotlib.pyplot as plt\n",
14 | "import pandas as pd\n",
15 | "import seaborn as sns\n",
16 | "import GPyOpt\n",
17 | "import GPy\n",
18 | "import os\n",
19 | "import matplotlib as mpl\n",
20 | "import matplotlib.tri as tri\n",
21 | "import ternary\n",
22 | "import pickle\n",
23 | "import datetime\n",
24 | "from collections import Counter\n",
25 | "import matplotlib.ticker as ticker\n",
26 | "from sklearn import preprocessing\n",
27 | "import pyDOE\n",
28 | "import random\n",
29 | "from scipy.stats import norm\n",
30 | "import time\n",
31 | "from sklearn.ensemble import RandomForestRegressor\n",
32 | "import copy"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "# Load materials dataset"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": 2,
45 | "metadata": {},
46 | "outputs": [
47 | {
48 | "data": {
49 | "text/html": [
50 | "
\n",
51 | "\n",
64 | "
\n",
65 | " \n",
66 | " \n",
67 | " | \n",
68 | " n | \n",
69 | " theta | \n",
70 | " r | \n",
71 | " t | \n",
72 | " toughness | \n",
73 | "
\n",
74 | " \n",
75 | " \n",
76 | " \n",
77 | " 0 | \n",
78 | " 6 | \n",
79 | " 0 | \n",
80 | " 1.5 | \n",
81 | " 0.70 | \n",
82 | " 1.144667 | \n",
83 | "
\n",
84 | " \n",
85 | " 1 | \n",
86 | " 6 | \n",
87 | " 0 | \n",
88 | " 1.5 | \n",
89 | " 1.05 | \n",
90 | " 1.607561 | \n",
91 | "
\n",
92 | " \n",
93 | " 2 | \n",
94 | " 6 | \n",
95 | " 0 | \n",
96 | " 1.5 | \n",
97 | " 1.40 | \n",
98 | " 1.144338 | \n",
99 | "
\n",
100 | " \n",
101 | " 3 | \n",
102 | " 6 | \n",
103 | " 0 | \n",
104 | " 1.7 | \n",
105 | " 0.70 | \n",
106 | " 3.642738 | \n",
107 | "
\n",
108 | " \n",
109 | " 4 | \n",
110 | " 6 | \n",
111 | " 0 | \n",
112 | " 1.7 | \n",
113 | " 1.05 | \n",
114 | " 3.748405 | \n",
115 | "
\n",
116 | " \n",
117 | " ... | \n",
118 | " ... | \n",
119 | " ... | \n",
120 | " ... | \n",
121 | " ... | \n",
122 | " ... | \n",
123 | "
\n",
124 | " \n",
125 | " 1795 | \n",
126 | " 12 | \n",
127 | " 200 | \n",
128 | " 2.3 | \n",
129 | " 1.05 | \n",
130 | " 1.358975 | \n",
131 | "
\n",
132 | " \n",
133 | " 1796 | \n",
134 | " 12 | \n",
135 | " 200 | \n",
136 | " 2.3 | \n",
137 | " 1.40 | \n",
138 | " 3.196306 | \n",
139 | "
\n",
140 | " \n",
141 | " 1797 | \n",
142 | " 12 | \n",
143 | " 200 | \n",
144 | " 2.5 | \n",
145 | " 0.70 | \n",
146 | " 36.104187 | \n",
147 | "
\n",
148 | " \n",
149 | " 1798 | \n",
150 | " 12 | \n",
151 | " 200 | \n",
152 | " 2.5 | \n",
153 | " 1.05 | \n",
154 | " 1.313487 | \n",
155 | "
\n",
156 | " \n",
157 | " 1799 | \n",
158 | " 12 | \n",
159 | " 200 | \n",
160 | " 2.5 | \n",
161 | " 1.40 | \n",
162 | " 1.069728 | \n",
163 | "
\n",
164 | " \n",
165 | "
\n",
166 | "
1800 rows × 5 columns
\n",
167 | "
"
168 | ],
169 | "text/plain": [
170 | " n theta r t toughness\n",
171 | "0 6 0 1.5 0.70 1.144667\n",
172 | "1 6 0 1.5 1.05 1.607561\n",
173 | "2 6 0 1.5 1.40 1.144338\n",
174 | "3 6 0 1.7 0.70 3.642738\n",
175 | "4 6 0 1.7 1.05 3.748405\n",
176 | "... .. ... ... ... ...\n",
177 | "1795 12 200 2.3 1.05 1.358975\n",
178 | "1796 12 200 2.3 1.40 3.196306\n",
179 | "1797 12 200 2.5 0.70 36.104187\n",
180 | "1798 12 200 2.5 1.05 1.313487\n",
181 | "1799 12 200 2.5 1.40 1.069728\n",
182 | "\n",
183 | "[1800 rows x 5 columns]"
184 | ]
185 | },
186 | "execution_count": 2,
187 | "metadata": {},
188 | "output_type": "execute_result"
189 | }
190 | ],
191 | "source": [
192 | "# go to directory where datasets reside\n",
193 | "# load a dataset\n",
194 | "# dataset names = ['Crossed barrel', 'Perovskite', 'AgNP', 'P3HT', 'AutoAM']\n",
195 | "dataset_name = 'Crossed barrel'\n",
196 | "raw_dataset = pd.read_csv(dataset_name + '_dataset.csv')\n",
197 | "raw_dataset"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 3,
203 | "metadata": {},
204 | "outputs": [
205 | {
206 | "data": {
207 | "text/plain": [
208 | "['n', 'theta', 'r', 't']"
209 | ]
210 | },
211 | "execution_count": 3,
212 | "metadata": {},
213 | "output_type": "execute_result"
214 | }
215 | ],
216 | "source": [
217 | "feature_name = list(raw_dataset.columns)[:-1]\n",
218 | "feature_name"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": 4,
224 | "metadata": {},
225 | "outputs": [
226 | {
227 | "data": {
228 | "text/plain": [
229 | "'toughness'"
230 | ]
231 | },
232 | "execution_count": 4,
233 | "metadata": {},
234 | "output_type": "execute_result"
235 | }
236 | ],
237 | "source": [
238 | "objective_name = list(raw_dataset.columns)[-1]\n",
239 | "objective_name"
240 | ]
241 | },
242 | {
243 | "cell_type": "markdown",
244 | "metadata": {},
245 | "source": [
246 | "# Formulate optimization as global minimization"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": 5,
252 | "metadata": {},
253 | "outputs": [
254 | {
255 | "data": {
256 | "text/html": [
257 | "\n",
258 | "\n",
271 | "
\n",
272 | " \n",
273 | " \n",
274 | " | \n",
275 | " n | \n",
276 | " theta | \n",
277 | " r | \n",
278 | " t | \n",
279 | " toughness | \n",
280 | "
\n",
281 | " \n",
282 | " \n",
283 | " \n",
284 | " 0 | \n",
285 | " 6 | \n",
286 | " 0 | \n",
287 | " 1.5 | \n",
288 | " 0.70 | \n",
289 | " -1.144667 | \n",
290 | "
\n",
291 | " \n",
292 | " 1 | \n",
293 | " 6 | \n",
294 | " 0 | \n",
295 | " 1.5 | \n",
296 | " 1.05 | \n",
297 | " -1.607561 | \n",
298 | "
\n",
299 | " \n",
300 | " 2 | \n",
301 | " 6 | \n",
302 | " 0 | \n",
303 | " 1.5 | \n",
304 | " 1.40 | \n",
305 | " -1.144338 | \n",
306 | "
\n",
307 | " \n",
308 | " 3 | \n",
309 | " 6 | \n",
310 | " 0 | \n",
311 | " 1.7 | \n",
312 | " 0.70 | \n",
313 | " -3.642738 | \n",
314 | "
\n",
315 | " \n",
316 | " 4 | \n",
317 | " 6 | \n",
318 | " 0 | \n",
319 | " 1.7 | \n",
320 | " 1.05 | \n",
321 | " -3.748405 | \n",
322 | "
\n",
323 | " \n",
324 | " ... | \n",
325 | " ... | \n",
326 | " ... | \n",
327 | " ... | \n",
328 | " ... | \n",
329 | " ... | \n",
330 | "
\n",
331 | " \n",
332 | " 1795 | \n",
333 | " 12 | \n",
334 | " 200 | \n",
335 | " 2.3 | \n",
336 | " 1.05 | \n",
337 | " -1.358975 | \n",
338 | "
\n",
339 | " \n",
340 | " 1796 | \n",
341 | " 12 | \n",
342 | " 200 | \n",
343 | " 2.3 | \n",
344 | " 1.40 | \n",
345 | " -3.196306 | \n",
346 | "
\n",
347 | " \n",
348 | " 1797 | \n",
349 | " 12 | \n",
350 | " 200 | \n",
351 | " 2.5 | \n",
352 | " 0.70 | \n",
353 | " -36.104187 | \n",
354 | "
\n",
355 | " \n",
356 | " 1798 | \n",
357 | " 12 | \n",
358 | " 200 | \n",
359 | " 2.5 | \n",
360 | " 1.05 | \n",
361 | " -1.313487 | \n",
362 | "
\n",
363 | " \n",
364 | " 1799 | \n",
365 | " 12 | \n",
366 | " 200 | \n",
367 | " 2.5 | \n",
368 | " 1.40 | \n",
369 | " -1.069728 | \n",
370 | "
\n",
371 | " \n",
372 | "
\n",
373 | "
1800 rows × 5 columns
\n",
374 | "
"
375 | ],
376 | "text/plain": [
377 | " n theta r t toughness\n",
378 | "0 6 0 1.5 0.70 -1.144667\n",
379 | "1 6 0 1.5 1.05 -1.607561\n",
380 | "2 6 0 1.5 1.40 -1.144338\n",
381 | "3 6 0 1.7 0.70 -3.642738\n",
382 | "4 6 0 1.7 1.05 -3.748405\n",
383 | "... .. ... ... ... ...\n",
384 | "1795 12 200 2.3 1.05 -1.358975\n",
385 | "1796 12 200 2.3 1.40 -3.196306\n",
386 | "1797 12 200 2.5 0.70 -36.104187\n",
387 | "1798 12 200 2.5 1.05 -1.313487\n",
388 | "1799 12 200 2.5 1.40 -1.069728\n",
389 | "\n",
390 | "[1800 rows x 5 columns]"
391 | ]
392 | },
393 | "execution_count": 5,
394 | "metadata": {},
395 | "output_type": "execute_result"
396 | }
397 | ],
398 | "source": [
399 | "# for P3HT/CNT, Crossed barrel, AutoAM, their original goals were to maximize objective value.\n",
400 | "# here, we add negative sign to all of its objective values here \n",
401 | "# because default BO in the framework below aims for global minimization\n",
402 | "# only P3HT/CNT, Crossed barrel, AutoAM need this line; Perovskite and AgNP do not need this line.\n",
403 | "ds = copy.deepcopy(raw_dataset) \n",
404 | "ds[objective_name] = -raw_dataset[objective_name].values\n",
405 | "ds"
406 | ]
407 | },
408 | {
409 | "cell_type": "markdown",
410 | "metadata": {},
411 | "source": [
412 | "# Process dataset for pool-based active learning"
413 | ]
414 | },
415 | {
416 | "cell_type": "code",
417 | "execution_count": 6,
418 | "metadata": {},
419 | "outputs": [
420 | {
421 | "data": {
422 | "text/html": [
423 | "\n",
424 | "\n",
437 | "
\n",
438 | " \n",
439 | " \n",
440 | " | \n",
441 | " n | \n",
442 | " theta | \n",
443 | " r | \n",
444 | " t | \n",
445 | " toughness | \n",
446 | "
\n",
447 | " \n",
448 | " \n",
449 | " \n",
450 | " 0 | \n",
451 | " 6 | \n",
452 | " 0 | \n",
453 | " 1.5 | \n",
454 | " 0.70 | \n",
455 | " -1.135453 | \n",
456 | "
\n",
457 | " \n",
458 | " 1 | \n",
459 | " 6 | \n",
460 | " 0 | \n",
461 | " 1.5 | \n",
462 | " 1.05 | \n",
463 | " -1.406492 | \n",
464 | "
\n",
465 | " \n",
466 | " 2 | \n",
467 | " 6 | \n",
468 | " 0 | \n",
469 | " 1.5 | \n",
470 | " 1.40 | \n",
471 | " -1.343498 | \n",
472 | "
\n",
473 | " \n",
474 | " 3 | \n",
475 | " 6 | \n",
476 | " 0 | \n",
477 | " 1.7 | \n",
478 | " 0.70 | \n",
479 | " -3.102525 | \n",
480 | "
\n",
481 | " \n",
482 | " 4 | \n",
483 | " 6 | \n",
484 | " 0 | \n",
485 | " 1.7 | \n",
486 | " 1.05 | \n",
487 | " -3.196597 | \n",
488 | "
\n",
489 | " \n",
490 | " ... | \n",
491 | " ... | \n",
492 | " ... | \n",
493 | " ... | \n",
494 | " ... | \n",
495 | " ... | \n",
496 | "
\n",
497 | " \n",
498 | " 595 | \n",
499 | " 12 | \n",
500 | " 200 | \n",
501 | " 2.3 | \n",
502 | " 1.05 | \n",
503 | " -4.775444 | \n",
504 | "
\n",
505 | " \n",
506 | " 596 | \n",
507 | " 12 | \n",
508 | " 200 | \n",
509 | " 2.3 | \n",
510 | " 1.40 | \n",
511 | " -1.997221 | \n",
512 | "
\n",
513 | " \n",
514 | " 597 | \n",
515 | " 12 | \n",
516 | " 200 | \n",
517 | " 2.5 | \n",
518 | " 0.70 | \n",
519 | " -24.956734 | \n",
520 | "
\n",
521 | " \n",
522 | " 598 | \n",
523 | " 12 | \n",
524 | " 200 | \n",
525 | " 2.5 | \n",
526 | " 1.05 | \n",
527 | " -1.360121 | \n",
528 | "
\n",
529 | " \n",
530 | " 599 | \n",
531 | " 12 | \n",
532 | " 200 | \n",
533 | " 2.5 | \n",
534 | " 1.40 | \n",
535 | " -1.337742 | \n",
536 | "
\n",
537 | " \n",
538 | "
\n",
539 | "
600 rows × 5 columns
\n",
540 | "
"
541 | ],
542 | "text/plain": [
543 | " n theta r t toughness\n",
544 | "0 6 0 1.5 0.70 -1.135453\n",
545 | "1 6 0 1.5 1.05 -1.406492\n",
546 | "2 6 0 1.5 1.40 -1.343498\n",
547 | "3 6 0 1.7 0.70 -3.102525\n",
548 | "4 6 0 1.7 1.05 -3.196597\n",
549 | ".. .. ... ... ... ...\n",
550 | "595 12 200 2.3 1.05 -4.775444\n",
551 | "596 12 200 2.3 1.40 -1.997221\n",
552 | "597 12 200 2.5 0.70 -24.956734\n",
553 | "598 12 200 2.5 1.05 -1.360121\n",
554 | "599 12 200 2.5 1.40 -1.337742\n",
555 | "\n",
556 | "[600 rows x 5 columns]"
557 | ]
558 | },
559 | "execution_count": 6,
560 | "metadata": {},
561 | "output_type": "execute_result"
562 | }
563 | ],
564 | "source": [
565 | "# for some datasets, each input feature x could have been evaluated more than once.\n",
566 | "# to perform pool-based active learning, we need to group the data by unique input feature x value. \n",
567 | "# for each unique x in design space, we only keep the average of all evaluations there as its objective value\n",
568 | "ds_grouped = ds.groupby(feature_name)[objective_name].agg(lambda x: x.unique().mean())\n",
569 | "ds_grouped = (ds_grouped.to_frame()).reset_index()\n",
570 | "ds_grouped"
571 | ]
572 | },
573 | {
574 | "cell_type": "code",
575 | "execution_count": 7,
576 | "metadata": {},
577 | "outputs": [
578 | {
579 | "name": "stdout",
580 | "output_type": "stream",
581 | "text": [
582 | "600\n"
583 | ]
584 | }
585 | ],
586 | "source": [
587 | "# these are the input feature x and objective value y used in framework\n",
588 | "X_feature = ds_grouped[feature_name].values\n",
589 | "\n",
590 | "y = np.array(ds_grouped[objective_name].values)\n",
591 | "\n",
592 | "assert len(ds_grouped) == len(X_feature) == len(y)\n",
593 | "\n",
594 | "# total number of data in set\n",
595 | "N = len(ds_grouped)\n",
596 | "print(N)"
597 | ]
598 | },
599 | {
600 | "cell_type": "markdown",
601 | "metadata": {},
602 | "source": [
603 | "# Benchmarking Framework parameters"
604 | ]
605 | },
606 | {
607 | "cell_type": "code",
608 | "execution_count": 9,
609 | "metadata": {},
610 | "outputs": [],
611 | "source": [
612 | "# here are some parameters of the framework, feel free to modify for your own purposes\n",
613 | "\n",
614 | "# number of ensembles. in the paper n_ensemble = 50.\n",
615 | "n_ensemble = 50\n",
616 | "# number of initial experiments\n",
617 | "n_initial = 2\n",
618 | "# number of top candidates, currently using top 5% of total dataset size\n",
619 | "n_top = int(math.ceil(len(y) * 0.05))\n",
620 | "# the top candidates and their indicies\n",
621 | "top_indices = list(ds_grouped.sort_values(objective_name).head(n_top).index)\n",
622 | "\n",
623 | "# random seeds used to distinguish between different ensembles\n",
624 | "# there are 300 of them, but only first n_ensemble are used\n",
625 | "seed_list = [4295, 8508, 326, 3135, 1549, 2528, 1274, 6545, 5971, 6269, 2422, 4287, 9320, 4932, 951, 4304, 1745, 5956, 7620, 4545, 6003, 9885, 5548, 9477, 30, 8992, 7559, 5034, 9071, 6437, 3389, 9816, 8617, 3712, 3626, 1660, 3309, 2427, 9872, 938, 5156, 7409, 7672, 3411, 3559, 9966, 7331, 8273, 8484, 5127, 2260, 6054, 5205, 311, 6056, 9456, 928, 6424, 7438, 8701, 8634, 4002, 6634, 8102, 8503, 1540, 9254, 7972, 7737, 3410, 4052, 8640, 9659, 8093, 7076, 7268, 2046, 7492, 3103, 3034, 7874, 5438, 4297, 291, 5436, 9021, 3711, 7837, 9188, 2036, 8013, 6188, 3734, 187, 1438, 1061, 674, 777, 7231, 7096, 3360, 4278, 5817, 5514, 3442, 6805, 6750, 8548, 9751, 3526, 9969, 8979, 1526, 1551, 2058, 6325, 1237, 5917, 5821, 9946, 5049, 654, 7750, 5149, 3545, 9165, 2837, 5621, 6501, 595, 3181, 1747, 4405, 4480, 4282, 9262, 6219, 3960, 4999, 1495, 6007, 9642, 3902, 3133, 1085, 3278, 1104, 5939, 7153, 971, 8733, 3785, 9056, 2020, 7249, 5021, 3384, 8740, 4593, 7869, 9941, 8813, 3688, 8139, 6436, 3742, 5503, 1587, 4766, 9846, 9117, 7001, 4853, 9346, 4927, 8480, 5298, 4753, 1151, 9768, 5405, 6196, 5721, 3419, 8090, 8166, 7834, 1480, 1150, 9002, 1134, 2237, 3995, 2029, 5336, 7050, 6857, 8794, 1754, 1184, 3558, 658, 6804, 8750, 5088, 1136, 626, 8462, 5203, 3196, 979, 7419, 1162, 5451, 6492, 1562, 8145, 8937, 8764, 4174, 7639, 8902, 7003, 765, 1554, 6135, 1689, 9530, 1398, 2273, 7925, 5948, 1036, 868, 4617, 1203, 7680, 7, 93, 3128, 5694, 6979, 7136, 8084, 5770, 9301, 1599, 737, 7018, 3774, 9843, 2296, 2287, 9875, 2349, 2469, 8941, 4973, 3798, 54, 2938, 4665, 3942, 3951, 9400, 3094, 2248, 3376, 1926, 5180, 1773, 3681, 1808, 350, 6669, 826, 539, 5313, 6193, 5752, 9370, 2782, 8399, 4881, 3166, 4906, 5829, 4827, 29, 6899, 9012, 6986, 4175, 1035, 8320, 7802, 3777, 6340, 7798, 7705]"
626 | ]
627 | },
628 | {
629 | "cell_type": "markdown",
630 | "metadata": {},
631 | "source": [
632 | "# GP's surrogate models"
633 | ]
634 | },
635 | {
636 | "cell_type": "code",
637 | "execution_count": 10,
638 | "metadata": {},
639 | "outputs": [],
640 | "source": [
641 | "def GP_pred(X, GP_model):\n",
642 | " X = X.reshape([1,X_feature.shape[1]])\n",
643 | " \n",
644 | " mean, var = GP_model.predict(X)[0][0][0], GP_model.predict(X)[1][0][0]\n",
645 | " return mean, np.sqrt(var)\n",
646 | " \n",
647 | "\n",
648 | "# expected improvement\n",
649 | "def EI(X, GP_model, y_best):\n",
650 | " xi = 0\n",
651 | "# can also use 0.01\n",
652 | " \n",
653 | " mean, std = GP_pred(X, GP_model)\n",
654 | "\n",
655 | " z = (y_best - mean - xi)/std\n",
656 | " return (y_best - mean - xi) * norm.cdf(z) + std * norm.pdf(z)\n",
657 | "\n",
658 | "# lower confidence bound\n",
659 | "def LCB(X, GP_model, ratio):\n",
660 | " \n",
661 | " mean, std = GP_pred(X, GP_model)\n",
662 | " \n",
663 | " return - mean + ratio * std\n",
664 | "\n",
665 | "# probability of improvement\n",
666 | "def PI(X, GP_model, y_best):\n",
667 | " xi = 0\n",
668 | "# can also use 0.01\n",
669 | " mean, std = GP_pred(X, GP_model)\n",
670 | " \n",
671 | " z = (y_best - mean - xi)/std\n",
672 | " return norm.cdf(z)\n",
673 | "\n"
674 | ]
675 | },
676 | {
677 | "cell_type": "markdown",
678 | "metadata": {},
679 | "source": [
680 | "# isotropic and anisotropic kernels for GP"
681 | ]
682 | },
683 | {
684 | "cell_type": "code",
685 | "execution_count": 11,
686 | "metadata": {},
687 | "outputs": [],
688 | "source": [
689 | "# if use isotropic kernels, ARD_ = False\n",
690 | "# if use anisotropic kernels, ARD_ = True\n",
691 | "\n",
692 | "ARD_ = False\n",
693 | "\n",
694 | "Bias_kernel = GPy.kern.Bias(X_feature.shape[1], variance=1.)\n",
695 | "\n",
696 | "Matern52_kernel = GPy.kern.Matern52(X_feature.shape[1], variance=1., ARD=ARD_) + Bias_kernel\n",
697 | "Matern32_kernel = GPy.kern.Matern32(X_feature.shape[1], variance=1., ARD=ARD_) + Bias_kernel\n",
698 | "Matern12_kernel = GPy.kern.Exponential(X_feature.shape[1], variance=1., ARD=ARD_) + Bias_kernel\n",
699 | "RBF_kernel = GPy.kern.RBF(X_feature.shape[1], variance=1., ARD=ARD_) + Bias_kernel\n",
700 | "MLP_kernel = GPy.kern.MLP(X_feature.shape[1], variance=1., ARD=ARD_) + Bias_kernel"
701 | ]
702 | },
703 | {
704 | "cell_type": "markdown",
705 | "metadata": {},
706 | "source": [
707 | "# Pool-based active learning framework"
708 | ]
709 | },
710 | {
711 | "cell_type": "code",
712 | "execution_count": 75,
713 | "metadata": {
714 | "scrolled": true
715 | },
716 | "outputs": [
717 | {
718 | "name": "stdout",
719 | "output_type": "stream",
720 | "text": [
721 | "initializing seed = 0\n",
722 | "Finished seed\n"
723 | ]
724 | }
725 | ],
726 | "source": [
727 | "# framework\n",
728 | "\n",
729 | "\n",
730 | "# good practice to keep check of time used\n",
731 | "start_time = time.time()\n",
732 | "\n",
733 | "# these will carry results along optimization sequence from all n_ensemble runs\n",
734 | "index_collection = []\n",
735 | "X_collection = []\n",
736 | "y_collection = []\n",
737 | "TopCount_collection = []\n",
738 | "\n",
739 | "\n",
740 | "\n",
741 | "for s in seed_list:\n",
742 | " \n",
743 | " if len(index_collection) == n_ensemble:\n",
744 | " break\n",
745 | " \n",
746 | " print('initializing seed = ' +str(seed_list.index(s)))\n",
747 | " random.seed(s)\n",
748 | " \n",
749 | " indices = list(np.arange(N))\n",
750 | "# index_learn is the pool of candidates to be examined\n",
751 | " index_learn = indices.copy()\n",
752 | "# index_ is the list of candidates we have already observed\n",
753 | "# adding in the initial experiments\n",
754 | " index_ = random.sample(index_learn, n_initial)\n",
755 | " \n",
756 | "# list to store all observed good candidates' input feature X\n",
757 | " X_ = []\n",
758 | "# list to store all observed good candidates' objective value y\n",
759 | " y_ = []\n",
760 | "# number of top candidates found so far\n",
761 | " c = 0\n",
762 | "# list of cumulative number of top candidates found at each learning cycle\n",
763 | " TopCount_ = []\n",
764 | "# add the first n_initial experiments to collection\n",
765 | " for i in index_:\n",
766 | " X_.append(X_feature[i])\n",
767 | " y_.append(y[i])\n",
768 | " if i in top_indices:\n",
769 | " c += 1\n",
770 | " TopCount_.append(c)\n",
771 | " index_learn.remove(i)\n",
772 | " \n",
773 | "\n",
774 | "# for each of the the rest of (N - n_initial) learning cycles\n",
775 | "# this for loop ends when all candidates in pool are observed \n",
776 | " for i in np.arange(len(index_learn)):\n",
777 | " \n",
778 | " y_best = np.min(y_)\n",
779 | " \n",
780 | " s_scaler = preprocessing.StandardScaler()\n",
781 | " X_train = s_scaler.fit_transform(X_)\n",
782 | " y_train = s_scaler.fit_transform([[i] for i in y_])\n",
783 | " \n",
784 | " try:\n",
785 | "# #TODO: select kernel for GP surrogate model\n",
786 | " GP_learn = GPy.models.GPRegression(X = X_train, \n",
787 | " Y = y_train, \n",
788 | " kernel= Matern52_kernel,\n",
789 | " noise_var = 0.01\n",
790 | " )\n",
791 | "\n",
792 | " GP_learn.optimize_restarts(num_restarts=10,\n",
793 | " parallel = True,\n",
794 | " robust = True,\n",
795 | " optimizer = 'bfgs',\n",
796 | " max_iters=100,\n",
797 | " verbose = False)\n",
798 | " except:\n",
799 | " break\n",
800 | " \n",
801 | "# by evaluating acquisition function values at candidates remaining in pool\n",
802 | "# we choose candidate with larger acquisition function value to be observed next \n",
803 | " next_index = None\n",
804 | " max_ac = -10**10\n",
805 | " for j in index_learn:\n",
806 | " X_j = X_feature[j]\n",
807 | " y_j = y[j]\n",
808 | "# #TODO: select Acquisiton Function for BO\n",
809 | " ac_value = LCB(X_j, GP_learn, 2)\n",
810 | " \n",
811 | " if max_ac <= ac_value:\n",
812 | " max_ac = ac_value\n",
813 | " next_index = j\n",
814 | " \n",
815 | " \n",
816 | " \n",
817 | " X_.append(X_feature[next_index])\n",
818 | " y_.append(y[next_index])\n",
819 | " \n",
820 | " \n",
821 | " if next_index in top_indices:\n",
822 | " c += 1\n",
823 | " \n",
824 | " TopCount_.append(c)\n",
825 | " \n",
826 | " index_learn.remove(next_index)\n",
827 | " index_.append(next_index) \n",
828 | "\n",
829 | " assert len(index_) == N\n",
830 | " \n",
831 | " index_collection.append(index_)\n",
832 | " X_collection.append(X_)\n",
833 | " y_collection.append(y_)\n",
834 | " TopCount_collection.append(TopCount_)\n",
835 | " \n",
836 | " \n",
837 | " print('Finished seed')\n",
838 | " \n",
839 | "total_time = time.time() - start_time\n",
840 | "\n",
841 | "master = np.array([index_collection, X_collection, y_collection, TopCount_collection, total_time])\n",
842 | "# #TODO: name output file\n",
843 | "np.save('test_run', master)\n",
844 | " "
845 | ]
846 | },
847 | {
848 | "cell_type": "code",
849 | "execution_count": null,
850 | "metadata": {},
851 | "outputs": [],
852 | "source": []
853 | },
854 | {
855 | "cell_type": "code",
856 | "execution_count": null,
857 | "metadata": {},
858 | "outputs": [],
859 | "source": []
860 | },
861 | {
862 | "cell_type": "code",
863 | "execution_count": null,
864 | "metadata": {},
865 | "outputs": [],
866 | "source": []
867 | }
868 | ],
869 | "metadata": {
870 | "kernelspec": {
871 | "display_name": "Python 3",
872 | "language": "python",
873 | "name": "python3"
874 | },
875 | "language_info": {
876 | "codemirror_mode": {
877 | "name": "ipython",
878 | "version": 3
879 | },
880 | "file_extension": ".py",
881 | "mimetype": "text/x-python",
882 | "name": "python",
883 | "nbconvert_exporter": "python",
884 | "pygments_lexer": "ipython3",
885 | "version": "3.7.6"
886 | }
887 | },
888 | "nbformat": 4,
889 | "nbformat_minor": 4
890 | }
891 |
--------------------------------------------------------------------------------
/Example use of framework with RF type surrogate models.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import torch\n",
11 | "import math\n",
12 | "import matplotlib\n",
13 | "import matplotlib.pyplot as plt\n",
14 | "import pandas as pd\n",
15 | "import seaborn as sns\n",
16 | "import GPyOpt\n",
17 | "import GPy\n",
18 | "import os\n",
19 | "import matplotlib as mpl\n",
20 | "import matplotlib.tri as tri\n",
21 | "import ternary\n",
22 | "import pickle\n",
23 | "import datetime\n",
24 | "from collections import Counter\n",
25 | "import matplotlib.ticker as ticker\n",
26 | "from sklearn import preprocessing\n",
27 | "import pyDOE\n",
28 | "import random\n",
29 | "from scipy.stats import norm\n",
30 | "import time\n",
31 | "from sklearn.ensemble import RandomForestRegressor\n",
32 | "import copy"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "# Load materials dataset"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": 2,
45 | "metadata": {},
46 | "outputs": [
47 | {
48 | "data": {
49 | "text/html": [
50 | "\n",
51 | "\n",
64 | "
\n",
65 | " \n",
66 | " \n",
67 | " | \n",
68 | " n | \n",
69 | " theta | \n",
70 | " r | \n",
71 | " t | \n",
72 | " toughness | \n",
73 | "
\n",
74 | " \n",
75 | " \n",
76 | " \n",
77 | " 0 | \n",
78 | " 6 | \n",
79 | " 0 | \n",
80 | " 1.5 | \n",
81 | " 0.70 | \n",
82 | " 1.144667 | \n",
83 | "
\n",
84 | " \n",
85 | " 1 | \n",
86 | " 6 | \n",
87 | " 0 | \n",
88 | " 1.5 | \n",
89 | " 1.05 | \n",
90 | " 1.607561 | \n",
91 | "
\n",
92 | " \n",
93 | " 2 | \n",
94 | " 6 | \n",
95 | " 0 | \n",
96 | " 1.5 | \n",
97 | " 1.40 | \n",
98 | " 1.144338 | \n",
99 | "
\n",
100 | " \n",
101 | " 3 | \n",
102 | " 6 | \n",
103 | " 0 | \n",
104 | " 1.7 | \n",
105 | " 0.70 | \n",
106 | " 3.642738 | \n",
107 | "
\n",
108 | " \n",
109 | " 4 | \n",
110 | " 6 | \n",
111 | " 0 | \n",
112 | " 1.7 | \n",
113 | " 1.05 | \n",
114 | " 3.748405 | \n",
115 | "
\n",
116 | " \n",
117 | " ... | \n",
118 | " ... | \n",
119 | " ... | \n",
120 | " ... | \n",
121 | " ... | \n",
122 | " ... | \n",
123 | "
\n",
124 | " \n",
125 | " 1795 | \n",
126 | " 12 | \n",
127 | " 200 | \n",
128 | " 2.3 | \n",
129 | " 1.05 | \n",
130 | " 1.358975 | \n",
131 | "
\n",
132 | " \n",
133 | " 1796 | \n",
134 | " 12 | \n",
135 | " 200 | \n",
136 | " 2.3 | \n",
137 | " 1.40 | \n",
138 | " 3.196306 | \n",
139 | "
\n",
140 | " \n",
141 | " 1797 | \n",
142 | " 12 | \n",
143 | " 200 | \n",
144 | " 2.5 | \n",
145 | " 0.70 | \n",
146 | " 36.104187 | \n",
147 | "
\n",
148 | " \n",
149 | " 1798 | \n",
150 | " 12 | \n",
151 | " 200 | \n",
152 | " 2.5 | \n",
153 | " 1.05 | \n",
154 | " 1.313487 | \n",
155 | "
\n",
156 | " \n",
157 | " 1799 | \n",
158 | " 12 | \n",
159 | " 200 | \n",
160 | " 2.5 | \n",
161 | " 1.40 | \n",
162 | " 1.069728 | \n",
163 | "
\n",
164 | " \n",
165 | "
\n",
166 | "
1800 rows × 5 columns
\n",
167 | "
"
168 | ],
169 | "text/plain": [
170 | " n theta r t toughness\n",
171 | "0 6 0 1.5 0.70 1.144667\n",
172 | "1 6 0 1.5 1.05 1.607561\n",
173 | "2 6 0 1.5 1.40 1.144338\n",
174 | "3 6 0 1.7 0.70 3.642738\n",
175 | "4 6 0 1.7 1.05 3.748405\n",
176 | "... .. ... ... ... ...\n",
177 | "1795 12 200 2.3 1.05 1.358975\n",
178 | "1796 12 200 2.3 1.40 3.196306\n",
179 | "1797 12 200 2.5 0.70 36.104187\n",
180 | "1798 12 200 2.5 1.05 1.313487\n",
181 | "1799 12 200 2.5 1.40 1.069728\n",
182 | "\n",
183 | "[1800 rows x 5 columns]"
184 | ]
185 | },
186 | "execution_count": 2,
187 | "metadata": {},
188 | "output_type": "execute_result"
189 | }
190 | ],
191 | "source": [
192 | "# go to directory where datasets reside\n",
193 | "# load a dataset\n",
194 | "# dataset names = ['Crossed barrel', 'Perovskite', 'AgNP', 'P3HT', 'AutoAM']\n",
195 | "dataset_name = 'Crossed barrel'\n",
196 | "raw_dataset = pd.read_csv(dataset_name + '_dataset.csv')\n",
197 | "raw_dataset"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 3,
203 | "metadata": {},
204 | "outputs": [
205 | {
206 | "data": {
207 | "text/plain": [
208 | "['n', 'theta', 'r', 't']"
209 | ]
210 | },
211 | "execution_count": 3,
212 | "metadata": {},
213 | "output_type": "execute_result"
214 | }
215 | ],
216 | "source": [
217 | "feature_name = list(raw_dataset.columns)[:-1]\n",
218 | "feature_name"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": 4,
224 | "metadata": {},
225 | "outputs": [
226 | {
227 | "data": {
228 | "text/plain": [
229 | "'toughness'"
230 | ]
231 | },
232 | "execution_count": 4,
233 | "metadata": {},
234 | "output_type": "execute_result"
235 | }
236 | ],
237 | "source": [
238 | "objective_name = list(raw_dataset.columns)[-1]\n",
239 | "objective_name"
240 | ]
241 | },
242 | {
243 | "cell_type": "markdown",
244 | "metadata": {},
245 | "source": [
246 | "# Formulate optimization as global minimization"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": 5,
252 | "metadata": {},
253 | "outputs": [
254 | {
255 | "data": {
256 | "text/html": [
257 | "\n",
258 | "\n",
271 | "
\n",
272 | " \n",
273 | " \n",
274 | " | \n",
275 | " n | \n",
276 | " theta | \n",
277 | " r | \n",
278 | " t | \n",
279 | " toughness | \n",
280 | "
\n",
281 | " \n",
282 | " \n",
283 | " \n",
284 | " 0 | \n",
285 | " 6 | \n",
286 | " 0 | \n",
287 | " 1.5 | \n",
288 | " 0.70 | \n",
289 | " -1.144667 | \n",
290 | "
\n",
291 | " \n",
292 | " 1 | \n",
293 | " 6 | \n",
294 | " 0 | \n",
295 | " 1.5 | \n",
296 | " 1.05 | \n",
297 | " -1.607561 | \n",
298 | "
\n",
299 | " \n",
300 | " 2 | \n",
301 | " 6 | \n",
302 | " 0 | \n",
303 | " 1.5 | \n",
304 | " 1.40 | \n",
305 | " -1.144338 | \n",
306 | "
\n",
307 | " \n",
308 | " 3 | \n",
309 | " 6 | \n",
310 | " 0 | \n",
311 | " 1.7 | \n",
312 | " 0.70 | \n",
313 | " -3.642738 | \n",
314 | "
\n",
315 | " \n",
316 | " 4 | \n",
317 | " 6 | \n",
318 | " 0 | \n",
319 | " 1.7 | \n",
320 | " 1.05 | \n",
321 | " -3.748405 | \n",
322 | "
\n",
323 | " \n",
324 | " ... | \n",
325 | " ... | \n",
326 | " ... | \n",
327 | " ... | \n",
328 | " ... | \n",
329 | " ... | \n",
330 | "
\n",
331 | " \n",
332 | " 1795 | \n",
333 | " 12 | \n",
334 | " 200 | \n",
335 | " 2.3 | \n",
336 | " 1.05 | \n",
337 | " -1.358975 | \n",
338 | "
\n",
339 | " \n",
340 | " 1796 | \n",
341 | " 12 | \n",
342 | " 200 | \n",
343 | " 2.3 | \n",
344 | " 1.40 | \n",
345 | " -3.196306 | \n",
346 | "
\n",
347 | " \n",
348 | " 1797 | \n",
349 | " 12 | \n",
350 | " 200 | \n",
351 | " 2.5 | \n",
352 | " 0.70 | \n",
353 | " -36.104187 | \n",
354 | "
\n",
355 | " \n",
356 | " 1798 | \n",
357 | " 12 | \n",
358 | " 200 | \n",
359 | " 2.5 | \n",
360 | " 1.05 | \n",
361 | " -1.313487 | \n",
362 | "
\n",
363 | " \n",
364 | " 1799 | \n",
365 | " 12 | \n",
366 | " 200 | \n",
367 | " 2.5 | \n",
368 | " 1.40 | \n",
369 | " -1.069728 | \n",
370 | "
\n",
371 | " \n",
372 | "
\n",
373 | "
1800 rows × 5 columns
\n",
374 | "
"
375 | ],
376 | "text/plain": [
377 | " n theta r t toughness\n",
378 | "0 6 0 1.5 0.70 -1.144667\n",
379 | "1 6 0 1.5 1.05 -1.607561\n",
380 | "2 6 0 1.5 1.40 -1.144338\n",
381 | "3 6 0 1.7 0.70 -3.642738\n",
382 | "4 6 0 1.7 1.05 -3.748405\n",
383 | "... .. ... ... ... ...\n",
384 | "1795 12 200 2.3 1.05 -1.358975\n",
385 | "1796 12 200 2.3 1.40 -3.196306\n",
386 | "1797 12 200 2.5 0.70 -36.104187\n",
387 | "1798 12 200 2.5 1.05 -1.313487\n",
388 | "1799 12 200 2.5 1.40 -1.069728\n",
389 | "\n",
390 | "[1800 rows x 5 columns]"
391 | ]
392 | },
393 | "execution_count": 5,
394 | "metadata": {},
395 | "output_type": "execute_result"
396 | }
397 | ],
398 | "source": [
399 | "# for P3HT/CNT, Crossed barrel, AutoAM, their original goals were to maximize objective value.\n",
400 | "# here, we add negative sign to all of its objective values here \n",
401 | "# because default BO in the framework below aims for global minimization\n",
402 | "# only P3HT/CNT, Crossed barrel, AutoAM need this line; Perovskite and AgNP do not need this line.\n",
403 | "ds = copy.deepcopy(raw_dataset) \n",
404 | "ds[objective_name] = -raw_dataset[objective_name].values\n",
405 | "ds"
406 | ]
407 | },
408 | {
409 | "cell_type": "markdown",
410 | "metadata": {},
411 | "source": [
412 | "# Process dataset for pool-based active learning"
413 | ]
414 | },
415 | {
416 | "cell_type": "code",
417 | "execution_count": 6,
418 | "metadata": {},
419 | "outputs": [
420 | {
421 | "data": {
422 | "text/html": [
423 | "\n",
424 | "\n",
437 | "
\n",
438 | " \n",
439 | " \n",
440 | " | \n",
441 | " n | \n",
442 | " theta | \n",
443 | " r | \n",
444 | " t | \n",
445 | " toughness | \n",
446 | "
\n",
447 | " \n",
448 | " \n",
449 | " \n",
450 | " 0 | \n",
451 | " 6 | \n",
452 | " 0 | \n",
453 | " 1.5 | \n",
454 | " 0.70 | \n",
455 | " -1.135453 | \n",
456 | "
\n",
457 | " \n",
458 | " 1 | \n",
459 | " 6 | \n",
460 | " 0 | \n",
461 | " 1.5 | \n",
462 | " 1.05 | \n",
463 | " -1.406492 | \n",
464 | "
\n",
465 | " \n",
466 | " 2 | \n",
467 | " 6 | \n",
468 | " 0 | \n",
469 | " 1.5 | \n",
470 | " 1.40 | \n",
471 | " -1.343498 | \n",
472 | "
\n",
473 | " \n",
474 | " 3 | \n",
475 | " 6 | \n",
476 | " 0 | \n",
477 | " 1.7 | \n",
478 | " 0.70 | \n",
479 | " -3.102525 | \n",
480 | "
\n",
481 | " \n",
482 | " 4 | \n",
483 | " 6 | \n",
484 | " 0 | \n",
485 | " 1.7 | \n",
486 | " 1.05 | \n",
487 | " -3.196597 | \n",
488 | "
\n",
489 | " \n",
490 | " ... | \n",
491 | " ... | \n",
492 | " ... | \n",
493 | " ... | \n",
494 | " ... | \n",
495 | " ... | \n",
496 | "
\n",
497 | " \n",
498 | " 595 | \n",
499 | " 12 | \n",
500 | " 200 | \n",
501 | " 2.3 | \n",
502 | " 1.05 | \n",
503 | " -4.775444 | \n",
504 | "
\n",
505 | " \n",
506 | " 596 | \n",
507 | " 12 | \n",
508 | " 200 | \n",
509 | " 2.3 | \n",
510 | " 1.40 | \n",
511 | " -1.997221 | \n",
512 | "
\n",
513 | " \n",
514 | " 597 | \n",
515 | " 12 | \n",
516 | " 200 | \n",
517 | " 2.5 | \n",
518 | " 0.70 | \n",
519 | " -24.956734 | \n",
520 | "
\n",
521 | " \n",
522 | " 598 | \n",
523 | " 12 | \n",
524 | " 200 | \n",
525 | " 2.5 | \n",
526 | " 1.05 | \n",
527 | " -1.360121 | \n",
528 | "
\n",
529 | " \n",
530 | " 599 | \n",
531 | " 12 | \n",
532 | " 200 | \n",
533 | " 2.5 | \n",
534 | " 1.40 | \n",
535 | " -1.337742 | \n",
536 | "
\n",
537 | " \n",
538 | "
\n",
539 | "
600 rows × 5 columns
\n",
540 | "
"
541 | ],
542 | "text/plain": [
543 | " n theta r t toughness\n",
544 | "0 6 0 1.5 0.70 -1.135453\n",
545 | "1 6 0 1.5 1.05 -1.406492\n",
546 | "2 6 0 1.5 1.40 -1.343498\n",
547 | "3 6 0 1.7 0.70 -3.102525\n",
548 | "4 6 0 1.7 1.05 -3.196597\n",
549 | ".. .. ... ... ... ...\n",
550 | "595 12 200 2.3 1.05 -4.775444\n",
551 | "596 12 200 2.3 1.40 -1.997221\n",
552 | "597 12 200 2.5 0.70 -24.956734\n",
553 | "598 12 200 2.5 1.05 -1.360121\n",
554 | "599 12 200 2.5 1.40 -1.337742\n",
555 | "\n",
556 | "[600 rows x 5 columns]"
557 | ]
558 | },
559 | "execution_count": 6,
560 | "metadata": {},
561 | "output_type": "execute_result"
562 | }
563 | ],
564 | "source": [
565 | "# for some datasets, each input feature x could have been evaluated more than once.\n",
566 | "# to perform pool-based active learning, we need to group the data by unique input feature x value. \n",
567 | "# for each unique x in design space, we only keep the average of all evaluations there as its objective value\n",
568 | "ds_grouped = ds.groupby(feature_name)[objective_name].agg(lambda x: x.unique().mean())\n",
569 | "ds_grouped = (ds_grouped.to_frame()).reset_index()\n",
570 | "ds_grouped"
571 | ]
572 | },
573 | {
574 | "cell_type": "code",
575 | "execution_count": 85,
576 | "metadata": {},
577 | "outputs": [
578 | {
579 | "name": "stdout",
580 | "output_type": "stream",
581 | "text": [
582 | "600\n"
583 | ]
584 | }
585 | ],
586 | "source": [
587 | "# these are the input feature x and objective value y used in framework\n",
588 | "X_feature = ds_grouped[feature_name].values\n",
589 | "\n",
590 | "y = np.array(ds_grouped[objective_name].values)\n",
591 | "\n",
592 | "assert len(ds_grouped) == len(X_feature) == len(y)\n",
593 | "\n",
594 | "# total number of data in set\n",
595 | "N = len(ds_grouped)\n",
596 | "print(N)"
597 | ]
598 | },
599 | {
600 | "cell_type": "markdown",
601 | "metadata": {},
602 | "source": [
603 | "# Benchmarking Framework parameters"
604 | ]
605 | },
606 | {
607 | "cell_type": "code",
608 | "execution_count": 86,
609 | "metadata": {},
610 | "outputs": [],
611 | "source": [
612 | "# here are some parameters of the framework, feel free to modify for your own purposes\n",
613 | "\n",
614 | "# number of ensembles. in the paper n_ensemble = 50.\n",
615 | "n_ensemble = 50\n",
616 | "# number of initial experiments\n",
617 | "n_initial = 2\n",
618 | "# number of top candidates, currently using top 5% of total dataset size\n",
619 | "n_top = int(math.ceil(len(y) * 0.05))\n",
620 | "# the top candidates and their indicies\n",
621 | "top_indices = list(ds_grouped.sort_values(objective_name).head(n_top).index)\n",
622 | "\n",
623 | "# random seeds used to distinguish between different ensembles\n",
624 | "# there are 300 of them, but only first n_ensemble are used\n",
625 | "seed_list = [4295, 8508, 326, 3135, 1549, 2528, 1274, 6545, 5971, 6269, 2422, 4287, 9320, 4932, 951, 4304, 1745, 5956, 7620, 4545, 6003, 9885, 5548, 9477, 30, 8992, 7559, 5034, 9071, 6437, 3389, 9816, 8617, 3712, 3626, 1660, 3309, 2427, 9872, 938, 5156, 7409, 7672, 3411, 3559, 9966, 7331, 8273, 8484, 5127, 2260, 6054, 5205, 311, 6056, 9456, 928, 6424, 7438, 8701, 8634, 4002, 6634, 8102, 8503, 1540, 9254, 7972, 7737, 3410, 4052, 8640, 9659, 8093, 7076, 7268, 2046, 7492, 3103, 3034, 7874, 5438, 4297, 291, 5436, 9021, 3711, 7837, 9188, 2036, 8013, 6188, 3734, 187, 1438, 1061, 674, 777, 7231, 7096, 3360, 4278, 5817, 5514, 3442, 6805, 6750, 8548, 9751, 3526, 9969, 8979, 1526, 1551, 2058, 6325, 1237, 5917, 5821, 9946, 5049, 654, 7750, 5149, 3545, 9165, 2837, 5621, 6501, 595, 3181, 1747, 4405, 4480, 4282, 9262, 6219, 3960, 4999, 1495, 6007, 9642, 3902, 3133, 1085, 3278, 1104, 5939, 7153, 971, 8733, 3785, 9056, 2020, 7249, 5021, 3384, 8740, 4593, 7869, 9941, 8813, 3688, 8139, 6436, 3742, 5503, 1587, 4766, 9846, 9117, 7001, 4853, 9346, 4927, 8480, 5298, 4753, 1151, 9768, 5405, 6196, 5721, 3419, 8090, 8166, 7834, 1480, 1150, 9002, 1134, 2237, 3995, 2029, 5336, 7050, 6857, 8794, 1754, 1184, 3558, 658, 6804, 8750, 5088, 1136, 626, 8462, 5203, 3196, 979, 7419, 1162, 5451, 6492, 1562, 8145, 8937, 8764, 4174, 7639, 8902, 7003, 765, 1554, 6135, 1689, 9530, 1398, 2273, 7925, 5948, 1036, 868, 4617, 1203, 7680, 7, 93, 3128, 5694, 6979, 7136, 8084, 5770, 9301, 1599, 737, 7018, 3774, 9843, 2296, 2287, 9875, 2349, 2469, 8941, 4973, 3798, 54, 2938, 4665, 3942, 3951, 9400, 3094, 2248, 3376, 1926, 5180, 1773, 3681, 1808, 350, 6669, 826, 539, 5313, 6193, 5752, 9370, 2782, 8399, 4881, 3166, 4906, 5829, 4827, 29, 6899, 9012, 6986, 4175, 1035, 8320, 7802, 3777, 6340, 7798, 7705]"
626 | ]
627 | },
628 | {
629 | "cell_type": "markdown",
630 | "metadata": {},
631 | "source": [
632 | "# RF's surrogate models"
633 | ]
634 | },
635 | {
636 | "cell_type": "code",
637 | "execution_count": 88,
638 | "metadata": {},
639 | "outputs": [],
640 | "source": [
641 | "# number of decision trees in Random Forest (RF) model\n",
642 | "n_est = 100"
643 | ]
644 | },
645 | {
646 | "cell_type": "code",
647 | "execution_count": 89,
648 | "metadata": {},
649 | "outputs": [],
650 | "source": [
651 | "def RF_pred(X, RF_model):\n",
652 | " tree_predictions = []\n",
653 | " for j in np.arange(n_est):\n",
654 | " tree_predictions.append((RF_model.estimators_[j].predict(np.array([X]))).tolist())\n",
655 | " mean = np.mean(np.array(tree_predictions), axis=0)[0]\n",
656 | "\n",
657 | " \n",
658 | " std = np.std(np.array(tree_predictions), axis=0)[0]\n",
659 | " return mean, std\n",
660 | "\n",
661 | "\n",
662 | "def EI(X, RF_model, y_best):\n",
663 | "\n",
664 | " mean, std = RF_pred(X, RF_model)\n",
665 | " \n",
666 | " z = (y_best - mean)/std\n",
667 | " return (y_best - mean) * norm.cdf(z) + std * norm.pdf(z)\n",
668 | "\n",
669 | "def LCB(X, RF_model, ratio):\n",
670 | " \n",
671 | " mean, std = RF_pred(X, RF_model)\n",
672 | "\n",
673 | " return - mean + ratio * std\n",
674 | "\n",
675 | "def PI(X, RF_model, y_best):\n",
676 | " \n",
677 | " mean, std = RF_pred(X, RF_model)\n",
678 | " \n",
679 | " z = (y_best - mean)/std\n",
680 | " return norm.cdf(z)\n",
681 | "\n"
682 | ]
683 | },
684 | {
685 | "cell_type": "markdown",
686 | "metadata": {},
687 | "source": [
688 | "# Pool-based active learning framework"
689 | ]
690 | },
691 | {
692 | "cell_type": "code",
693 | "execution_count": 54,
694 | "metadata": {
695 | "scrolled": true
696 | },
697 | "outputs": [
698 | {
699 | "name": "stdout",
700 | "output_type": "stream",
701 | "text": [
702 | "initializing seed = 0\n",
703 | "Finished seed\n"
704 | ]
705 | }
706 | ],
707 | "source": [
708 | "# framework\n",
709 | "\n",
710 | "\n",
711 | "# good practice to keep check of time used\n",
712 | "start_time = time.time()\n",
713 | "\n",
714 | "# these will carry results along optimization sequence from all n_ensemble runs\n",
715 | "index_collection = []\n",
716 | "X_collection = []\n",
717 | "y_collection = []\n",
718 | "TopCount_collection = []\n",
719 | "\n",
720 | "\n",
721 | "\n",
722 | "for s in seed_list:\n",
723 | " \n",
724 | " if len(index_collection) == n_ensemble:\n",
725 | " break\n",
726 | " \n",
727 | " print('initializing seed = ' +str(seed_list.index(s)))\n",
728 | " random.seed(s)\n",
729 | " \n",
730 | " indices = list(np.arange(N))\n",
731 | "# index_learn is the pool of candidates to be examined\n",
732 | " index_learn = indices.copy()\n",
733 | "# index_ is the list of candidates we have already observed\n",
734 | "# adding in the initial experiments\n",
735 | " index_ = random.sample(index_learn, n_initial)\n",
736 | " \n",
737 | "# list to store all observed good candidates' input feature X\n",
738 | " X_ = []\n",
739 | "# list to store all observed good candidates' objective value y\n",
740 | " y_ = []\n",
741 | "# number of top candidates found so far\n",
742 | " c = 0\n",
743 | "# list of cumulative number of top candidates found at each learning cycle\n",
744 | " TopCount_ = []\n",
745 | "# add the first n_initial experiments to collection\n",
746 | " for i in index_:\n",
747 | " X_.append(X_feature[i])\n",
748 | " y_.append(y[i])\n",
749 | " if i in top_indices:\n",
750 | " c += 1\n",
751 | " TopCount_.append(c)\n",
752 | " index_learn.remove(i)\n",
753 | " \n",
754 | " \n",
755 | "# for each of the the rest of (N - n_initial) learning cycles\n",
756 | "# this for loop ends when all candidates in pool are observed \n",
757 | " for i in np.arange(len(index_learn)):\n",
758 | " \n",
759 | " y_best = np.min(y_)\n",
760 | " \n",
761 | " s_scaler = preprocessing.StandardScaler()\n",
762 | " X_train = s_scaler.fit_transform(X_)\n",
763 | " y_train = s_scaler.fit_transform([[i] for i in y_])\n",
764 | " \n",
765 | " \n",
766 | " RF_model = RandomForestRegressor(n_estimators= n_est, n_jobs= -1)\n",
767 | " RF_model.fit(X_train, y_train)\n",
768 | " \n",
769 | " \n",
770 | "# by evaluating acquisition function values at candidates remaining in pool\n",
771 | "# we choose candidate with larger acquisition function value to be observed next \n",
772 | " next_index = None\n",
773 | " max_ac = -10**10\n",
774 | " for j in index_learn:\n",
775 | " X_j = X_feature[j]\n",
776 | " y_j = y[j]\n",
777 | "# #TODO: select Acquisiton Function for BO\n",
778 | " ac_value = LCB(X_j, RF_model, 10)\n",
779 | " \n",
780 | " if max_ac <= ac_value:\n",
781 | " max_ac = ac_value\n",
782 | " next_index = j\n",
783 | " \n",
784 | " \n",
785 | " \n",
786 | " X_.append(X_feature[next_index])\n",
787 | " y_.append(y[next_index])\n",
788 | " \n",
789 | " \n",
790 | " if next_index in top_indices:\n",
791 | " c += 1\n",
792 | " \n",
793 | " TopCount_.append(c)\n",
794 | " \n",
795 | " index_learn.remove(next_index)\n",
796 | " index_.append(next_index) \n",
797 | "\n",
798 | " \n",
799 | " assert len(index_) == N\n",
800 | " \n",
801 | " index_collection.append(index_)\n",
802 | " X_collection.append(X_)\n",
803 | " y_collection.append(y_)\n",
804 | " TopCount_collection.append(TopCount_)\n",
805 | " \n",
806 | " \n",
807 | " print('Finished seed')\n",
808 | " \n",
809 | "total_time = time.time() - start_time\n",
810 | "\n",
811 | "master = np.array([index_collection, X_collection, y_collection, TopCount_collection, total_time])\n",
812 | "# #TODO: name output file\n",
813 | "np.save('test_run', master)\n",
814 | " "
815 | ]
816 | },
817 | {
818 | "cell_type": "code",
819 | "execution_count": null,
820 | "metadata": {},
821 | "outputs": [],
822 | "source": []
823 | },
824 | {
825 | "cell_type": "code",
826 | "execution_count": null,
827 | "metadata": {},
828 | "outputs": [],
829 | "source": [
830 | "\n"
831 | ]
832 | },
833 | {
834 | "cell_type": "code",
835 | "execution_count": null,
836 | "metadata": {},
837 | "outputs": [],
838 | "source": []
839 | }
840 | ],
841 | "metadata": {
842 | "kernelspec": {
843 | "display_name": "Python 3",
844 | "language": "python",
845 | "name": "python3"
846 | },
847 | "language_info": {
848 | "codemirror_mode": {
849 | "name": "ipython",
850 | "version": 3
851 | },
852 | "file_extension": ".py",
853 | "mimetype": "text/x-python",
854 | "name": "python",
855 | "nbconvert_exporter": "python",
856 | "pygments_lexer": "ipython3",
857 | "version": "3.7.6"
858 | }
859 | },
860 | "nbformat": 4,
861 | "nbformat_minor": 4
862 | }
863 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 MIT PVLab
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Benchmarking
2 |
3 | Project Name: Benchmarking the Performance of Bayesian Optimization across Multiple Experimental Materials Science Domains
4 |
5 | ## Authors
6 | || |
7 | | ------------- | ------------------------------ |
8 | | **AUTHORS** | Harry Qiaohao Liang |
9 | | **VERSION** | 2.0 / Aug, 2021 |
10 | | **EMAILS** | hqliang@mit.edu |
11 | || |
12 |
13 |
14 | Abstract:
15 | In the field of machine learning (ML) for materials optimization, active learning algorithms, such as Bayesian Optimization (BO), have been leveraged for guiding autonomous and high-throughput experimentation systems. However, very few studies have evaluated the efficiency of BO as a general optimization algorithm across a broad range of experimental materials science domains. In this work, we evaluate the performance of BO algorithms with a collection of surrogate model and acquisition function pairs across five diverse experimental materials systems, namely carbon nanotube polymer blends, silver nanoparticles, lead-halide perovskites, as well as additively manufactured polymer structures and shapes. By defining acceleration and enhancement metrics for general materials optimization objectives, we find that Gaussian Process (GP) with anisotropic kernels (automatic relevance detection, ARD) and Random Forests (RF) have comparable performance in BO as surrogate models, and both outperform the commonly used GP with isotropic kernels. While GP with anisotropic kernel has shown to be more robust as a surrogate model across most design spaces, RF is a close alternative and warrants more consideration because of it being free of distribution assumptions, having lower time complexities, and requiring less effort in initial hyperparameter selection. We also raise awareness about the benefits of using GP with anisotropic kernels over GP with isotropic kernels in future materials optimization campaigns.
16 |
17 | GitHub Repo: https://github.com/PV-Lab/Benchmarking
18 |
19 | Collaborators: Aldair Gongora, Danny Zekun Ren, Armi Tiihonen, etc.
20 |
21 | Status: Published in npj Computational Materials (2021).
22 | See PDF at: https://rdcu.be/cByoD
23 |
24 |
25 | ## Attribution
26 | This work is under BSD-2-Clause License. Please, acknowledge use of this work with the appropiate citation to the repository and research article.
27 |
28 | ## Citation
29 |
30 | Liang, Q., Gongora, A.E., Ren, Z. et al. Benchmarking the performance of Bayesian optimization across multiple experimental materials science domains. npj Comput Mater 7, 188 (2021). https://doi.org/10.1038/s41524-021-00656-9
31 |
32 | ## Usage
33 |
34 | run `Example use of framework with GP type surrogate models.ipynb` or `Example use of framework with RF type surrogate models.ipynb` with any of the given datasets to benchmark the performance of a selected BO algorithm using pool-based active learning framework.
35 | run `Manifold Visualization.ipynb` with any of the given datasets to visualize its design space.
36 | run `Performance Visualization.ipynb` with given demo benchmarking results or any benchmarking results from running the framework locally.
37 |
38 | The package contains the following module and scripts:
39 |
40 | | Module | Description |
41 | | ------------- | ------------------------------ |
42 | | `Example use of framework with GP type surrogate models.ipynb` | Script for running framework |
43 | | `Example use of framework with RF type surrogate models.ipynb` | Script for running framework |
44 | | `Manifold Visualization.ipynb` | Script for visualizing materials dataset design spaces |
45 | | `Performance Visualization.ipynb` | Script for visualizing performance |
46 | | `requirements.txt` | Required packages |
47 |
48 |
49 |
50 |
51 | ## Datasets
52 | **For reuse for code and materials datasets in this repo, please cite both this study and the following authors for sharing their datasets.**
53 |
54 | Materials datasets used to benchmark BO performance in this repository are provided by:
55 |
56 | (1) Crossed barrel dataset
57 |
58 | @article{gongora2020bayesian,
59 | title={A Bayesian experimental autonomous researcher for mechanical design},
60 | author={Gongora, Aldair E and Xu, Bowen and Perry, Wyatt and Okoye, Chika and Riley, Patrick and Reyes, Kristofer G and Morgan, Elise F and Brown, Keith A},
61 | journal={Science advances},
62 | volume={6},
63 | number={15},
64 | pages={eaaz1708},
65 | year={2020},
66 | publisher={American Association for the Advancement of Science}
67 | }
68 |
69 | link: https://advances.sciencemag.org/content/6/15/eaaz1708
70 |
71 | (2) Perovskite dataset
72 |
73 | @article{sun2021data,
74 | title={A data fusion approach to optimize compositional stability of halide perovskites},
75 | author={Sun, Shijing and Tiihonen, Armi and Oviedo, Felipe and Liu, Zhe and Thapa, Janak and Zhao, Yicheng and Hartono, Noor Titan P and Goyal, Anuj and Heumueller, Thomas and Batali, Clio and others},
76 | journal={Matter},
77 | volume={4},
78 | number={4},
79 | pages={1305--1322},
80 | year={2021},
81 | publisher={Elsevier}
82 | }
83 |
84 | link: https://www.sciencedirect.com/science/article/pii/S2590238521000084
85 |
86 | (3) AutoAM dataset
87 |
88 | @article{deneault2021toward,
89 | title={Toward autonomous additive manufacturing: Bayesian optimization on a 3D printer},
90 | author={Deneault, James R and Chang, Jorge and Myung, Jay and Hooper, Daylond and Armstrong, Andrew and Pitt, Mark and Maruyama, Benji},
91 | journal={MRS Bulletin},
92 | pages={1--10},
93 | year={2021},
94 | publisher={Springer}
95 | }
96 |
97 | link: https://link.springer.com/article/10.1557/s43577-021-00051-1
98 |
99 | (4) P3HT/CNT dataset
100 |
101 | @article{bash2021multi,
102 | title={Multi-Fidelity High-Throughput Optimization of Electrical Conductivity in P3HT-CNT Composites},
103 | author={Bash, Daniil and Cai, Yongqiang and Chellappan, Vijila and Wong, Swee Liang and Yang, Xu and Kumar, Pawan and Tan, Jin Da and Abutaha, Anas and Cheng, Jayce JW and Lim, Yee-Fun and others},
104 | journal={Advanced Functional Materials},
105 | pages={2102606},
106 | year={2021},
107 | publisher={Wiley Online Library}
108 | }
109 |
110 | link: https://onlinelibrary.wiley.com/doi/abs/10.1002/adfm.202102606
111 |
112 | (5) AgNP dataset
113 |
114 | @article{mekki2021two,
115 | title={Two-step machine learning enables optimized nanoparticle synthesis},
116 | author={Mekki-Berrada, Flore and Ren, Zekun and Huang, Tan and Wong, Wai Kuan and Zheng, Fang and Xie, Jiaxun and Tian, Isaac Parker Siyu and Jayavelu, Senthilnath and Mahfoud, Zackaria and Bash, Daniil and others},
117 | journal={npj Computational Materials},
118 | volume={7},
119 | number={1},
120 | pages={1--10},
121 | year={2021},
122 | publisher={Nature Publishing Group}
123 | }
124 |
125 | link: https://www.nature.com/articles/s41524-021-00520-w
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/TopCount_collection_GP_Matern52_LCB2.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PV-Lab/Benchmarking/7585c517ad88e676c42c6bf24a8ad278e01ddb21/TopCount_collection_GP_Matern52_LCB2.npy
--------------------------------------------------------------------------------
/datasets/AutoAM_dataset.csv:
--------------------------------------------------------------------------------
1 | Prime Delay,Print Speed,X Offset Correction,Y Offset Correction,Score
2 | 0,1,0,0,0.339554
3 | 2.5,3,0.1,0.1,0
4 | 5,5,-0.1,-0.1,0.218576
5 | 0,0.999931323,5.54E-06,2.52E-05,0.368919
6 | 0,5.070132864,0.225150933,1,0
7 | 0,0.477297834,0.04098728,0.191860936,0.142527
8 | 0.037130035,0.893927306,-0.035154338,0.04794658,0.420117
9 | 1.320466175,0.876729199,-1,0.365728896,0
10 | 0,8.30943536,-1,0.051005166,0
11 | 4.154109762,0.700993538,-1,0.029316768,0
12 | 0,1.350343868,-1,0.081764165,0
13 | 0.159317802,0.797724337,-0.200398219,0.029134653,0.699507
14 | 0,0.1,-0.263441407,-1,0.444846
15 | 0,6.325758975,-0.222747125,1,0.276438
16 | 0,0.1,-0.153953164,-0.633831044,0.521209
17 | 0,0.1,-0.383306816,-0.025953018,0.529015
18 | 0,8.017935076,-0.247280484,-0.215020933,0.368797
19 | 3.135016374,0.1,-0.286119834,0.242018176,0.231671
20 | 5,10,1,-1,0
21 | 0,0.1,1,1,0
22 | 0,0.1,-0.562307821,-1,0.55386
23 | 3.836204454,0.1,-0.515315555,-1,0.606331
24 | 3.981643012,9.515406916,-0.578467663,-1,0.590437
25 | 5,1.77011634,-0.734004069,-1,0.397736
26 | 5,8.563211249,-0.357116099,-1,0.665694
27 | 5,0.1,-0.225013889,-1,0.544579
28 | 5,10,-0.03037432,-1,0.394429
29 | 0,10,-0.459455051,-1,0.246653
30 | 5,6.463819981,-0.471807741,-0.56396988,0.528122
31 | 0,0.1,0.549188119,-1,0
32 | 3.270200002,4.423433841,-0.374321393,-1,0.669569
33 | 5,4.545758033,-0.454060731,-1,0.638409
34 | 5,10,1,1,0
35 | 5,10,-1,-1,0
36 | 5,10,-0.631321514,1,0
37 | 2.216058263,0.1,-0.351728777,-0.628832171,0.557771
38 | 2.568442889,4.128176512,-0.555356944,-1,0.553297
39 | 3.485758382,5.921041073,-0.220503744,-1,0.619629
40 | 0,0.1,-0.227008754,-0.203180401,0.506045
41 | 5,10,-0.392564219,-1,0.641716
42 | 5,5.100814558,-0.323019596,-1,0.64553
43 | 3.580209894,7.858465599,-0.379159846,-1,0.658964
44 | 5,0.1,-0.42326762,-1,0.47378
45 | 5,7.421528607,-0.46970392,-1,0.625649
46 | 3.704389712,10,-0.316697247,-1,0.644678
47 | 2.666170048,3.404372752,-0.323335094,-1,0.655095
48 | 3.793363821,6.321410667,-0.391116557,-1,0.645276
49 | 0,10,0.069650601,-1,0.225159
50 | 5,7.361323026,-0.290247848,-1,0.581536
51 | 2.138411123,0.1,-0.46469562,-1,0.427846
52 | 3.555925442,6.567725414,-0.409295679,-1,0.687847
53 | 5,10,-0.453750649,-1,0.649364
54 | 0,0.1,-0.399771583,1,0.23795
55 | 1.723971629,4.127795611,-0.317563966,-0.557834728,0.673479
56 | 0,3.166857813,-0.31510984,-0.347626202,0.691636
57 | 1.452060715,4.755510342,-0.276418111,-1,0.6771
58 | 0,3.355609427,-0.266558489,-0.441226868,0.696129
59 | 0,10,1,-1,0
60 | 0.991507301,3.374281222,-0.258724474,-0.400535608,0.65301
61 | 0,3.403912672,-0.39608221,-0.569370559,0.681477
62 | 5,0.1,1,-1,0.085345
63 | 0,4.074459554,-0.320553798,-1,0.456972
64 | 1.078526093,3.809288033,-0.406115714,-0.299587017,0.624609
65 | 0,3.179026659,-0.299133297,-0.07961219,0.724231
66 | 0,3.19738197,-0.293036821,-0.057746921,0.725884
67 | 0,3.17750185,-0.27157315,-0.063868507,0.746997
68 | 0,3.222519665,-0.244475416,-0.074529296,0.757861
69 | 0,3.296372494,-0.223819617,-0.092455882,0.73516
70 | 0,3.276032393,-0.294808743,0.083334502,0.727446
71 | 0,3.271360864,-0.215097596,-0.106963778,0.775094
72 | 0,3.313607155,-0.213103103,0.024677417,0.735191
73 | 2.559151299,2.747723276,-0.123465174,-1,0.558344
74 | 0,3.295163374,-0.201049723,-0.188737994,0.760199
75 | 0,3.321199668,-0.195937105,-0.187030775,0.741053
76 | 0,3.04339056,-0.221664458,0.085762614,0.733492
77 | 0.615535018,3.123837379,-0.23008989,-0.096321438,0.65512
78 | 5,0.1,0.196547731,-1,0.05735
79 | 0,0.1,-0.837284029,-1,0.13848
80 | 0,3.410189478,-0.182024517,-0.226603174,0.595148
81 | 0,3.383658905,-0.317137057,0.00792076,0.68768
82 | 0,2.590313222,-0.273631503,-0.128822762,0.797176
83 | 0,2.521505976,-0.278407762,-0.147367367,0.825486
84 | 0,2.429511424,-0.286077685,-0.171964491,0.839722
85 | 0,2.325883942,-0.294651346,-0.195062378,0.832971
86 | 0,2.221500067,-0.301345289,-0.211940111,0.857797
87 | 0,2.019113004,-0.310327554,-0.236066774,0.87748
88 | 0,1.827157601,-0.348561106,-0.27565965,0.904846
89 | 0,1.80847724,-0.412629308,-0.253730723,0.899917
90 | 0,1.880113634,-0.44461981,-0.111643532,0.820774
91 | 0,1.673653336,-0.372916521,-0.463324746,0.932613
92 | 0,1.663150494,-0.389967799,-0.557219105,0.902128
93 | 0,4.334235765,1,-1,0
94 | 0,1.690003046,-0.414789879,-0.56390241,0.869142
95 | 0,1.595660205,-0.32064382,-0.430844373,0.88388
96 | 0,10,0.26780482,1,0
97 | 0.742365236,1.714609821,-0.380317157,-0.416497064,0.829756
98 | 0,1.585934347,-0.337142578,-0.372810658,0.92212
99 | 0,1.568030435,-0.335315758,-0.365804076,0.901296
100 | 0,1.55882615,-0.349834168,-0.328645365,0.936549
101 | 5,3.837488349,1,1,0
--------------------------------------------------------------------------------
/datasets/Crossed barrel_dataset.csv:
--------------------------------------------------------------------------------
1 | n,theta,r,t,toughness
2 | 6,0,1.5,0.7,1.14466667
3 | 6,0,1.5,1.05,1.60756105
4 | 6,0,1.5,1.4,1.144337785
5 | 6,0,1.7,0.7,3.64273758
6 | 6,0,1.7,1.05,3.748405035
7 | 6,0,1.7,1.4,2.661723075
8 | 6,0,1.9,0.7,5.62889179
9 | 6,0,1.9,1.05,6.545940435
10 | 6,0,1.9,1.4,5.13711498
11 | 6,0,2.1,0.7,6.113001335
12 | 6,0,2.1,1.05,7.63448638
13 | 6,0,2.1,1.4,2.06551135
14 | 6,0,2.3,0.7,22.63629481
15 | 6,0,2.3,1.05,11.98794863
16 | 6,0,2.3,1.4,2.18745377
17 | 6,0,2.5,0.7,27.27353061
18 | 6,0,2.5,1.05,27.71493771
19 | 6,0,2.5,1.4,1.724510655
20 | 6,25,1.6,0.7,1.321202445
21 | 6,25,1.6,1.05,1.901228995
22 | 6,25,1.6,1.4,1.464165225
23 | 6,25,1.8,0.7,2.77622403
24 | 6,25,1.8,1.05,2.725128695
25 | 6,25,1.8,1.4,3.181724645
26 | 6,25,2,0.7,4.561826505
27 | 6,25,2,1.05,4.80682388
28 | 6,25,2,1.4,5.21743359
29 | 6,25,2.2,0.7,5.9481448
30 | 6,25,2.2,1.05,10.57081004
31 | 6,25,2.2,1.4,7.91761853
32 | 6,25,2.4,0.7,9.40534553
33 | 6,25,2.4,1.05,25.05585347
34 | 6,25,2.4,1.4,2.401968915
35 | 6,50,1.5,0.7,0.911408525
36 | 6,50,1.5,1.05,0.80713156
37 | 6,50,1.5,1.4,0.600585055
38 | 6,50,1.7,0.7,1.280064085
39 | 6,50,1.7,1.05,1.856575375
40 | 6,50,1.7,1.4,1.22887476
41 | 6,50,1.9,0.7,1.670555225
42 | 6,50,1.9,1.05,1.733336745
43 | 6,50,1.9,1.4,3.17129197
44 | 6,50,2.1,0.7,16.00978343
45 | 6,50,2.1,1.05,4.159978755
46 | 6,50,2.1,1.4,4.324624925
47 | 6,50,2.3,0.7,3.208368345
48 | 6,50,2.3,1.05,8.36155203
49 | 6,50,2.3,1.4,10.26593447
50 | 6,50,2.5,0.7,6.389414035
51 | 6,50,2.5,1.05,23.65177359
52 | 6,50,2.5,1.4,8.263866695
53 | 6,75,1.6,0.7,0.56218211
54 | 6,75,1.6,1.05,0.62222879
55 | 6,75,1.6,1.4,1.129298705
56 | 6,75,1.8,0.7,0.49796538
57 | 6,75,1.8,1.05,0.955209465
58 | 6,75,1.8,1.4,1.307267365
59 | 6,75,2,0.7,0.70437207
60 | 6,75,2,1.05,1.844288285
61 | 6,75,2,1.4,14.5440842
62 | 6,75,2.2,0.7,2.28417177
63 | 6,75,2.2,1.05,13.167347
64 | 6,75,2.2,1.4,1.245206375
65 | 6,75,2.4,0.7,2.414646575
66 | 6,75,2.4,1.05,4.68270893
67 | 6,75,2.4,1.4,17.76211779
68 | 6,100,1.5,0.7,1.761419435
69 | 6,100,1.5,1.05,2.038989845
70 | 6,100,1.5,1.4,1.74145084
71 | 6,100,1.7,0.7,2.42913609
72 | 6,100,1.7,1.05,2.639451635
73 | 6,100,1.7,1.4,1.83067924
74 | 6,100,1.9,0.7,2.248745235
75 | 6,100,1.9,1.05,1.874484205
76 | 6,100,1.9,1.4,13.84331533
77 | 6,100,2.1,0.7,16.77800679
78 | 6,100,2.1,1.05,14.0173637
79 | 6,100,2.1,1.4,13.71482183
80 | 6,100,2.3,0.7,19.72634797
81 | 6,100,2.3,1.05,18.3145061
82 | 6,100,2.3,1.4,17.49456493
83 | 6,100,2.5,0.7,20.8723812
84 | 6,100,2.5,1.05,19.19430635
85 | 6,100,2.5,1.4,23.2304953
86 | 6,125,1.6,0.7,1.4166949
87 | 6,125,1.6,1.05,1.25473141
88 | 6,125,1.6,1.4,1.077183595
89 | 6,125,1.8,0.7,0.92180845
90 | 6,125,1.8,1.05,19.82673045
91 | 6,125,1.8,1.4,13.35430999
92 | 6,125,2,0.7,0.93520637
93 | 6,125,2,1.05,17.48321147
94 | 6,125,2,1.4,18.91401758
95 | 6,125,2.2,0.7,16.74357002
96 | 6,125,2.2,1.05,14.11770208
97 | 6,125,2.2,1.4,20.73650342
98 | 6,125,2.4,0.7,24.20558826
99 | 6,125,2.4,1.05,18.50428299
100 | 6,125,2.4,1.4,11.64011261
101 | 6,150,1.5,0.7,0.9721676
102 | 6,150,1.5,1.05,0.73339565
103 | 6,150,1.5,1.4,1.241250395
104 | 6,150,1.7,0.7,13.11436394
105 | 6,150,1.7,1.05,1.247425855
106 | 6,150,1.7,1.4,16.19996728
107 | 6,150,1.9,0.7,1.27661952
108 | 6,150,1.9,1.05,15.55778051
109 | 6,150,1.9,1.4,19.00199987
110 | 6,150,2.1,0.7,16.41042719
111 | 6,150,2.1,1.05,18.51288508
112 | 6,150,2.1,1.4,17.18467533
113 | 6,150,2.3,0.7,18.49939449
114 | 6,150,2.3,1.05,19.91431001
115 | 6,150,2.3,1.4,18.03172451
116 | 6,150,2.5,0.7,20.95858163
117 | 6,150,2.5,1.05,18.51341047
118 | 6,150,2.5,1.4,20.43322464
119 | 6,175,1.6,0.7,14.9117021
120 | 6,175,1.6,1.05,20.1909625
121 | 6,175,1.6,1.4,16.86847969
122 | 6,175,1.8,0.7,16.25303968
123 | 6,175,1.8,1.05,18.12488509
124 | 6,175,1.8,1.4,20.35152813
125 | 6,175,2,0.7,18.03436356
126 | 6,175,2,1.05,17.5264161
127 | 6,175,2,1.4,18.38228342
128 | 6,175,2.2,0.7,21.11426736
129 | 6,175,2.2,1.05,19.70686035
130 | 6,175,2.2,1.4,13.48899982
131 | 6,175,2.4,0.7,22.93310771
132 | 6,175,2.4,1.05,16.38889589
133 | 6,175,2.4,1.4,14.4047196
134 | 6,200,1.5,0.7,14.96221898
135 | 6,200,1.5,1.05,14.65657727
136 | 6,200,1.5,1.4,20.22036666
137 | 6,200,1.7,0.7,16.78959272
138 | 6,200,1.7,1.05,25.21025957
139 | 6,200,1.7,1.4,19.40045977
140 | 6,200,1.9,0.7,17.83614353
141 | 6,200,1.9,1.05,24.39245006
142 | 6,200,1.9,1.4,23.56283371
143 | 6,200,2.1,0.7,21.86809238
144 | 6,200,2.1,1.05,20.21042247
145 | 6,200,2.1,1.4,20.31956724
146 | 6,200,2.3,0.7,24.84958171
147 | 6,200,2.3,1.05,21.80051696
148 | 6,200,2.3,1.4,19.66597793
149 | 6,200,2.5,0.7,25.99898896
150 | 6,200,2.5,1.05,22.06231878
151 | 6,200,2.5,1.4,21.03835901
152 | 8,0,1.5,0.7,2.166798125
153 | 8,0,1.5,1.05,1.994880795
154 | 8,0,1.5,1.4,1.991507755
155 | 8,0,1.7,0.7,2.413471505
156 | 8,0,1.7,1.05,4.139476235
157 | 8,0,1.7,1.4,2.899414305
158 | 8,0,1.9,0.7,5.52009985
159 | 8,0,1.9,1.05,7.354356745
160 | 8,0,1.9,1.4,5.33195838
161 | 8,0,2.1,0.7,19.99789002
162 | 8,0,2.1,1.05,8.488290225
163 | 8,0,2.1,1.4,1.915515405
164 | 8,0,2.3,0.7,25.3582639
165 | 8,0,2.3,1.05,2.185516945
166 | 8,0,2.3,1.4,1.437887965
167 | 8,0,2.5,0.7,31.09783404
168 | 8,0,2.5,1.05,1.890725635
169 | 8,0,2.5,1.4,1.379164485
170 | 8,25,1.6,0.7,6.022568495
171 | 8,25,1.6,1.05,5.899901415
172 | 8,25,1.6,1.4,5.25760901
173 | 8,25,1.8,0.7,5.487119255
174 | 8,25,1.8,1.05,9.138851555
175 | 8,25,1.8,1.4,12.00985826
176 | 8,25,2,0.7,9.76006525
177 | 8,25,2,1.05,10.70192285
178 | 8,25,2,1.4,14.18665702
179 | 8,25,2.2,0.7,13.1504635
180 | 8,25,2.2,1.05,16.83781416
181 | 8,25,2.2,1.4,19.7309444
182 | 8,25,2.4,0.7,22.05845141
183 | 8,25,2.4,1.05,27.7911449
184 | 8,25,2.4,1.4,1.651555005
185 | 8,50,1.5,0.7,0.606925755
186 | 8,50,1.5,1.05,0.967633045
187 | 8,50,1.5,1.4,1.01771189
188 | 8,50,1.7,0.7,1.83865145
189 | 8,50,1.7,1.05,1.946333395
190 | 8,50,1.7,1.4,1.256027065
191 | 8,50,1.9,0.7,2.446088645
192 | 8,50,1.9,1.05,1.75677089
193 | 8,50,1.9,1.4,2.89525274
194 | 8,50,2.1,0.7,4.16988717
195 | 8,50,2.1,1.05,3.47753655
196 | 8,50,2.1,1.4,4.749716225
197 | 8,50,2.3,0.7,6.054145875
198 | 8,50,2.3,1.05,5.279049785
199 | 8,50,2.3,1.4,9.993393195
200 | 8,50,2.5,0.7,7.144120635
201 | 8,50,2.5,1.05,23.23553062
202 | 8,50,2.5,1.4,1.789061775
203 | 8,75,1.6,0.7,2.301961535
204 | 8,75,1.6,1.05,2.517171365
205 | 8,75,1.6,1.4,1.897846855
206 | 8,75,1.8,0.7,3.146094015
207 | 8,75,1.8,1.05,12.57974175
208 | 8,75,1.8,1.4,4.87327289
209 | 8,75,2,0.7,5.749459945
210 | 8,75,2,1.05,3.877424105
211 | 8,75,2,1.4,12.78541704
212 | 8,75,2.2,0.7,20.21204998
213 | 8,75,2.2,1.05,20.51460132
214 | 8,75,2.2,1.4,17.76513275
215 | 8,75,2.4,0.7,24.44423375
216 | 8,75,2.4,1.05,26.61614424
217 | 8,75,2.4,1.4,32.61966052
218 | 8,100,1.5,0.7,0.758866325
219 | 8,100,1.5,1.05,1.47033628
220 | 8,100,1.5,1.4,1.371727985
221 | 8,100,1.7,0.7,1.66019497
222 | 8,100,1.7,1.05,1.94218581
223 | 8,100,1.7,1.4,2.526298485
224 | 8,100,1.9,0.7,18.46355991
225 | 8,100,1.9,1.05,19.95083256
226 | 8,100,1.9,1.4,15.28451124
227 | 8,100,2.1,0.7,24.44324883
228 | 8,100,2.1,1.05,26.5761516
229 | 8,100,2.1,1.4,15.81249026
230 | 8,100,2.3,0.7,26.55366339
231 | 8,100,2.3,1.05,31.76165247
232 | 8,100,2.3,1.4,24.51326645
233 | 8,100,2.5,0.7,33.05713503
234 | 8,100,2.5,1.05,39.22260624
235 | 8,100,2.5,1.4,33.98515161
236 | 8,125,1.6,0.7,1.46008469
237 | 8,125,1.6,1.05,16.95513861
238 | 8,125,1.6,1.4,16.54564365
239 | 8,125,1.8,0.7,16.49016369
240 | 8,125,1.8,1.05,17.97152167
241 | 8,125,1.8,1.4,17.14974079
242 | 8,125,2,0.7,20.02394696
243 | 8,125,2,1.05,21.33621444
244 | 8,125,2,1.4,16.41212001
245 | 8,125,2.2,0.7,21.7261251
246 | 8,125,2.2,1.05,24.28895599
247 | 8,125,2.2,1.4,19.99643012
248 | 8,125,2.4,0.7,26.74357163
249 | 8,125,2.4,1.05,23.40047254
250 | 8,125,2.4,1.4,22.66353705
251 | 8,150,1.5,0.7,15.76503932
252 | 8,150,1.5,1.05,14.71129282
253 | 8,150,1.5,1.4,16.81053048
254 | 8,150,1.7,0.7,18.38869488
255 | 8,150,1.7,1.05,16.23362792
256 | 8,150,1.7,1.4,25.59678803
257 | 8,150,1.9,0.7,25.06795204
258 | 8,150,1.9,1.05,26.46985809
259 | 8,150,1.9,1.4,21.43531816
260 | 8,150,2.1,0.7,23.84790793
261 | 8,150,2.1,1.05,25.75089201
262 | 8,150,2.1,1.4,29.20332858
263 | 8,150,2.3,0.7,30.2897043
264 | 8,150,2.3,1.05,27.00954622
265 | 8,150,2.3,1.4,26.35897031
266 | 8,150,2.5,0.7,33.28026619
267 | 8,150,2.5,1.05,27.0706717
268 | 8,150,2.5,1.4,22.86899577
269 | 8,175,1.6,0.7,14.02868913
270 | 8,175,1.6,1.05,20.02470101
271 | 8,175,1.6,1.4,18.21306351
272 | 8,175,1.8,0.7,17.80463232
273 | 8,175,1.8,1.05,22.26115859
274 | 8,175,1.8,1.4,17.92242904
275 | 8,175,2,0.7,20.52712839
276 | 8,175,2,1.05,26.43476973
277 | 8,175,2,1.4,30.62884765
278 | 8,175,2.2,0.7,23.87569613
279 | 8,175,2.2,1.05,27.13847058
280 | 8,175,2.2,1.4,21.95018747
281 | 8,175,2.4,0.7,24.93067951
282 | 8,175,2.4,1.05,26.48945395
283 | 8,175,2.4,1.4,19.63858649
284 | 8,200,1.5,0.7,18.25283987
285 | 8,200,1.5,1.05,13.14853798
286 | 8,200,1.5,1.4,16.80890649
287 | 8,200,1.7,0.7,21.76204542
288 | 8,200,1.7,1.05,19.95473314
289 | 8,200,1.7,1.4,28.41190008
290 | 8,200,1.9,0.7,23.99916942
291 | 8,200,1.9,1.05,22.51447703
292 | 8,200,1.9,1.4,22.81980479
293 | 8,200,2.1,0.7,26.42427769
294 | 8,200,2.1,1.05,23.00800265
295 | 8,200,2.1,1.4,20.86879417
296 | 8,200,2.3,0.7,29.49732455
297 | 8,200,2.3,1.05,23.05055265
298 | 8,200,2.3,1.4,19.78231941
299 | 8,200,2.5,0.7,31.08251055
300 | 8,200,2.5,1.05,21.36694488
301 | 8,200,2.5,1.4,2.45140065
302 | 10,0,1.5,0.7,2.37024162
303 | 10,0,1.5,1.05,2.465344685
304 | 10,0,1.5,1.4,1.902319725
305 | 10,0,1.7,0.7,15.43857988
306 | 10,0,1.7,1.05,5.526309155
307 | 10,0,1.7,1.4,1.8278817
308 | 10,0,1.9,0.7,20.12095256
309 | 10,0,1.9,1.05,1.909063605
310 | 10,0,1.9,1.4,1.835869325
311 | 10,0,2.1,0.7,10.91442134
312 | 10,0,2.1,1.05,1.472520425
313 | 10,0,2.1,1.4,1.497575625
314 | 10,0,2.3,0.7,37.2159395
315 | 10,0,2.3,1.05,1.447227795
316 | 10,0,2.3,1.4,1.27589006
317 | 10,0,2.5,0.7,1.8790985
318 | 10,0,2.5,1.05,1.43131911
319 | 10,0,2.5,1.4,1.320330185
320 | 10,25,1.6,0.7,3.697126945
321 | 10,25,1.6,1.05,6.04913388
322 | 10,25,1.6,1.4,4.04881531
323 | 10,25,1.8,0.7,6.947829155
324 | 10,25,1.8,1.05,7.81723548
325 | 10,25,1.8,1.4,8.20748261
326 | 10,25,2,0.7,11.28420063
327 | 10,25,2,1.05,13.41803126
328 | 10,25,2,1.4,1.814660625
329 | 10,25,2.2,0.7,28.61980338
330 | 10,25,2.2,1.05,16.47586264
331 | 10,25,2.2,1.4,1.701154805
332 | 10,25,2.4,0.7,40.1337563
333 | 10,25,2.4,1.05,39.37679974
334 | 10,25,2.4,1.4,1.47408317
335 | 10,50,1.5,0.7,1.260503925
336 | 10,50,1.5,1.05,2.05605895
337 | 10,50,1.5,1.4,1.974935335
338 | 10,50,1.7,0.7,2.454350585
339 | 10,50,1.7,1.05,3.56727412
340 | 10,50,1.7,1.4,2.616819025
341 | 10,50,1.9,0.7,4.691029555
342 | 10,50,1.9,1.05,4.199239655
343 | 10,50,1.9,1.4,6.88366734
344 | 10,50,2.1,0.7,18.14836186
345 | 10,50,2.1,1.05,6.48318302
346 | 10,50,2.1,1.4,1.938137185
347 | 10,50,2.3,0.7,21.43247518
348 | 10,50,2.3,1.05,28.25571177
349 | 10,50,2.3,1.4,1.68978614
350 | 10,50,2.5,0.7,35.44502725
351 | 10,50,2.5,1.05,1.77016859
352 | 10,50,2.5,1.4,1.362320765
353 | 10,75,1.6,0.7,4.15126126
354 | 10,75,1.6,1.05,3.87517276
355 | 10,75,1.6,1.4,14.52642432
356 | 10,75,1.8,0.7,14.98008309
357 | 10,75,1.8,1.05,14.8352227
358 | 10,75,1.8,1.4,16.80488933
359 | 10,75,2,0.7,21.10158681
360 | 10,75,2,1.05,21.2147152
361 | 10,75,2,1.4,23.21411872
362 | 10,75,2.2,0.7,22.43277946
363 | 10,75,2.2,1.05,29.38565798
364 | 10,75,2.2,1.4,30.7444312
365 | 10,75,2.4,0.7,26.05758587
366 | 10,75,2.4,1.05,36.75305072
367 | 10,75,2.4,1.4,2.00176375
368 | 10,100,1.5,0.7,17.06507599
369 | 10,100,1.5,1.05,18.94962245
370 | 10,100,1.5,1.4,2.53324977
371 | 10,100,1.7,0.7,17.69646839
372 | 10,100,1.7,1.05,21.07823388
373 | 10,100,1.7,1.4,17.29897621
374 | 10,100,1.9,0.7,21.8432857
375 | 10,100,1.9,1.05,26.46207417
376 | 10,100,1.9,1.4,22.38864355
377 | 10,100,2.1,0.7,29.71010653
378 | 10,100,2.1,1.05,36.53765208
379 | 10,100,2.1,1.4,26.70796294
380 | 10,100,2.3,0.7,29.27685436
381 | 10,100,2.3,1.05,31.96874948
382 | 10,100,2.3,1.4,35.05213832
383 | 10,100,2.5,0.7,30.76561245
384 | 10,100,2.5,1.05,25.82152161
385 | 10,100,2.5,1.4,1.886956525
386 | 10,125,1.6,0.7,11.89656028
387 | 10,125,1.6,1.05,14.16437352
388 | 10,125,1.6,1.4,12.73031194
389 | 10,125,1.8,0.7,18.74037221
390 | 10,125,1.8,1.05,20.30153969
391 | 10,125,1.8,1.4,17.24241632
392 | 10,125,2,0.7,27.547945
393 | 10,125,2,1.05,30.90801587
394 | 10,125,2,1.4,32.79220122
395 | 10,125,2.2,0.7,28.65176546
396 | 10,125,2.2,1.05,30.12516931
397 | 10,125,2.2,1.4,40.38833175
398 | 10,125,2.4,0.7,33.29229334
399 | 10,125,2.4,1.05,28.87825012
400 | 10,125,2.4,1.4,26.01214699
401 | 10,150,1.5,0.7,5.07192638
402 | 10,150,1.5,1.05,13.95909786
403 | 10,150,1.5,1.4,13.76132647
404 | 10,150,1.7,0.7,23.06744641
405 | 10,150,1.7,1.05,28.86552639
406 | 10,150,1.7,1.4,17.14295674
407 | 10,150,1.9,0.7,29.77538936
408 | 10,150,1.9,1.05,30.79088801
409 | 10,150,1.9,1.4,37.60315188
410 | 10,150,2.1,0.7,29.89276263
411 | 10,150,2.1,1.05,28.74855898
412 | 10,150,2.1,1.4,25.49029507
413 | 10,150,2.3,0.7,33.5952381
414 | 10,150,2.3,1.05,21.73779973
415 | 10,150,2.3,1.4,18.34570169
416 | 10,150,2.5,0.7,31.77714475
417 | 10,150,2.5,1.05,2.180528875
418 | 10,150,2.5,1.4,19.60171103
419 | 10,175,1.6,0.7,22.24623455
420 | 10,175,1.6,1.05,25.81931546
421 | 10,175,1.6,1.4,23.0666609
422 | 10,175,1.8,0.7,24.879369
423 | 10,175,1.8,1.05,28.75976827
424 | 10,175,1.8,1.4,39.76854173
425 | 10,175,2,0.7,24.75957759
426 | 10,175,2,1.05,23.18499775
427 | 10,175,2,1.4,20.94242387
428 | 10,175,2.2,0.7,26.75827735
429 | 10,175,2.2,1.05,21.54047674
430 | 10,175,2.2,1.4,14.86050651
431 | 10,175,2.4,0.7,28.79159634
432 | 10,175,2.4,1.05,16.63769652
433 | 10,175,2.4,1.4,1.94975736
434 | 10,200,1.5,0.7,26.57207203
435 | 10,200,1.5,1.05,30.13790411
436 | 10,200,1.5,1.4,32.46480467
437 | 10,200,1.7,0.7,25.74684893
438 | 10,200,1.7,1.05,23.47800227
439 | 10,200,1.7,1.4,48.06433862
440 | 10,200,1.9,0.7,26.99685194
441 | 10,200,1.9,1.05,21.68353378
442 | 10,200,1.9,1.4,20.33871995
443 | 10,200,2.1,0.7,30.02929235
444 | 10,200,2.1,1.05,20.59749653
445 | 10,200,2.1,1.4,17.54910594
446 | 10,200,2.3,0.7,33.18631766
447 | 10,200,2.3,1.05,19.73466771
448 | 10,200,2.3,1.4,1.543837075
449 | 10,200,2.5,0.7,32.53028312
450 | 10,200,2.5,1.05,14.83241934
451 | 10,200,2.5,1.4,2.119129425
452 | 12,0,1.5,0.7,2.710077215
453 | 12,0,1.5,1.05,3.382610125
454 | 12,0,1.5,1.4,1.879645105
455 | 12,0,1.7,0.7,4.848634005
456 | 12,0,1.7,1.05,2.042897735
457 | 12,0,1.7,1.4,1.614795695
458 | 12,0,1.9,0.7,5.787094095
459 | 12,0,1.9,1.05,1.45487696
460 | 12,0,1.9,1.4,1.37488409
461 | 12,0,2.1,0.7,2.53720665
462 | 12,0,2.1,1.05,1.54752002
463 | 12,0,2.1,1.4,1.268136685
464 | 12,0,2.3,0.7,1.596147205
465 | 12,0,2.3,1.05,1.311584905
466 | 12,0,2.3,1.4,1.538662875
467 | 12,0,2.5,0.7,1.600953855
468 | 12,0,2.5,1.05,1.140192515
469 | 12,0,2.5,1.4,1.094330115
470 | 12,25,1.6,0.7,3.283366465
471 | 12,25,1.6,1.05,3.947534815
472 | 12,25,1.6,1.4,4.05098771
473 | 12,25,1.8,0.7,21.23499458
474 | 12,25,1.8,1.05,6.942361015
475 | 12,25,1.8,1.4,1.79896845
476 | 12,25,2,0.7,29.74137126
477 | 12,25,2,1.05,19.94002746
478 | 12,25,2,1.4,1.476944155
479 | 12,25,2.2,0.7,46.13158384
480 | 12,25,2.2,1.05,1.484047405
481 | 12,25,2.2,1.4,1.277472645
482 | 12,25,2.4,0.7,50.29350783
483 | 12,25,2.4,1.05,1.366536405
484 | 12,25,2.4,1.4,1.221183055
485 | 12,50,1.5,0.7,3.0627593
486 | 12,50,1.5,1.05,2.44075017
487 | 12,50,1.5,1.4,3.972113785
488 | 12,50,1.7,0.7,3.64365483
489 | 12,50,1.7,1.05,13.44766539
490 | 12,50,1.7,1.4,4.728461135
491 | 12,50,1.9,0.7,20.98779674
492 | 12,50,1.9,1.05,22.76612895
493 | 12,50,1.9,1.4,6.8981237
494 | 12,50,2.1,0.7,30.82322728
495 | 12,50,2.1,1.05,27.90628977
496 | 12,50,2.1,1.4,1.4467511
497 | 12,50,2.3,0.7,33.47814198
498 | 12,50,2.3,1.05,2.69682508
499 | 12,50,2.3,1.4,1.35477584
500 | 12,50,2.5,0.7,35.85667806
501 | 12,50,2.5,1.05,1.36576364
502 | 12,50,2.5,1.4,1.24842055
503 | 12,75,1.6,0.7,16.22990609
504 | 12,75,1.6,1.05,17.18445948
505 | 12,75,1.6,1.4,12.94250759
506 | 12,75,1.8,0.7,18.88295428
507 | 12,75,1.8,1.05,22.02190688
508 | 12,75,1.8,1.4,16.51580642
509 | 12,75,2,0.7,21.58189617
510 | 12,75,2,1.05,32.22906249
511 | 12,75,2,1.4,2.467981245
512 | 12,75,2.2,0.7,35.79146501
513 | 12,75,2.2,1.05,40.86827958
514 | 12,75,2.2,1.4,1.49421229
515 | 12,75,2.4,0.7,43.14528141
516 | 12,75,2.4,1.05,46.92091161
517 | 12,75,2.4,1.4,1.48225321
518 | 12,100,1.5,0.7,14.69444426
519 | 12,100,1.5,1.05,11.49054846
520 | 12,100,1.5,1.4,10.61172675
521 | 12,100,1.7,0.7,18.05369099
522 | 12,100,1.7,1.05,19.5094086
523 | 12,100,1.7,1.4,16.65803788
524 | 12,100,1.9,0.7,30.06304252
525 | 12,100,1.9,1.05,32.71835599
526 | 12,100,1.9,1.4,20.96172303
527 | 12,100,2.1,0.7,40.47378346
528 | 12,100,2.1,1.05,42.67655245
529 | 12,100,2.1,1.4,2.02024529
530 | 12,100,2.3,0.7,40.29737022
531 | 12,100,2.3,1.05,34.28081129
532 | 12,100,2.3,1.4,2.70949508
533 | 12,100,2.5,0.7,40.6362777
534 | 12,100,2.5,1.05,1.86482408
535 | 12,100,2.5,1.4,1.36719433
536 | 12,125,1.6,0.7,15.60811198
537 | 12,125,1.6,1.05,20.18824631
538 | 12,125,1.6,1.4,16.34644991
539 | 12,125,1.8,0.7,31.44584563
540 | 12,125,1.8,1.05,32.27993772
541 | 12,125,1.8,1.4,28.35393028
542 | 12,125,2,0.7,32.82589566
543 | 12,125,2,1.05,36.33068406
544 | 12,125,2,1.4,41.07478159
545 | 12,125,2.2,0.7,34.05089677
546 | 12,125,2.2,1.05,25.4602824
547 | 12,125,2.2,1.4,1.754954565
548 | 12,125,2.4,0.7,35.46253183
549 | 12,125,2.4,1.05,24.91308985
550 | 12,125,2.4,1.4,1.78455199
551 | 12,150,1.5,0.7,20.09582272
552 | 12,150,1.5,1.05,24.73057766
553 | 12,150,1.5,1.4,21.53784942
554 | 12,150,1.7,0.7,29.63537291
555 | 12,150,1.7,1.05,35.66341558
556 | 12,150,1.7,1.4,34.86198284
557 | 12,150,1.9,0.7,30.62289498
558 | 12,150,1.9,1.05,29.48954875
559 | 12,150,1.9,1.4,49.25078791
560 | 12,150,2.1,0.7,30.30987612
561 | 12,150,2.1,1.05,22.4073812
562 | 12,150,2.1,1.4,1.83697897
563 | 12,150,2.3,0.7,32.24994845
564 | 12,150,2.3,1.05,1.85980549
565 | 12,150,2.3,1.4,2.899684285
566 | 12,150,2.5,0.7,32.54542221
567 | 12,150,2.5,1.05,1.556686305
568 | 12,150,2.5,1.4,1.67768615
569 | 12,175,1.6,0.7,29.88577368
570 | 12,175,1.6,1.05,34.00182953
571 | 12,175,1.6,1.4,43.47975901
572 | 12,175,1.8,0.7,28.4906872
573 | 12,175,1.8,1.05,23.88152443
574 | 12,175,1.8,1.4,17.40448202
575 | 12,175,2,0.7,29.69113837
576 | 12,175,2,1.05,20.3410298
577 | 12,175,2,1.4,1.767964575
578 | 12,175,2.2,0.7,32.58077756
579 | 12,175,2.2,1.05,1.811359785
580 | 12,175,2.2,1.4,1.856395865
581 | 12,175,2.4,0.7,34.33571139
582 | 12,175,2.4,1.05,2.732696605
583 | 12,175,2.4,1.4,1.257684545
584 | 12,200,1.5,0.7,41.03767786
585 | 12,200,1.5,1.05,36.26267399
586 | 12,200,1.5,1.4,41.56020183
587 | 12,200,1.7,0.7,29.52654878
588 | 12,200,1.7,1.05,22.87250266
589 | 12,200,1.7,1.4,22.98706561
590 | 12,200,1.9,0.7,29.24994097
591 | 12,200,1.9,1.05,15.20724601
592 | 12,200,1.9,1.4,1.7438969
593 | 12,200,2.1,0.7,32.12768047
594 | 12,200,2.1,1.05,1.857889805
595 | 12,200,2.1,1.4,1.8860728
596 | 12,200,2.3,0.7,41.87429944
597 | 12,200,2.3,1.05,1.331659865
598 | 12,200,2.3,1.4,1.578600555
599 | 12,200,2.5,0.7,2.029597225
600 | 12,200,2.5,1.05,1.240041765
601 | 12,200,2.5,1.4,1.05594173
602 | 6,0,1.5,0.7,1.276972545
603 | 6,0,1.5,1.05,1.40763682
604 | 6,0,1.5,1.4,1.22198518
605 | 6,0,1.7,0.7,2.57058053
606 | 6,0,1.7,1.05,2.907952235
607 | 6,0,1.7,1.4,2.61173793
608 | 6,0,1.9,0.7,3.623287735
609 | 6,0,1.9,1.05,4.861726245
610 | 6,0,1.9,1.4,4.685755775
611 | 6,0,2.1,0.7,17.24322518
612 | 6,0,2.1,1.05,9.14252845
613 | 6,0,2.1,1.4,7.22210682
614 | 6,0,2.3,0.7,21.77185393
615 | 6,0,2.3,1.05,23.58202299
616 | 6,0,2.3,1.4,1.90549201
617 | 6,0,2.5,0.7,24.97148829
618 | 6,0,2.5,1.05,30.27901031
619 | 6,0,2.5,1.4,1.921319615
620 | 6,25,1.6,0.7,1.45386602
621 | 6,25,1.6,1.05,1.67985751
622 | 6,25,1.6,1.4,2.359961675
623 | 6,25,1.8,0.7,2.30229922
624 | 6,25,1.8,1.05,3.318807365
625 | 6,25,1.8,1.4,2.73639597
626 | 6,25,2,0.7,4.76432071
627 | 6,25,2,1.05,4.69724476
628 | 6,25,2,1.4,5.22735781
629 | 6,25,2.2,0.7,7.4399274
630 | 6,25,2.2,1.05,11.05566429
631 | 6,25,2.2,1.4,8.14951516
632 | 6,25,2.4,0.7,14.67360446
633 | 6,25,2.4,1.05,14.64715217
634 | 6,25,2.4,1.4,24.38383386
635 | 6,50,1.5,0.7,0.581434425
636 | 6,50,1.5,1.05,0.780913535
637 | 6,50,1.5,1.4,0.724918585
638 | 6,50,1.7,0.7,1.019549845
639 | 6,50,1.7,1.05,1.279980775
640 | 6,50,1.7,1.4,0.890335145
641 | 6,50,1.9,0.7,2.180233015
642 | 6,50,1.9,1.05,2.433453355
643 | 6,50,1.9,1.4,1.64671733
644 | 6,50,2.1,0.7,4.20246107
645 | 6,50,2.1,1.05,4.046561015
646 | 6,50,2.1,1.4,4.59804905
647 | 6,50,2.3,0.7,4.73968986
648 | 6,50,2.3,1.05,4.08230125
649 | 6,50,2.3,1.4,17.49258419
650 | 6,50,2.5,0.7,8.01391438
651 | 6,50,2.5,1.05,22.65485255
652 | 6,50,2.5,1.4,10.29284302
653 | 6,75,1.6,0.7,0.316275755
654 | 6,75,1.6,1.05,1.00228558
655 | 6,75,1.6,1.4,1.259409195
656 | 6,75,1.8,0.7,0.85857833
657 | 6,75,1.8,1.05,0.368262385
658 | 6,75,1.8,1.4,2.12653032
659 | 6,75,2,0.7,1.450135035
660 | 6,75,2,1.05,1.93924519
661 | 6,75,2,1.4,2.47596343
662 | 6,75,2.2,0.7,2.43745487
663 | 6,75,2.2,1.05,3.764273615
664 | 6,75,2.2,1.4,17.23894601
665 | 6,75,2.4,0.7,2.74980348
666 | 6,75,2.4,1.05,6.161012775
667 | 6,75,2.4,1.4,16.50240024
668 | 6,100,1.5,0.7,1.043984375
669 | 6,100,1.5,1.05,0.929859555
670 | 6,100,1.5,1.4,1.667230665
671 | 6,100,1.7,0.7,1.86839673
672 | 6,100,1.7,1.05,1.58847147
673 | 6,100,1.7,1.4,2.522915755
674 | 6,100,1.9,0.7,1.657600975
675 | 6,100,1.9,1.05,2.571962555
676 | 6,100,1.9,1.4,13.56843395
677 | 6,100,2.1,0.7,1.67980929
678 | 6,100,2.1,1.05,13.92808819
679 | 6,100,2.1,1.4,15.59499058
680 | 6,100,2.3,0.7,16.99064027
681 | 6,100,2.3,1.05,16.70905174
682 | 6,100,2.3,1.4,14.64180982
683 | 6,100,2.5,0.7,20.82913473
684 | 6,100,2.5,1.05,19.53450837
685 | 6,100,2.5,1.4,21.9617933
686 | 6,125,1.6,0.7,1.58774253
687 | 6,125,1.6,1.05,1.96536155
688 | 6,125,1.6,1.4,1.07172349
689 | 6,125,1.8,0.7,13.29645001
690 | 6,125,1.8,1.05,14.75259793
691 | 6,125,1.8,1.4,2.021798275
692 | 6,125,2,0.7,1.64178562
693 | 6,125,2,1.05,17.50446985
694 | 6,125,2,1.4,15.76786807
695 | 6,125,2.2,0.7,20.49097717
696 | 6,125,2.2,1.05,18.51707968
697 | 6,125,2.2,1.4,24.61952226
698 | 6,125,2.4,0.7,22.96424488
699 | 6,125,2.4,1.05,18.76875767
700 | 6,125,2.4,1.4,21.97821605
701 | 6,150,1.5,0.7,0.735884685
702 | 6,150,1.5,1.05,0.937172305
703 | 6,150,1.5,1.4,17.35790708
704 | 6,150,1.7,0.7,1.01669435
705 | 6,150,1.7,1.05,14.79832094
706 | 6,150,1.7,1.4,19.47943298
707 | 6,150,1.9,0.7,0.537630855
708 | 6,150,1.9,1.05,16.20815219
709 | 6,150,1.9,1.4,17.5141618
710 | 6,150,2.1,0.7,14.95439099
711 | 6,150,2.1,1.05,17.20666344
712 | 6,150,2.1,1.4,14.66813147
713 | 6,150,2.3,0.7,14.02922575
714 | 6,150,2.3,1.05,18.46880396
715 | 6,150,2.3,1.4,14.86324414
716 | 6,150,2.5,0.7,20.19911768
717 | 6,150,2.5,1.05,18.11758006
718 | 6,150,2.5,1.4,17.82099566
719 | 6,175,1.6,0.7,14.39788533
720 | 6,175,1.6,1.05,23.82705059
721 | 6,175,1.6,1.4,4.48566731
722 | 6,175,1.8,0.7,16.17696459
723 | 6,175,1.8,1.05,17.10172416
724 | 6,175,1.8,1.4,22.86955112
725 | 6,175,2,0.7,17.10096512
726 | 6,175,2,1.05,17.30324628
727 | 6,175,2,1.4,16.31177215
728 | 6,175,2.2,0.7,19.72507122
729 | 6,175,2.2,1.05,19.31625969
730 | 6,175,2.2,1.4,12.40292786
731 | 6,175,2.4,0.7,20.67206613
732 | 6,175,2.4,1.05,18.6453617
733 | 6,175,2.4,1.4,14.50308112
734 | 6,200,1.5,0.7,13.58270496
735 | 6,200,1.5,1.05,16.66536236
736 | 6,200,1.5,1.4,17.34488347
737 | 6,200,1.7,0.7,16.03773862
738 | 6,200,1.7,1.05,21.38700344
739 | 6,200,1.7,1.4,8.339999845
740 | 6,200,1.9,0.7,17.48027602
741 | 6,200,1.9,1.05,19.64039634
742 | 6,200,1.9,1.4,24.93052827
743 | 6,200,2.1,0.7,20.23693665
744 | 6,200,2.1,1.05,19.22200277
745 | 6,200,2.1,1.4,26.0766163
746 | 6,200,2.3,0.7,20.90731185
747 | 6,200,2.3,1.05,20.57744419
748 | 6,200,2.3,1.4,18.54291755
749 | 6,200,2.5,0.7,25.09903317
750 | 6,200,2.5,1.05,23.11410977
751 | 6,200,2.5,1.4,22.99321612
752 | 8,0,1.5,0.7,1.9446332
753 | 8,0,1.5,1.05,1.96282795
754 | 8,0,1.5,1.4,2.143893625
755 | 8,0,1.7,0.7,3.472810065
756 | 8,0,1.7,1.05,3.065404435
757 | 8,0,1.7,1.4,3.79572566
758 | 8,0,1.9,0.7,13.95284194
759 | 8,0,1.9,1.05,8.580193305
760 | 8,0,1.9,1.4,8.15384786
761 | 8,0,2.1,0.7,19.00737704
762 | 8,0,2.1,1.05,20.68608874
763 | 8,0,2.1,1.4,1.62434588
764 | 8,0,2.3,0.7,21.89507745
765 | 8,0,2.3,1.05,1.757085215
766 | 8,0,2.3,1.4,1.424017165
767 | 8,0,2.5,0.7,24.46751917
768 | 8,0,2.5,1.05,1.540877455
769 | 8,0,2.5,1.4,1.341181375
770 | 8,25,1.6,0.7,5.38905503
771 | 8,25,1.6,1.05,5.90264023
772 | 8,25,1.6,1.4,6.89180886
773 | 8,25,1.8,0.7,7.618696405
774 | 8,25,1.8,1.05,9.41505298
775 | 8,25,1.8,1.4,11.90886521
776 | 8,25,2,0.7,11.78357118
777 | 8,25,2,1.05,15.01512353
778 | 8,25,2,1.4,14.75295729
779 | 8,25,2.2,0.7,17.04183149
780 | 8,25,2.2,1.05,28.66445513
781 | 8,25,2.2,1.4,33.7412304
782 | 8,25,2.4,0.7,21.58190069
783 | 8,25,2.4,1.05,36.84660789
784 | 8,25,2.4,1.4,1.95455938
785 | 8,50,1.5,0.7,0.92954519
786 | 8,50,1.5,1.05,0.89535553
787 | 8,50,1.5,1.4,1.045444615
788 | 8,50,1.7,0.7,1.215050635
789 | 8,50,1.7,1.05,2.046947545
790 | 8,50,1.7,1.4,2.29423402
791 | 8,50,1.9,0.7,2.313059155
792 | 8,50,1.9,1.05,3.07566036
793 | 8,50,1.9,1.4,2.679152695
794 | 8,50,2.1,0.7,4.166636315
795 | 8,50,2.1,1.05,6.51946287
796 | 8,50,2.1,1.4,4.755243245
797 | 8,50,2.3,0.7,6.201702715
798 | 8,50,2.3,1.05,6.793946615
799 | 8,50,2.3,1.4,9.51737332
800 | 8,50,2.5,0.7,10.71189652
801 | 8,50,2.5,1.05,23.17921309
802 | 8,50,2.5,1.4,1.798908325
803 | 8,75,1.6,0.7,2.23340016
804 | 8,75,1.6,1.05,2.47774116
805 | 8,75,1.6,1.4,2.787914895
806 | 8,75,1.8,0.7,2.421117005
807 | 8,75,1.8,1.05,3.688520175
808 | 8,75,1.8,1.4,4.7896747
809 | 8,75,2,0.7,6.35860395
810 | 8,75,2,1.05,12.73013662
811 | 8,75,2,1.4,21.27402599
812 | 8,75,2.2,0.7,22.07383703
813 | 8,75,2.2,1.05,21.3394019
814 | 8,75,2.2,1.4,27.6013942
815 | 8,75,2.4,0.7,25.10776541
816 | 8,75,2.4,1.05,25.70566987
817 | 8,75,2.4,1.4,31.37445519
818 | 8,100,1.5,0.7,1.019029305
819 | 8,100,1.5,1.05,1.58020915
820 | 8,100,1.5,1.4,14.01092007
821 | 8,100,1.7,0.7,1.51086271
822 | 8,100,1.7,1.05,15.57963557
823 | 8,100,1.7,1.4,4.074202785
824 | 8,100,1.9,0.7,17.47108114
825 | 8,100,1.9,1.05,21.43939518
826 | 8,100,1.9,1.4,13.46396952
827 | 8,100,2.1,0.7,22.6591346
828 | 8,100,2.1,1.05,25.09494934
829 | 8,100,2.1,1.4,23.42045223
830 | 8,100,2.3,0.7,22.02141722
831 | 8,100,2.3,1.05,33.8393769
832 | 8,100,2.3,1.4,27.58580287
833 | 8,100,2.5,0.7,28.09455674
834 | 8,100,2.5,1.05,25.92875706
835 | 8,100,2.5,1.4,38.61747719
836 | 8,125,1.6,0.7,1.608277095
837 | 8,125,1.6,1.05,1.88662548
838 | 8,125,1.6,1.4,1.96556581
839 | 8,125,1.8,0.7,18.90941071
840 | 8,125,1.8,1.05,18.66237893
841 | 8,125,1.8,1.4,16.2365994
842 | 8,125,2,0.7,19.83811912
843 | 8,125,2,1.05,19.35939402
844 | 8,125,2,1.4,16.98806743
845 | 8,125,2.2,0.7,23.72583282
846 | 8,125,2.2,1.05,23.69980874
847 | 8,125,2.2,1.4,20.10351489
848 | 8,125,2.4,0.7,27.62149392
849 | 8,125,2.4,1.05,23.11138859
850 | 8,125,2.4,1.4,22.00852282
851 | 8,150,1.5,0.7,14.14813405
852 | 8,150,1.5,1.05,15.97590274
853 | 8,150,1.5,1.4,14.02973265
854 | 8,150,1.7,0.7,17.66117801
855 | 8,150,1.7,1.05,17.9145985
856 | 8,150,1.7,1.4,24.64793279
857 | 8,150,1.9,0.7,21.19465207
858 | 8,150,1.9,1.05,25.43435805
859 | 8,150,1.9,1.4,22.91095971
860 | 8,150,2.1,0.7,24.56941835
861 | 8,150,2.1,1.05,29.67098499
862 | 8,150,2.1,1.4,31.45434378
863 | 8,150,2.3,0.7,29.14769093
864 | 8,150,2.3,1.05,24.9799802
865 | 8,150,2.3,1.4,22.79001114
866 | 8,150,2.5,0.7,30.2694805
867 | 8,150,2.5,1.05,26.02590333
868 | 8,150,2.5,1.4,22.09771915
869 | 8,175,1.6,0.7,14.86447757
870 | 8,175,1.6,1.05,15.34889878
871 | 8,175,1.6,1.4,17.15394636
872 | 8,175,1.8,0.7,19.22644436
873 | 8,175,1.8,1.05,22.3220811
874 | 8,175,1.8,1.4,21.84879449
875 | 8,175,2,0.7,20.54196691
876 | 8,175,2,1.05,26.72247017
877 | 8,175,2,1.4,23.21122474
878 | 8,175,2.2,0.7,23.2372288
879 | 8,175,2.2,1.05,26.01821757
880 | 8,175,2.2,1.4,19.7140267
881 | 8,175,2.4,0.7,25.64329154
882 | 8,175,2.4,1.05,26.50821912
883 | 8,175,2.4,1.4,19.13097189
884 | 8,200,1.5,0.7,19.46653587
885 | 8,200,1.5,1.05,17.70714249
886 | 8,200,1.5,1.4,20.48806195
887 | 8,200,1.7,0.7,22.59783182
888 | 8,200,1.7,1.05,17.83604297
889 | 8,200,1.7,1.4,26.83157947
890 | 8,200,1.9,0.7,21.81119228
891 | 8,200,1.9,1.05,22.75433722
892 | 8,200,1.9,1.4,37.84739581
893 | 8,200,2.1,0.7,24.42026279
894 | 8,200,2.1,1.05,22.7586222
895 | 8,200,2.1,1.4,20.4701461
896 | 8,200,2.3,0.7,28.6733806
897 | 8,200,2.3,1.05,21.90408288
898 | 8,200,2.3,1.4,19.88240753
899 | 8,200,2.5,0.7,30.67888973
900 | 8,200,2.5,1.05,20.91814908
901 | 8,200,2.5,1.4,18.91718213
902 | 10,0,1.5,0.7,2.68874185
903 | 10,0,1.5,1.05,3.23580922
904 | 10,0,1.5,1.4,2.32434983
905 | 10,0,1.7,0.7,5.065268035
906 | 10,0,1.7,1.05,2.000912255
907 | 10,0,1.7,1.4,2.13757049
908 | 10,0,1.9,0.7,18.42932606
909 | 10,0,1.9,1.05,1.76896569
910 | 10,0,1.9,1.4,2.00238821
911 | 10,0,2.1,0.7,25.38932774
912 | 10,0,2.1,1.05,2.0328948
913 | 10,0,2.1,1.4,1.49585066
914 | 10,0,2.3,0.7,2.030986995
915 | 10,0,2.3,1.05,1.57286967
916 | 10,0,2.3,1.4,1.239645215
917 | 10,0,2.5,0.7,32.49767929
918 | 10,0,2.5,1.05,1.264971665
919 | 10,0,2.5,1.4,1.258602205
920 | 10,25,1.6,0.7,3.56665632
921 | 10,25,1.6,1.05,4.98293583
922 | 10,25,1.6,1.4,5.78953103
923 | 10,25,1.8,0.7,7.640590115
924 | 10,25,1.8,1.05,8.75533431
925 | 10,25,1.8,1.4,7.42388148
926 | 10,25,2,0.7,9.690962805
927 | 10,25,2,1.05,15.07411102
928 | 10,25,2,1.4,14.00846764
929 | 10,25,2.2,0.7,26.9895793
930 | 10,25,2.2,1.05,29.47288901
931 | 10,25,2.2,1.4,27.82813159
932 | 10,25,2.4,0.7,40.29677286
933 | 10,25,2.4,1.05,36.86394767
934 | 10,25,2.4,1.4,1.48491208
935 | 10,50,1.5,0.7,1.666033325
936 | 10,50,1.5,1.05,1.660144715
937 | 10,50,1.5,1.4,1.77223072
938 | 10,50,1.7,0.7,2.326829455
939 | 10,50,1.7,1.05,2.93331449
940 | 10,50,1.7,1.4,2.91813294
941 | 10,50,1.9,0.7,4.012846955
942 | 10,50,1.9,1.05,6.327796025
943 | 10,50,1.9,1.4,4.465437585
944 | 10,50,2.1,0.7,7.8446652
945 | 10,50,2.1,1.05,18.12854522
946 | 10,50,2.1,1.4,9.52971548
947 | 10,50,2.3,0.7,25.46623344
948 | 10,50,2.3,1.05,21.92936174
949 | 10,50,2.3,1.4,1.722331425
950 | 10,50,2.5,0.7,29.85900814
951 | 10,50,2.5,1.05,1.90834218
952 | 10,50,2.5,1.4,1.37573093
953 | 10,75,1.6,0.7,2.801982435
954 | 10,75,1.6,1.05,3.65339209
955 | 10,75,1.6,1.4,4.31146103
956 | 10,75,1.8,0.7,14.47847228
957 | 10,75,1.8,1.05,13.93280176
958 | 10,75,1.8,1.4,17.12357983
959 | 10,75,2,0.7,19.8059104
960 | 10,75,2,1.05,20.7149298
961 | 10,75,2,1.4,22.5752033
962 | 10,75,2.2,0.7,19.28107221
963 | 10,75,2.2,1.05,28.25179018
964 | 10,75,2.2,1.4,29.73223526
965 | 10,75,2.4,0.7,25.08406986
966 | 10,75,2.4,1.05,35.97420333
967 | 10,75,2.4,1.4,30.4392378
968 | 10,100,1.5,0.7,2.171097245
969 | 10,100,1.5,1.05,2.7742289
970 | 10,100,1.5,1.4,15.19785445
971 | 10,100,1.7,0.7,16.76034382
972 | 10,100,1.7,1.05,20.03022914
973 | 10,100,1.7,1.4,15.07295208
974 | 10,100,1.9,0.7,21.81975966
975 | 10,100,1.9,1.05,20.94379548
976 | 10,100,1.9,1.4,23.9046485
977 | 10,100,2.1,0.7,30.52418574
978 | 10,100,2.1,1.05,33.63718715
979 | 10,100,2.1,1.4,24.92273536
980 | 10,100,2.3,0.7,33.96098349
981 | 10,100,2.3,1.05,27.17386938
982 | 10,100,2.3,1.4,2.3203321
983 | 10,100,2.5,0.7,29.37255341
984 | 10,100,2.5,1.05,29.32923957
985 | 10,100,2.5,1.4,1.91606742
986 | 10,125,1.6,0.7,11.40262133
987 | 10,125,1.6,1.05,13.71296025
988 | 10,125,1.6,1.4,12.35039804
989 | 10,125,1.8,0.7,18.51946331
990 | 10,125,1.8,1.05,18.00891086
991 | 10,125,1.8,1.4,15.23491936
992 | 10,125,2,0.7,28.77338471
993 | 10,125,2,1.05,27.54808572
994 | 10,125,2,1.4,30.95862624
995 | 10,125,2.2,0.7,31.19857197
996 | 10,125,2.2,1.05,29.24878965
997 | 10,125,2.2,1.4,39.43245728
998 | 10,125,2.4,0.7,32.36148769
999 | 10,125,2.4,1.05,28.29307961
1000 | 10,125,2.4,1.4,25.60873108
1001 | 10,150,1.5,0.7,14.93511849
1002 | 10,150,1.5,1.05,12.49228115
1003 | 10,150,1.5,1.4,8.683937425
1004 | 10,150,1.7,0.7,19.55310702
1005 | 10,150,1.7,1.05,13.64818818
1006 | 10,150,1.7,1.4,13.60503087
1007 | 10,150,1.9,0.7,26.96239796
1008 | 10,150,1.9,1.05,30.80929899
1009 | 10,150,1.9,1.4,36.36662791
1010 | 10,150,2.1,0.7,26.78225577
1011 | 10,150,2.1,1.05,28.73448449
1012 | 10,150,2.1,1.4,22.4649818
1013 | 10,150,2.3,0.7,31.86511689
1014 | 10,150,2.3,1.05,28.47801169
1015 | 10,150,2.3,1.4,19.52456477
1016 | 10,150,2.5,0.7,29.73290818
1017 | 10,150,2.5,1.05,20.76501874
1018 | 10,150,2.5,1.4,17.71773594
1019 | 10,175,1.6,0.7,19.03429115
1020 | 10,175,1.6,1.05,18.94115364
1021 | 10,175,1.6,1.4,22.66918814
1022 | 10,175,1.8,0.7,24.55388137
1023 | 10,175,1.8,1.05,27.76586321
1024 | 10,175,1.8,1.4,24.01394741
1025 | 10,175,2,0.7,24.69360456
1026 | 10,175,2,1.05,25.18533206
1027 | 10,175,2,1.4,21.50663856
1028 | 10,175,2.2,0.7,26.37707803
1029 | 10,175,2.2,1.05,20.34512214
1030 | 10,175,2.2,1.4,17.46738938
1031 | 10,175,2.4,0.7,28.4968647
1032 | 10,175,2.4,1.05,23.20004315
1033 | 10,175,2.4,1.4,1.8880219
1034 | 10,200,1.5,0.7,25.09012174
1035 | 10,200,1.5,1.05,30.34804809
1036 | 10,200,1.5,1.4,29.60248144
1037 | 10,200,1.7,0.7,24.87442237
1038 | 10,200,1.7,1.05,24.17798621
1039 | 10,200,1.7,1.4,47.68481323
1040 | 10,200,1.9,0.7,26.53648934
1041 | 10,200,1.9,1.05,20.68534785
1042 | 10,200,1.9,1.4,20.81778208
1043 | 10,200,2.1,0.7,28.88119233
1044 | 10,200,2.1,1.05,19.93064439
1045 | 10,200,2.1,1.4,15.51392334
1046 | 10,200,2.3,0.7,31.56649514
1047 | 10,200,2.3,1.05,18.30526454
1048 | 10,200,2.3,1.4,11.63060955
1049 | 10,200,2.5,0.7,34.57712647
1050 | 10,200,2.5,1.05,23.0289247
1051 | 10,200,2.5,1.4,1.861004645
1052 | 12,0,1.5,0.7,2.217989635
1053 | 12,0,1.5,1.05,2.57400154
1054 | 12,0,1.5,1.4,2.22740741
1055 | 12,0,1.7,0.7,2.798259365
1056 | 12,0,1.7,1.05,1.95356353
1057 | 12,0,1.7,1.4,1.71814353
1058 | 12,0,1.9,0.7,6.996048425
1059 | 12,0,1.9,1.05,1.638178305
1060 | 12,0,1.9,1.4,1.502285125
1061 | 12,0,2.1,0.7,8.205917015
1062 | 12,0,2.1,1.05,1.403615165
1063 | 12,0,2.1,1.4,1.28903732
1064 | 12,0,2.3,0.7,1.80559346
1065 | 12,0,2.3,1.05,1.353783275
1066 | 12,0,2.3,1.4,1.158633795
1067 | 12,0,2.5,0.7,1.51381865
1068 | 12,0,2.5,1.05,1.28789654
1069 | 12,0,2.5,1.4,1.18235387
1070 | 12,25,1.6,0.7,2.81568514
1071 | 12,25,1.6,1.05,2.39229897
1072 | 12,25,1.6,1.4,2.66575283
1073 | 12,25,1.8,0.7,10.96776871
1074 | 12,25,1.8,1.05,4.806190225
1075 | 12,25,1.8,1.4,3.765164815
1076 | 12,25,2,0.7,8.574036075
1077 | 12,25,2,1.05,9.426500365
1078 | 12,25,2,1.4,1.81290671
1079 | 12,25,2.2,0.7,33.4553499
1080 | 12,25,2.2,1.05,1.462283295
1081 | 12,25,2.2,1.4,1.90966783
1082 | 12,25,2.4,0.7,32.15487672
1083 | 12,25,2.4,1.05,1.6005787
1084 | 12,25,2.4,1.4,1.22288414
1085 | 12,50,1.5,0.7,3.05250575
1086 | 12,50,1.5,1.05,1.391692875
1087 | 12,50,1.5,1.4,1.58716356
1088 | 12,50,1.7,0.7,3.26030678
1089 | 12,50,1.7,1.05,6.01849178
1090 | 12,50,1.7,1.4,3.17396336
1091 | 12,50,1.9,0.7,5.90276535
1092 | 12,50,1.9,1.05,5.72380911
1093 | 12,50,1.9,1.4,15.62069471
1094 | 12,50,2.1,0.7,9.29026404
1095 | 12,50,2.1,1.05,10.60761289
1096 | 12,50,2.1,1.4,22.63830231
1097 | 12,50,2.3,0.7,17.4591797
1098 | 12,50,2.3,1.05,18.90567656
1099 | 12,50,2.3,1.4,1.74107211
1100 | 12,50,2.5,0.7,33.0137716
1101 | 12,50,2.5,1.05,2.0703271
1102 | 12,50,2.5,1.4,1.302218465
1103 | 12,75,1.6,0.7,2.43280957
1104 | 12,75,1.6,1.05,15.31898925
1105 | 12,75,1.6,1.4,2.38991713
1106 | 12,75,1.8,0.7,16.34781985
1107 | 12,75,1.8,1.05,5.086568105
1108 | 12,75,1.8,1.4,17.76506143
1109 | 12,75,2,0.7,17.19897909
1110 | 12,75,2,1.05,32.17184556
1111 | 12,75,2,1.4,17.65870961
1112 | 12,75,2.2,0.7,19.73401949
1113 | 12,75,2.2,1.05,27.71886967
1114 | 12,75,2.2,1.4,2.75804036
1115 | 12,75,2.4,0.7,45.48213759
1116 | 12,75,2.4,1.05,40.6079649
1117 | 12,75,2.4,1.4,1.60176982
1118 | 12,100,1.5,0.7,8.892414655
1119 | 12,100,1.5,1.05,8.88408156
1120 | 12,100,1.5,1.4,9.853825685
1121 | 12,100,1.7,0.7,11.29506672
1122 | 12,100,1.7,1.05,13.35318585
1123 | 12,100,1.7,1.4,12.72629473
1124 | 12,100,1.9,0.7,18.6692565
1125 | 12,100,1.9,1.05,20.39089823
1126 | 12,100,1.9,1.4,21.70844968
1127 | 12,100,2.1,0.7,26.18295304
1128 | 12,100,2.1,1.05,29.36967361
1129 | 12,100,2.1,1.4,39.31829259
1130 | 12,100,2.3,0.7,34.90977982
1131 | 12,100,2.3,1.05,40.4790107
1132 | 12,100,2.3,1.4,1.843159445
1133 | 12,100,2.5,0.7,37.66667776
1134 | 12,100,2.5,1.05,37.06981261
1135 | 12,100,2.5,1.4,1.70377423
1136 | 12,125,1.6,0.7,13.71977862
1137 | 12,125,1.6,1.05,14.92099437
1138 | 12,125,1.6,1.4,12.07112258
1139 | 12,125,1.8,0.7,29.34213915
1140 | 12,125,1.8,1.05,22.95483198
1141 | 12,125,1.8,1.4,21.35987436
1142 | 12,125,2,0.7,33.82786567
1143 | 12,125,2,1.05,37.20733246
1144 | 12,125,2,1.4,38.36510421
1145 | 12,125,2.2,0.7,32.78833207
1146 | 12,125,2.2,1.05,33.16625308
1147 | 12,125,2.2,1.4,23.75170673
1148 | 12,125,2.4,0.7,35.76095873
1149 | 12,125,2.4,1.05,24.95509839
1150 | 12,125,2.4,1.4,2.37460567
1151 | 12,150,1.5,0.7,20.43303825
1152 | 12,150,1.5,1.05,18.19801116
1153 | 12,150,1.5,1.4,16.36990424
1154 | 12,150,1.7,0.7,27.56463097
1155 | 12,150,1.7,1.05,28.86626493
1156 | 12,150,1.7,1.4,21.9534511
1157 | 12,150,1.9,0.7,27.43718607
1158 | 12,150,1.9,1.05,43.81693055
1159 | 12,150,1.9,1.4,41.89631514
1160 | 12,150,2.1,0.7,30.01685387
1161 | 12,150,2.1,1.05,26.98313765
1162 | 12,150,2.1,1.4,15.82714667
1163 | 12,150,2.3,0.7,30.81267184
1164 | 12,150,2.3,1.05,20.01687814
1165 | 12,150,2.3,1.4,2.280350995
1166 | 12,150,2.5,0.7,33.55520108
1167 | 12,150,2.5,1.05,15.7435385
1168 | 12,150,2.5,1.4,1.573343915
1169 | 12,175,1.6,0.7,24.70855653
1170 | 12,175,1.6,1.05,28.30756993
1171 | 12,175,1.6,1.4,38.70371517
1172 | 12,175,1.8,0.7,30.44194607
1173 | 12,175,1.8,1.05,46.28138168
1174 | 12,175,1.8,1.4,46.04167205
1175 | 12,175,2,0.7,30.24429189
1176 | 12,175,2,1.05,20.9950382
1177 | 12,175,2,1.4,14.31680707
1178 | 12,175,2.2,0.7,31.34408471
1179 | 12,175,2.2,1.05,17.33874133
1180 | 12,175,2.2,1.4,1.629345795
1181 | 12,175,2.4,0.7,36.03900898
1182 | 12,175,2.4,1.05,10.59665846
1183 | 12,175,2.4,1.4,2.74636211
1184 | 12,200,1.5,0.7,32.69642057
1185 | 12,200,1.5,1.05,32.67447181
1186 | 12,200,1.5,1.4,39.71127057
1187 | 12,200,1.7,0.7,30.81942151
1188 | 12,200,1.7,1.05,20.98843555
1189 | 12,200,1.7,1.4,51.54260273
1190 | 12,200,1.9,0.7,29.9102573
1191 | 12,200,1.9,1.05,17.01436909
1192 | 12,200,1.9,1.4,8.28854607
1193 | 12,200,2.1,0.7,33.25790557
1194 | 12,200,2.1,1.05,11.36837164
1195 | 12,200,2.1,1.4,1.570382565
1196 | 12,200,2.3,0.7,34.8912318
1197 | 12,200,2.3,1.05,11.63569601
1198 | 12,200,2.3,1.4,1.216757475
1199 | 12,200,2.5,0.7,36.73641685
1200 | 12,200,2.5,1.05,1.52683429
1201 | 12,200,2.5,1.4,1.887555225
1202 | 6,0,1.5,0.7,0.984718805
1203 | 6,0,1.5,1.05,1.204278305
1204 | 6,0,1.5,1.4,1.66416987
1205 | 6,0,1.7,0.7,3.09425671
1206 | 6,0,1.7,1.05,2.933434265
1207 | 6,0,1.7,1.4,2.328659075
1208 | 6,0,1.9,0.7,3.576849065
1209 | 6,0,1.9,1.05,2.966058095
1210 | 6,0,1.9,1.4,6.652394795
1211 | 6,0,2.1,0.7,17.42102897
1212 | 6,0,2.1,1.05,6.771103375
1213 | 6,0,2.1,1.4,7.735849185
1214 | 6,0,2.3,0.7,5.017847775
1215 | 6,0,2.3,1.05,32.23955734
1216 | 6,0,2.3,1.4,2.02903116
1217 | 6,0,2.5,0.7,30.26843881
1218 | 6,0,2.5,1.05,11.31234055
1219 | 6,0,2.5,1.4,36.25217858
1220 | 6,25,1.6,0.7,2.09663152
1221 | 6,25,1.6,1.05,1.83385039
1222 | 6,25,1.6,1.4,1.696675845
1223 | 6,25,1.8,0.7,1.504651125
1224 | 6,25,1.8,1.05,4.24364418
1225 | 6,25,1.8,1.4,2.45040998
1226 | 6,25,2,0.7,3.88626849
1227 | 6,25,2,1.05,2.940354025
1228 | 6,25,2,1.4,6.44899864
1229 | 6,25,2.2,0.7,12.36841071
1230 | 6,25,2.2,1.05,7.9235667
1231 | 6,25,2.2,1.4,4.150184395
1232 | 6,25,2.4,0.7,18.73687729
1233 | 6,25,2.4,1.05,11.42302692
1234 | 6,25,2.4,1.4,2.25998876
1235 | 6,50,1.5,0.7,0.503617945
1236 | 6,50,1.5,1.05,0.77442139
1237 | 6,50,1.5,1.4,0.615132955
1238 | 6,50,1.7,0.7,0.857033345
1239 | 6,50,1.7,1.05,1.78397391
1240 | 6,50,1.7,1.4,1.47492584
1241 | 6,50,1.9,0.7,1.31915937
1242 | 6,50,1.9,1.05,1.898873585
1243 | 6,50,1.9,1.4,2.720035715
1244 | 6,50,2.1,0.7,3.17734853
1245 | 6,50,2.1,1.05,2.592669905
1246 | 6,50,2.1,1.4,3.74722125
1247 | 6,50,2.3,0.7,6.059289655
1248 | 6,50,2.3,1.05,8.212267235
1249 | 6,50,2.3,1.4,5.749600085
1250 | 6,50,2.5,0.7,6.19458219
1251 | 6,50,2.5,1.05,24.14959018
1252 | 6,50,2.5,1.4,21.12649344
1253 | 6,75,1.6,0.7,0.421248255
1254 | 6,75,1.6,1.05,0.74635778
1255 | 6,75,1.6,1.4,1.690187665
1256 | 6,75,1.8,0.7,0.874340925
1257 | 6,75,1.8,1.05,0.737498875
1258 | 6,75,1.8,1.4,0.764132035
1259 | 6,75,2,0.7,1.16090386
1260 | 6,75,2,1.05,2.33812806
1261 | 6,75,2,1.4,4.505327385
1262 | 6,75,2.2,0.7,3.738256045
1263 | 6,75,2.2,1.05,4.35728777
1264 | 6,75,2.2,1.4,11.99178691
1265 | 6,75,2.4,0.7,3.55753965
1266 | 6,75,2.4,1.05,19.44473814
1267 | 6,75,2.4,1.4,18.85629444
1268 | 6,100,1.5,0.7,1.297602915
1269 | 6,100,1.5,1.05,1.766962485
1270 | 6,100,1.5,1.4,1.55379982
1271 | 6,100,1.7,0.7,2.458336485
1272 | 6,100,1.7,1.05,2.608886545
1273 | 6,100,1.7,1.4,9.27425569
1274 | 6,100,1.9,0.7,1.230991015
1275 | 6,100,1.9,1.05,12.41231281
1276 | 6,100,1.9,1.4,13.77882286
1277 | 6,100,2.1,0.7,13.02767408
1278 | 6,100,2.1,1.05,14.89652844
1279 | 6,100,2.1,1.4,17.55796323
1280 | 6,100,2.3,0.7,17.90994853
1281 | 6,100,2.3,1.05,14.43700693
1282 | 6,100,2.3,1.4,16.11451264
1283 | 6,100,2.5,0.7,22.08445189
1284 | 6,100,2.5,1.05,20.04377174
1285 | 6,100,2.5,1.4,19.59464177
1286 | 6,125,1.6,0.7,1.085871695
1287 | 6,125,1.6,1.05,13.05591221
1288 | 6,125,1.6,1.4,13.9894676
1289 | 6,125,1.8,0.7,0.71230218
1290 | 6,125,1.8,1.05,13.82702867
1291 | 6,125,1.8,1.4,13.68365648
1292 | 6,125,2,0.7,1.39624755
1293 | 6,125,2,1.05,18.96923443
1294 | 6,125,2,1.4,16.97832826
1295 | 6,125,2.2,0.7,20.71585045
1296 | 6,125,2.2,1.05,18.72782356
1297 | 6,125,2.2,1.4,19.21279802
1298 | 6,125,2.4,0.7,21.46815411
1299 | 6,125,2.4,1.05,17.71837793
1300 | 6,125,2.4,1.4,20.38441493
1301 | 6,150,1.5,0.7,0.720294875
1302 | 6,150,1.5,1.05,0.91101892
1303 | 6,150,1.5,1.4,18.74742944
1304 | 6,150,1.7,0.7,0.88429715
1305 | 6,150,1.7,1.05,15.54963513
1306 | 6,150,1.7,1.4,19.01081895
1307 | 6,150,1.9,0.7,0.862449755
1308 | 6,150,1.9,1.05,15.59957872
1309 | 6,150,1.9,1.4,16.0481957
1310 | 6,150,2.1,0.7,14.10560919
1311 | 6,150,2.1,1.05,16.99550773
1312 | 6,150,2.1,1.4,16.09093879
1313 | 6,150,2.3,0.7,17.37752817
1314 | 6,150,2.3,1.05,17.35029167
1315 | 6,150,2.3,1.4,15.50390773
1316 | 6,150,2.5,0.7,17.94230795
1317 | 6,150,2.5,1.05,20.4220484
1318 | 6,150,2.5,1.4,16.39896448
1319 | 6,175,1.6,0.7,15.46033752
1320 | 6,175,1.6,1.05,19.34627319
1321 | 6,175,1.6,1.4,18.26674629
1322 | 6,175,1.8,0.7,15.6975907
1323 | 6,175,1.8,1.05,17.38876143
1324 | 6,175,1.8,1.4,28.19387442
1325 | 6,175,2,0.7,18.57465277
1326 | 6,175,2,1.05,17.13837747
1327 | 6,175,2,1.4,20.57800408
1328 | 6,175,2.2,0.7,19.21437316
1329 | 6,175,2.2,1.05,17.07134424
1330 | 6,175,2.2,1.4,16.37818041
1331 | 6,175,2.4,0.7,22.75940737
1332 | 6,175,2.4,1.05,17.88724908
1333 | 6,175,2.4,1.4,18.2698049
1334 | 6,200,1.5,0.7,15.7409393
1335 | 6,200,1.5,1.05,15.63951464
1336 | 6,200,1.5,1.4,20.37627403
1337 | 6,200,1.7,0.7,16.27981063
1338 | 6,200,1.7,1.05,26.36648259
1339 | 6,200,1.7,1.4,22.27744967
1340 | 6,200,1.9,0.7,13.79168408
1341 | 6,200,1.9,1.05,19.49920548
1342 | 6,200,1.9,1.4,22.75615377
1343 | 6,200,2.1,0.7,20.23517058
1344 | 6,200,2.1,1.05,20.51055968
1345 | 6,200,2.1,1.4,7.20144772
1346 | 6,200,2.3,0.7,23.40856053
1347 | 6,200,2.3,1.05,21.32481878
1348 | 6,200,2.3,1.4,24.85532039
1349 | 6,200,2.5,0.7,25.13038982
1350 | 6,200,2.5,1.05,22.80520974
1351 | 6,200,2.5,1.4,18.87405486
1352 | 8,0,1.5,0.7,1.79906466
1353 | 8,0,1.5,1.05,1.584290705
1354 | 8,0,1.5,1.4,1.660902335
1355 | 8,0,1.7,0.7,3.66459729
1356 | 8,0,1.7,1.05,2.09191768
1357 | 8,0,1.7,1.4,4.383226565
1358 | 8,0,1.9,0.7,5.202961875
1359 | 8,0,1.9,1.05,7.4669773
1360 | 8,0,1.9,1.4,4.02889168
1361 | 8,0,2.1,0.7,20.42346871
1362 | 8,0,2.1,1.05,19.61008402
1363 | 8,0,2.1,1.4,1.70359334
1364 | 8,0,2.3,0.7,4.988631565
1365 | 8,0,2.3,1.05,1.61534906
1366 | 8,0,2.3,1.4,1.9665795
1367 | 8,0,2.5,0.7,35.57965442
1368 | 8,0,2.5,1.05,2.239210555
1369 | 8,0,2.5,1.4,1.310974495
1370 | 8,25,1.6,0.7,3.92030164
1371 | 8,25,1.6,1.05,6.31753358
1372 | 8,25,1.6,1.4,2.70571583
1373 | 8,25,1.8,0.7,9.68552392
1374 | 8,25,1.8,1.05,6.70508105
1375 | 8,25,1.8,1.4,10.12589473
1376 | 8,25,2,0.7,6.35420248
1377 | 8,25,2,1.05,13.3530951
1378 | 8,25,2,1.4,10.95345267
1379 | 8,25,2.2,0.7,10.01264335
1380 | 8,25,2.2,1.05,29.72036515
1381 | 8,25,2.2,1.4,18.59106074
1382 | 8,25,2.4,0.7,13.43877417
1383 | 8,25,2.4,1.05,36.1300039
1384 | 8,25,2.4,1.4,34.9592874
1385 | 8,50,1.5,0.7,0.580645385
1386 | 8,50,1.5,1.05,0.2835024
1387 | 8,50,1.5,1.4,0.610162855
1388 | 8,50,1.7,0.7,1.068200195
1389 | 8,50,1.7,1.05,0.530468075
1390 | 8,50,1.7,1.4,2.57808866
1391 | 8,50,1.9,0.7,1.7688226
1392 | 8,50,1.9,1.05,0.99036637
1393 | 8,50,1.9,1.4,4.16800253
1394 | 8,50,2.1,0.7,2.552720965
1395 | 8,50,2.1,1.05,6.37566581
1396 | 8,50,2.1,1.4,4.02354012
1397 | 8,50,2.3,0.7,3.08157811
1398 | 8,50,2.3,1.05,19.8553271
1399 | 8,50,2.3,1.4,10.78723802
1400 | 8,50,2.5,0.7,4.478213675
1401 | 8,50,2.5,1.05,21.22377998
1402 | 8,50,2.5,1.4,2.090443795
1403 | 8,75,1.6,0.7,0.997970035
1404 | 8,75,1.6,1.05,0.5029841
1405 | 8,75,1.6,1.4,1.202600565
1406 | 8,75,1.8,0.7,1.923480915
1407 | 8,75,1.8,1.05,13.76810579
1408 | 8,75,1.8,1.4,0.98540034
1409 | 8,75,2,0.7,17.16436162
1410 | 8,75,2,1.05,1.427589615
1411 | 8,75,2,1.4,4.00071953
1412 | 8,75,2.2,0.7,17.61099159
1413 | 8,75,2.2,1.05,4.977815625
1414 | 8,75,2.2,1.4,28.39669297
1415 | 8,75,2.4,0.7,4.023762455
1416 | 8,75,2.4,1.05,21.54221829
1417 | 8,75,2.4,1.4,16.98524062
1418 | 8,100,1.5,0.7,1.752622255
1419 | 8,100,1.5,1.05,0.98552759
1420 | 8,100,1.5,1.4,0.381889405
1421 | 8,100,1.7,0.7,1.401898735
1422 | 8,100,1.7,1.05,16.41218992
1423 | 8,100,1.7,1.4,0.58207294
1424 | 8,100,1.9,0.7,15.761628
1425 | 8,100,1.9,1.05,21.01485876
1426 | 8,100,1.9,1.4,1.09751429
1427 | 8,100,2.1,0.7,18.09078329
1428 | 8,100,2.1,1.05,27.92352784
1429 | 8,100,2.1,1.4,2.174995185
1430 | 8,100,2.3,0.7,20.89136195
1431 | 8,100,2.3,1.05,30.80100966
1432 | 8,100,2.3,1.4,15.90063296
1433 | 8,100,2.5,0.7,3.691875515
1434 | 8,100,2.5,1.05,29.25635883
1435 | 8,100,2.5,1.4,37.2935421
1436 | 8,125,1.6,0.7,0.555669225
1437 | 8,125,1.6,1.05,1.54022802
1438 | 8,125,1.6,1.4,1.772851885
1439 | 8,125,1.8,0.7,17.40060132
1440 | 8,125,1.8,1.05,1.15879333
1441 | 8,125,1.8,1.4,1.74030975
1442 | 8,125,2,0.7,1.26034766
1443 | 8,125,2,1.05,16.96168832
1444 | 8,125,2,1.4,17.09009911
1445 | 8,125,2.2,0.7,21.53762106
1446 | 8,125,2.2,1.05,23.23672095
1447 | 8,125,2.2,1.4,17.31953869
1448 | 8,125,2.4,0.7,22.84953928
1449 | 8,125,2.4,1.05,24.81064122
1450 | 8,125,2.4,1.4,24.46094024
1451 | 8,150,1.5,0.7,15.37844109
1452 | 8,150,1.5,1.05,16.36829634
1453 | 8,150,1.5,1.4,1.90937273
1454 | 8,150,1.7,0.7,13.29449274
1455 | 8,150,1.7,1.05,18.11751563
1456 | 8,150,1.7,1.4,24.27369623
1457 | 8,150,1.9,0.7,21.12950211
1458 | 8,150,1.9,1.05,2.87239936
1459 | 8,150,1.9,1.4,18.21817377
1460 | 8,150,2.1,0.7,23.11471414
1461 | 8,150,2.1,1.05,25.97304787
1462 | 8,150,2.1,1.4,11.0882812
1463 | 8,150,2.3,0.7,25.69999832
1464 | 8,150,2.3,1.05,24.84117849
1465 | 8,150,2.3,1.4,21.82711212
1466 | 8,150,2.5,0.7,20.32642785
1467 | 8,150,2.5,1.05,28.80428434
1468 | 8,150,2.5,1.4,23.14973202
1469 | 8,175,1.6,0.7,9.44977179
1470 | 8,175,1.6,1.05,13.35499867
1471 | 8,175,1.6,1.4,12.54898462
1472 | 8,175,1.8,0.7,9.934021545
1473 | 8,175,1.8,1.05,22.54772138
1474 | 8,175,1.8,1.4,19.39221531
1475 | 8,175,2,0.7,19.87380413
1476 | 8,175,2,1.05,30.09047112
1477 | 8,175,2,1.4,35.5636307
1478 | 8,175,2.2,0.7,21.70413423
1479 | 8,175,2.2,1.05,26.32360299
1480 | 8,175,2.2,1.4,21.14495698
1481 | 8,175,2.4,0.7,25.05996553
1482 | 8,175,2.4,1.05,26.33490468
1483 | 8,175,2.4,1.4,26.17297952
1484 | 8,200,1.5,0.7,19.75280611
1485 | 8,200,1.5,1.05,13.41499684
1486 | 8,200,1.5,1.4,15.70872909
1487 | 8,200,1.7,0.7,21.33334446
1488 | 8,200,1.7,1.05,13.53689275
1489 | 8,200,1.7,1.4,12.99023395
1490 | 8,200,1.9,0.7,21.89667604
1491 | 8,200,1.9,1.05,22.53879111
1492 | 8,200,1.9,1.4,21.90320458
1493 | 8,200,2.1,0.7,25.87711604
1494 | 8,200,2.1,1.05,22.34457401
1495 | 8,200,2.1,1.4,20.82277737
1496 | 8,200,2.3,0.7,25.5377981
1497 | 8,200,2.3,1.05,23.16218558
1498 | 8,200,2.3,1.4,21.21698051
1499 | 8,200,2.5,0.7,31.19337179
1500 | 8,200,2.5,1.05,23.35022337
1501 | 8,200,2.5,1.4,22.21883225
1502 | 10,0,1.5,0.7,2.484941625
1503 | 10,0,1.5,1.05,1.995446115
1504 | 10,0,1.5,1.4,1.59246101
1505 | 10,0,1.7,0.7,3.93400999
1506 | 10,0,1.7,1.05,4.202121115
1507 | 10,0,1.7,1.4,2.09316675
1508 | 10,0,1.9,0.7,6.404926035
1509 | 10,0,1.9,1.05,2.301721065
1510 | 10,0,1.9,1.4,2.207163845
1511 | 10,0,2.1,0.7,19.13454141
1512 | 10,0,2.1,1.05,1.59126824
1513 | 10,0,2.1,1.4,1.47175539
1514 | 10,0,2.3,0.7,8.552942635
1515 | 10,0,2.3,1.05,1.48189301
1516 | 10,0,2.3,1.4,1.22052988
1517 | 10,0,2.5,0.7,2.17018344
1518 | 10,0,2.5,1.05,1.78869854
1519 | 10,0,2.5,1.4,1.23106499
1520 | 10,25,1.6,0.7,3.236290435
1521 | 10,25,1.6,1.05,4.48764365
1522 | 10,25,1.6,1.4,5.52352907
1523 | 10,25,1.8,0.7,4.301986835
1524 | 10,25,1.8,1.05,7.23375999
1525 | 10,25,1.8,1.4,6.765780615
1526 | 10,25,2,0.7,9.176301185
1527 | 10,25,2,1.05,11.53404773
1528 | 10,25,2,1.4,8.83892428
1529 | 10,25,2.2,0.7,23.76283092
1530 | 10,25,2.2,1.05,8.484391765
1531 | 10,25,2.2,1.4,1.75988664
1532 | 10,25,2.4,0.7,31.26961212
1533 | 10,25,2.4,1.05,32.82540015
1534 | 10,25,2.4,1.4,12.38719482
1535 | 10,50,1.5,0.7,0.39829036
1536 | 10,50,1.5,1.05,1.309078795
1537 | 10,50,1.5,1.4,1.787142455
1538 | 10,50,1.7,0.7,2.04446714
1539 | 10,50,1.7,1.05,0.80325155
1540 | 10,50,1.7,1.4,2.6504243
1541 | 10,50,1.9,0.7,4.3821171
1542 | 10,50,1.9,1.05,1.884693735
1543 | 10,50,1.9,1.4,5.6230742
1544 | 10,50,2.1,0.7,8.18131518
1545 | 10,50,2.1,1.05,18.25382211
1546 | 10,50,2.1,1.4,8.43650171
1547 | 10,50,2.3,0.7,18.80038505
1548 | 10,50,2.3,1.05,23.0185344
1549 | 10,50,2.3,1.4,18.32694408
1550 | 10,50,2.5,0.7,17.63498946
1551 | 10,50,2.5,1.05,8.159024855
1552 | 10,50,2.5,1.4,1.883642545
1553 | 10,75,1.6,0.7,2.55179376
1554 | 10,75,1.6,1.05,3.982026465
1555 | 10,75,1.6,1.4,0.91748268
1556 | 10,75,1.8,0.7,3.71797517
1557 | 10,75,1.8,1.05,13.02729787
1558 | 10,75,1.8,1.4,15.57363845
1559 | 10,75,2,0.7,7.923393555
1560 | 10,75,2,1.05,19.17797565
1561 | 10,75,2,1.4,21.90600339
1562 | 10,75,2.2,0.7,22.52605105
1563 | 10,75,2.2,1.05,25.9285469
1564 | 10,75,2.2,1.4,30.17208591
1565 | 10,75,2.4,0.7,18.19590629
1566 | 10,75,2.4,1.05,31.50348043
1567 | 10,75,2.4,1.4,33.23522319
1568 | 10,100,1.5,0.7,2.03703113
1569 | 10,100,1.5,1.05,16.70525583
1570 | 10,100,1.5,1.4,14.96158266
1571 | 10,100,1.7,0.7,16.76906101
1572 | 10,100,1.7,1.05,19.32535098
1573 | 10,100,1.7,1.4,18.86877373
1574 | 10,100,1.9,0.7,20.10515772
1575 | 10,100,1.9,1.05,22.11283985
1576 | 10,100,1.9,1.4,22.68859384
1577 | 10,100,2.1,0.7,23.38482757
1578 | 10,100,2.1,1.05,26.33086888
1579 | 10,100,2.1,1.4,23.24464829
1580 | 10,100,2.3,0.7,27.22173275
1581 | 10,100,2.3,1.05,31.89632291
1582 | 10,100,2.3,1.4,2.769482045
1583 | 10,100,2.5,0.7,32.82694581
1584 | 10,100,2.5,1.05,29.88330736
1585 | 10,100,2.5,1.4,2.558668295
1586 | 10,125,1.6,0.7,11.46484409
1587 | 10,125,1.6,1.05,14.31538651
1588 | 10,125,1.6,1.4,12.51399667
1589 | 10,125,1.8,0.7,18.52669987
1590 | 10,125,1.8,1.05,18.84357803
1591 | 10,125,1.8,1.4,16.15506253
1592 | 10,125,2,0.7,22.17191514
1593 | 10,125,2,1.05,25.99219371
1594 | 10,125,2,1.4,28.12012429
1595 | 10,125,2.2,0.7,31.32995625
1596 | 10,125,2.2,1.05,31.12102655
1597 | 10,125,2.2,1.4,36.94337918
1598 | 10,125,2.4,0.7,32.85563771
1599 | 10,125,2.4,1.05,27.64733324
1600 | 10,125,2.4,1.4,24.46426331
1601 | 10,150,1.5,0.7,17.50682348
1602 | 10,150,1.5,1.05,11.86753321
1603 | 10,150,1.5,1.4,9.39198196
1604 | 10,150,1.7,0.7,22.64905122
1605 | 10,150,1.7,1.05,27.76133619
1606 | 10,150,1.7,1.4,30.75323761
1607 | 10,150,1.9,0.7,25.18258923
1608 | 10,150,1.9,1.05,32.25932257
1609 | 10,150,1.9,1.4,36.26241233
1610 | 10,150,2.1,0.7,26.25669384
1611 | 10,150,2.1,1.05,27.06739447
1612 | 10,150,2.1,1.4,21.90865338
1613 | 10,150,2.3,0.7,31.35316374
1614 | 10,150,2.3,1.05,26.99388042
1615 | 10,150,2.3,1.4,18.95177967
1616 | 10,150,2.5,0.7,34.61506059
1617 | 10,150,2.5,1.05,25.0872446
1618 | 10,150,2.5,1.4,2.04505332
1619 | 10,175,1.6,0.7,23.03021044
1620 | 10,175,1.6,1.05,26.78930236
1621 | 10,175,1.6,1.4,24.08115478
1622 | 10,175,1.8,0.7,24.72800972
1623 | 10,175,1.8,1.05,32.54962845
1624 | 10,175,1.8,1.4,41.38951944
1625 | 10,175,2,0.7,24.41042384
1626 | 10,175,2,1.05,24.18133668
1627 | 10,175,2,1.4,34.30511905
1628 | 10,175,2.2,0.7,26.09665248
1629 | 10,175,2.2,1.05,22.71365938
1630 | 10,175,2.2,1.4,24.91481805
1631 | 10,175,2.4,0.7,28.60261435
1632 | 10,175,2.4,1.05,21.88111405
1633 | 10,175,2.4,1.4,16.87573066
1634 | 10,200,1.5,0.7,24.87143932
1635 | 10,200,1.5,1.05,23.6502444
1636 | 10,200,1.5,1.4,30.34241852
1637 | 10,200,1.7,0.7,22.84413083
1638 | 10,200,1.7,1.05,25.15946955
1639 | 10,200,1.7,1.4,23.55279285
1640 | 10,200,1.9,0.7,24.82666369
1641 | 10,200,1.9,1.05,20.82860256
1642 | 10,200,1.9,1.4,19.70498165
1643 | 10,200,2.1,0.7,27.94340729
1644 | 10,200,2.1,1.05,19.37243051
1645 | 10,200,2.1,1.4,16.46835299
1646 | 10,200,2.3,0.7,32.16715351
1647 | 10,200,2.3,1.05,18.66841468
1648 | 10,200,2.3,1.4,19.93479405
1649 | 10,200,2.5,0.7,36.31708483
1650 | 10,200,2.5,1.05,2.874153325
1651 | 10,200,2.5,1.4,2.48323922
1652 | 12,0,1.5,0.7,2.817184975
1653 | 12,0,1.5,1.05,2.8202735
1654 | 12,0,1.5,1.4,1.94721929
1655 | 12,0,1.7,0.7,2.837285865
1656 | 12,0,1.7,1.05,1.65020764
1657 | 12,0,1.7,1.4,1.79740427
1658 | 12,0,1.9,0.7,7.80562971
1659 | 12,0,1.9,1.05,1.83451679
1660 | 12,0,1.9,1.4,1.573231545
1661 | 12,0,2.1,0.7,2.102801175
1662 | 12,0,2.1,1.05,8.535636925
1663 | 12,0,2.1,1.4,1.41095575
1664 | 12,0,2.3,0.7,21.10118362
1665 | 12,0,2.3,1.05,1.371921215
1666 | 12,0,2.3,1.4,1.305498525
1667 | 12,0,2.5,0.7,32.83765951
1668 | 12,0,2.5,1.05,1.307638845
1669 | 12,0,2.5,1.4,1.138089705
1670 | 12,25,1.6,0.7,1.904929865
1671 | 12,25,1.6,1.05,3.52213153
1672 | 12,25,1.6,1.4,1.38790277
1673 | 12,25,1.8,0.7,5.997701515
1674 | 12,25,1.8,1.05,4.88818575
1675 | 12,25,1.8,1.4,7.573931395
1676 | 12,25,2,0.7,12.52197107
1677 | 12,25,2,1.05,11.66128167
1678 | 12,25,2,1.4,13.06205261
1679 | 12,25,2.2,0.7,37.68724288
1680 | 12,25,2.2,1.05,33.68153487
1681 | 12,25,2.2,1.4,1.891449785
1682 | 12,25,2.4,0.7,50.83130521
1683 | 12,25,2.4,1.05,10.10799107
1684 | 12,25,2.4,1.4,1.300664185
1685 | 12,50,1.5,0.7,1.16333293
1686 | 12,50,1.5,1.05,3.115892155
1687 | 12,50,1.5,1.4,2.65670081
1688 | 12,50,1.7,0.7,4.555843875
1689 | 12,50,1.7,1.05,2.579712
1690 | 12,50,1.7,1.4,4.76844995
1691 | 12,50,1.9,0.7,9.916262775
1692 | 12,50,1.9,1.05,16.58984522
1693 | 12,50,1.9,1.4,8.952703645
1694 | 12,50,2.1,0.7,8.43442134
1695 | 12,50,2.1,1.05,23.66746768
1696 | 12,50,2.1,1.4,1.57108567
1697 | 12,50,2.3,0.7,11.1407421
1698 | 12,50,2.3,1.05,37.08072058
1699 | 12,50,2.3,1.4,25.79438494
1700 | 12,50,2.5,0.7,35.87166052
1701 | 12,50,2.5,1.05,1.44766338
1702 | 12,50,2.5,1.4,1.492311205
1703 | 12,75,1.6,0.7,2.117595435
1704 | 12,75,1.6,1.05,3.363703575
1705 | 12,75,1.6,1.4,14.92657584
1706 | 12,75,1.8,0.7,4.903521785
1707 | 12,75,1.8,1.05,16.55792988
1708 | 12,75,1.8,1.4,14.25681865
1709 | 12,75,2,0.7,24.55175934
1710 | 12,75,2,1.05,25.44355022
1711 | 12,75,2,1.4,15.99058804
1712 | 12,75,2.2,0.7,33.17701842
1713 | 12,75,2.2,1.05,36.28859878
1714 | 12,75,2.2,1.4,1.790871605
1715 | 12,75,2.4,0.7,41.35446994
1716 | 12,75,2.4,1.05,47.30570731
1717 | 12,75,2.4,1.4,2.155238865
1718 | 12,100,1.5,0.7,9.29353788
1719 | 12,100,1.5,1.05,12.11958914
1720 | 12,100,1.5,1.4,11.8038592
1721 | 12,100,1.7,0.7,19.0194704
1722 | 12,100,1.7,1.05,17.53314818
1723 | 12,100,1.7,1.4,14.66705747
1724 | 12,100,1.9,0.7,18.74793491
1725 | 12,100,1.9,1.05,27.97689242
1726 | 12,100,1.9,1.4,26.2044042
1727 | 12,100,2.1,0.7,37.41047273
1728 | 12,100,2.1,1.05,37.35179195
1729 | 12,100,2.1,1.4,24.43435657
1730 | 12,100,2.3,0.7,38.84055602
1731 | 12,100,2.3,1.05,39.8601446
1732 | 12,100,2.3,1.4,34.17278767
1733 | 12,100,2.5,0.7,39.42386505
1734 | 12,100,2.5,1.05,30.72339312
1735 | 12,100,2.5,1.4,1.581840945
1736 | 12,125,1.6,0.7,11.04265774
1737 | 12,125,1.6,1.05,30.20966198
1738 | 12,125,1.6,1.4,12.90936504
1739 | 12,125,1.8,0.7,19.76653614
1740 | 12,125,1.8,1.05,32.28743284
1741 | 12,125,1.8,1.4,31.66129959
1742 | 12,125,2,0.7,31.47662584
1743 | 12,125,2,1.05,36.25284256
1744 | 12,125,2,1.4,44.04477933
1745 | 12,125,2.2,0.7,32.43359398
1746 | 12,125,2.2,1.05,29.59553908
1747 | 12,125,2.2,1.4,23.16476721
1748 | 12,125,2.4,0.7,37.54951294
1749 | 12,125,2.4,1.05,1.878474045
1750 | 12,125,2.4,1.4,22.13553289
1751 | 12,150,1.5,0.7,18.33169252
1752 | 12,150,1.5,1.05,21.98816719
1753 | 12,150,1.5,1.4,17.58409244
1754 | 12,150,1.7,0.7,28.19673461
1755 | 12,150,1.7,1.05,32.67804959
1756 | 12,150,1.7,1.4,25.15656951
1757 | 12,150,1.9,0.7,27.97876193
1758 | 12,150,1.9,1.05,27.66069837
1759 | 12,150,1.9,1.4,48.98711188
1760 | 12,150,2.1,0.7,28.81046509
1761 | 12,150,2.1,1.05,22.31453663
1762 | 12,150,2.1,1.4,2.12203775
1763 | 12,150,2.3,0.7,33.72884771
1764 | 12,150,2.3,1.05,20.08719368
1765 | 12,150,2.3,1.4,18.99845952
1766 | 12,150,2.5,0.7,32.94164965
1767 | 12,150,2.5,1.05,20.7900743
1768 | 12,150,2.5,1.4,1.22973419
1769 | 12,175,1.6,0.7,30.49768339
1770 | 12,175,1.6,1.05,39.07880007
1771 | 12,175,1.6,1.4,37.85625565
1772 | 12,175,1.8,0.7,30.55014033
1773 | 12,175,1.8,1.05,21.71498025
1774 | 12,175,1.8,1.4,48.00469508
1775 | 12,175,2,0.7,29.49511193
1776 | 12,175,2,1.05,23.87025342
1777 | 12,175,2,1.4,1.72323757
1778 | 12,175,2.2,0.7,31.41051358
1779 | 12,175,2.2,1.05,16.71189843
1780 | 12,175,2.2,1.4,14.47445507
1781 | 12,175,2.4,0.7,34.65747253
1782 | 12,175,2.4,1.05,14.92602464
1783 | 12,175,2.4,1.4,1.1411741
1784 | 12,200,1.5,0.7,25.91147249
1785 | 12,200,1.5,1.05,36.65199221
1786 | 12,200,1.5,1.4,43.44795774
1787 | 12,200,1.7,0.7,29.4756594
1788 | 12,200,1.7,1.05,19.96334537
1789 | 12,200,1.7,1.4,20.06653041
1790 | 12,200,1.9,0.7,30.4019694
1791 | 12,200,1.9,1.05,17.529669
1792 | 12,200,1.9,1.4,2.84463563
1793 | 12,200,2.1,0.7,32.08265752
1794 | 12,200,2.1,1.05,14.89627256
1795 | 12,200,2.1,1.4,1.731124215
1796 | 12,200,2.3,0.7,32.63826111
1797 | 12,200,2.3,1.05,1.35897464
1798 | 12,200,2.3,1.4,3.196306215
1799 | 12,200,2.5,0.7,36.1041867
1800 | 12,200,2.5,1.05,1.31348703
1801 | 12,200,2.5,1.4,1.069728395
--------------------------------------------------------------------------------
/datasets/P3HT_dataset.csv:
--------------------------------------------------------------------------------
1 | P3HT content (%),D1 content (%),D2 content (%),D6 content (%),D8 content (%),Conductivity (measured) (S/cm)
2 | 45,0,55,0,0,12.77
3 | 75,0,25,0,0,13.19
4 | 30,0,70,0,0,14.78
5 | 30,0,70,0,0,16.34
6 | 45,0,55,0,0,16.94
7 | 90,0,0,10,0,24.01
8 | 95,0,0,0,5,26.19
9 | 60,0,0,40,0,33.44
10 | 30,0,0,70,0,37.09
11 | 45,0,0,55,0,45.53
12 | 15,0,0,85,0,45.97
13 | 55,0,0,0,45,140.7
14 | 90,10,0,0,0,179.53
15 | 25,0,0,0,75,215.37
16 | 90,10,0,0,0,218.67
17 | 70,30,0,0,0,411.82
18 | 40,60,0,0,0,490.06
19 | 60,40,0,0,0,559.01
20 | 60,40,0,0,0,617.92
21 | 40,60,0,0,0,634.35
22 | 40,60,0,0,0,688.74
23 | 40,60,0,0,0,725.7933279
24 | 50,50,0,0,0,766.85
25 | 40,60,0,0,0,788.8326479
26 | 50,50,0,0,0,800.58
27 | 40,60,0,0,0,804.11
28 | 40,60,0,0,0,839.28
29 | 40,60,0,0,0,852.33
30 | 88.25,0.83,8.23,1.57,1.21,5.09
31 | 85.48,0.32,0.15,4.03,9.96,6.19
32 | 94.81,0.3,0.28,0.08,4.53,6.44
33 | 73.99,0.5,25.18,0.28,0.04,7.2
34 | 74.19,0.07,24.76,0.17,0.8,7.58
35 | 93.87,0.24,0.56,0.24,5.11,7.72
36 | 44.35,0.18,54.47,0.18,0.81,8.39
37 | 93.87,0.24,0.56,0.24,5.11,10.32
38 | 84.86,3.42,0.96,0.28,10.41,11.15
39 | 59.82,0.23,0.5,38.89,0.55,11.44
40 | 54.48,0.65,0.53,0.46,43.89,12.19
41 | 85.08,7.75,3.5,2.24,1.36,18.25
42 | 44.01,0.09,0.9,54.71,0.3,19.06
43 | 44.01,0.09,0.9,54.71,0.3,19.3
44 | 74.31,0.39,0.1,25.18,0.04,49.98
45 | 91.13,1.99,1.95,2.24,2.69,6.25
46 | 90.31,0.18,0.62,8.25,0.65,7.09
47 | 89.8,3.01,1.59,4.36,1.23,7.84
48 | 90.3,0.2,7.89,0.79,0.89,7.85
49 | 84.55,1.93,1.4,3.23,8.85,9.21
50 | 92.28,2.19,2.61,1.66,1.24,10.37
51 | 84.55,1.93,1.4,3.23,8.85,10.58
52 | 18.67,0.27,47.09,24.03,9.99,14.08
53 | 16.15,0.46,25.38,51.42,6.55,15.85
54 | 88.45,3.5,1.89,1.96,4.21,16.66
55 | 89.55,5.72,0.83,1.8,2.1,16.69
56 | 18.74,0.17,18.41,36.13,26.58,17.89
57 | 16.15,0.46,25.38,51.42,6.55,17.96
58 | 20.57,0.18,35.6,39.41,4.16,18.43
59 | 90.48,4.82,2.61,0.55,1.57,28.51
60 | 88.5,7.91,0.96,1.95,0.64,31.24
61 | 19.84,1.06,28.7,33.62,16.75,34.35
62 | 87.89,2,6.26,2.42,1.33,63.53
63 | 19.13,0.4,46.86,25.41,8.25,6.92
64 | 74.88,0.08,2.02,22.01,1.03,7.25
65 | 19.13,0.4,46.86,25.41,8.25,10.53
66 | 88.62,0.3,0.34,9.64,1.11,11.38
67 | 83.17,1.72,3.54,2.43,9.07,11.49
68 | 83.17,1.72,3.54,2.43,9.07,12.36
69 | 83.55,0.58,3.09,7.01,5.74,13.46
70 | 92.37,0.45,1.84,0.51,4.85,13.99
71 | 17.71,0.98,25.34,50.3,5.68,14.78
72 | 43.11,0.53,53.45,0.51,2.36,14.99
73 | 84.57,0.9,0.69,5.84,7.98,16.13
74 | 82.28,2.43,2.97,3.56,8.72,16.51
75 | 88.62,0.3,0.34,9.64,1.11,17.57
76 | 86.41,2.25,0.04,1.87,9.45,26.55
77 | 42.44,1.11,1.49,54.04,0.95,26.57
78 | 85.38,1.66,1.45,1.1,10.48,28.39
79 | 84.01,2.49,0.67,4.71,8.11,32.04
80 | 84.09,1.85,0.65,2.36,11.14,39.11
81 | 86.6,1.1,0.28,2.42,9.54,52.77
82 | 80.26,0.53,12.03,5.47,1.78,2.44
83 | 96.27,0.27,2.02,0.85,0.51,6.31
84 | 80.26,0.53,12.03,5.47,1.78,6.68
85 | 18.73,1.28,47.46,24.64,7.94,6.73
86 | 93.11,0.03,0.63,3.78,2.5,7.08
87 | 96.27,0.27,2.02,0.85,0.51,7.67
88 | 19.15,1.59,48.4,23.33,7.57,7.96
89 | 84.23,1.06,1.13,4.34,9.15,9.62
90 | 94.58,0.03,1.15,1.38,2.91,9.71
91 | 84.23,1.06,1.13,4.34,9.15,11.38
92 | 82.98,8.04,4.79,2.58,1.51,11.47
93 | 19.52,1.24,46.21,25.86,7.23,13.25
94 | 91.03,0.46,1.93,6.59,0.04,13.55
95 | 44.55,0.5,51.55,1.1,2.31,17.35
96 | 16.88,1.25,24.96,49.58,7.32,20.03
97 | 84.6,7.56,0.04,7.83,0.06,29.81
98 | 19.23,0.34,45.36,26.03,9.07,45.34
99 | 81.71,4.33,5.44,8.58,0.04,86.01
100 | 69.1,30.83,0.04,0.04,0.04,233.89
101 | 58.55,37.36,1.92,0.87,1.27,374.29
102 | 48.97,50.34,0.04,0.1,0.56,433.78
103 | 49.4,49.48,0.54,0.38,0.21,484.36
104 | 58.76,39.81,0.75,0.15,0.53,545.78
105 | 49.51,49.24,0.62,0.12,0.56,562.06
106 | 50.64,48.29,0.23,0.44,0.41,580.44
107 | 49.51,49.24,0.62,0.12,0.56,607.72
108 | 48.72,49.78,0.45,0.83,0.23,618.72
109 | 49.72,48.06,1.11,0.89,0.23,625.33
110 | 50.64,48.29,0.23,0.44,0.41,659.36
111 | 47.81,49.29,0.55,1.46,0.89,750.9
112 | 40.29,58.82,0.29,0.41,0.19,770.35
113 | 46.92,50.3,1.53,0.04,1.23,838.31
114 | 40.67,59.26,0.04,0.04,0.07,904.29
115 | 48.62,50.35,0.49,0.53,0.04,226.22
116 | 44.3,54.68,0.04,0.29,0.78,291.41
117 | 47.49,50.27,1.47,0.65,0.16,325.56
118 | 44.3,54.68,0.04,0.29,0.78,452.8
119 | 42.33,51.83,2.75,2.87,0.32,475.46
120 | 42.33,51.83,2.75,2.87,0.32,529.11
121 | 42.33,51.83,2.75,2.87,0.32,535.99
122 | 47.05,49.93,0.7,1.63,0.73,540.19
123 | 56.62,37.68,3.06,1.13,1.53,540.19
124 | 44.75,52.92,0.21,1.32,0.84,543.75
125 | 44.3,54.68,0.04,0.29,0.78,552.26
126 | 53.99,44.58,0.04,0.54,0.89,554.41
127 | 49.58,45.42,0.89,2.25,1.93,608.87
128 | 44.3,54.68,0.04,0.29,0.78,660
129 | 44.3,54.68,0.04,0.29,0.78,1089.12
130 | 42.33,51.83,2.75,2.87,0.32,1243.67
131 | 55.32,37.04,1.9,2.32,3.44,192.74
132 | 55.36,38.38,1.13,4.15,1.04,203.82
133 | 53.66,38.51,1.57,4.75,1.58,207.38
134 | 53.13,37.59,4.69,3.52,1.05,212.66
135 | 77.73,20.64,1.58,0.04,0.04,215.93
136 | 42.02,47.62,4.59,3.34,2.38,278.51
137 | 42.3,45.07,4.49,5.21,3.02,281.68
138 | 54.14,40.57,0.04,2.68,2.61,293.6
139 | 41.23,49.06,3.41,6.17,0.14,293.93
140 | 58.62,39.89,0.04,1.32,0.14,302.25
141 | 48.33,50.13,1.32,0.22,0.04,336.24
142 | 46.12,48.4,2.73,0.86,1.91,259.11
143 | 45.41,48.51,2.1,2.64,1.38,279.69
144 | 46.05,49.9,2.22,0.77,1.09,280.38
145 | 51.73,32.12,3.92,2.29,9.96,283.86
146 | 45.41,48.51,2.1,2.64,1.38,286.73
147 | 46.05,49.9,2.22,0.77,1.09,291.83
148 | 45.58,49.52,2.59,0.49,1.89,312.65
149 | 48.68,39.72,10.97,0.58,0.04,326.54
150 | 45.21,49.39,3.59,1.45,0.41,328.21
151 | 47.13,46.95,4.28,1.08,0.57,328.45
152 | 47.2,49.49,2.01,0.31,1.01,348.88
153 | 67.1,30.15,0.72,0.04,1.95,36.88
154 | 38.9,0.03,0.21,2.95,57.95,44.1
155 | 60.1,38.39,0.55,0.11,0.86,47.22
156 | 68.11,28.69,0.89,0.97,1.35,63.81
157 | 50.74,41.05,1.65,4.69,1.87,92.38
158 | 44.8,37.4,6.81,5.83,5.18,116.5
159 | 44.8,37.4,6.81,5.83,5.18,118.34
160 | 50.74,41.05,1.65,4.69,1.87,142.23
161 | 50.35,40.6,5.37,2.44,1.26,180.26
162 | 50.35,40.6,5.37,2.44,1.26,189.12
163 | 51.81,39.1,5.15,0.78,3.14,214.21
164 | 39.05,41.85,5.75,4.04,9.4,217.94
165 | 39.05,41.85,5.75,4.04,9.4,220.57
166 | 50.65,44.59,4.06,0.04,0.69,220.66
167 | 50.7,37.88,4.05,3.37,4.05,233.43
168 | 44.89,47.03,3.81,1.16,3.1,249.23
169 | 44.89,47.03,3.81,1.16,3.1,250.28
170 | 47.09,35.56,5.79,5.47,6.17,331.17
171 | 46.44,49.23,1.66,1.7,1.06,385.2
172 | 37.69,43.16,4.14,6.22,8.8,405.26
173 | 39.9,45.9,3.34,1.76,9.14,419.51
174 | 45.8,47.25,3.24,2.7,1.1,542.3
175 | 41.64,0.6,0.59,54.7,2.48,2.740952014
176 | 60.34,26.07,6.5,2.31,4.87,41.89833748
177 | 66.03,30.01,2.61,0.24,1.08,52.47346448
178 | 56.59,31.34,5.46,0.93,5.73,76.51244504
179 | 61.93,30.52,7.05,0.2,0.36,83.28491341
180 | 43.49,46.89,4.18,5.1,0.37,111.8949727
181 | 45.6,46.5,1.67,3.96,2.26,135.2652427
182 | 43.09,47.85,3.14,3.3,2.67,151.3841761
183 | 43.09,47.85,3.14,3.3,2.67,155.3457121
184 | 41.41,52.85,1.88,2.52,1.42,160.6199195
185 | 41.6,51.43,2.1,4.91,0.04,162.7067112
186 | 43.8,45.29,7.89,2.43,0.55,200.7545831
187 | 36.88,41.6,2.67,1.85,16.99,311.2965408
188 | 46.57,48.91,3.08,1.11,0.36,318.1919743
189 | 47.26,44.46,7.32,0.69,0.23,322.6997586
190 | 45.64,48.86,2.64,1.78,1.09,339.0080085
191 | 36.88,41.6,2.67,1.85,16.99,345.3800939
192 | 39.61,48.07,7.51,1.93,2.94,390.2707837
193 | 37.29,43.2,2.77,1.81,14.94,398.5758253
194 | 38.33,43.6,0.1,3.63,14.3,424.1973941
195 | 43.61,49.29,2.88,4.22,0.04,733.7295218
196 | 43.61,49.29,2.88,4.22,0.04,745.278685
197 | 70.14,27.18,1.45,0.27,0.97,179.12
198 | 44.57,48.47,2.93,1.6,2.4,191.45
199 | 44.57,48.47,2.93,1.6,2.4,192.98
200 | 71.64,28.16,0.04,0.11,0.05,195.33
201 | 44.57,48.47,2.93,1.6,2.4,201.63
202 | 68.94,26.79,0.69,1.03,2.57,207.58
203 | 71.64,28.16,0.04,0.11,0.05,213.5
204 | 41.9,48.61,3.78,1.87,3.83,214.93
205 | 34.13,42.19,8.79,13.22,1.69,226.25
206 | 43.13,49.3,3.16,1.72,2.74,240.44
207 | 34.01,38.9,0.73,12.68,13.65,248.62
208 | 34.01,38.9,0.73,12.68,13.65,268.25
209 | 44.61,47.53,2.05,3.17,2.58,289.72
210 | 43.83,36.37,0.99,2.44,16.4,297.95
211 | 43.17,34.01,3.89,2.04,16.96,325.42
212 | 46.69,37.39,0.04,1.86,14.05,356.76
213 | 34.96,44.32,1.8,14.01,4.96,382.45
214 | 45.15,42.25,7.82,1.98,2.83,395.74
215 | 40.02,41.49,0.08,13.95,4.56,406.47
216 | 46.86,43.74,5.08,0.78,3.61,451.64
217 | 46.86,43.74,5.08,0.78,3.61,457.13
218 | 41.63,45.99,8.17,0.04,4.24,696.39
219 | 53.4,33.9,1.18,11.34,0.25,271.56
220 | 59.98,32.34,5.8,1.71,0.06,537.77
221 | 46.51,49.51,2.06,1.06,0.89,545.18
222 | 40.67,59.26,0.04,0.04,0.07,546.79
223 | 55.34,38.12,6.27,0.01,0.26,561.86
224 | 50,50,0,0,0,602.48
225 | 38.38,48.28,0.04,0.57,12.71,636.48
226 | 38.92,49.28,0.09,0.95,10.67,637.46
227 | 55.34,38.12,6.27,0.01,0.26,654.34
228 | 40.67,59.26,0.04,0.04,0.07,658.86
229 | 50,50,0,0,0,695.67
230 | 38.29,47.37,0.95,0.6,12.87,697.4
231 | 44.3,54.68,0.04,0.29,0.78,731.11
232 | 38.92,49.28,0.09,0.95,10.67,738.1
233 | 44.3,54.68,0.04,0.29,0.78,772.94
234 | 40,60,0,0,0,824.47
--------------------------------------------------------------------------------
/datasets/Perovskite_dataset.csv:
--------------------------------------------------------------------------------
1 | CsPbI,FAPbI,MAPbI,Instability index
2 | 0,1,0,480185
3 | 0,1,0,505657
4 | 0.25,0.75,0,144074
5 | 0.5,0.5,0,239852
6 | 0.5,0.5,0,416657
7 | 0.75,0.25,0,253423
8 | 0.75,0.25,0,273687
9 | 1,0,0,144556
10 | 1,0,0,351227
11 | 0,0.75,0.25,914156
12 | 0,0.75,0.25,981850
13 | 0.25,0.5,0.25,284710
14 | 0.25,0.5,0.25,602576
15 | 0.5,0.25,0.25,714735
16 | 0.5,0.25,0.25,925193
17 | 0.75,0,0.25,102012
18 | 0.75,0,0.25,128971
19 | 0,0.5,0.5,1103450
20 | 0,0.5,0.5,1155542
21 | 0.25,0.25,0.5,446747
22 | 0.25,0.25,0.5,533060
23 | 0.5,0,0.5,1167261
24 | 0.5,0,0.5,1182236
25 | 0,0.25,0.75,1281399
26 | 0,0.25,0.75,1300295
27 | 0.25,0,0.75,876981
28 | 0,0,1,1267615
29 | 0,0,1,1274993
30 | 0.16,0.84,0,57484
31 | 0.16,0.84,0,286689
32 | 0.35,0.65,0,209698
33 | 0.35,0.65,0,316345
34 | 0.37,0.63,0,207474
35 | 0.39,0.61,0,132665
36 | 0.63,0.37,0,287734
37 | 0.63,0.37,0,302631
38 | 0.3,0.69,0.01,190636
39 | 0.3,0.69,0.01,342685
40 | 0.87,0.12,0.01,224858
41 | 0.13,0.85,0.02,98773
42 | 0.34,0.64,0.02,343647
43 | 0.28,0.65,0.07,370439
44 | 0.91,0.01,0.08,482218
45 | 0.8,0.11,0.09,228347
46 | 0.23,0.66,0.11,299002
47 | 0.89,0,0.11,481313
48 | 0.89,0,0.11,482674
49 | 0.34,0.54,0.12,416405
50 | 0.88,0,0.12,404845
51 | 0.78,0.09,0.13,234879
52 | 0.83,0.03,0.14,199330
53 | 0.84,0,0.16,293210
54 | 0.84,0,0.16,456484
55 | 0.83,0,0.17,374938
56 | 0.81,0,0.19,376516
57 | 0.81,0,0.19,463506
58 | 0.09,0.91,0,133495
59 | 0.1,0.9,0,237318
60 | 0.19,0.81,0,221696
61 | 0.22,0.78,0,123157
62 | 0.23,0.77,0,38893
63 | 0.26,0.74,0,449878
64 | 0.32,0.68,0,40621
65 | 0.43,0.57,0,300244
66 | 0.57,0.43,0,269180
67 | 0.96,0.04,0,349707
68 | 0.16,0.83,0.01,284939
69 | 0.17,0.82,0.01,227835
70 | 0.37,0.62,0.01,180293
71 | 0.41,0.58,0.01,252077
72 | 0.7,0.29,0.01,286563
73 | 0.14,0.84,0.02,87667
74 | 0.09,0.87,0.04,339668
75 | 0.25,0.71,0.04,96638
76 | 0.18,0.76,0.06,140574
77 | 0.13,0.79,0.08,59256
78 | 0.06,0.85,0.09,272660
79 | 0.66,0.24,0.1,476197
80 | 0.19,0.7,0.11,127394
81 | 0.68,0.13,0.19,548909
82 | 0.71,0.06,0.23,438903
83 | 0.71,0.01,0.28,489801
84 | 0.26,0.37,0.36,252642
85 | 0,0,1,1497320
86 | 0.13,0.87,0,24723
87 | 0.18,0.82,0,27122
88 | 0.2,0.8,0,91681
89 | 0.25,0.75,0,183180
90 | 0.34,0.66,0,132641
91 | 0.38,0.62,0,120257
92 | 0.53,0.47,0,318600
93 | 0.59,0.41,0,225170
94 | 0.69,0.31,0,261756
95 | 0.76,0.24,0,211852
96 | 0.1,0.89,0.01,136004
97 | 0.27,0.72,0.01,163985
98 | 0.32,0.67,0.01,383479
99 | 0.44,0.55,0.01,206695
100 | 0.18,0.78,0.04,147202
101 | 0.09,0.85,0.06,192477
102 | 0.22,0.72,0.06,175885
103 | 0.12,0.81,0.07,239057
104 | 0.16,0.75,0.09,129483
105 | 0.12,0.77,0.11,145224
106 | 0.2,0.69,0.11,208028
107 | 0.16,0.7,0.14,141683
108 | 0.3,0.39,0.32,437082
109 | 0.31,0.36,0.33,701140
110 | 0.28,0.37,0.35,719564
111 | 0.24,0.37,0.39,452558
112 | 0.31,0.3,0.39,794499
113 | 0.27,0.34,0.4,375241
114 | 0.13,0.87,0,23707
115 | 0.13,0.87,0,59211
116 | 0.13,0.87,0,80413
117 | 0.13,0.87,0,98439
118 | 0.17,0.8,0.03,26197
119 | 0.17,0.8,0.03,38370
120 | 0.17,0.8,0.03,107494
121 | 0.17,0.8,0.03,119938
122 | 0.25,0.7,0.06,91655
123 | 0.25,0.7,0.06,107908
124 | 0.25,0.7,0.06,168856
125 | 0.25,0.7,0.06,292981
126 | 0.13,0.79,0.08,55524
127 | 0.13,0.79,0.08,88158
128 | 0.13,0.79,0.08,93928
129 | 0.13,0.79,0.08,184086
130 | 0.05,0.7885,0.1615,258154
131 | 0.05,0.7885,0.1615,316310
132 | 0.05,0.7885,0.1615,319618
133 | 0.05,0.7885,0.1615,422140
134 | 0.26,0.38,0.36,503950
135 | 0.26,0.38,0.36,541686
136 | 0.26,0.38,0.36,593083
137 | 0,0,1,1375402
138 | 0,0,1,1413800
139 | 0,0,1,1583406
140 | 0,0,1,1755056
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 |
2 | scikit-learn
3 | numpy
4 | matplotlib
5 | GPy
6 | pyDOE
7 | pandas
8 | scipy
9 |
--------------------------------------------------------------------------------