├── .DS_Store
├── image.jpg
├── DataSet
├── .DS_Store
├── ATT_4G_pedestrian.txt
├── 4G_pedestrian_TMobile.txt
├── ATT_4G_bus.txt
├── s21_average_sweep.txt
├── s31_average_sweep.txt
├── s41_average_sweep.txt
└── 4G_bus_TMobile.txt
├── Code
├── fileprocessor.py
├── preprocessor.py
├── calculateError.py
├── seq2seq_unguided_LSTM.py
├── seq2seq_unguided_GRU.py
├── seq2seq_guided_LSTM.py
└── seq2seq_curriculum_LSTM.py
└── Readme.md
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dherath/DeepLearning_for_Wireless_Signal_Strength_Prediction/HEAD/.DS_Store
--------------------------------------------------------------------------------
/image.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dherath/DeepLearning_for_Wireless_Signal_Strength_Prediction/HEAD/image.jpg
--------------------------------------------------------------------------------
/DataSet/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dherath/DeepLearning_for_Wireless_Signal_Strength_Prediction/HEAD/DataSet/.DS_Store
--------------------------------------------------------------------------------
/Code/fileprocessor.py:
--------------------------------------------------------------------------------
1 |
2 | def writetofile(filename,Y):
3 | f = open(filename,"w")
4 | for data in Y:
5 | temp_string = ""
6 | for i in range(len(data)-1):
7 | temp_string += str(data[i]) + " "
8 | temp_string += str(data[-1]) +"\n"
9 | f.write(temp_string)
10 | f.close()
11 | return
12 |
13 | def readfromfile(filename):
14 | Y = []
15 | f = open(filename,"r")
16 | for line in f:
17 | data = line.split()
18 | Y.append(data)
19 | f.close()
20 | return Y
21 |
22 | def writeErrResult(filename,Err):
23 | f = open(filename,"w")
24 | for data in Err:
25 | f.write(str(data)+"\n")
26 | f.close()
27 | return
--------------------------------------------------------------------------------
/Code/preprocessor.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | #-----------------------------------------
4 | # helper functions for getData()
5 | #-----------------------------------------
6 |
7 | def openFile(filename):
8 | data = []
9 | with open(filename) as f:
10 | for line in f:
11 | words = line.split()
12 | data.append(words[0])
13 | return data
14 |
15 |
16 | def sampleData(dataset,x_length,y_length):
17 | x_data_limit = len(dataset) - (x_length+y_length)
18 | X = []
19 | Y = []
20 | for i in range(x_data_limit):
21 | # for the inputs
22 | temp_x = []
23 | for j in range(x_length):
24 | temp_x.append(dataset[i+j])
25 | X.append(temp_x)
26 | # for the outputs
27 | temp_y = []
28 | for j in range(y_length):
29 | temp_y.append(dataset[i+x_length+j])
30 | Y.append(temp_y)
31 | return X,Y
32 |
33 |
34 |
35 | #-----------------------------------------
36 | # main method to obtain data
37 | #-----------------------------------------
38 |
39 | # obtains the datasets -> used for the RNN model
40 | # filename : the string name for the file
41 | # x_length : length of the input(timesteps of the past)
42 | # y_length : length of output(timesteps into future)
43 | # percentage : the percentage of data to use for training and testing
44 |
45 | def getData(filename,x_length,y_length,percentage):
46 | data = openFile(filename) # open the file and get data
47 |
48 | #-- seperate training and testing --------
49 | train_size = int(percentage*len(data))
50 |
51 | train_data = data[1:train_size]
52 | test_data = data[train_size+1:-1]
53 |
54 | X_Train,Y_Train = sampleData(train_data,x_length,y_length)
55 | X_Test,Y_Test = sampleData(test_data,x_length,y_length)
56 |
57 | return X_Train,Y_Train,X_Test,Y_Test
58 |
--------------------------------------------------------------------------------
/Code/calculateError.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #--------------------------------------------
4 | # helper functions
5 | #--------------------------------------------
6 |
7 | # combining the data into placeholders
8 | def getPlaceholders(org_data,comp_data):
9 | y1 = [] # correct values
10 | y2 = [] # computed values
11 | for i in range(len(org_data[0])):
12 | temp1 = []
13 | temp2 = []
14 | for j in range(len(org_data)):
15 | temp1.append(org_data[j][i])
16 | temp2.append(comp_data[j][i])
17 | y1.append(temp1)
18 | y2.append(temp2)
19 | return y1,y2
20 |
21 | #--------------------------------------------
22 | # error calculation
23 | #--------------------------------------------
24 |
25 | # MSE : Mean Squared Error
26 | def MSE(org_data,comp_data):
27 | try:
28 | if len(org_data) != len(comp_data):
29 | raise ValueError("length of original Y and computed Y does not match")
30 | y_org_sample, y_calc_sample = getPlaceholders(org_data,comp_data)
31 | mse = []
32 | for n in range(len(y_org_sample)):
33 | y_org = y_org_sample[n]
34 | y_calc = y_calc_sample[n]
35 | sum_value = 0
36 | for i in range(len(y_org)):
37 | diff = float(float(y_org[i])-float(y_calc[i]))
38 | sqrd_diff = diff ** 2
39 | sum_value += sqrd_diff
40 | mse.append(float(sum_value/len(y_org)))
41 | return mse
42 | except ValueError as err:
43 | print "Error: ",err
44 |
45 | # RMSE : Root Mean Squared Error
46 | def RMSE(org_data,comp_data):
47 | mse = MSE(org_data,comp_data)
48 | rmse = []
49 | for data in mse:
50 | rmse.append(float(data ** 0.5))
51 | return rmse
52 |
53 | # MAE : mean Absolute Error
54 | def MAE(org_data,comp_data):
55 | try:
56 | if len(org_data) != len(comp_data):
57 | raise ValueError("length of original Y and computed Y does not match")
58 | y_org_sample, y_calc_sample = getPlaceholders(org_data,comp_data)
59 | mae = []
60 | for n in range(len(y_org_sample)):
61 | y_org = y_org_sample[n]
62 | y_calc = y_calc_sample[n]
63 | sum_value = 0
64 | for i in range(len(y_org)):
65 | diff = abs(float(y_org[i])-float(y_calc[i]))
66 | sum_value += diff
67 | mae.append(float(sum_value/len(y_org)))
68 | return mae
69 | except ValueError as err:
70 | print "Error: ",err
71 |
--------------------------------------------------------------------------------
/Readme.md:
--------------------------------------------------------------------------------
1 | ### A Deep Learning Model for Wireless Channel Quality Prediction ###
2 |
3 | The code here contains a sequence-to-sequence LSTM/GRU based deep learning model used for wireless signal strength prediction. It is possible to train the model in three training paradigms of guided, unguided and curriculum training. Please refer the [paper](http://www.dinalherath.com/papers/2019ICC.pdf) for algorithm information. Note, an extended version of the work with more comparison is also available in this [paper](http://www.dinalherath.com/papers/2019tvt.pdf).
4 |
5 |
6 |
7 |
8 |
9 | #### Deep learning (LSTM and GRU) models:
10 |
11 | - seq2seq_curriculum_LSTM.py => LSTM using curriculum training 30%
12 | - seq2seq_guided_LSTM.py => LSTM using guided training
13 | - seq2seq_unguided_LSTM.py => LSTM using unguided training
14 | - seq2seq_unguided_GRU.py => GRU using unguided training
15 | - calculateError.py, fileprocessor.py, preprocessor.py => contains helper functions
16 |
17 | #### Data description:
18 |
19 | All datasets are cleaned datasets. Reference for the raw data is mentioned below each network type.
20 |
21 | **4G LTE RSRP Measurements:**
22 | (Data Collected by the authors)
23 | - 4G_bus_TMobile.txt
24 | - 4G_pedestrian_TMobile.txt
25 | - ATT_4G_bus.txt
26 | - ATT_4G_pedestrian.txt
27 |
28 | **Industrial Network Measurements:**
29 | (Raw data can be found at this [link](https://crawdad.org/init/factory/20160613/factory1-channel-gain))
30 | - s21_average_sweep.txt (antennas separated by a distance of 3.1m)
31 | - s31_average_sweep.txt (antennas separated by a distance of 10.0m)
32 | - s41_average_sweep.txt (antennas separated by a distance of 20.4m)
33 |
34 | **WiFi RSI Measurements:**
35 | (1st two datasets colleted by the authors)
36 | - wifi_1s_sample.txt (sampling rate of 1 second)
37 | - wifi_2s_sample.txt (sampling rate of 2 seconds)
38 |
39 | **Zigbee Measurements:**
40 | (Raw data can be found at this [link](https://crawdad.org/due/packet-delivery/20150401))
41 | (Power level 31 considered)
42 | - wsn_p31_d10_sample.txt (sensor nodes communicating with each other over fixed distance of 10m)
43 | - wsn_p31_d15_sample.txt (sensor nodes communicating with each other over fixed distance of 15m)
44 |
45 | ```
46 | @inproceedings{herath2019deep,
47 | title={A deep learning model for wireless channel quality prediction},
48 | author={Herath, J Dinal and Seetharam, Anand and Ramesh, Arti},
49 | booktitle={ICC 2019-2019 IEEE International Conference on Communications (ICC)},
50 | pages={1--6},
51 | year={2019},
52 | organization={IEEE}
53 | }
54 | ```
55 |
56 | ```
57 | @article{kulkarni2019deepchannel,
58 | title={Deepchannel: Wireless channel quality prediction using deep learning},
59 | author={Kulkarni, Adita and Seetharam, Anand and Ramesh, Arti and Herath, J Dinal},
60 | journal={IEEE Transactions on Vehicular Technology},
61 | volume={69},
62 | number={1},
63 | pages={443--456},
64 | year={2019},
65 | publisher={IEEE}
66 | }
67 | ```
68 |
--------------------------------------------------------------------------------
/DataSet/ATT_4G_pedestrian.txt:
--------------------------------------------------------------------------------
1 | -112
2 | -111
3 | -111
4 | -112
5 | -112
6 | -112
7 | -112
8 | -113
9 | -113
10 | -117
11 | -117
12 | -115
13 | -115
14 | -115
15 | -115
16 | -114
17 | -114
18 | -114
19 | -114
20 | -114
21 | -114
22 | -114
23 | -114
24 | -113
25 | -113
26 | -113
27 | -113
28 | -113
29 | -113
30 | -113
31 | -113
32 | -113
33 | -113
34 | -115
35 | -115
36 | -120
37 | -120
38 | -121
39 | -121
40 | -129
41 | -129
42 | -120
43 | -120
44 | -118
45 | -118
46 | -115
47 | -115
48 | -109
49 | -109
50 | -116
51 | -116
52 | -113
53 | -113
54 | -115
55 | -115
56 | -110
57 | -110
58 | -107
59 | -107
60 | -113
61 | -113
62 | -112
63 | -112
64 | -111
65 | -111
66 | -111
67 | -111
68 | -112
69 | -112
70 | -111
71 | -111
72 | -111
73 | -111
74 | -110
75 | -110
76 | -111
77 | -111
78 | -111
79 | -111
80 | -111
81 | -111
82 | -110
83 | -110
84 | -110
85 | -110
86 | -113
87 | -113
88 | -111
89 | -111
90 | -111
91 | -111
92 | -111
93 | -111
94 | -111
95 | -111
96 | -111
97 | -111
98 | -110
99 | -110
100 | -111
101 | -111
102 | -110
103 | -110
104 | -113
105 | -113
106 | -113
107 | -113
108 | -113
109 | -113
110 | -113
111 | -113
112 | -112
113 | -112
114 | -112
115 | -112
116 | -111
117 | -111
118 | -111
119 | -111
120 | -111
121 | -111
122 | -111
123 | -111
124 | -110
125 | -110
126 | -111
127 | -111
128 | -111
129 | -111
130 | -111
131 | -111
132 | -119
133 | -119
134 | -120
135 | -120
136 | -119
137 | -119
138 | -116
139 | -116
140 | -115
141 | -115
142 | -115
143 | -115
144 | -115
145 | -115
146 | -115
147 | -115
148 | -115
149 | -115
150 | -115
151 | -115
152 | -114
153 | -114
154 | -115
155 | -115
156 | -115
157 | -115
158 | -116
159 | -116
160 | -114
161 | -114
162 | -114
163 | -114
164 | -115
165 | -115
166 | -117
167 | -117
168 | -114
169 | -114
170 | -114
171 | -114
172 | -115
173 | -115
174 | -115
175 | -115
176 | -115
177 | -115
178 | -114
179 | -114
180 | -114
181 | -114
182 | -115
183 | -115
184 | -114
185 | -114
186 | -115
187 | -115
188 | -115
189 | -115
190 | -114
191 | -114
192 | -115
193 | -115
194 | -115
195 | -115
196 | -115
197 | -115
198 | -115
199 | -115
200 | -115
201 | -111
202 | -111
203 | -113
204 | -113
205 | -110
206 | -110
207 | -109
208 | -109
209 | -109
210 | -101
211 | -101
212 | -111
213 | -111
214 | -110
215 | -110
216 | -105
217 | -105
218 | -107
219 | -107
220 | -114
221 | -114
222 | -119
223 | -119
224 | -114
225 | -114
226 | -118
227 | -118
228 | -118
229 | -118
230 | -119
231 | -119
232 | -121
233 | -118
234 | -118
235 | -118
236 | -118
237 | -118
238 | -117
239 | -117
240 | -119
241 | -119
242 | -119
243 | -118
244 | -118
245 | -118
246 | -117
247 | -117
248 | -117
249 | -115
250 | -115
251 | -116
252 | -116
253 | -112
254 | -112
255 | -105
256 | -105
257 | -103
258 | -103
259 | -103
260 | -101
261 | -101
262 | -101
263 | -101
264 | -100
265 | -100
266 | -100
267 | -100
268 | -107
269 | -107
270 | -105
271 | -105
272 | -105
273 | -101
274 | -101
275 | -101
276 | -101
277 | -105
278 | -105
279 | -105
280 | -105
281 | -105
282 | -105
283 | -105
284 | -105
285 | -103
286 | -103
287 | -103
288 | -103
289 | -103
290 | -103
291 | -103
292 | -103
293 | -104
294 | -104
295 | -104
296 | -104
297 | -104
298 | -104
299 | -104
300 | -104
301 | -104
302 | -95
303 | -95
304 | -95
305 | -95
306 | -105
307 | -105
308 | -105
309 | -105
310 | -94
311 | -94
312 | -94
313 | -100
314 | -100
315 | -100
316 | -100
317 | -103
318 | -103
319 | -103
320 | -103
321 | -103
322 | -97
323 | -97
324 | -97
325 | -99
326 | -99
327 | -101
328 | -101
329 | -101
330 | -101
331 | -99
332 | -99
333 | -99
334 | -99
335 | -99
336 | -98
337 | -98
338 | -102
339 | -102
340 | -102
341 | -102
342 | -100
343 | -100
344 | -100
345 | -100
346 | -99
347 | -99
348 | -99
349 | -99
350 | -98
351 | -98
352 | -98
353 | -101
354 | -101
355 | -101
356 | -99
357 | -99
358 | -99
359 | -102
360 | -102
361 | -102
362 | -102
363 | -102
364 | -102
365 | -102
366 | -100
367 | -100
368 | -100
369 | -98
370 | -98
371 | -98
372 | -98
373 | -96
374 | -96
375 | -96
376 | -91
377 | -91
378 | -91
379 | -91
380 | -93
381 | -93
382 | -93
383 | -93
384 | -94
385 | -94
386 | -94
387 | -95
388 | -95
389 | -95
390 | -95
391 | -95
392 | -95
393 | -95
394 | -95
395 | -96
396 | -96
397 | -96
398 | -96
399 | -92
400 | -92
401 | -92
402 | -92
403 | -94
404 | -94
405 | -94
406 | -94
407 | -93
408 | -93
409 | -93
410 | -93
411 | -93
412 | -91
413 | -91
414 | -91
415 | -91
416 | -91
417 | -91
418 | -91
419 | -99
420 | -99
421 | -99
422 | -99
423 | -99
424 | -99
425 | -99
426 | -99
427 | -99
428 | -96
429 | -96
430 | -96
431 | -96
432 | -96
433 | -96
434 | -96
435 | -96
436 | -96
437 | -95
438 | -95
439 | -95
440 | -95
441 | -95
442 | -99
443 | -99
444 | -99
445 | -99
446 | -99
447 | -99
448 | -99
449 | -96
450 | -96
451 | -96
452 | -96
453 | -96
454 | -96
455 | -96
456 | -96
457 | -96
458 | -96
459 | -96
460 | -89
461 | -89
462 | -89
463 | -96
464 | -96
465 | -96
466 | -96
467 | -95
468 | -95
469 | -95
470 | -95
471 | -94
472 | -94
473 | -94
474 | -94
475 | -87
476 | -87
477 | -87
478 | -85
479 | -85
480 | -85
481 | -89
482 | -89
483 | -89
484 | -89
485 | -95
486 | -95
487 | -95
488 | -95
489 | -95
490 | -92
491 | -92
492 | -88
493 | -88
494 | -88
495 | -88
496 | -82
497 | -82
498 | -82
499 | -82
500 | -86
501 | -86
502 | -86
503 | -86
504 | -86
505 | -83
506 | -83
507 | -83
508 | -79
509 | -79
510 | -79
511 | -81
512 | -81
513 | -81
514 | -81
515 | -81
516 | -90
517 | -90
518 | -90
519 | -90
520 | -80
521 | -80
522 | -80
523 | -96
524 | -96
525 | -96
526 | -97
527 | -97
528 | -97
529 | -97
530 | -97
531 | -93
532 | -93
533 | -93
534 | -93
535 | -87
536 | -87
537 | -87
538 | -87
539 | -81
540 | -81
541 | -81
542 | -78
543 | -78
544 | -85
545 | -85
546 | -85
547 | -81
548 | -81
549 | -81
550 | -81
551 | -84
552 | -84
553 | -84
554 | -84
555 | -84
556 | -87
557 | -87
558 | -87
559 | -88
560 | -88
561 | -88
562 | -84
563 | -84
564 | -84
565 | -84
566 | -85
567 | -85
568 | -85
569 | -85
570 | -87
571 | -87
572 | -87
573 | -90
574 | -90
575 | -90
576 | -91
577 | -91
578 | -91
579 | -91
580 | -91
581 | -91
582 | -91
583 | -91
584 | -91
585 | -91
586 | -91
587 | -91
588 | -90
589 | -90
590 | -90
591 | -89
592 | -89
593 | -89
594 | -89
595 | -74
596 | -74
597 | -74
598 | -74
599 | -86
600 | -86
601 | -86
602 | -86
603 | -92
604 | -92
605 | -92
606 | -93
607 | -93
608 | -93
609 | -93
610 | -90
611 | -90
612 | -90
613 | -90
614 | -88
615 | -88
616 | -88
617 | -88
618 | -84
619 | -84
620 | -84
621 | -84
622 | -84
623 | -78
624 | -78
625 | -78
626 | -78
627 | -87
628 | -87
629 | -87
630 | -92
631 | -92
632 | -92
633 | -81
634 | -81
635 | -81
636 | -81
637 | -86
638 | -86
639 | -86
640 | -86
641 | -88
642 | -88
643 | -88
644 | -88
645 | -87
646 | -87
647 | -87
648 | -87
649 | -87
650 | -87
651 | -87
652 | -87
653 | -88
654 | -88
655 | -88
656 | -88
657 | -88
658 | -84
659 | -84
660 | -84
661 | -86
662 | -86
663 | -86
664 | -95
665 | -95
666 | -95
667 | -91
668 | -91
669 | -91
670 | -91
671 | -91
672 | -89
673 | -89
674 | -89
675 | -89
676 | -90
677 | -90
678 | -93
679 | -93
680 | -93
681 | -89
682 | -89
683 | -89
684 | -89
685 | -93
686 | -93
687 | -93
688 | -93
689 | -96
690 | -96
691 | -96
692 | -96
693 | -96
694 | -90
695 | -90
696 | -90
697 | -90
698 | -87
699 | -87
700 | -87
701 | -87
702 | -92
703 | -92
704 | -92
705 | -92
706 | -92
707 | -93
708 | -93
709 | -93
710 | -93
711 | -88
712 | -88
713 | -88
714 | -97
715 | -97
716 | -97
717 | -97
718 | -99
719 | -99
720 | -99
721 | -99
722 | -99
723 | -91
724 | -91
725 | -91
726 | -91
727 | -79
728 | -79
729 | -79
730 | -79
731 | -88
732 | -88
733 | -86
734 | -86
735 | -86
736 | -86
737 | -88
738 | -88
739 | -88
740 | -88
741 | -86
742 | -86
743 | -86
744 | -86
745 | -90
746 | -90
747 | -88
748 | -88
749 | -88
750 | -86
751 | -86
752 | -86
753 | -86
754 | -86
755 | -88
756 | -88
757 | -88
758 | -88
759 | -85
760 | -85
761 | -85
762 | -90
763 | -90
764 | -85
765 | -85
766 | -85
767 | -85
768 | -89
769 | -89
770 | -89
771 | -89
772 | -86
773 | -86
774 | -86
775 | -86
776 | -94
777 | -94
778 | -94
779 | -94
780 | -94
781 | -94
782 | -85
783 | -85
784 | -85
785 | -85
786 | -88
787 | -88
788 | -88
789 | -88
790 | -88
791 | -91
792 | -91
793 | -91
794 | -90
795 | -90
796 | -93
797 | -93
798 | -93
799 | -93
800 | -93
801 | -93
802 | -93
803 | -93
804 | -90
805 | -90
806 | -90
807 | -88
808 | -88
809 | -88
810 | -83
811 | -83
812 | -83
813 | -83
814 | -88
815 | -88
816 | -89
817 | -89
818 | -89
819 | -85
820 | -85
821 | -86
822 | -86
823 | -86
824 | -86
825 | -86
826 | -82
827 | -82
828 | -86
829 | -86
830 | -84
831 | -84
832 | -85
833 | -85
834 | -83
835 | -83
836 | -88
837 | -88
838 | -87
839 | -87
840 | -83
841 | -83
842 | -83
843 | -95
844 | -95
845 | -96
846 | -96
847 | -99
848 | -99
849 | -99
850 | -95
851 | -95
852 | -95
853 | -95
854 | -96
855 | -96
856 | -93
857 | -93
858 | -93
859 | -93
860 | -93
861 | -93
862 | -92
863 | -92
864 | -92
865 | -92
866 | -94
867 | -94
868 | -96
869 | -96
870 | -96
871 | -96
872 | -96
873 | -96
874 | -96
875 | -97
876 | -97
877 | -95
878 | -95
879 | -95
880 | -95
881 | -95
882 | -95
883 | -95
884 | -95
885 | -95
886 | -91
887 | -91
888 | -91
889 | -91
890 | -97
891 | -97
892 | -97
893 | -97
894 | -95
895 | -95
896 | -95
897 | -95
898 | -97
899 | -97
900 | -96
901 | -96
902 | -96
903 | -98
904 | -98
905 | -98
906 | -97
907 | -97
908 | -97
909 | -97
910 | -97
911 | -93
912 | -93
913 | -91
914 | -91
915 | -91
916 | -97
917 | -97
918 | -97
919 | -97
920 | -96
921 | -96
922 | -96
923 | -96
924 | -96
925 | -93
926 | -93
927 | -93
928 | -86
929 | -86
930 | -86
931 | -89
932 | -89
933 | -89
934 | -89
935 | -89
936 | -90
937 | -90
938 | -90
939 | -90
940 | -94
941 | -94
942 | -94
943 | -98
944 | -98
945 | -98
946 | -98
947 |
--------------------------------------------------------------------------------
/DataSet/4G_pedestrian_TMobile.txt:
--------------------------------------------------------------------------------
1 | -105
2 | -105
3 | -105
4 | -104
5 | -104
6 | -104
7 | -104
8 | -107
9 | -107
10 | -110
11 | -110
12 | -104
13 | -104
14 | -104
15 | -104
16 | -108
17 | -108
18 | -103
19 | -103
20 | -107
21 | -107
22 | -108
23 | -108
24 | -108
25 | -108
26 | -106
27 | -106
28 | -99
29 | -99
30 | -101
31 | -101
32 | -97
33 | -97
34 | -99
35 | -99
36 | -99
37 | -99
38 | -95
39 | -95
40 | -95
41 | -95
42 | -100
43 | -100
44 | -101
45 | -101
46 | -98
47 | -98
48 | -100
49 | -100
50 | -97
51 | -97
52 | -101
53 | -101
54 | -99
55 | -99
56 | -98
57 | -98
58 | -102
59 | -102
60 | -99
61 | -99
62 | -90
63 | -90
64 | -78
65 | -78
66 | -85
67 | -85
68 | -81
69 | -81
70 | -79
71 | -79
72 | -86
73 | -86
74 | -85
75 | -85
76 | -92
77 | -92
78 | -90
79 | -90
80 | -88
81 | -88
82 | -83
83 | -83
84 | -81
85 | -81
86 | -77
87 | -77
88 | -78
89 | -78
90 | -83
91 | -83
92 | -81
93 | -81
94 | -80
95 | -80
96 | -84
97 | -84
98 | -86
99 | -86
100 | -75
101 | -75
102 | -80
103 | -80
104 | -77
105 | -77
106 | -81
107 | -81
108 | -81
109 | -81
110 | -79
111 | -79
112 | -84
113 | -84
114 | -91
115 | -91
116 | -88
117 | -88
118 | -86
119 | -86
120 | -87
121 | -87
122 | -91
123 | -91
124 | -84
125 | -84
126 | -86
127 | -86
128 | -94
129 | -94
130 | -83
131 | -83
132 | -85
133 | -85
134 | -95
135 | -95
136 | -94
137 | -94
138 | -84
139 | -84
140 | -89
141 | -89
142 | -93
143 | -93
144 | -91
145 | -91
146 | -91
147 | -91
148 | -83
149 | -83
150 | -95
151 | -95
152 | -79
153 | -79
154 | -89
155 | -89
156 | -90
157 | -90
158 | -80
159 | -80
160 | -90
161 | -90
162 | -87
163 | -87
164 | -95
165 | -95
166 | -80
167 | -80
168 | -82
169 | -82
170 | -95
171 | -95
172 | -89
173 | -89
174 | -89
175 | -89
176 | -90
177 | -90
178 | -85
179 | -85
180 | -81
181 | -81
182 | -92
183 | -92
184 | -82
185 | -82
186 | -91
187 | -91
188 | -77
189 | -77
190 | -85
191 | -85
192 | -81
193 | -81
194 | -89
195 | -89
196 | -93
197 | -93
198 | -88
199 | -88
200 | -83
201 | -83
202 | -89
203 | -89
204 | -89
205 | -89
206 | -83
207 | -83
208 | -89
209 | -89
210 | -86
211 | -86
212 | -81
213 | -81
214 | -85
215 | -85
216 | -85
217 | -85
218 | -83
219 | -83
220 | -96
221 | -96
222 | -90
223 | -90
224 | -85
225 | -85
226 | -91
227 | -91
228 | -83
229 | -83
230 | -92
231 | -92
232 | -88
233 | -88
234 | -92
235 | -92
236 | -85
237 | -85
238 | -91
239 | -91
240 | -72
241 | -72
242 | -74
243 | -74
244 | -87
245 | -87
246 | -91
247 | -91
248 | -78
249 | -78
250 | -86
251 | -86
252 | -82
253 | -82
254 | -84
255 | -84
256 | -81
257 | -81
258 | -82
259 | -82
260 | -91
261 | -91
262 | -77
263 | -77
264 | -82
265 | -82
266 | -79
267 | -79
268 | -84
269 | -84
270 | -89
271 | -89
272 | -82
273 | -82
274 | -76
275 | -76
276 | -79
277 | -79
278 | -86
279 | -86
280 | -87
281 | -87
282 | -78
283 | -78
284 | -79
285 | -79
286 | -82
287 | -82
288 | -75
289 | -75
290 | -93
291 | -93
292 | -79
293 | -79
294 | -82
295 | -82
296 | -81
297 | -81
298 | -78
299 | -78
300 | -77
301 | -77
302 | -72
303 | -72
304 | -84
305 | -84
306 | -93
307 | -93
308 | -83
309 | -83
310 | -75
311 | -75
312 | -74
313 | -74
314 | -85
315 | -85
316 | -91
317 | -91
318 | -96
319 | -96
320 | -95
321 | -95
322 | -99
323 | -99
324 | -96
325 | -96
326 | -101
327 | -101
328 | -95
329 | -95
330 | -94
331 | -94
332 | -91
333 | -91
334 | -96
335 | -96
336 | -93
337 | -93
338 | -93
339 | -93
340 | -97
341 | -97
342 | -93
343 | -93
344 | -90
345 | -90
346 | -96
347 | -96
348 | -95
349 | -95
350 | -93
351 | -93
352 | -97
353 | -97
354 | -94
355 | -94
356 | -95
357 | -95
358 | -95
359 | -95
360 | -95
361 | -93
362 | -93
363 | -92
364 | -92
365 | -90
366 | -90
367 | -90
368 | -90
369 | -92
370 | -92
371 | -94
372 | -94
373 | -94
374 | -94
375 | -92
376 | -92
377 | -92
378 | -93
379 | -93
380 | -93
381 | -98
382 | -98
383 | -96
384 | -96
385 | -96
386 | -96
387 | -92
388 | -92
389 | -94
390 | -94
391 | -93
392 | -93
393 | -99
394 | -99
395 | -101
396 | -101
397 | -96
398 | -96
399 | -94
400 | -94
401 | -99
402 | -99
403 | -96
404 | -96
405 | -99
406 | -99
407 | -99
408 | -99
409 | -102
410 | -102
411 | -105
412 | -105
413 | -104
414 | -104
415 | -101
416 | -101
417 | -104
418 | -104
419 | -104
420 | -100
421 | -100
422 | -100
423 | -100
424 | -100
425 | -100
426 | -96
427 | -96
428 | -99
429 | -99
430 | -99
431 | -99
432 | -100
433 | -100
434 | -100
435 | -100
436 | -95
437 | -95
438 | -100
439 | -100
440 | -107
441 | -107
442 | -99
443 | -99
444 | -97
445 | -97
446 | -89
447 | -89
448 | -96
449 | -96
450 | -101
451 | -101
452 | -95
453 | -95
454 | -104
455 | -104
456 | -92
457 | -92
458 | -101
459 | -101
460 | -109
461 | -109
462 | -108
463 | -108
464 | -99
465 | -99
466 | -93
467 | -93
468 | -103
469 | -103
470 | -106
471 | -106
472 | -98
473 | -98
474 | -106
475 | -106
476 | -103
477 | -103
478 | -105
479 | -105
480 | -107
481 | -107
482 | -106
483 | -106
484 | -106
485 | -106
486 | -109
487 | -109
488 | -108
489 | -108
490 | -106
491 | -106
492 | -107
493 | -107
494 | -104
495 | -104
496 | -102
497 | -102
498 | -104
499 | -104
500 | -101
501 | -101
502 | -100
503 | -100
504 | -108
505 | -108
506 | -103
507 | -103
508 | -107
509 | -107
510 | -108
511 | -108
512 | -108
513 | -108
514 | -106
515 | -106
516 | -108
517 | -108
518 | -104
519 | -104
520 | -106
521 | -106
522 | -106
523 | -106
524 | -103
525 | -103
526 | -102
527 | -102
528 | -99
529 | -99
530 | -103
531 | -103
532 | -110
533 | -110
534 | -111
535 | -111
536 | -119
537 | -119
538 | -110
539 | -110
540 | -114
541 | -114
542 | -113
543 | -113
544 | -117
545 | -117
546 | -113
547 | -113
548 | -112
549 | -112
550 | -116
551 | -116
552 | -108
553 | -108
554 | -109
555 | -109
556 | -106
557 | -106
558 | -106
559 | -106
560 | -108
561 | -108
562 | -109
563 | -109
564 | -110
565 | -110
566 | -107
567 | -107
568 | -107
569 | -107
570 | -112
571 | -112
572 | -114
573 | -114
574 | -111
575 | -111
576 | -110
577 | -110
578 | -111
579 | -111
580 | -108
581 | -108
582 | -114
583 | -114
584 | -112
585 | -112
586 | -113
587 | -113
588 | -110
589 | -110
590 | -112
591 | -112
592 | -109
593 | -109
594 | -105
595 | -105
596 | -106
597 | -106
598 | -103
599 | -103
600 | -101
601 | -101
602 | -109
603 | -109
604 | -105
605 | -105
606 | -106
607 | -106
608 | -105
609 | -105
610 | -104
611 | -104
612 | -105
613 | -105
614 | -105
615 | -105
616 | -112
617 | -112
618 | -112
619 | -112
620 | -109
621 | -109
622 | -109
623 | -109
624 | -104
625 | -104
626 | -99
627 | -99
628 | -99
629 | -99
630 | -101
631 | -101
632 | -97
633 | -97
634 | -97
635 | -97
636 | -99
637 | -99
638 | -100
639 | -100
640 | -101
641 | -101
642 | -97
643 | -97
644 | -99
645 | -99
646 | -100
647 | -100
648 | -101
649 | -101
650 | -102
651 | -102
652 | -101
653 | -101
654 | -98
655 | -98
656 | -101
657 | -101
658 | -100
659 | -100
660 | -101
661 | -101
662 | -101
663 | -101
664 | -101
665 | -101
666 | -102
667 | -102
668 | -100
669 | -100
670 | -101
671 | -101
672 | -102
673 | -102
674 | -100
675 | -100
676 | -100
677 | -100
678 | -99
679 | -99
680 | -101
681 | -101
682 | -100
683 | -100
684 | -100
685 | -100
686 | -102
687 | -102
688 | -101
689 | -101
690 | -100
691 | -100
692 | -100
693 | -100
694 | -97
695 | -97
696 | -100
697 | -100
698 | -100
699 | -100
700 | -95
701 | -95
702 | -103
703 | -103
704 | -108
705 | -108
706 | -101
707 | -101
708 | -107
709 | -107
710 | -105
711 | -105
712 | -97
713 | -97
714 | -97
715 | -97
716 | -100
717 | -100
718 | -104
719 | -104
720 | -99
721 | -99
722 | -100
723 | -100
724 | -106
725 | -106
726 | -101
727 | -101
728 | -104
729 | -104
730 | -104
731 | -104
732 | -102
733 | -102
734 | -104
735 | -104
736 | -99
737 | -99
738 | -100
739 | -100
740 | -104
741 | -104
742 | -105
743 | -105
744 | -104
745 | -104
746 | -106
747 | -106
748 | -104
749 | -104
750 | -105
751 | -105
752 | -104
753 | -104
754 | -104
755 | -104
756 | -104
757 | -104
758 | -103
759 | -103
760 | -104
761 | -104
762 | -107
763 | -107
764 | -110
765 | -110
766 | -105
767 | -105
768 | -107
769 | -107
770 | -105
771 | -105
772 | -105
773 | -105
774 | -104
775 | -104
776 | -104
777 | -104
778 | -103
779 | -103
780 | -105
781 | -105
782 | -109
783 | -109
784 | -105
785 | -105
786 | -104
787 | -104
788 | -105
789 | -105
790 | -104
791 | -104
792 | -108
793 | -108
794 | -101
795 | -101
796 | -103
797 | -103
798 | -107
799 | -107
800 | -98
801 | -98
802 | -99
803 | -99
804 | -91
805 | -91
806 | -97
807 | -97
808 | -93
809 | -93
810 | -99
811 | -99
812 | -90
813 | -90
814 | -92
815 | -92
816 | -92
817 | -92
818 | -84
819 | -84
820 | -82
821 | -82
822 | -95
823 | -95
824 | -96
825 | -96
826 | -93
827 | -93
828 | -93
829 | -93
830 | -92
831 | -92
832 | -95
833 | -95
834 | -104
835 | -104
836 | -89
837 | -89
838 | -92
839 | -92
840 | -99
841 | -99
842 | -97
843 | -97
844 | -102
845 | -102
846 | -94
847 | -94
848 | -95
849 | -95
850 | -99
851 | -99
852 | -103
853 | -103
854 | -92
855 | -92
856 | -102
857 | -102
858 | -99
859 | -99
860 | -94
861 | -94
862 | -99
863 | -99
864 | -109
865 | -109
866 | -104
867 | -104
868 | -102
869 | -102
870 | -108
871 | -108
872 | -105
873 | -105
874 | -101
875 | -101
876 | -100
877 | -100
878 | -107
879 | -107
880 | -99
881 | -99
882 | -95
883 | -95
884 | -91
885 | -91
886 | -103
887 | -103
888 | -108
889 | -108
890 | -101
891 | -101
892 | -98
893 | -98
894 | -105
895 | -105
896 | -104
897 | -104
898 | -100
899 | -100
900 | -106
901 | -106
902 | -105
903 | -105
904 | -105
905 | -105
906 | -109
907 | -109
908 | -106
909 | -106
910 | -106
911 | -106
912 | -106
913 | -106
914 | -101
915 | -101
916 | -96
917 | -96
918 | -98
919 | -98
920 | -95
921 | -95
922 | -94
923 | -94
924 | -94
925 | -94
926 | -96
927 | -96
928 | -100
929 | -100
930 | -105
931 | -105
932 | -103
933 | -103
934 | -103
935 | -103
936 | -104
937 | -104
938 | -106
939 | -106
940 | -109
941 | -109
942 | -107
943 | -107
944 | -107
945 | -107
946 | -104
947 | -104
948 | -109
949 | -109
950 | -108
951 | -108
952 | -106
953 | -106
954 | -111
955 | -111
956 | -109
957 | -109
958 | -110
959 | -110
960 | -103
961 | -103
962 | -105
963 | -105
964 | -97
965 | -97
966 | -94
967 | -94
968 | -97
969 | -97
970 | -93
971 | -93
972 | -96
973 | -96
974 | -98
975 | -98
976 | -102
977 | -102
978 | -105
979 | -105
980 | -102
981 | -102
982 | -103
983 | -103
984 | -105
985 | -105
986 | -108
987 | -108
988 | -104
989 | -104
990 | -107
991 | -107
992 | -107
993 | -107
994 | -104
995 | -104
996 | -104
997 | -104
998 | -101
999 | -101
1000 | -105
1001 | -105
1002 | -111
1003 | -111
1004 | -110
1005 | -110
1006 | -113
1007 | -113
1008 | -113
1009 | -113
1010 | -112
1011 | -112
1012 | -111
1013 | -111
1014 | -110
1015 | -110
1016 | -111
1017 | -111
1018 | -104
1019 | -104
1020 | -102
1021 | -102
1022 | -106
1023 | -106
1024 | -109
1025 | -109
1026 | -114
1027 | -114
1028 | -112
1029 | -112
1030 | -110
1031 | -110
1032 | -111
1033 | -111
1034 | -111
1035 | -111
1036 | -110
1037 | -110
1038 | -114
1039 | -114
1040 | -113
1041 | -113
1042 | -113
1043 | -113
1044 | -113
1045 | -113
1046 | -112
1047 | -112
1048 | -112
1049 | -112
1050 | -112
1051 | -112
1052 | -115
1053 | -115
1054 | -116
1055 | -116
1056 | -118
1057 | -118
1058 | -120
1059 | -120
1060 | -122
1061 | -122
1062 | -116
1063 | -116
1064 | -115
1065 | -115
1066 | -118
1067 | -118
1068 | -112
1069 | -112
1070 | -116
1071 | -116
1072 | -112
1073 | -112
1074 | -110
1075 | -110
1076 | -106
1077 | -106
1078 | -111
1079 | -111
1080 | -104
1081 | -104
1082 | -112
1083 | -112
1084 | -113
1085 | -113
1086 | -113
1087 | -113
1088 | -113
1089 | -113
1090 | -113
1091 | -113
1092 | -110
1093 | -110
1094 | -108
1095 | -108
1096 | -99
1097 | -99
1098 | -96
1099 | -96
1100 | -93
1101 | -93
1102 | -90
1103 | -90
1104 | -90
1105 | -90
1106 | -93
1107 | -93
1108 | -93
1109 | -93
1110 | -87
1111 | -87
1112 | -87
1113 | -87
1114 | -83
1115 | -83
1116 | -91
1117 | -91
1118 | -81
1119 | -81
1120 | -77
1121 | -77
1122 | -79
1123 | -79
1124 | -87
1125 | -87
1126 | -88
1127 | -88
1128 | -85
1129 | -85
1130 | -87
1131 | -87
1132 | -87
1133 | -87
1134 | -87
1135 | -87
1136 | -81
1137 | -81
1138 | -70
1139 | -70
1140 | -75
1141 | -75
1142 | -81
1143 | -81
1144 | -77
1145 | -77
1146 | -87
1147 | -87
1148 | -92
1149 | -92
1150 | -81
1151 | -81
1152 | -82
1153 | -82
1154 | -87
1155 | -87
1156 | -85
1157 | -85
1158 | -83
1159 | -83
1160 | -88
1161 | -88
1162 | -87
1163 | -87
1164 | -89
1165 | -89
1166 | -90
1167 | -90
1168 | -91
1169 | -91
1170 | -89
1171 | -89
1172 | -90
1173 | -90
1174 |
--------------------------------------------------------------------------------
/DataSet/ATT_4G_bus.txt:
--------------------------------------------------------------------------------
1 | -107
2 | -107
3 | -107
4 | -105
5 | -105
6 | -110
7 | -110
8 | -104
9 | -104
10 | -111
11 | -111
12 | -105
13 | -105
14 | -105
15 | -110
16 | -110
17 | -109
18 | -109
19 | -110
20 | -110
21 | -109
22 | -109
23 | -107
24 | -107
25 | -106
26 | -106
27 | -106
28 | -104
29 | -104
30 | -104
31 | -101
32 | -101
33 | -97
34 | -97
35 | -97
36 | -97
37 | -102
38 | -102
39 | -102
40 | -102
41 | -102
42 | -102
43 | -102
44 | -108
45 | -108
46 | -107
47 | -107
48 | -105
49 | -105
50 | -107
51 | -107
52 | -105
53 | -105
54 | -105
55 | -102
56 | -102
57 | -102
58 | -102
59 | -106
60 | -106
61 | -103
62 | -103
63 | -100
64 | -100
65 | -100
66 | -103
67 | -103
68 | -99
69 | -99
70 | -99
71 | -99
72 | -100
73 | -100
74 | -107
75 | -107
76 | -102
77 | -102
78 | -105
79 | -105
80 | -105
81 | -103
82 | -103
83 | -101
84 | -101
85 | -98
86 | -98
87 | -100
88 | -98
89 | -103
90 | -101
91 | -102
92 | -103
93 | -103
94 | -104
95 | -104
96 | -103
97 | -103
98 | -101
99 | -101
100 | -99
101 | -99
102 | -106
103 | -106
104 | -109
105 | -109
106 | -105
107 | -105
108 | -107
109 | -107
110 | -107
111 | -107
112 | -107
113 | -107
114 | -107
115 | -111
116 | -111
117 | -106
118 | -106
119 | -109
120 | -109
121 | -110
122 | -110
123 | -105
124 | -105
125 | -109
126 | -109
127 | -109
128 | -111
129 | -111
130 | -107
131 | -107
132 | -106
133 | -106
134 | -111
135 | -111
136 | -115
137 | -115
138 | -111
139 | -111
140 | -111
141 | -111
142 | -111
143 | -113
144 | -113
145 | -108
146 | -108
147 | -105
148 | -105
149 | -105
150 | -105
151 | -101
152 | -101
153 | -101
154 | -101
155 | -101
156 | -103
157 | -103
158 | -104
159 | -104
160 | -98
161 | -98
162 | -102
163 | -102
164 | -100
165 | -100
166 | -100
167 | -100
168 | -100
169 | -93
170 | -93
171 | -100
172 | -100
173 | -99
174 | -99
175 | -98
176 | -98
177 | -100
178 | -100
179 | -100
180 | -98
181 | -103
182 | -103
183 | -99
184 | -103
185 | -103
186 | -107
187 | -106
188 | -106
189 | -108
190 | -108
191 | -109
192 | -109
193 | -110
194 | -110
195 | -106
196 | -106
197 | -106
198 | -103
199 | -103
200 | -107
201 | -107
202 | -102
203 | -102
204 | -101
205 | -101
206 | -104
207 | -104
208 | -104
209 | -104
210 | -104
211 | -102
212 | -102
213 | -104
214 | -102
215 | -97
216 | -109
217 | -103
218 | -103
219 | -103
220 | -103
221 | -99
222 | -99
223 | -102
224 | -102
225 | -98
226 | -98
227 | -98
228 | -103
229 | -103
230 | -96
231 | -96
232 | -95
233 | -95
234 | -96
235 | -96
236 | -93
237 | -93
238 | -93
239 | -93
240 | -93
241 | -93
242 | -98
243 | -98
244 | -98
245 | -98
246 | -98
247 | -98
248 | -97
249 | -97
250 | -97
251 | -109
252 | -109
253 | -109
254 | -109
255 | -109
256 | -108
257 | -108
258 | -108
259 | -108
260 | -106
261 | -106
262 | -106
263 | -106
264 | -102
265 | -102
266 | -103
267 | -103
268 | -103
269 | -104
270 | -104
271 | -102
272 | -102
273 | -104
274 | -104
275 | -104
276 | -104
277 | -105
278 | -105
279 | -104
280 | -104
281 | -106
282 | -106
283 | -104
284 | -104
285 | -104
286 | -104
287 | -104
288 | -104
289 | -105
290 | -105
291 | -104
292 | -104
293 | -105
294 | -105
295 | -105
296 | -105
297 | -105
298 | -105
299 | -99
300 | -99
301 | -103
302 | -103
303 | -103
304 | -103
305 | -103
306 | -102
307 | -102
308 | -102
309 | -102
310 | -102
311 | -102
312 | -102
313 | -102
314 | -102
315 | -105
316 | -105
317 | -105
318 | -105
319 | -100
320 | -100
321 | -100
322 | -100
323 | -105
324 | -105
325 | -105
326 | -105
327 | -105
328 | -109
329 | -109
330 | -109
331 | -109
332 | -104
333 | -104
334 | -104
335 | -104
336 | -104
337 | -104
338 | -104
339 | -104
340 | -102
341 | -102
342 | -102
343 | -102
344 | -96
345 | -96
346 | -96
347 | -96
348 | -96
349 | -104
350 | -104
351 | -104
352 | -104
353 | -106
354 | -106
355 | -106
356 | -106
357 | -107
358 | -107
359 | -107
360 | -107
361 | -107
362 | -106
363 | -106
364 | -106
365 | -106
366 | -109
367 | -109
368 | -109
369 | -109
370 | -106
371 | -106
372 | -106
373 | -106
374 | -108
375 | -108
376 | -108
377 | -108
378 | -108
379 | -107
380 | -107
381 | -107
382 | -107
383 | -105
384 | -105
385 | -105
386 | -105
387 | -106
388 | -106
389 | -106
390 | -106
391 | -106
392 | -100
393 | -100
394 | -100
395 | -100
396 | -101
397 | -101
398 | -101
399 | -101
400 | -103
401 | -103
402 | -103
403 | -103
404 | -98
405 | -98
406 | -98
407 | -98
408 | -98
409 | -103
410 | -103
411 | -103
412 | -103
413 | -97
414 | -97
415 | -97
416 | -97
417 | -96
418 | -96
419 | -96
420 | -96
421 | -99
422 | -99
423 | -99
424 | -99
425 | -99
426 | -102
427 | -102
428 | -102
429 | -102
430 | -103
431 | -103
432 | -103
433 | -103
434 | -101
435 | -101
436 | -101
437 | -101
438 | -101
439 | -101
440 | -101
441 | -101
442 | -101
443 | -105
444 | -105
445 | -105
446 | -105
447 | -102
448 | -102
449 | -102
450 | -102
451 | -105
452 | -105
453 | -105
454 | -105
455 | -105
456 | -104
457 | -104
458 | -104
459 | -104
460 | -108
461 | -108
462 | -108
463 | -108
464 | -112
465 | -112
466 | -112
467 | -112
468 | -108
469 | -108
470 | -108
471 | -108
472 | -108
473 | -108
474 | -108
475 | -108
476 | -108
477 | -109
478 | -109
479 | -109
480 | -109
481 | -110
482 | -110
483 | -110
484 | -110
485 | -110
486 | -110
487 | -110
488 | -110
489 | -103
490 | -103
491 | -103
492 | -103
493 | -103
494 | -104
495 | -104
496 | -104
497 | -104
498 | -103
499 | -103
500 | -103
501 | -103
502 | -110
503 | -110
504 | -110
505 | -110
506 | -110
507 | -110
508 | -110
509 | -110
510 | -110
511 | -109
512 | -109
513 | -109
514 | -109
515 | -109
516 | -109
517 | -109
518 | -109
519 | -109
520 | -109
521 | -109
522 | -109
523 | -107
524 | -107
525 | -107
526 | -107
527 | -107
528 | -106
529 | -106
530 | -106
531 | -106
532 | -104
533 | -104
534 | -104
535 | -104
536 | -103
537 | -103
538 | -103
539 | -103
540 | -102
541 | -102
542 | -102
543 | -102
544 | -102
545 | -102
546 | -102
547 | -102
548 | -102
549 | -105
550 | -105
551 | -105
552 | -105
553 | -99
554 | -99
555 | -99
556 | -99
557 | -105
558 | -105
559 | -105
560 | -105
561 | -105
562 | -103
563 | -103
564 | -103
565 | -103
566 | -99
567 | -99
568 | -99
569 | -99
570 | -102
571 | -102
572 | -102
573 | -102
574 | -100
575 | -100
576 | -100
577 | -100
578 | -101
579 | -101
580 | -101
581 | -101
582 | -101
583 | -96
584 | -96
585 | -96
586 | -96
587 | -102
588 | -102
589 | -102
590 | -102
591 | -108
592 | -108
593 | -108
594 | -108
595 | -105
596 | -105
597 | -105
598 | -105
599 | -105
600 | -103
601 | -103
602 | -103
603 | -103
604 | -99
605 | -99
606 | -99
607 | -99
608 | -102
609 | -102
610 | -102
611 | -102
612 | -107
613 | -107
614 | -107
615 | -107
616 | -107
617 | -108
618 | -108
619 | -108
620 | -108
621 | -111
622 | -111
623 | -111
624 | -111
625 | -107
626 | -107
627 | -107
628 | -107
629 | -102
630 | -102
631 | -102
632 | -102
633 | -103
634 | -103
635 | -103
636 | -103
637 | -103
638 | -104
639 | -104
640 | -104
641 | -104
642 | -106
643 | -106
644 | -106
645 | -106
646 | -107
647 | -107
648 | -107
649 | -106
650 | -106
651 | -106
652 | -106
653 | -107
654 | -107
655 | -107
656 | -107
657 | -105
658 | -105
659 | -105
660 | -105
661 | -98
662 | -98
663 | -98
664 | -98
665 | -104
666 | -104
667 | -104
668 | -104
669 | -104
670 | -108
671 | -108
672 | -108
673 | -108
674 | -105
675 | -105
676 | -105
677 | -105
678 | -109
679 | -109
680 | -109
681 | -109
682 | -108
683 | -108
684 | -108
685 | -108
686 | -108
687 | -110
688 | -110
689 | -110
690 | -110
691 | -113
692 | -113
693 | -113
694 | -113
695 | -113
696 | -113
697 | -113
698 | -113
699 | -111
700 | -111
701 | -111
702 | -111
703 | -107
704 | -107
705 | -107
706 | -107
707 | -108
708 | -108
709 | -108
710 | -108
711 | -110
712 | -110
713 | -110
714 | -110
715 | -113
716 | -113
717 | -113
718 | -113
719 | -113
720 | -112
721 | -112
722 | -112
723 | -112
724 | -115
725 | -115
726 | -115
727 | -115
728 | -112
729 | -112
730 | -112
731 | -112
732 | -113
733 | -113
734 | -113
735 | -113
736 | -113
737 | -116
738 | -116
739 | -116
740 | -116
741 | -114
742 | -114
743 | -114
744 | -114
745 | -109
746 | -109
747 | -109
748 | -109
749 | -107
750 | -107
751 | -107
752 | -107
753 | -107
754 | -112
755 | -112
756 | -112
757 | -112
758 | -112
759 | -112
760 | -112
761 | -112
762 | -112
763 | -112
764 | -112
765 | -108
766 | -108
767 | -108
768 | -108
769 | -109
770 | -109
771 | -109
772 | -109
773 | -109
774 | -109
775 | -109
776 | -109
777 | -108
778 | -108
779 | -108
780 | -108
781 | -109
782 | -109
783 | -113
784 | -113
785 | -109
786 | -109
787 | -114
788 | -114
789 | -109
790 | -109
791 | -110
792 | -110
793 | -109
794 | -109
795 | -109
796 | -109
797 | -109
798 | -109
799 | -109
800 | -109
801 | -109
802 | -109
803 | -108
804 | -108
805 | -108
806 | -109
807 | -109
808 | -109
809 | -109
810 | -109
811 | -113
812 | -113
813 | -113
814 | -113
815 | -112
816 | -112
817 | -112
818 | -112
819 | -105
820 | -105
821 | -105
822 | -105
823 | -106
824 | -106
825 | -106
826 | -106
827 | -106
828 | -108
829 | -108
830 | -108
831 | -108
832 | -103
833 | -103
834 | -103
835 | -103
836 | -103
837 | -103
838 | -103
839 | -103
840 | -104
841 | -104
842 | -104
843 | -104
844 | -104
845 | -107
846 | -107
847 | -107
848 | -107
849 | -106
850 | -106
851 | -106
852 | -106
853 | -103
854 | -103
855 | -103
856 | -100
857 | -100
858 | -100
859 | -100
860 | -101
861 | -101
862 | -101
863 | -102
864 | -102
865 | -101
866 | -101
867 | -103
868 | -103
869 | -103
870 | -103
871 | -100
872 | -100
873 | -100
874 | -99
875 | -99
876 | -97
877 | -97
878 | -97
879 | -97
880 | -98
881 | -109
882 | -109
883 | -109
884 | -109
885 | -109
886 | -109
887 | -109
888 | -109
889 | -109
890 | -103
891 | -103
892 | -103
893 | -103
894 | -100
895 | -100
896 | -100
897 | -100
898 | -102
899 | -102
900 | -102
901 | -102
902 | -102
903 | -102
904 | -102
905 | -102
906 | -102
907 | -102
908 | -102
909 | -102
910 | -106
911 | -106
912 | -106
913 | -106
914 | -106
915 | -105
916 | -105
917 | -105
918 | -105
919 | -108
920 | -108
921 | -108
922 | -108
923 | -110
924 | -110
925 | -110
926 | -110
927 | -110
928 | -110
929 | -110
930 | -110
931 | -110
932 | -110
933 | -110
934 | -110
935 | -107
936 | -107
937 | -107
938 | -106
939 | -106
940 | -108
941 | -108
942 | -106
943 | -106
944 | -106
945 | -104
946 | -104
947 | -104
948 | -104
949 | -101
950 | -101
951 | -101
952 | -101
953 | -101
954 | -103
955 | -103
956 | -103
957 | -103
958 | -96
959 | -96
960 | -96
961 | -96
962 | -102
963 | -102
964 | -102
965 | -102
966 | -99
967 | -99
968 | -99
969 | -99
970 | -99
971 | -99
972 | -99
973 | -99
974 | -97
975 | -97
976 | -97
977 | -97
978 | -91
979 | -91
980 | -91
981 | -91
982 | -91
983 | -91
984 | -91
985 | -91
986 | -94
987 | -94
988 | -94
989 | -94
990 | -94
991 | -94
992 | -94
993 | -94
994 | -94
995 | -94
996 | -94
997 | -94
998 | -94
999 | -94
1000 | -94
1001 | -94
1002 | -94
1003 | -100
1004 | -100
1005 | -100
1006 | -100
1007 | -99
1008 | -99
1009 | -99
1010 | -99
1011 | -101
1012 | -101
1013 | -101
1014 | -101
1015 | -101
1016 | -98
1017 | -98
1018 | -98
1019 | -98
1020 | -92
1021 | -92
1022 | -92
1023 | -92
1024 | -96
1025 | -96
1026 | -96
1027 | -96
1028 | -102
1029 | -102
1030 | -102
1031 | -102
1032 | -110
1033 | -110
1034 | -110
1035 | -110
1036 | -109
1037 | -109
1038 | -109
1039 | -109
1040 | -109
1041 | -108
1042 | -108
1043 | -108
1044 | -108
1045 | -108
1046 | -108
1047 | -108
1048 | -106
1049 | -106
1050 | -103
1051 | -103
1052 | -107
1053 | -107
1054 | -107
1055 | -107
1056 | -107
1057 | -107
1058 | -107
1059 | -106
1060 | -106
1061 | -106
1062 | -107
1063 | -107
1064 | -110
1065 | -110
1066 | -110
1067 | -110
1068 | -113
1069 | -113
1070 | -113
1071 | -113
1072 | -116
1073 | -116
1074 | -116
1075 | -116
1076 | -113
1077 | -113
1078 | -113
1079 | -113
1080 | -112
1081 | -112
1082 | -112
1083 | -112
1084 | -123
1085 | -123
1086 | -123
1087 | -123
1088 | -124
1089 | -124
1090 | -124
1091 | -124
1092 | -122
1093 | -122
1094 | -122
1095 | -122
1096 | -122
1097 | -125
1098 | -125
1099 | -125
1100 | -125
1101 | -127
1102 | -127
1103 | -127
1104 | -127
1105 | -126
1106 | -126
1107 | -126
1108 | -126
1109 | -126
1110 | -126
1111 | -126
1112 | -126
1113 | -125
1114 | -125
1115 | -125
1116 | -125
1117 | -120
1118 | -120
1119 | -120
1120 | -120
1121 | -120
1122 | -116
1123 | -116
1124 | -116
1125 | -116
1126 | -124
1127 | -124
1128 | -124
1129 | -124
1130 | -122
1131 | -122
1132 | -122
1133 | -122
1134 | -123
1135 | -123
1136 | -123
1137 | -123
1138 | -120
1139 | -120
1140 | -120
1141 | -120
1142 | -124
1143 | -124
1144 | -124
1145 | -124
1146 | -124
1147 | -125
1148 | -125
1149 | -125
1150 | -125
1151 | -125
1152 | -125
1153 | -125
1154 | -124
1155 | -124
1156 | -124
1157 | -124
1158 | -118
1159 | -118
1160 | -118
1161 | -118
1162 | -116
1163 | -116
1164 | -116
1165 | -116
1166 | -117
1167 | -117
1168 | -117
1169 | -117
1170 | -126
1171 | -126
1172 | -126
1173 | -126
1174 | -126
1175 | -130
1176 | -130
1177 | -130
1178 | -130
1179 | -129
1180 | -129
1181 | -129
1182 | -129
1183 | -131
1184 | -131
1185 | -131
1186 | -131
1187 | -132
1188 | -132
1189 | -132
1190 | -132
1191 | -132
1192 | -132
1193 | -132
1194 | -132
1195 | -110
1196 | -110
1197 | -110
1198 | -110
1199 | -112
1200 | -112
1201 | -112
1202 | -112
1203 | -112
1204 | -112
1205 | -112
1206 | -112
1207 | -112
1208 | -114
1209 | -114
1210 | -114
1211 | -114
1212 | -113
1213 | -113
1214 | -113
1215 | -113
1216 | -111
1217 | -111
1218 | -111
1219 | -111
1220 | -109
1221 | -109
1222 | -109
1223 | -109
1224 | -110
1225 | -110
1226 | -110
1227 | -110
1228 | -112
1229 | -112
1230 | -112
1231 | -112
1232 | -115
1233 | -115
1234 | -115
1235 | -115
1236 | -115
1237 | -118
1238 | -118
1239 | -118
1240 | -118
1241 | -112
1242 | -112
1243 | -112
1244 | -112
1245 | -114
1246 | -114
1247 | -114
1248 | -114
1249 | -102
1250 | -102
1251 | -102
1252 | -102
1253 | -112
1254 | -112
1255 | -112
1256 | -112
1257 | -113
1258 | -113
1259 | -113
1260 | -113
1261 | -111
1262 | -111
1263 | -111
1264 | -111
1265 | -109
1266 | -109
1267 | -109
1268 | -109
1269 | -109
1270 | -113
1271 | -113
1272 | -113
1273 | -113
1274 | -114
1275 | -114
1276 | -114
1277 | -114
1278 | -112
1279 | -112
1280 | -112
1281 | -112
1282 | -112
1283 | -112
1284 | -112
1285 | -112
1286 | -109
1287 | -109
1288 | -109
1289 | -109
1290 | -107
1291 | -107
1292 | -107
1293 | -107
1294 | -105
1295 | -105
1296 | -105
1297 | -105
1298 | -111
1299 | -111
1300 | -111
1301 | -111
1302 | -111
1303 | -111
1304 | -111
1305 | -111
1306 | -110
1307 | -110
1308 | -110
1309 | -110
1310 | -112
1311 | -112
1312 | -112
1313 | -112
1314 | -106
1315 | -106
1316 | -106
1317 | -106
1318 | -106
1319 | -105
1320 | -105
1321 | -105
1322 | -105
1323 | -107
1324 | -107
1325 | -107
1326 | -107
1327 | -104
1328 | -104
1329 | -104
1330 | -104
1331 | -103
1332 | -103
1333 | -103
1334 | -103
1335 | -103
1336 | -103
1337 | -103
1338 | -103
1339 | -107
1340 | -107
1341 | -107
1342 | -107
1343 | -106
1344 | -106
1345 | -106
1346 | -106
1347 | -103
1348 | -103
1349 | -103
1350 | -103
1351 | -103
1352 | -103
1353 | -103
1354 | -103
1355 | -103
1356 | -109
1357 | -109
1358 | -109
1359 | -109
1360 | -109
1361 | -109
1362 | -109
1363 | -109
1364 | -110
1365 | -110
1366 | -110
1367 | -110
1368 | -108
1369 | -108
1370 | -108
1371 | -108
1372 | -101
1373 | -101
1374 | -101
1375 | -101
1376 | -101
1377 | -101
1378 | -101
1379 | -101
1380 | -101
1381 | -101
1382 | -101
1383 | -101
1384 | -101
1385 | -102
1386 | -102
1387 | -102
1388 | -102
1389 | -98
1390 | -98
1391 | -98
1392 | -98
1393 | -100
1394 | -100
1395 | -100
1396 | -100
1397 | -99
1398 | -99
1399 | -99
1400 | -99
1401 | -96
1402 | -96
1403 | -96
1404 | -96
1405 | -96
1406 | -96
1407 | -96
1408 | -96
1409 | -96
1410 | -96
1411 | -96
1412 | -96
1413 | -96
1414 | -94
1415 | -94
1416 | -94
1417 | -94
1418 | -94
1419 | -94
1420 | -94
1421 | -94
1422 | -94
1423 | -94
1424 | -94
1425 | -94
1426 | -89
1427 | -89
1428 | -89
1429 | -89
1430 | -95
1431 | -95
1432 | -95
1433 | -95
1434 | -98
1435 | -98
1436 | -98
1437 | -98
1438 | -95
1439 | -95
1440 | -95
1441 | -95
1442 | -100
1443 | -100
1444 | -100
1445 | -100
1446 | -100
1447 | -94
1448 | -94
1449 | -94
1450 | -94
1451 | -90
1452 | -90
1453 | -90
1454 | -90
1455 | -97
1456 | -97
1457 | -97
1458 | -97
1459 | -94
1460 | -94
1461 | -94
1462 | -94
1463 | -91
1464 | -91
1465 | -91
1466 | -91
1467 | -100
1468 | -100
1469 | -100
1470 | -100
1471 | -98
1472 | -98
1473 | -98
1474 | -98
1475 | -98
1476 | -100
1477 | -100
1478 | -100
1479 | -100
1480 | -91
1481 | -91
1482 | -91
1483 | -91
1484 | -93
1485 | -93
1486 | -93
1487 | -93
1488 | -93
1489 | -93
1490 | -93
1491 | -93
1492 | -91
1493 | -91
1494 | -91
1495 | -91
1496 | -97
1497 | -97
1498 | -97
1499 | -97
1500 | -92
1501 | -92
1502 | -92
1503 | -92
1504 | -92
1505 | -93
1506 | -93
1507 | -93
1508 | -93
1509 | -93
1510 | -93
1511 | -93
1512 | -93
1513 | -95
1514 | -95
1515 | -95
1516 | -95
1517 | -97
1518 | -97
1519 | -97
1520 |
--------------------------------------------------------------------------------
/Code/seq2seq_unguided_LSTM.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.environ['TP_CPP_MIN_LOG_LEVEL'] = '2'
3 | #--
4 | import numpy as np
5 | import tensorflow as tf
6 | import matplotlib.pyplot as plt
7 | import sys
8 | import copy
9 | import random
10 |
11 | from fileprocessor import *
12 | from preprocessor import *
13 | from calculateError import *
14 |
15 | from tensorflow.contrib import rnn
16 | from tensorflow.python.ops import variable_scope
17 | from tensorflow.python.framework import dtypes
18 |
19 | # obtaining the Data ------------------------
20 |
21 | x_length = 20 # the input sequence length
22 | y_length = 10 # the output sequence length
23 | percentage = 0.8 # the percentage of data used for training
24 | filename = "../Dataset/4G_bus_TMobile.txt" #Set this to the dataset on which the model is to be run
25 | name_flag = "4G_bus_TMobile" # the name flag for the test case
26 | save_path_name = os.getcwd() # the pwd to current directory
27 | save_object_name = name_flag # the state name to be saved
28 |
29 | X_train_data, Y_train_data, X_test_data, Y_test_data = getData(filename,x_length,y_length,percentage)
30 |
31 | X_train = np.array(X_train_data)
32 | Y_train = np.array(Y_train_data)
33 | X_test = np.array(X_test_data)
34 | Y_test = np.array(Y_test_data)
35 |
36 | #----- create a new random sample from training set ---
37 |
38 | X_train_random_data = []
39 | Y_train_random_data = []
40 | sample_percentage = 0.1 # 10% of the train sample is selected
41 | sample_size = int(round(len(X_train_data)*sample_percentage))
42 | indices = random.sample(xrange(len(X_train_data)),sample_size)
43 |
44 | for i in range(len(indices)):
45 | X_train_random_data.append(X_train_data[int(indices[i])])
46 | Y_train_random_data.append(Y_train_data[int(indices[i])])
47 |
48 | X_train_random = np.array(X_train_random_data)
49 | Y_train_random = np.array(Y_train_random_data)
50 |
51 | name = "Seq2Seq_unguided_"+name_flag+"_LSTM_Y_test_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
52 | writetofile(name,Y_test_data)
53 |
54 | #--------------------------------------------
55 |
56 | learning_rate = 0.01 # learning rate parameter
57 | lambda_l2_reg = 0.003 # l2 regularization parameter
58 |
59 | hidden_size = 100 # LSTM hidden node size
60 | input_dim = 1 # the numeber of input signals
61 | output_dim = 1 # the number of output signals
62 |
63 | num_stacked_layers = 2 # 2 stacked layers
64 | gradient_clipping = 2.5 # gradient clipping parameter
65 |
66 | #---------------------------------------------
67 |
68 | # when feed_previous = True, the decoder uses the previous output as an input
69 | def graph(feed_previous = False):
70 | tf.reset_default_graph() # resets the previous graph
71 |
72 | global_step = tf.Variable(initial_value = 0, name= "global_step", trainable= False, collections = [tf.GraphKeys.GLOBAL_STEP,tf.GraphKeys.GLOBAL_VARIABLES])
73 |
74 | weights = {
75 | 'out' : tf.get_variable('Weights_out', shape = [hidden_size,output_dim], dtype = tf.float32, initializer = tf.truncated_normal_initializer()),
76 | }
77 |
78 | biases = {
79 | 'out' : tf.get_variable('Biases_out', shape = [output_dim], dtype = tf.float32, initializer = tf.constant_initializer(0.)),
80 | }
81 |
82 | with tf.variable_scope('Seq2seq'):
83 | # Encoder : inputs
84 | enc_inp = [
85 | tf.placeholder(tf.float32, shape=(None,input_dim), name="inp_{}".format(t))
86 | for t in range(x_length)
87 | ]
88 |
89 | # Decoder : target outputs
90 | target_seq = [
91 | tf.placeholder(tf.float32, shape=(None,output_dim), name="y".format(t))
92 | for t in range(y_length)
93 | ]
94 |
95 | # add a "END" token at the end of the sequence
96 | # two types of training
97 | # guided : the dec_inp are fed into the decoder as inputs
98 | # unguided : only the first element will be fed into it ( use for testing
99 |
100 | #-- Method with END -> works
101 | #dec_inp = [tf.zeros_like(target_seq[0],dtype=tf.float32, name="END")] + target_seq[:-1]
102 |
103 | #-- Mthod without END, worked when the target_inputs were actual values
104 | #dec_inp = target_seq
105 |
106 | #-- new method
107 | # instead of giing an END symbol, instead input the last value that was in the sequence given
108 | # as the first input the the decoder
109 |
110 | dec_inp = [enc_inp[-1]] + target_seq[:-1]
111 |
112 | #-- building the LSTM cell
113 | with tf.variable_scope('LSTMCell'):
114 | cells = []
115 | for i in range(num_stacked_layers):
116 | with tf.variable_scope('RNN_{}'.format(i)):
117 | cells.append(tf.contrib.rnn.LSTMCell(hidden_size))
118 | cell = tf.contrib.rnn.MultiRNNCell(cells)
119 |
120 | def _rnn_decoder(decoder_inputs,
121 | initial_state,
122 | cell,
123 | loop_function=None,
124 | scope=None):
125 | """RNN decoder for the sequence-to-sequence model.
126 | Args:
127 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
128 | initial_state: 2D Tensor with shape [batch_size x cell.state_size].
129 | cell: rnn_cell.RNNCell defining the cell function and size.
130 | loop_function: If not None, this function will be applied to the i-th output
131 | in order to generate the i+1-st input, and decoder_inputs will be ignored,
132 | except for the first element ("GO" symbol). This can be used for decoding,
133 | but also for training to emulate http://arxiv.org/abs/1506.03099.
134 | Signature -- loop_function(prev, i) = next
135 | * prev is a 2D Tensor of shape [batch_size x output_size],
136 | * i is an integer, the step number (when advanced control is needed),
137 | * next is a 2D Tensor of shape [batch_size x input_size].
138 | scope: VariableScope for the created subgraph; defaults to "rnn_decoder".
139 | Returns:
140 | A tuple of the form (outputs, state), where:
141 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
142 | shape [batch_size x output_size] containing generated outputs.
143 | state: The state of each cell at the final time-step.
144 | It is a 2D Tensor of shape [batch_size x cell.state_size].
145 | (Note that in some cases, like basic RNN cell or GRU cell, outputs and
146 | states can be the same. They are different for LSTM cells though.)
147 | """
148 | with variable_scope.variable_scope(scope or "rnn_decoder"):
149 | state = initial_state
150 | outputs = []
151 | prev = None
152 | for i, inp in enumerate(decoder_inputs):
153 | if loop_function is not None and prev is not None:
154 | with variable_scope.variable_scope("loop_function", reuse=True):
155 | inp = loop_function(prev, i)
156 | if i > 0:
157 | variable_scope.get_variable_scope().reuse_variables()
158 | output, state = cell(inp, state)
159 | outputs.append(output)
160 | if loop_function is not None:
161 | prev = output
162 | return outputs, state
163 |
164 | def _basic_rnn_seq2seq(encoder_inputs,
165 | decoder_inputs,
166 | cell,
167 | feed_previous,
168 | dtype=dtypes.float32,
169 | scope=None):
170 | """Basic RNN sequence-to-sequence model.
171 | This model first runs an RNN to encode encoder_inputs into a state vector,
172 | then runs decoder, initialized with the last encoder state, on decoder_inputs.
173 | Encoder and decoder use the same RNN cell type, but don't share parameters.
174 | Args:
175 | encoder_inputs: A list of 2D Tensors [batch_size x input_size].
176 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
177 | feed_previous: Boolean; if True, only the first of decoder_inputs will be
178 | used (the "GO" symbol), all other inputs will be generated by the previous
179 | decoder output using _loop_function below. If False, decoder_inputs are used
180 | as given (the standard decoder case).
181 | dtype: The dtype of the initial state of the RNN cell (default: tf.float32).
182 | scope: VariableScope for the created subgraph; default: "basic_rnn_seq2seq".
183 | Returns:
184 | A tuple of the form (outputs, state), where:
185 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
186 | shape [batch_size x output_size] containing the generated outputs.
187 | state: The state of each decoder cell in the final time-step.
188 | It is a 2D Tensor of shape [batch_size x cell.state_size].
189 | """
190 | with variable_scope.variable_scope(scope or "basic_rnn_seq2seq"):
191 | enc_cell = copy.deepcopy(cell)
192 | _, enc_state = rnn.static_rnn(enc_cell, encoder_inputs, dtype=dtype)
193 | if feed_previous:
194 | return _rnn_decoder(decoder_inputs, enc_state, cell, _loop_function)
195 | else:
196 | return _rnn_decoder(decoder_inputs, enc_state, cell)
197 |
198 | def _loop_function(prev,_):
199 | return tf.matmul(prev,weights['out']) + biases['out']
200 |
201 | dec_outputs, dec_memory = _basic_rnn_seq2seq(enc_inp,dec_inp,cell,feed_previous=feed_previous)
202 | reshaped_outputs = [tf.matmul(i,weights['out'])+biases['out'] for i in dec_outputs]
203 |
204 | # Training loss and optimizer
205 | with tf.variable_scope('Loss'):
206 | # L2 loss
207 | output_loss = tf.reduce_mean(tf.squared_difference(reshaped_outputs,target_seq))
208 | # L2 regularization for weights and biases
209 | reg_loss = 0
210 | for tf_var in tf.trainable_variables():
211 | if 'Biases_' in tf_var.name or 'Weights_' in tf_var.name:
212 | reg_loss += tf.reduce_mean(tf.nn.l2_loss(tf_var))
213 |
214 | loss = output_loss + lambda_l2_reg * reg_loss
215 |
216 | with tf.variable_scope('Optimizer'):
217 | optimizer = tf.contrib.layers.optimize_loss(loss=loss,learning_rate=learning_rate,global_step=global_step,optimizer='Adam',clip_gradients=gradient_clipping)
218 |
219 | saver = tf.train.Saver
220 |
221 | return dict(enc_inp = enc_inp,target_seq= target_seq,train_op=optimizer,loss=loss,saver=saver,reshaped_outputs = reshaped_outputs)
222 |
223 |
224 | #-----------------------------------------------------
225 | # un-guided training method
226 | ep = 0;
227 | loss_t = 300
228 | avg_rmse_lim = 3
229 | LOSS_LIMIT = avg_rmse_lim * avg_rmse_lim
230 | CONTINUE_FLAG = True
231 | EPOCH_LIMIT = 50000
232 |
233 | rnn_model =graph(feed_previous=True) #un-guided training model
234 | saver = tf.train.Saver()
235 |
236 | init = tf.global_variables_initializer()
237 |
238 | Y_found = []
239 | train_loss = []
240 | train_RMSE = []
241 |
242 | past_loss_values = []
243 | epoch_range = 5
244 |
245 | with tf.Session() as sess:
246 | print "--- tensorflow session started ---"
247 | init.run()
248 | # -- training
249 | while CONTINUE_FLAG:
250 | #-----------------------------------
251 | feed_dict = {rnn_model['enc_inp'][t]:X_train[:,t].reshape(-1,input_dim) for t in range(x_length)}
252 | feed_dict.update({rnn_model['target_seq'][t]:Y_train[:,t].reshape(-1,output_dim) for t in range(y_length)})
253 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
254 | train_loss.append(loss_t)
255 | if ep % 100 == 0:
256 | temp_output = np.reshape(out_t,(10,-1))
257 | temp_output = temp_output.transpose()
258 | temp_y_found = temp_output.tolist()
259 | temp_err = RMSE(Y_train_data,temp_y_found)
260 | train_RMSE.append(temp_err)
261 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
262 | #-------------------- STATE LOGGER--------------------------------
263 | # log state of identified values every 2000 epochs
264 | if ep % 2000 == 0:
265 | print "-- state logged @ epoch :",ep
266 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
267 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
268 |
269 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
270 | writeErrResult(name,train_loss) # append the train loss
271 |
272 | temp_saver = rnn_model['saver']()
273 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
274 | #-----------------------------------------------------------------
275 | #-- condition to stop training
276 | #-- condition to keep track of past losses
277 | if ep < epoch_range:
278 | past_loss_values.append(loss_t)
279 | else:
280 | past_loss_values.pop(0)
281 | past_loss_values.append(loss_t)
282 | # increase the epoch count
283 | ep += 1
284 | #-- find if the entire range of previous losses are below a threshold
285 | count = 0
286 | for val in past_loss_values:
287 | if val < LOSS_LIMIT:
288 | count += 1
289 | #-- stopping condition for training
290 | if count >= epoch_range or ep >= EPOCH_LIMIT:
291 | CONTINUE_FLAG = False
292 | print "-- training stopped @ epoch :",ep
293 | print "--- randomized training started ---"
294 | CONTINUE_FLAG = True # reset the continue flag
295 | while CONTINUE_FLAG:
296 | #-----------------------------------
297 | feed_dict = {rnn_model['enc_inp'][t]:X_train_random[:,t].reshape(-1,input_dim) for t in range(x_length)}
298 | feed_dict.update({rnn_model['target_seq'][t]:Y_train_random[:,t].reshape(-1,output_dim) for t in range(y_length)})
299 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
300 | train_loss.append(loss_t)
301 | if ep % 100 == 0:
302 | temp_output = np.reshape(out_t,(10,-1))
303 | temp_output = temp_output.transpose()
304 | temp_y_found = temp_output.tolist()
305 | temp_err = RMSE(Y_train_random,temp_y_found)
306 | train_RMSE.append(temp_err)
307 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
308 | #-------------------- STATE LOGGER--------------------------------
309 | # log state of identified values every 2000 epochs
310 | if ep % 2000 == 0:
311 | print "-- state logged @ epoch :",ep
312 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
313 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
314 |
315 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
316 | writeErrResult(name,train_loss) # append the train loss
317 |
318 | temp_saver = rnn_model['saver']()
319 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
320 | #-----------------------------------------------------------------
321 | #-- condition to stop training
322 | #-- condition to keep track of past losses
323 | if ep < epoch_range:
324 | past_loss_values.append(loss_t)
325 | else:
326 | past_loss_values.pop(0)
327 | past_loss_values.append(loss_t)
328 | # increase the epoch count
329 | ep += 1
330 | #-- find if the entire range of previous losses are below a threshold
331 | count = 0
332 | for val in past_loss_values:
333 | if val < LOSS_LIMIT:
334 | count += 1
335 | #-- stopping condition for training
336 | if count >= epoch_range or ep >= EPOCH_LIMIT:
337 | CONTINUE_FLAG = False
338 | print "-- randomized training stopped @ epoch :",ep
339 |
340 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
341 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
342 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
343 | writeErrResult(name,train_loss) # append the train loss
344 |
345 | print "--- training complete ---"
346 | temp_saver = rnn_model['saver']()
347 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
348 | print "--- session saved ---"
349 |
350 | loss_t,out_t = sess.run([rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
351 | temp_output = np.reshape(out_t,(10,-1))
352 | temp_output = temp_output.transpose()
353 | temp_y_found = temp_output.tolist()
354 | temp_err = RMSE(Y_train_random,temp_y_found)
355 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_TRAIN_FOUND_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
356 | writeErrResult(name,temp_err)
357 |
358 | print "--- testing started ---"
359 | feed_dict2 = {rnn_model['enc_inp'][t]:X_test[:,t].reshape(-1,input_dim) for t in range(x_length)}
360 |
361 | Y_temp = np.zeros((len(X_test),y_length), dtype=np.float)
362 | feed_dict2.update({rnn_model['target_seq'][t]:Y_temp[:,t].reshape(-1,output_dim) for t in range(y_length)})
363 | #--
364 | #print np.array(rnn_model['reshaped_outputs']).shape
365 | out_t = sess.run([rnn_model['reshaped_outputs']],feed_dict2)
366 | print "prediction size: ", np.array(out_t).shape
367 | matrix = np.reshape(out_t,(10,-1))
368 | print "reshaped output: ", matrix.shape
369 | matrix = matrix.transpose()
370 | print "transposed matrix: ",matrix.shape
371 | Y_found = matrix.tolist()
372 | # -- testing
373 |
374 | #------- saving the outputs of Y from testing
375 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_Y_found_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
376 | writetofile(name,Y_found)
377 |
378 | err = RMSE(Y_test_data,Y_found)
379 |
380 | name = "Seq2seq_unguided_"+name_flag+"_LSTM_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
381 | writeErrResult(name,err)
382 |
383 | print "----- run complete-------"
--------------------------------------------------------------------------------
/Code/seq2seq_unguided_GRU.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.environ['TP_CPP_MIN_LOG_LEVEL'] = '2'
3 | #--
4 | import numpy as np
5 | import tensorflow as tf
6 | import matplotlib.pyplot as plt
7 | import sys # for debugging
8 | import copy
9 | import random
10 |
11 | from fileprocessor import *
12 | from preprocessor import *
13 | from calculateError import *
14 |
15 | from tensorflow.contrib import rnn
16 | from tensorflow.python.ops import variable_scope
17 | from tensorflow.python.framework import dtypes
18 |
19 | # obtaining the Data ------------------------
20 |
21 | x_length = 20 # the input sequence length
22 | y_length = 10 # the output sequence length
23 | percentage = 0.8 # the percentage of data used for training
24 | filename = "../Dataset/4G_bus_TMobile.txt" #Set this to the dataset on which the model is to be run
25 | name_flag = "4G_Bus_TMobile" # the name flag for the test case
26 | save_path_name = os.getcwd() # the pwd to current directory
27 | save_object_name = name_flag # the state name to be saved
28 |
29 | X_train_data, Y_train_data, X_test_data, Y_test_data = getData(filename,x_length,y_length,percentage)
30 |
31 | X_train = np.array(X_train_data)
32 | Y_train = np.array(Y_train_data)
33 | X_test = np.array(X_test_data)
34 | Y_test = np.array(Y_test_data)
35 |
36 | #----- create a new random sample from training set ---
37 |
38 | X_train_random_data = []
39 | Y_train_random_data = []
40 | sample_percentage = 0.1 # 10% of the train sample is selected
41 | sample_size = int(round(len(X_train_data)*sample_percentage))
42 | indices = random.sample(xrange(len(X_train_data)),sample_size)
43 |
44 | for i in range(len(indices)):
45 | X_train_random_data.append(X_train_data[int(indices[i])])
46 | Y_train_random_data.append(Y_train_data[int(indices[i])])
47 |
48 | X_train_random = np.array(X_train_random_data)
49 | Y_train_random = np.array(Y_train_random_data)
50 |
51 | name = "Seq2Seq_unguided_"+name_flag+"_Y_test_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
52 | writetofile(name,Y_test_data)
53 |
54 | #--------------------------------------------
55 |
56 | learning_rate = 0.01 # learning rate parameter
57 | lambda_l2_reg = 0.003 # l2 regularization parameter
58 |
59 | hidden_size = 100 # LSTM hidden node size
60 | input_dim = 1 # the numeber of input signals
61 | output_dim = 1 # the number of output signals
62 |
63 | num_stacked_layers = 2 # 2 stacked layers
64 | gradient_clipping = 2.5 # gradient clipping parameter
65 |
66 | #---------------------------------------------
67 |
68 | # when feed_previous = True, the decoder uses the previous output as an input
69 | def graph(feed_previous = False):
70 | tf.reset_default_graph() # resets the previous graph
71 |
72 | global_step = tf.Variable(initial_value = 0, name= "global_step", trainable= False, collections = [tf.GraphKeys.GLOBAL_STEP,tf.GraphKeys.GLOBAL_VARIABLES])
73 |
74 | weights = {
75 | 'out' : tf.get_variable('Weights_out', shape = [hidden_size,output_dim], dtype = tf.float32, initializer = tf.truncated_normal_initializer()),
76 | }
77 |
78 | biases = {
79 | 'out' : tf.get_variable('Biases_out', shape = [output_dim], dtype = tf.float32, initializer = tf.constant_initializer(0.)),
80 | }
81 |
82 | with tf.variable_scope('Seq2seq'):
83 | # Encoder : inputs
84 | enc_inp = [
85 | tf.placeholder(tf.float32, shape=(None,input_dim), name="inp_{}".format(t))
86 | for t in range(x_length)
87 | ]
88 |
89 | # Decoder : target outputs
90 | target_seq = [
91 | tf.placeholder(tf.float32, shape=(None,output_dim), name="y".format(t))
92 | for t in range(y_length)
93 | ]
94 |
95 | # add a "END" token at the end of the sequence
96 | # two types of training
97 | # guided : the dec_inp are fed into the decoder as inputs
98 | # unguided : only the first element will be fed into it ( use for testing
99 |
100 | #-- Method with END -> works
101 | #dec_inp = [tf.zeros_like(target_seq[0],dtype=tf.float32, name="END")] + target_seq[:-1]
102 |
103 | #-- Mthod without END, worked when the target_inputs were actual values
104 | #dec_inp = target_seq
105 |
106 | #-- new method
107 | # instead of giing an END symbol, instead input the last value that was in the sequence given
108 | # as the first input the the decoder
109 |
110 | dec_inp = [enc_inp[-1]] + target_seq[:-1]
111 |
112 | #-- building the LSTM cell
113 | with tf.variable_scope('GRUCell'):
114 | cells = []
115 | for i in range(num_stacked_layers):
116 | with tf.variable_scope('RNN_{}'.format(i)):
117 | cells.append(tf.contrib.rnn.GRUCell(hidden_size))
118 | cell = tf.contrib.rnn.MultiRNNCell(cells)
119 |
120 | def _rnn_decoder(decoder_inputs,
121 | initial_state,
122 | cell,
123 | loop_function=None,
124 | scope=None):
125 | """RNN decoder for the sequence-to-sequence model.
126 | Args:
127 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
128 | initial_state: 2D Tensor with shape [batch_size x cell.state_size].
129 | cell: rnn_cell.RNNCell defining the cell function and size.
130 | loop_function: If not None, this function will be applied to the i-th output
131 | in order to generate the i+1-st input, and decoder_inputs will be ignored,
132 | except for the first element ("GO" symbol). This can be used for decoding,
133 | but also for training to emulate http://arxiv.org/abs/1506.03099.
134 | Signature -- loop_function(prev, i) = next
135 | * prev is a 2D Tensor of shape [batch_size x output_size],
136 | * i is an integer, the step number (when advanced control is needed),
137 | * next is a 2D Tensor of shape [batch_size x input_size].
138 | scope: VariableScope for the created subgraph; defaults to "rnn_decoder".
139 | Returns:
140 | A tuple of the form (outputs, state), where:
141 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
142 | shape [batch_size x output_size] containing generated outputs.
143 | state: The state of each cell at the final time-step.
144 | It is a 2D Tensor of shape [batch_size x cell.state_size].
145 | (Note that in some cases, like basic RNN cell or GRU cell, outputs and
146 | states can be the same. They are different for LSTM cells though.)
147 | """
148 | with variable_scope.variable_scope(scope or "rnn_decoder"):
149 | state = initial_state
150 | outputs = []
151 | prev = None
152 | for i, inp in enumerate(decoder_inputs):
153 | if loop_function is not None and prev is not None:
154 | with variable_scope.variable_scope("loop_function", reuse=True):
155 | inp = loop_function(prev, i)
156 | if i > 0:
157 | variable_scope.get_variable_scope().reuse_variables()
158 | output, state = cell(inp, state)
159 | outputs.append(output)
160 | if loop_function is not None:
161 | prev = output
162 | return outputs, state
163 |
164 | def _basic_rnn_seq2seq(encoder_inputs,
165 | decoder_inputs,
166 | cell,
167 | feed_previous,
168 | dtype=dtypes.float32,
169 | scope=None):
170 | """Basic RNN sequence-to-sequence model.
171 | This model first runs an RNN to encode encoder_inputs into a state vector,
172 | then runs decoder, initialized with the last encoder state, on decoder_inputs.
173 | Encoder and decoder use the same RNN cell type, but don't share parameters.
174 | Args:
175 | encoder_inputs: A list of 2D Tensors [batch_size x input_size].
176 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
177 | feed_previous: Boolean; if True, only the first of decoder_inputs will be
178 | used (the "GO" symbol), all other inputs will be generated by the previous
179 | decoder output using _loop_function below. If False, decoder_inputs are used
180 | as given (the standard decoder case).
181 | dtype: The dtype of the initial state of the RNN cell (default: tf.float32).
182 | scope: VariableScope for the created subgraph; default: "basic_rnn_seq2seq".
183 | Returns:
184 | A tuple of the form (outputs, state), where:
185 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
186 | shape [batch_size x output_size] containing the generated outputs.
187 | state: The state of each decoder cell in the final time-step.
188 | It is a 2D Tensor of shape [batch_size x cell.state_size].
189 | """
190 | with variable_scope.variable_scope(scope or "basic_rnn_seq2seq"):
191 | enc_cell = copy.deepcopy(cell)
192 | _, enc_state = rnn.static_rnn(enc_cell, encoder_inputs, dtype=dtype)
193 | if feed_previous:
194 | return _rnn_decoder(decoder_inputs, enc_state, cell, _loop_function)
195 | else:
196 | return _rnn_decoder(decoder_inputs, enc_state, cell)
197 |
198 | def _loop_function(prev,_):
199 | return tf.matmul(prev,weights['out']) + biases['out']
200 |
201 | dec_outputs, dec_memory = _basic_rnn_seq2seq(enc_inp,dec_inp,cell,feed_previous=feed_previous)
202 | reshaped_outputs = [tf.matmul(i,weights['out'])+biases['out'] for i in dec_outputs]
203 |
204 | # Training loss and optimizer
205 | with tf.variable_scope('Loss'):
206 | # L2 loss
207 | output_loss = tf.reduce_mean(tf.squared_difference(reshaped_outputs,target_seq))
208 | # L2 regularization for weights and biases
209 | reg_loss = 0
210 | for tf_var in tf.trainable_variables():
211 | if 'Biases_' in tf_var.name or 'Weights_' in tf_var.name:
212 | reg_loss += tf.reduce_mean(tf.nn.l2_loss(tf_var))
213 |
214 | loss = output_loss + lambda_l2_reg * reg_loss
215 |
216 | with tf.variable_scope('Optimizer'):
217 | optimizer = tf.contrib.layers.optimize_loss(loss=loss,learning_rate=learning_rate,global_step=global_step,optimizer='Adam',clip_gradients=gradient_clipping)
218 |
219 | saver = tf.train.Saver
220 |
221 | return dict(enc_inp = enc_inp,target_seq= target_seq,train_op=optimizer,loss=loss,saver=saver,reshaped_outputs = reshaped_outputs)
222 |
223 |
224 | #-----------------------------------------------------
225 | # un-guided training method
226 | ep = 0;
227 | loss_t = 300 # needs to be some random value less than LOSS_LIMIT
228 | avg_rmse_lim = 3
229 | LOSS_LIMIT = avg_rmse_lim * avg_rmse_lim
230 | CONTINUE_FLAG = True
231 | EPOCH_LIMIT = 50000
232 |
233 | rnn_model =graph(feed_previous=True) #un-guided training model
234 | saver = tf.train.Saver()
235 |
236 | init = tf.global_variables_initializer()
237 |
238 | Y_found = []
239 | train_loss = []
240 | train_RMSE = []
241 |
242 | past_loss_values = []
243 | epoch_range = 5
244 |
245 | with tf.Session() as sess:
246 | print "--- gru: tensorflow session started ---"
247 | init.run()
248 | # -- training
249 | while CONTINUE_FLAG:
250 | #-----------------------------------
251 | feed_dict = {rnn_model['enc_inp'][t]:X_train[:,t].reshape(-1,input_dim) for t in range(x_length)}
252 | feed_dict.update({rnn_model['target_seq'][t]:Y_train[:,t].reshape(-1,output_dim) for t in range(y_length)})
253 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
254 | train_loss.append(loss_t)
255 | if ep % 100 == 0:
256 | temp_output = np.reshape(out_t,(10,-1))
257 | temp_output = temp_output.transpose()
258 | temp_y_found = temp_output.tolist()
259 | temp_err = RMSE(Y_train_data,temp_y_found)
260 | train_RMSE.append(temp_err)
261 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
262 | #-------------------- STATE LOGGER--------------------------------
263 | # log state of identified values every 2000 epochs
264 | if ep % 2000 == 0:
265 | print "-- state logged @ epoch :",ep
266 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
267 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
268 |
269 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
270 | writeErrResult(name,train_loss) # append the train loss
271 |
272 | temp_saver = rnn_model['saver']()
273 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
274 | #-----------------------------------------------------------------
275 | #-- condition to stop training
276 | #-- condition to keep track of past losses
277 | if ep < epoch_range:
278 | past_loss_values.append(loss_t)
279 | else:
280 | past_loss_values.pop(0)
281 | past_loss_values.append(loss_t)
282 | # increase the epoch count
283 | ep += 1
284 | #-- find if the entire range of previous losses are below a threshold
285 | count = 0
286 | for val in past_loss_values:
287 | if val < LOSS_LIMIT:
288 | count += 1
289 | #-- stopping condition for training
290 | if count >= epoch_range or ep >= EPOCH_LIMIT:
291 | CONTINUE_FLAG = False
292 | print "-- training stopped @ epoch :",ep
293 | print "--- randomized training started ---"
294 | CONTINUE_FLAG = True # reset the continue flag
295 | while CONTINUE_FLAG:
296 | #-----------------------------------
297 | feed_dict = {rnn_model['enc_inp'][t]:X_train_random[:,t].reshape(-1,input_dim) for t in range(x_length)}
298 | feed_dict.update({rnn_model['target_seq'][t]:Y_train_random[:,t].reshape(-1,output_dim) for t in range(y_length)})
299 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
300 | train_loss.append(loss_t)
301 | if ep % 100 == 0:
302 | temp_output = np.reshape(out_t,(10,-1))
303 | temp_output = temp_output.transpose()
304 | temp_y_found = temp_output.tolist()
305 | temp_err = RMSE(Y_train_random,temp_y_found)
306 | train_RMSE.append(temp_err)
307 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
308 | #-------------------- STATE LOGGER--------------------------------
309 | # log state of identified values every 2000 epochs
310 | if ep % 2000 == 0:
311 | print "-- state logged @ epoch :",ep
312 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
313 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
314 |
315 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
316 | writeErrResult(name,train_loss) # append the train loss
317 |
318 | temp_saver = rnn_model['saver']()
319 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
320 | #-----------------------------------------------------------------
321 | #-- condition to stop training
322 | #-- condition to keep track of past losses
323 | if ep < epoch_range:
324 | past_loss_values.append(loss_t)
325 | else:
326 | past_loss_values.pop(0)
327 | past_loss_values.append(loss_t)
328 | # increase the epoch count
329 | ep += 1
330 | #-- find if the entire range of previous losses are below a threshold
331 | count = 0
332 | for val in past_loss_values:
333 | if val < LOSS_LIMIT:
334 | count += 1
335 | #-- stopping condition for training
336 | if count >= epoch_range or ep >= EPOCH_LIMIT:
337 | CONTINUE_FLAG = False
338 | print "-- randomized training stopped @ epoch :",ep
339 |
340 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
341 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
342 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
343 | writeErrResult(name,train_loss) # append the train loss
344 |
345 | print "--- training complete ---"
346 | temp_saver = rnn_model['saver']()
347 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
348 | print "--- session saved ---"
349 |
350 | loss_t,out_t = sess.run([rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
351 | temp_output = np.reshape(out_t,(10,-1))
352 | temp_output = temp_output.transpose()
353 | temp_y_found = temp_output.tolist()
354 | temp_err = RMSE(Y_train_random,temp_y_found)
355 | name = "Seq2seq_unguided_"+name_flag+"_TRAIN_FOUND_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
356 | writeErrResult(name,temp_err)
357 |
358 | print "--- testing started ---"
359 | feed_dict2 = {rnn_model['enc_inp'][t]:X_test[:,t].reshape(-1,input_dim) for t in range(x_length)}
360 |
361 | Y_temp = np.zeros((len(X_test),y_length), dtype=np.float)
362 | feed_dict2.update({rnn_model['target_seq'][t]:Y_temp[:,t].reshape(-1,output_dim) for t in range(y_length)})
363 | #--
364 | #print np.array(rnn_model['reshaped_outputs']).shape
365 | out_t = sess.run([rnn_model['reshaped_outputs']],feed_dict2)
366 | print "prediction size: ", np.array(out_t).shape
367 | matrix = np.reshape(out_t,(10,-1))
368 | print "reshaped output: ", matrix.shape
369 | matrix = matrix.transpose()
370 | print "transposed matrix: ",matrix.shape
371 | Y_found = matrix.tolist()
372 | # -- testing
373 |
374 | #------- saving the outputs of Y from testing
375 | name = "Seq2seq_unguided_"+name_flag+"_Y_found_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
376 | writetofile(name,Y_found)
377 |
378 | err = RMSE(Y_test_data,Y_found)
379 |
380 | name = "Seq2seq_unguided_"+name_flag+"_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
381 | writeErrResult(name,err)
382 |
383 | print "----- run complete-------"
384 |
--------------------------------------------------------------------------------
/Code/seq2seq_guided_LSTM.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.environ['TP_CPP_MIN_LOG_LEVEL'] = '2'
3 | #--
4 | import numpy as np
5 | import tensorflow as tf
6 | import matplotlib.pyplot as plt
7 | import sys # for debugging
8 | import copy
9 | import random
10 |
11 | from fileprocessor import *
12 | from preprocessor import *
13 | from calculateError import *
14 |
15 | from tensorflow.contrib import rnn
16 | from tensorflow.python.ops import variable_scope
17 | from tensorflow.python.framework import dtypes
18 |
19 | # obtaining the Data ------------------------
20 |
21 | x_length = 20 # the input sequence length
22 | y_length = 10 # the output sequence length
23 | percentage = 0.8 # the percentage of data used for training
24 | filename = "../Dataset/4G_bus_TMobile.txt" #Set this to the dataset on which the model is to be run
25 | name_flag = "4G_bus_TMobile" # the name flag for the test case
26 | save_path_name = os.getcwd() # the pwd to current directory
27 | save_object_name = name_flag # the state name to be saved
28 |
29 | X_train_data, Y_train_data, X_test_data, Y_test_data = getData(filename,x_length,y_length,percentage)
30 |
31 | X_train = np.array(X_train_data)
32 | Y_train = np.array(Y_train_data)
33 | X_test = np.array(X_test_data)
34 | Y_test = np.array(Y_test_data)
35 |
36 | #----- create a new random sample from training set ---
37 |
38 | X_train_random_data = []
39 | Y_train_random_data = []
40 | sample_percentage = 0.1 # 10% of the train sample is selected
41 | sample_size = int(round(len(X_train_data)*sample_percentage))
42 | indices = random.sample(xrange(len(X_train_data)),sample_size)
43 |
44 |
45 | for i in range(len(indices)):
46 | X_train_random_data.append(X_train_data[int(indices[i])])
47 | Y_train_random_data.append(Y_train_data[int(indices[i])])
48 | #print("-- generated the test cases --")
49 | #sys.exit(0)
50 | X_train_random = np.array(X_train_random_data)
51 | Y_train_random = np.array(Y_train_random_data)
52 |
53 | name = "Seq2Seq_"+name_flag+"_LSTM_Y_test_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
54 | writetofile(name,Y_test_data)
55 |
56 | #--------------------------------------------
57 |
58 | learning_rate = 0.01 # learning rate parameter
59 | lambda_l2_reg = 0.003 # l2 regularization parameter
60 |
61 | hidden_size = 200 # LSTM hidden node size
62 | input_dim = 1 # the numeber of input signals
63 | output_dim = 1 # the number of output signals
64 |
65 | num_stacked_layers = 2 # 2 stacked layers
66 | gradient_clipping = 2.5 # gradient clipping parameter
67 |
68 | #---------------------------------------------
69 |
70 | # when feed_previous = True, the decoder uses the previous output as an input
71 | def graph(feed_previous = False):
72 | tf.reset_default_graph() # resets the previous graph
73 |
74 | global_step = tf.Variable(initial_value = 0, name= "global_step", trainable= False, collections = [tf.GraphKeys.GLOBAL_STEP,tf.GraphKeys.GLOBAL_VARIABLES])
75 |
76 | weights = {
77 | 'out' : tf.get_variable('Weights_out', shape = [hidden_size,output_dim], dtype = tf.float32, initializer = tf.truncated_normal_initializer()),
78 | }
79 |
80 | biases = {
81 | 'out' : tf.get_variable('Biases_out', shape = [output_dim], dtype = tf.float32, initializer = tf.constant_initializer(0.)),
82 | }
83 |
84 | with tf.variable_scope('Seq2seq'):
85 | # Encoder : inputs
86 | enc_inp = [
87 | tf.placeholder(tf.float32, shape=(None,input_dim), name="inp_{}".format(t))
88 | for t in range(x_length)
89 | ]
90 |
91 | # Decoder : target outputs
92 | target_seq = [
93 | tf.placeholder(tf.float32, shape=(None,output_dim), name="y".format(t))
94 | for t in range(y_length)
95 | ]
96 |
97 | # add a "END" token at the end of the sequence
98 | # two types of training
99 | # guided : the dec_inp are fed into the decoder as inputs
100 | # unguided : only the first element will be fed into it ( use for testing
101 |
102 | #-- Method with END -> works
103 | #dec_inp = [tf.zeros_like(target_seq[0],dtype=tf.float32, name="END")] + target_seq[:-1]
104 |
105 | #-- Mthod without END, worked when the target_inputs were actual values
106 | #dec_inp = target_seq
107 |
108 | #-- new method
109 | # instead of giing an END symbol, instead input the last value that was in the sequence given
110 | # as the first input the the decoder
111 |
112 | dec_inp = [enc_inp[-1]] + target_seq[:-1]
113 |
114 | #-- building the LSTM cell
115 | with tf.variable_scope('LSTMCell'):
116 | cells = []
117 | for i in range(num_stacked_layers):
118 | with tf.variable_scope('RNN_{}'.format(i)):
119 | cells.append(tf.contrib.rnn.LSTMCell(hidden_size))
120 | cell = tf.contrib.rnn.MultiRNNCell(cells)
121 |
122 | def _rnn_decoder(decoder_inputs,
123 | initial_state,
124 | cell,
125 | loop_function=None,
126 | scope=None):
127 | """RNN decoder for the sequence-to-sequence model.
128 | Args:
129 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
130 | initial_state: 2D Tensor with shape [batch_size x cell.state_size].
131 | cell: rnn_cell.RNNCell defining the cell function and size.
132 | loop_function: If not None, this function will be applied to the i-th output
133 | in order to generate the i+1-st input, and decoder_inputs will be ignored,
134 | except for the first element ("GO" symbol). This can be used for decoding,
135 | but also for training to emulate http://arxiv.org/abs/1506.03099.
136 | Signature -- loop_function(prev, i) = next
137 | * prev is a 2D Tensor of shape [batch_size x output_size],
138 | * i is an integer, the step number (when advanced control is needed),
139 | * next is a 2D Tensor of shape [batch_size x input_size].
140 | scope: VariableScope for the created subgraph; defaults to "rnn_decoder".
141 | Returns:
142 | A tuple of the form (outputs, state), where:
143 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
144 | shape [batch_size x output_size] containing generated outputs.
145 | state: The state of each cell at the final time-step.
146 | It is a 2D Tensor of shape [batch_size x cell.state_size].
147 | (Note that in some cases, like basic RNN cell or GRU cell, outputs and
148 | states can be the same. They are different for LSTM cells though.)
149 | """
150 | with variable_scope.variable_scope(scope or "rnn_decoder"):
151 | state = initial_state
152 | outputs = []
153 | prev = None
154 | for i, inp in enumerate(decoder_inputs):
155 | if loop_function is not None and prev is not None:
156 | with variable_scope.variable_scope("loop_function", reuse=True):
157 | inp = loop_function(prev, i)
158 | if i > 0:
159 | variable_scope.get_variable_scope().reuse_variables()
160 | output, state = cell(inp, state)
161 | outputs.append(output)
162 | if loop_function is not None:
163 | prev = output
164 | return outputs, state
165 |
166 | def _basic_rnn_seq2seq(encoder_inputs,
167 | decoder_inputs,
168 | cell,
169 | feed_previous,
170 | dtype=dtypes.float32,
171 | scope=None):
172 | """Basic RNN sequence-to-sequence model.
173 | This model first runs an RNN to encode encoder_inputs into a state vector,
174 | then runs decoder, initialized with the last encoder state, on decoder_inputs.
175 | Encoder and decoder use the same RNN cell type, but don't share parameters.
176 | Args:
177 | encoder_inputs: A list of 2D Tensors [batch_size x input_size].
178 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
179 | feed_previous: Boolean; if True, only the first of decoder_inputs will be
180 | used (the "GO" symbol), all other inputs will be generated by the previous
181 | decoder output using _loop_function below. If False, decoder_inputs are used
182 | as given (the standard decoder case).
183 | dtype: The dtype of the initial state of the RNN cell (default: tf.float32).
184 | scope: VariableScope for the created subgraph; default: "basic_rnn_seq2seq".
185 | Returns:
186 | A tuple of the form (outputs, state), where:
187 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
188 | shape [batch_size x output_size] containing the generated outputs.
189 | state: The state of each decoder cell in the final time-step.
190 | It is a 2D Tensor of shape [batch_size x cell.state_size].
191 | """
192 | with variable_scope.variable_scope(scope or "basic_rnn_seq2seq"):
193 | enc_cell = copy.deepcopy(cell)
194 | _, enc_state = rnn.static_rnn(enc_cell, encoder_inputs, dtype=dtype)
195 | if feed_previous:
196 | return _rnn_decoder(decoder_inputs, enc_state, cell, _loop_function)
197 | else:
198 | return _rnn_decoder(decoder_inputs, enc_state, cell)
199 |
200 | def _loop_function(prev,_):
201 | return tf.matmul(prev,weights['out']) + biases['out']
202 |
203 | dec_outputs, dec_memory = _basic_rnn_seq2seq(enc_inp,dec_inp,cell,feed_previous=feed_previous)
204 | reshaped_outputs = [tf.matmul(i,weights['out'])+biases['out'] for i in dec_outputs]
205 |
206 | # Training loss and optimizer
207 | with tf.variable_scope('Loss'):
208 | # L2 loss
209 |
210 | output_loss = tf.reduce_mean(tf.squared_difference(reshaped_outputs,target_seq))
211 | # L2 regularization for weights and biases
212 | reg_loss = 0
213 | for tf_var in tf.trainable_variables():
214 | if 'Biases_' in tf_var.name or 'Weights_' in tf_var.name:
215 | reg_loss += tf.reduce_mean(tf.nn.l2_loss(tf_var))
216 |
217 | loss = output_loss + lambda_l2_reg * reg_loss
218 |
219 | with tf.variable_scope('Optimizer'):
220 | optimizer = tf.contrib.layers.optimize_loss(loss=loss,learning_rate=learning_rate,global_step=global_step,optimizer='Adam',clip_gradients=gradient_clipping)
221 |
222 | saver = tf.train.Saver
223 |
224 | return dict(enc_inp = enc_inp,target_seq= target_seq,train_op=optimizer,loss=loss,saver=saver,reshaped_outputs = reshaped_outputs)
225 |
226 |
227 | #-----------------------------------------------------
228 | # un-guided training method
229 | ep = 0;
230 | loss_t = 300 # needs to be some random value less than LOSS_LIMIT
231 | avg_rmse_lim = 3
232 | LOSS_LIMIT = avg_rmse_lim * avg_rmse_lim
233 | CONTINUE_FLAG = True
234 | EPOCH_LIMIT = 5000
235 | MIN_EPOCH_LIM = 1000
236 |
237 | rnn_model = graph(feed_previous=False) # for guided model
238 | saver = tf.train.Saver()
239 |
240 | init = tf.global_variables_initializer()
241 |
242 | Y_found = []
243 | train_loss = []
244 | train_RMSE = []
245 |
246 | past_loss_values = []
247 | epoch_range = 5
248 |
249 | with tf.Session() as sess:
250 | print "--- tensorflow session started ---"
251 | init.run()
252 | # -- training
253 | while CONTINUE_FLAG:
254 | #-----------------------------------
255 | feed_dict = {rnn_model['enc_inp'][t]:X_train[:,t].reshape(-1,input_dim) for t in range(x_length)}
256 | feed_dict.update({rnn_model['target_seq'][t]:Y_train[:,t].reshape(-1,output_dim) for t in range(y_length)})
257 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
258 | train_loss.append(loss_t)
259 | if ep % 100 == 0:
260 | temp_output = np.reshape(out_t,(10,-1))
261 | temp_output = temp_output.transpose()
262 | temp_y_found = temp_output.tolist()
263 | temp_err = RMSE(Y_train_data,temp_y_found)
264 | train_RMSE.append(temp_err)
265 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
266 | #-------------------- STATE LOGGER--------------------------------
267 | # log state of identified values every 2000 epochs
268 | if ep % 2000 == 0:
269 | print "-- state logged @ epoch :",ep
270 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
271 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
272 |
273 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
274 | writeErrResult(name,train_loss) # append the train loss
275 |
276 | temp_saver = rnn_model['saver']()
277 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
278 | #-----------------------------------------------------------------
279 | #-- condition to stop training
280 | #-- condition to keep track of past losses
281 | if ep < epoch_range:
282 | past_loss_values.append(loss_t)
283 | else:
284 | past_loss_values.pop(0)
285 | past_loss_values.append(loss_t)
286 | # increase the epoch count
287 | ep += 1
288 | #-- find if the entire range of previous losses are below a threshold
289 | count = 0
290 | for val in past_loss_values:
291 | if val < LOSS_LIMIT:
292 | count += 1
293 | #-- stopping condition for training
294 | if (count >= epoch_range or ep >= EPOCH_LIMIT) and ep >= MIN_EPOCH_LIM:
295 | CONTINUE_FLAG = False
296 | print "-- training stopped @ epoch :",ep
297 | temp_saver = rnn_model['saver']()
298 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
299 | sess.close()
300 | print "-- checkpoint saved --"
301 |
302 | tf.reset_default_graph()
303 | rnn_model = graph(feed_previous=True) # the model will be take the previous output as the next input
304 | init = tf.global_variables_initializer()
305 | CONTINUE_FLAG = True # reset the continue flag
306 |
307 | with tf.Session() as sess:
308 | sess.run(init)
309 | saver = rnn_model['saver']().restore(sess,os.path.join(save_path_name,save_object_name))
310 | print "--- randomized training started ---"
311 | while CONTINUE_FLAG:
312 | #-----------------------------------
313 | feed_dict = {rnn_model['enc_inp'][t]:X_train_random[:,t].reshape(-1,input_dim) for t in range(x_length)}
314 | feed_dict.update({rnn_model['target_seq'][t]:Y_train_random[:,t].reshape(-1,output_dim) for t in range(y_length)})
315 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
316 | train_loss.append(loss_t)
317 | if ep % 100 == 0:
318 | temp_output = np.reshape(out_t,(10,-1))
319 | temp_output = temp_output.transpose()
320 | temp_y_found = temp_output.tolist()
321 | temp_err = RMSE(Y_train_random,temp_y_found)
322 | train_RMSE.append(temp_err)
323 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
324 | #-------------------- STATE LOGGER--------------------------------
325 | # log state of identified values every 2000 epochs
326 | if ep % 2000 == 0:
327 | print "-- state logged @ epoch :",ep
328 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
329 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
330 |
331 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
332 | writeErrResult(name,train_loss) # append the train loss
333 |
334 | temp_saver = rnn_model['saver']()
335 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
336 | #-----------------------------------------------------------------
337 | #-- condition to stop training
338 | #-- condition to keep track of past losses
339 | if ep < epoch_range:
340 | past_loss_values.append(loss_t)
341 | else:
342 | past_loss_values.pop(0)
343 | past_loss_values.append(loss_t)
344 | # increase the epoch count
345 | ep += 1
346 | #-- find if the entire range of previous losses are below a threshold
347 | count = 0
348 | for val in past_loss_values:
349 | if val < LOSS_LIMIT:
350 | count += 1
351 | #-- stopping condition for training
352 | if (count >= epoch_range or ep >= EPOCH_LIMIT) and ep >= MIN_EPOCH_LIM:
353 | CONTINUE_FLAG = False
354 | print "-- randomized training stopped @ epoch :",ep
355 |
356 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
357 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
358 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
359 | writeErrResult(name,train_loss) # append the train loss
360 |
361 | print "--- training complete ---"
362 | temp_saver = rnn_model['saver']()
363 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
364 | print "--- session saved ---"
365 |
366 | loss_t,out_t = sess.run([rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
367 | temp_output = np.reshape(out_t,(10,-1))
368 | temp_output = temp_output.transpose()
369 | temp_y_found = temp_output.tolist()
370 | temp_err = RMSE(Y_train_random,temp_y_found)
371 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_FOUND_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
372 | writeErrResult(name,temp_err)
373 |
374 | print "--- testing started ---"
375 | feed_dict2 = {rnn_model['enc_inp'][t]:X_test[:,t].reshape(-1,input_dim) for t in range(x_length)}
376 |
377 | Y_temp = np.zeros((len(X_test),y_length), dtype=np.float)
378 | feed_dict2.update({rnn_model['target_seq'][t]:Y_temp[:,t].reshape(-1,output_dim) for t in range(y_length)})
379 | #--
380 | #print np.array(rnn_model['reshaped_outputs']).shape
381 | out_t = sess.run([rnn_model['reshaped_outputs']],feed_dict2)
382 | print "prediction size: ", np.array(out_t).shape
383 | matrix = np.reshape(out_t,(10,-1))
384 | print "reshaped output: ", matrix.shape
385 | matrix = matrix.transpose()
386 | print "transposed matrix: ",matrix.shape
387 | Y_found = matrix.tolist()
388 | # -- testing
389 |
390 | #------- saving the outputs of Y from testing
391 | name = name_flag+"_"+str(x_length)+"_"+str(y_length)+"_pred.txt"
392 | writetofile(name,Y_found)
393 |
394 | err = RMSE(Y_test_data,Y_found)
395 |
396 | name = name_flag+"_"+str(x_length)+"_"+str(y_length)+"_test_rmse.txt"
397 | writeErrResult(name,err)
398 |
399 | print "----- run complete-------"
--------------------------------------------------------------------------------
/Code/seq2seq_curriculum_LSTM.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.environ['TP_CPP_MIN_LOG_LEVEL'] = '2'
3 | #--
4 | import numpy as np
5 | import tensorflow as tf
6 | import matplotlib.pyplot as plt
7 | import sys # for debugging
8 | import copy
9 | import random
10 |
11 | from fileprocessor import *
12 | from preprocessor import *
13 | from calculateError import *
14 |
15 | from tensorflow.contrib import rnn
16 | from tensorflow.python.ops import variable_scope
17 | from tensorflow.python.framework import dtypes
18 |
19 | # obtaining the Data ------------------------
20 |
21 | x_length = 20 # the input sequence length
22 | y_length = 10 # the output sequence length
23 | percentage = 0.8 # the percentage of data used for training
24 | filename = "../Dataset/4G_bus_TMobile.txt" #Set this to the dataset on which the model is to be run
25 | name_flag = "4G_bus_TMobile" # the name flag for the test case
26 | save_path_name = os.getcwd() # the pwd to current directory
27 | save_object_name = name_flag # the state name to be saved
28 |
29 | X_train_data, Y_train_data, X_test_data, Y_test_data = getData(filename,x_length,y_length,percentage)
30 |
31 | X_train = np.array(X_train_data)
32 | Y_train = np.array(Y_train_data)
33 | X_test = np.array(X_test_data)
34 | Y_test = np.array(Y_test_data)
35 |
36 | #-- creating the guided training data set
37 |
38 | guided_training_percentage = 0.3 # 30% of the total training data will be used for guided training
39 | sz = len(X_train_data)# the total data that is for training
40 | guided_data_size = int(round(guided_training_percentage * sz))
41 | unguided_data_size = sz - guided_data_size
42 |
43 | X_train_guided_data = X_train_data[0:guided_data_size]
44 | X_train_unguided_data = X_train_data[guided_data_size:-1]
45 |
46 | X_train_guided = np.array(X_train_guided_data)
47 | X_train_unguided = np.array(X_train_unguided_data)
48 |
49 | Y_train_guided_data = Y_train_data[0:guided_data_size]
50 | Y_train_unguided_data = Y_train_data[guided_data_size:-1]
51 |
52 | Y_train_guided = np.array(Y_train_guided_data)
53 | Y_train_unguided = np.array(Y_train_unguided_data)
54 |
55 | #----- create a new random sample from training set ---
56 |
57 | X_train_random_data = []
58 | Y_train_random_data = []
59 | sample_percentage = 0.1 # 10% of the train sample is selected
60 | sample_size = int(round(len(X_train_data)*sample_percentage))
61 | indices = random.sample(xrange(len(X_train_data)),sample_size)
62 |
63 |
64 | for i in range(len(indices)):
65 | X_train_random_data.append(X_train_data[int(indices[i])])
66 | Y_train_random_data.append(Y_train_data[int(indices[i])])
67 |
68 | X_train_random = np.array(X_train_random_data)
69 | Y_train_random = np.array(Y_train_random_data)
70 |
71 | name = "Seq2Seq_"+name_flag+"_LSTM_Y_test_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
72 | writetofile(name,Y_test_data)
73 |
74 | #--------------------------------------------
75 |
76 | learning_rate = 0.01 # learning rate parameter
77 | lambda_l2_reg = 0.003 # l2 regularization parameter
78 |
79 | hidden_size = 200 # LSTM hidden node size
80 | input_dim = 1 # the numeber of input signals
81 | output_dim = 1 # the number of output signals
82 |
83 | num_stacked_layers = 2 # 2 stacked layers
84 | gradient_clipping = 2.5 # gradient clipping parameter
85 |
86 | #---------------------------------------------
87 |
88 | # when feed_previous = True, the decoder uses the previous output as an input
89 | def graph(feed_previous = False):
90 | tf.reset_default_graph() # resets the previous graph
91 |
92 | global_step = tf.Variable(initial_value = 0, name= "global_step", trainable= False, collections = [tf.GraphKeys.GLOBAL_STEP,tf.GraphKeys.GLOBAL_VARIABLES])
93 |
94 | weights = {
95 | 'out' : tf.get_variable('Weights_out', shape = [hidden_size,output_dim], dtype = tf.float32, initializer = tf.truncated_normal_initializer()),
96 | }
97 |
98 | biases = {
99 | 'out' : tf.get_variable('Biases_out', shape = [output_dim], dtype = tf.float32, initializer = tf.constant_initializer(0.)),
100 | }
101 |
102 | with tf.variable_scope('Seq2seq'):
103 | # Encoder : inputs
104 | enc_inp = [
105 | tf.placeholder(tf.float32, shape=(None,input_dim), name="inp_{}".format(t))
106 | for t in range(x_length)
107 | ]
108 |
109 | # Decoder : target outputs
110 | target_seq = [
111 | tf.placeholder(tf.float32, shape=(None,output_dim), name="y".format(t))
112 | for t in range(y_length)
113 | ]
114 |
115 | # add a "END" token at the end of the sequence
116 | # two types of training
117 | # guided : the dec_inp are fed into the decoder as inputs
118 | # unguided : only the first element will be fed into it ( use for testing
119 |
120 | #-- Method with END -> works
121 | #dec_inp = [tf.zeros_like(target_seq[0],dtype=tf.float32, name="END")] + target_seq[:-1]
122 |
123 | #-- Mthod without END, worked when the target_inputs were actual values
124 | #dec_inp = target_seq
125 |
126 | #-- new method
127 | # instead of giing an END symbol, instead input the last value that was in the sequence given
128 | # as the first input the the decoder
129 |
130 | dec_inp = [enc_inp[-1]] + target_seq[:-1]
131 |
132 | #-- building the LSTM cell
133 | with tf.variable_scope('LSTMCell'):
134 | cells = []
135 | for i in range(num_stacked_layers):
136 | with tf.variable_scope('RNN_{}'.format(i)):
137 | cells.append(tf.contrib.rnn.LSTMCell(hidden_size))
138 | cell = tf.contrib.rnn.MultiRNNCell(cells)
139 |
140 | def _rnn_decoder(decoder_inputs,
141 | initial_state,
142 | cell,
143 | loop_function=None,
144 | scope=None):
145 | """RNN decoder for the sequence-to-sequence model.
146 | Args:
147 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
148 | initial_state: 2D Tensor with shape [batch_size x cell.state_size].
149 | cell: rnn_cell.RNNCell defining the cell function and size.
150 | loop_function: If not None, this function will be applied to the i-th output
151 | in order to generate the i+1-st input, and decoder_inputs will be ignored,
152 | except for the first element ("GO" symbol). This can be used for decoding,
153 | but also for training to emulate http://arxiv.org/abs/1506.03099.
154 | Signature -- loop_function(prev, i) = next
155 | * prev is a 2D Tensor of shape [batch_size x output_size],
156 | * i is an integer, the step number (when advanced control is needed),
157 | * next is a 2D Tensor of shape [batch_size x input_size].
158 | scope: VariableScope for the created subgraph; defaults to "rnn_decoder".
159 | Returns:
160 | A tuple of the form (outputs, state), where:
161 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
162 | shape [batch_size x output_size] containing generated outputs.
163 | state: The state of each cell at the final time-step.
164 | It is a 2D Tensor of shape [batch_size x cell.state_size].
165 | (Note that in some cases, like basic RNN cell or GRU cell, outputs and
166 | states can be the same. They are different for LSTM cells though.)
167 | """
168 | with variable_scope.variable_scope(scope or "rnn_decoder"):
169 | state = initial_state
170 | outputs = []
171 | prev = None
172 | for i, inp in enumerate(decoder_inputs):
173 | if loop_function is not None and prev is not None:
174 | with variable_scope.variable_scope("loop_function", reuse=True):
175 | inp = loop_function(prev, i)
176 | if i > 0:
177 | variable_scope.get_variable_scope().reuse_variables()
178 | output, state = cell(inp, state)
179 | outputs.append(output)
180 | if loop_function is not None:
181 | prev = output
182 | return outputs, state
183 |
184 | def _basic_rnn_seq2seq(encoder_inputs,
185 | decoder_inputs,
186 | cell,
187 | feed_previous,
188 | dtype=dtypes.float32,
189 | scope=None):
190 | """Basic RNN sequence-to-sequence model.
191 | This model first runs an RNN to encode encoder_inputs into a state vector,
192 | then runs decoder, initialized with the last encoder state, on decoder_inputs.
193 | Encoder and decoder use the same RNN cell type, but don't share parameters.
194 | Args:
195 | encoder_inputs: A list of 2D Tensors [batch_size x input_size].
196 | decoder_inputs: A list of 2D Tensors [batch_size x input_size].
197 | feed_previous: Boolean; if True, only the first of decoder_inputs will be
198 | used (the "GO" symbol), all other inputs will be generated by the previous
199 | decoder output using _loop_function below. If False, decoder_inputs are used
200 | as given (the standard decoder case).
201 | dtype: The dtype of the initial state of the RNN cell (default: tf.float32).
202 | scope: VariableScope for the created subgraph; default: "basic_rnn_seq2seq".
203 | Returns:
204 | A tuple of the form (outputs, state), where:
205 | outputs: A list of the same length as decoder_inputs of 2D Tensors with
206 | shape [batch_size x output_size] containing the generated outputs.
207 | state: The state of each decoder cell in the final time-step.
208 | It is a 2D Tensor of shape [batch_size x cell.state_size].
209 | """
210 | with variable_scope.variable_scope(scope or "basic_rnn_seq2seq"):
211 | enc_cell = copy.deepcopy(cell)
212 | _, enc_state = rnn.static_rnn(enc_cell, encoder_inputs, dtype=dtype)
213 | if feed_previous:
214 | return _rnn_decoder(decoder_inputs, enc_state, cell, _loop_function)
215 | else:
216 | return _rnn_decoder(decoder_inputs, enc_state, cell)
217 |
218 | def _loop_function(prev,_):
219 | return tf.matmul(prev,weights['out']) + biases['out']
220 |
221 | dec_outputs, dec_memory = _basic_rnn_seq2seq(enc_inp,dec_inp,cell,feed_previous=feed_previous)
222 | reshaped_outputs = [tf.matmul(i,weights['out'])+biases['out'] for i in dec_outputs]
223 |
224 | # Training loss and optimizer
225 | with tf.variable_scope('Loss'):
226 | # L2 loss
227 | #output_loss = 0
228 | #for _y, _Y in zip(reshaped_outputs,target_seq):
229 | # output_loss += tf.reduce_mean(tf.pow(_y - _Y, 2))
230 | output_loss = tf.reduce_mean(tf.squared_difference(reshaped_outputs,target_seq))
231 | # L2 regularization for weights and biases
232 | reg_loss = 0
233 | for tf_var in tf.trainable_variables():
234 | if 'Biases_' in tf_var.name or 'Weights_' in tf_var.name:
235 | reg_loss += tf.reduce_mean(tf.nn.l2_loss(tf_var))
236 |
237 | loss = output_loss + lambda_l2_reg * reg_loss
238 |
239 | with tf.variable_scope('Optimizer'):
240 | optimizer = tf.contrib.layers.optimize_loss(loss=loss,learning_rate=learning_rate,global_step=global_step,optimizer='Adam',clip_gradients=gradient_clipping)
241 |
242 | saver = tf.train.Saver
243 |
244 | return dict(enc_inp = enc_inp,target_seq= target_seq,train_op=optimizer,loss=loss,saver=saver,reshaped_outputs = reshaped_outputs)
245 |
246 |
247 | #-----------------------------------------------------
248 | # curriculum training method
249 |
250 | ep = 0;
251 | loss_t = 300 # needs to be some random value less than LOSS_LIMIT
252 | avg_rmse_lim = 3
253 | LOSS_LIMIT = avg_rmse_lim * avg_rmse_lim
254 | CONTINUE_FLAG = True
255 | EPOCH_LIMIT = 5000
256 | MIN_EPOCH_LIM = 1000
257 |
258 | Y_found = []
259 | train_loss = []
260 | train_RMSE = []
261 | random_train_RMSE = []
262 |
263 | past_loss_values = []
264 | epoch_range = 20
265 |
266 | #------------------------------------------------------
267 | # Guided Training
268 | #------------------------------------------------------
269 | rnn_model =graph(feed_previous=False) # false for guided training
270 | saver = tf.train.Saver()
271 |
272 | init = tf.global_variables_initializer()
273 |
274 | with tf.Session() as sess:
275 | print "--- tensorflow session started ---"
276 | print "--- guided training started ---"
277 | #init.run()
278 | sess.run(init)
279 | # -- training
280 | while CONTINUE_FLAG:
281 | #-----------------------------------
282 | feed_dict = {rnn_model['enc_inp'][t]:X_train_guided[:,t].reshape(-1,input_dim) for t in range(x_length)}
283 | feed_dict.update({rnn_model['target_seq'][t]:Y_train_guided[:,t].reshape(-1,output_dim) for t in range(y_length)})
284 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
285 | train_loss.append(loss_t)
286 | if ep % 100 == 0:
287 | temp_output = np.reshape(out_t,(10,-1))
288 | temp_output = temp_output.transpose()
289 | temp_y_found = temp_output.tolist()
290 | temp_err = RMSE(Y_train_guided_data,temp_y_found)
291 | train_RMSE.append(temp_err)
292 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
293 | #-------------------- STATE LOGGER--------------------------------
294 | # log state of identified values every 2000 epochs
295 | if ep % 2000 == 0:
296 | print "-- state logged @ epoch :",ep
297 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
298 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
299 |
300 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
301 | writeErrResult(name,train_loss) # append the train loss
302 |
303 | temp_saver = rnn_model['saver']()
304 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
305 | #-----------------------------------------------------------------
306 | #-- condition to stop training
307 | #-- condition to keep track of past losses
308 | if ep < epoch_range:
309 | past_loss_values.append(loss_t)
310 | else:
311 | past_loss_values.pop(0)
312 | past_loss_values.append(loss_t)
313 | # increase the epoch count
314 | ep += 1
315 | #-- find if the entire range of previous losses are below a threshold
316 | count = 0
317 | for val in past_loss_values:
318 | if val < LOSS_LIMIT:
319 | count += 1
320 | #-- stopping condition for training
321 | if (count >= epoch_range or ep >= EPOCH_LIMIT) and ep >= MIN_EPOCH_LIM:
322 | CONTINUE_FLAG = False
323 | print "-- guided training stopped @ epoch :",ep
324 | temp_saver = rnn_model['saver']()
325 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
326 | print "--- checkpoint saved ---"
327 | sess.close()
328 |
329 | #------------------------------------------------------
330 | # Un-guided Training
331 | #------------------------------------------------------
332 | #rnn_model = graph(feed_previous=False)
333 | tf.reset_default_graph()
334 | rnn_model =graph(feed_previous=True) #un-guided training model
335 | init = tf.global_variables_initializer()
336 |
337 | CONTINUE_FLAG = True
338 |
339 | with tf.Session() as sess:
340 | print "--- unguided training started ---"
341 | sess.run(init)
342 | saver = rnn_model['saver']().restore(sess,os.path.join(save_path_name,save_object_name))
343 | print "--- loaded saved state ---"
344 | # -- training
345 | while CONTINUE_FLAG:
346 | #-----------------------------------
347 | feed_dict = {rnn_model['enc_inp'][t]:X_train_unguided[:,t].reshape(-1,input_dim) for t in range(x_length)}
348 | feed_dict.update({rnn_model['target_seq'][t]:Y_train_unguided[:,t].reshape(-1,output_dim) for t in range(y_length)})
349 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
350 | train_loss.append(loss_t)
351 | if ep % 100 == 0:
352 | temp_output = np.reshape(out_t,(10,-1))
353 | temp_output = temp_output.transpose()
354 | temp_y_found = temp_output.tolist()
355 | temp_err = RMSE(Y_train_unguided_data,temp_y_found)
356 | train_RMSE.append(temp_err)
357 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
358 | #-------------------- STATE LOGGER--------------------------------
359 | # log state of identified values every 2000 epochs
360 | if ep % 2000 == 0:
361 | print "-- state logged @ epoch :",ep
362 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
363 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
364 |
365 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
366 | writeErrResult(name,train_loss) # append the train loss
367 |
368 | temp_saver = rnn_model['saver']()
369 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
370 | #-----------------------------------------------------------------
371 | #-- condition to stop training
372 | #-- condition to keep track of past losses
373 | if ep < epoch_range:
374 | past_loss_values.append(loss_t)
375 | else:
376 | past_loss_values.pop(0)
377 | past_loss_values.append(loss_t)
378 | # increase the epoch count
379 | ep += 1
380 | #-- find if the entire range of previous losses are below a threshold
381 | count = 0
382 | for val in past_loss_values:
383 | if val < LOSS_LIMIT:
384 | count += 1
385 | #-- stopping condition for training
386 | if (count >= epoch_range or ep >= EPOCH_LIMIT) and ep >= MIN_EPOCH_LIM:
387 | CONTINUE_FLAG = False
388 | print "-- un-guided training stopped @ epoch :",ep
389 | print "--- randomized training started ---"
390 | CONTINUE_FLAG = True # reset the continue flag
391 | while CONTINUE_FLAG:
392 | #-----------------------------------
393 | feed_dict = {rnn_model['enc_inp'][t]:X_train_random[:,t].reshape(-1,input_dim) for t in range(x_length)}
394 | feed_dict.update({rnn_model['target_seq'][t]:Y_train_random[:,t].reshape(-1,output_dim) for t in range(y_length)})
395 | train_t,loss_t,out_t = sess.run([rnn_model['train_op'],rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
396 | train_loss.append(loss_t)
397 | if ep % 100 == 0:
398 | temp_output = np.reshape(out_t,(10,-1))
399 | temp_output = temp_output.transpose()
400 | temp_y_found = temp_output.tolist()
401 | train_RMSE.append(temp_err)
402 | print ep," loss :",loss_t ," output size :",np.array(out_t).shape
403 | #-------------------- STATE LOGGER--------------------------------
404 | # log state of identified values every 2000 epochs
405 | if ep % 2000 == 0:
406 | print "-- state logged @ epoch :",ep
407 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
408 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
409 |
410 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
411 | writeErrResult(name,train_loss) # append the train loss
412 |
413 | temp_saver = rnn_model['saver']()
414 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
415 | #-----------------------------------------------------------------
416 | #-- condition to stop training
417 | #-- condition to keep track of past losses
418 | if ep < epoch_range:
419 | past_loss_values.append(loss_t)
420 | else:
421 | past_loss_values.pop(0)
422 | past_loss_values.append(loss_t)
423 | # increase the epoch count
424 | ep += 1
425 | #-- find if the entire range of previous losses are below a threshold
426 | count = 0
427 | for val in past_loss_values:
428 | if val < LOSS_LIMIT:
429 | count += 1
430 | #-- stopping condition for training
431 | if (count >= epoch_range or ep >= EPOCH_LIMIT) and ep >= MIN_EPOCH_LIM:
432 | CONTINUE_FLAG = False
433 | print "-- randomized training stopped @ epoch :",ep
434 | name = "Seq2seq_"+name_flag+"_LSTM_RANDOM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
435 | writetofile(name,random_train_RMSE) # saving the train RMSE values for every 200th epoch
436 |
437 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
438 | writetofile(name,train_RMSE) # saving the train RMSE values for every 200th epoch
439 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_LOSS_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
440 | writeErrResult(name,train_loss) # append the train loss
441 |
442 | print "--- All training complete ---"
443 | temp_saver = rnn_model['saver']()
444 | save_path = temp_saver.save(sess,os.path.join(save_path_name,save_object_name))
445 | print "--- session saved ---"
446 |
447 | loss_t,out_t = sess.run([rnn_model['loss'],rnn_model['reshaped_outputs']],feed_dict)
448 | temp_output = np.reshape(out_t,(10,-1))
449 | temp_output = temp_output.transpose()
450 | temp_y_found = temp_output.tolist()
451 | temp_err = RMSE(Y_train_random,temp_y_found)
452 | name = "Seq2seq_"+name_flag+"_LSTM_TRAIN_FOUND_RMSE_x_"+str(x_length)+"_y_"+str(y_length)+"data.txt"
453 | writeErrResult(name,temp_err)
454 |
455 | print "--- testing started ---"
456 | feed_dict2 = {rnn_model['enc_inp'][t]:X_test[:,t].reshape(-1,input_dim) for t in range(x_length)}
457 |
458 | Y_temp = np.zeros((len(X_test),y_length), dtype=np.float)
459 | feed_dict2.update({rnn_model['target_seq'][t]:Y_temp[:,t].reshape(-1,output_dim) for t in range(y_length)})
460 | #--
461 | #print np.array(rnn_model['reshaped_outputs']).shape
462 | out_t = sess.run([rnn_model['reshaped_outputs']],feed_dict2)
463 | print "prediction size: ", np.array(out_t).shape
464 | matrix = np.reshape(out_t,(10,-1))
465 | print "reshaped output: ", matrix.shape
466 | matrix = matrix.transpose()
467 | print "transposed matrix: ",matrix.shape
468 | Y_found = matrix.tolist()
469 | # -- testing
470 |
471 | #------- saving the outputs of Y from testing
472 | name = name_flag+"_"+str(x_length)+"_"+str(y_length)+"_pred.txt"
473 | writetofile(name,Y_found)
474 |
475 | err = RMSE(Y_test_data,Y_found)
476 |
477 | name = name_flag+"_"+str(x_length)+"_"+str(y_length)+"_test_rmse.txt"
478 | writeErrResult(name,err)
479 |
480 | print "----- run complete-------"
--------------------------------------------------------------------------------
/DataSet/s21_average_sweep.txt:
--------------------------------------------------------------------------------
1 | -56.2838132001079
2 | -55.0956905655113
3 | -53.7073891746474
4 | -52.8959272444462
5 | -52.5204805318990
6 | -51.8862330445598
7 | -51.3439554406128
8 | -50.8041911669796
9 | -49.8518564095250
10 | -49.0511250626660
11 | -48.9234282582538
12 | -49.0554136113578
13 | -49.0478951169528
14 | -48.8868043955849
15 | -48.9842209674213
16 | -48.5336701184504
17 | -48.4231513144278
18 | -48.5404134916505
19 | -48.4553062168588
20 | -48.4048713788040
21 | -48.3904148720803
22 | -48.6571317272491
23 | -49.2155784371836
24 | -49.7306797163067
25 | -50.1559561055438
26 | -50.0712695261817
27 | -50.4738313977078
28 | -50.8977935255918
29 | -51.4564144006758
30 | -51.6426425910953
31 | -52.0386430278233
32 | -52.4134831799110
33 | -52.6582037674039
34 | -52.3806323980294
35 | -52.2066114060274
36 | -52.0785151191611
37 | -50.8610883639503
38 | -50.4400175601999
39 | -50.4363503394992
40 | -49.7067148982940
41 | -49.2451266974039
42 | -49.7234759501738
43 | -50.2132386588032
44 | -49.0630107581714
45 | -47.9260763485498
46 | -47.2868970309274
47 | -47.2746793170403
48 | -47.6472506602122
49 | -48.2872934465727
50 | -48.7624807480794
51 | -49.2724254551170
52 | -49.7858059326615
53 | -49.7531208660137
54 | -48.9752599380125
55 | -48.4031682337188
56 | -47.8439289434668
57 | -47.6680786905426
58 | -47.2492053047816
59 | -47.1597316482443
60 | -47.2601416689138
61 | -47.1818212935755
62 | -46.9656536938409
63 | -46.9036199936532
64 | -46.6456288304309
65 | -46.3276420330370
66 | -46.3750862782380
67 | -46.3630117009045
68 | -46.0976922349348
69 | -46.2913590698783
70 | -46.6906950849077
71 | -47.0464708848954
72 | -47.7400521118040
73 | -48.8594555228266
74 | -49.8052405849418
75 | -49.8377509885431
76 | -49.5804191961204
77 | -49.5802815463896
78 | -49.5242604118966
79 | -49.5852654889503
80 | -49.4694816036569
81 | -48.7716516344114
82 | -48.4570329358732
83 | -48.8391628739336
84 | -50.1011829289390
85 | -51.8348168511257
86 | -53.1405646217163
87 | -54.3593385780302
88 | -55.6974600804705
89 | -57.6748532999419
90 | -60.5763236093412
91 | -59.1303012017886
92 | -58.4525197265004
93 | -57.8109542817564
94 | -56.4149132685764
95 | -55.9248761844664
96 | -55.5055659717229
97 | -55.4476778110176
98 | -56.3666278490251
99 | -57.7676985343348
100 | -57.6638809567728
101 | -58.7898805720571
102 | -59.6656259462439
103 | -58.8124110817020
104 | -56.7693972632193
105 | -56.0766685905117
106 | -55.9400726487759
107 | -56.1368890542454
108 | -56.2882519314926
109 | -56.9262034143394
110 | -58.0635943104347
111 | -57.6343934290336
112 | -56.8711066841179
113 | -57.3668836238005
114 | -58.3322938220540
115 | -58.3780215919405
116 | -57.0061825341705
117 | -54.7002639433488
118 | -53.8983987104871
119 | -53.5370343170158
120 | -53.0785076468652
121 | -53.2411138551099
122 | -53.4466075077927
123 | -53.6847006341978
124 | -53.6846190103202
125 | -53.5847773780218
126 | -54.1443625280027
127 | -54.3169512918250
128 | -53.0343347059900
129 | -52.1889388856425
130 | -52.3261157623922
131 | -53.4403398657794
132 | -54.4815894460180
133 | -55.6312847500175
134 | -54.7935201442964
135 | -53.3243055025313
136 | -52.9363333997995
137 | -52.6652847121181
138 | -52.3753459176945
139 | -51.7421341587017
140 | -51.2344748329424
141 | -51.0725191728448
142 | -51.1892414921176
143 | -50.7918094070476
144 | -49.8204667712088
145 | -48.9231446103464
146 | -49.1111847663991
147 | -49.6159318713507
148 | -50.3345886369741
149 | -50.6950948599471
150 | -50.1263898592465
151 | -49.3441927727390
152 | -48.7160762214927
153 | -47.9695563667559
154 | -47.4303811797901
155 | -47.2347052273526
156 | -47.1234577480509
157 | -46.8149661706596
158 | -46.8714119039101
159 | -47.1122647862145
160 | -47.4236864891429
161 | -48.0494291007795
162 | -48.6772618608072
163 | -48.4201292345358
164 | -48.1026546703208
165 | -48.2980821710614
166 | -48.2741056229975
167 | -48.3322229596412
168 | -49.0192152956385
169 | -49.6209952569341
170 | -49.7590450656001
171 | -50.1197969818384
172 | -50.2232140220547
173 | -50.0992234814154
174 | -49.9448479789007
175 | -49.9251552232921
176 | -49.9987388398394
177 | -50.1770329023574
178 | -50.5316439474267
179 | -50.8174859008638
180 | -51.6213568397375
181 | -52.9732510833672
182 | -54.0862700183523
183 | -54.5296537447956
184 | -53.1155229921563
185 | -52.0787248809840
186 | -52.2978949397350
187 | -53.2103578784842
188 | -54.4291162724533
189 | -55.7604679442675
190 | -55.8054810007243
191 | -56.2140910116022
192 | -55.6848905091991
193 | -54.5792827805688
194 | -52.7449216555536
195 | -52.0329874089778
196 | -51.6441235934767
197 | -50.9562767488488
198 | -50.6292220314341
199 | -51.1177686824407
200 | -51.9053389263416
201 | -51.7056176121317
202 | -51.5533585211257
203 | -51.7450793792019
204 | -52.2760680121441
205 | -53.3565705107188
206 | -54.1159504406124
207 | -53.2576843655142
208 | -52.1618756993895
209 | -51.7058143169204
210 | -52.0875710501012
211 | -52.1934441271254
212 | -52.1581022922462
213 | -51.9553715186652
214 | -51.7929080641084
215 | -51.8409361848437
216 | -51.3954498275919
217 | -50.2118978130170
218 | -49.3001125288530
219 | -48.8253064973154
220 | -48.6927121124952
221 | -48.6396827224128
222 | -48.6216081745721
223 | -48.2711992768552
224 | -48.1000090928539
225 | -48.4439350017542
226 | -48.6145305787887
227 | -48.3329903201238
228 | -48.7685495993440
229 | -49.6374200903262
230 | -49.7810286006190
231 | -50.1829645511920
232 | -50.9055206087431
233 | -51.3340862970541
234 | -51.3480835732722
235 | -50.7297682986753
236 | -50.5437078135751
237 | -50.0973521799090
238 | -49.2965756071642
239 | -49.3829085634791
240 | -49.8656658198196
241 | -50.4983037517449
242 | -51.3214255976301
243 | -51.7665395932682
244 | -52.2600867901882
245 | -52.7931357014240
246 | -53.0746225266648
247 | -52.8662087801668
248 | -52.8676826861302
249 | -53.4990693136758
250 | -54.1322665749036
251 | -54.5133631418994
252 | -55.1608145069976
253 | -55.2655461511422
254 | -54.9087740598537
255 | -54.5953094588025
256 | -54.5480536001494
257 | -55.0670598524448
258 | -55.3631803148958
259 | -55.1675717804628
260 | -55.3232851751306
261 | -55.3379929624886
262 | -54.9184119449991
263 | -54.5701799328522
264 | -54.2993734595816
265 | -54.6301844627829
266 | -55.4775705600509
267 | -56.3576526539631
268 | -57.5440895405028
269 | -57.3506867844660
270 | -56.9635718606036
271 | -56.3232209627176
272 | -56.1281346918380
273 | -56.8881301608136
274 | -58.0387842932656
275 | -58.4379406989057
276 | -57.9494441217643
277 | -57.5966593819896
278 | -57.3138428380487
279 | -57.1636399350199
280 | -56.5077712073678
281 | -55.5322666616435
282 | -55.0847448063417
283 | -55.3886287499686
284 | -55.0429546696319
285 | -55.0543698092336
286 | -55.3642395163305
287 | -56.3229385029812
288 | -57.3171606467628
289 | -58.2230801490686
290 | -61.8203351808405
291 | -63.9533383458401
292 | -59.4019315309137
293 | -56.4470812859542
294 | -55.4710082672617
295 | -54.3760859172533
296 | -52.4919091105717
297 | -51.4144586744107
298 | -50.8641096378750
299 | -51.0288343925870
300 | -51.1328008093480
301 | -51.1292850226581
302 | -51.0732532577025
303 | -51.4468297593101
304 | -53.0650204880374
305 | -54.2690574071438
306 | -53.8629628310308
307 | -52.9157247210320
308 | -52.2915536832524
309 | -51.7988291738578
310 | -51.6278669391466
311 | -52.2829612423754
312 | -52.9571414548794
313 | -52.4174514042095
314 | -51.7485657184258
315 | -51.6049091448476
316 | -52.0442230529838
317 | -52.0188097074798
318 | -52.0232861698733
319 | -51.8016161220506
320 | -51.4449129877200
321 | -50.8491156057194
322 | -50.5920102666569
323 | -51.2835483612803
324 | -52.0018174358843
325 | -52.6950571293243
326 | -53.7355353290050
327 | -53.3657619338071
328 | -53.0236112673933
329 | -53.0670944153847
330 | -53.0492854755781
331 | -52.4125126429710
332 | -51.7858150347448
333 | -52.1916363505856
334 | -52.7705873570683
335 | -52.9354129656707
336 | -52.5992576132962
337 | -52.6640945715424
338 | -52.9494175792950
339 | -54.4972377718174
340 | -55.5814201962807
341 | -55.6358489707571
342 | -55.7947890504265
343 | -56.5234816312005
344 | -56.6425674336365
345 | -55.9765975042545
346 | -55.3004041951439
347 | -54.1561360421455
348 | -53.3392780865990
349 | -53.2572684359710
350 | -53.8456523581541
351 | -54.6823190094996
352 | -54.2664621168856
353 | -54.1095192637983
354 | -54.0690843102747
355 | -54.1540210109667
356 | -54.0463398058357
357 | -55.0319900951686
358 | -56.2714811165074
359 | -56.6128462679149
360 | -56.2001225432030
361 | -55.2353256915926
362 | -53.3845224882117
363 | -52.3482090089767
364 | -52.1274790433896
365 | -52.0960408608435
366 | -51.4581901035222
367 | -51.1715741967074
368 | -51.1424450635258
369 | -51.8912531509912
370 | -51.9151082936490
371 | -51.2267936093435
372 | -50.5657824574457
373 | -50.7691833057873
374 | -51.1273664194808
375 | -51.8435611598356
376 | -53.0548289840257
377 | -53.6729843022299
378 | -53.2151333723735
379 | -53.0938483793547
380 | -53.7369299261278
381 | -54.8042514609236
382 | -55.2648768566016
383 | -55.1132633680913
384 | -55.2065626495545
385 | -55.5860021994874
386 | -55.5670426272830
387 | -56.3025979267242
388 | -56.3021993103944
389 | -55.7495156763494
390 | -55.3225155772736
391 | -54.7363874674439
392 | -54.7140483188617
393 | -55.6108741522485
394 | -57.0862612265521
395 | -58.0680973566218
396 | -59.0666150657834
397 | -59.3327245834177
398 | -59.4740046515026
399 | -59.4267294581410
400 | -61.7090226316552
401 | -63.5284160779037
402 | -65.1388602307386
403 | -67.5987250541067
404 | -62.9361007388757
405 | -59.4953702583175
406 | -57.9404599426474
407 | -58.2000745612949
408 | -59.6344056993809
409 | -59.9001455436884
410 | -59.1436756160423
411 | -57.7889676180603
412 | -57.1298214276739
413 | -57.1315627739618
414 | -57.2184787160651
415 | -57.0964781209172
416 | -56.6615318376549
417 | -56.2920267771350
418 | -57.1723201872726
419 | -59.1586713007718
420 | -59.4977875852224
421 | -60.3167923602976
422 | -60.7434811284187
423 | -61.8512494625098
424 | -65.6818156314916
425 | -64.6997762383643
426 | -61.5325868879093
427 | -59.5108034454456
428 | -59.1663576029642
429 | -58.6159570051390
430 | -58.3501752800618
431 | -58.9248144008242
432 | -57.3160926448638
433 | -55.5444327147708
434 | -54.1719547981804
435 | -53.5352908922905
436 | -53.4003537506288
437 | -53.3951152984347
438 | -53.7993571642781
439 | -54.6022591270766
440 | -54.4477345792615
441 | -54.0810524428150
442 | -54.6712683929387
443 | -55.5964987818710
444 | -56.4384829833985
445 | -56.6132638322278
446 | -55.9219605745571
447 | -55.7973116401208
448 | -56.0095739473559
449 | -55.8932390486929
450 | -55.1697500193800
451 | -53.5522160088646
452 | -52.6632038767423
453 | -52.1445065429125
454 | -52.7200048937751
455 | -53.3287387989198
456 | -53.6525783216703
457 | -54.2603480756159
458 | -55.2619084061773
459 | -55.9733293152720
460 | -56.3348088151082
461 | -56.8078201305483
462 | -57.0776691780253
463 | -57.3788652170087
464 | -59.1016338332168
465 | -61.0240686977157
466 | -60.6939760260840
467 | -58.1700532953300
468 | -55.7072803988970
469 | -54.3151358818086
470 | -54.1089198260931
471 | -53.5901296485382
472 | -53.7179717877000
473 | -53.8330776263971
474 | -53.2845068573489
475 | -52.3090158394030
476 | -51.8513985918384
477 | -51.7461473040241
478 | -51.9342888323408
479 | -52.7547935325438
480 | -53.2685870758481
481 | -53.7071278959644
482 | -54.3434537549487
483 | -54.7336201744294
484 | -55.4179553615812
485 | -56.1958991202509
486 | -57.2339698472354
487 | -58.0535888365649
488 | -58.4616741742824
489 | -59.7216441508744
490 | -61.6609854354839
491 | -64.6351555231476
492 | -67.3361825249905
493 | -67.8739271672866
494 | -65.8246388621161
495 | -63.8729462261926
496 | -63.0794499124222
497 | -62.2861809416209
498 | -61.6077872770051
499 | -61.7168757956434
500 | -63.1062918899217
501 | -65.3151741054308
502 | -67.5989027203932
503 | -68.7842334868748
504 | -66.5703600367773
505 | -62.7444170995376
506 | -60.7960323935848
507 | -60.0870933850669
508 | -60.6595072081897
509 | -60.8611311439314
510 | -60.9795582880791
511 | -60.7792638947328
512 | -59.7516978055115
513 | -58.3802664105300
514 | -57.3765573253138
515 | -57.5016713417697
516 | -57.9821222366419
517 | -58.8946602738761
518 | -60.2100931867488
519 | -61.1875374881263
520 | -62.4180646890073
521 | -59.4637943443433
522 | -57.5833187391108
523 | -57.9922837388193
524 | -59.6969912223380
525 | -60.8770712769350
526 | -60.6447063618747
527 | -59.8556793847244
528 | -58.3441263600070
529 | -57.0146082983288
530 | -56.1148181109967
531 | -55.6932216034653
532 | -55.7345437728197
533 | -55.6641915928962
534 | -55.8805853657248
535 | -56.1564257865391
536 | -55.8230708577410
537 | -55.7332470643481
538 | -55.9008312831755
539 | -56.2942396803193
540 | -56.3764617265361
541 | -56.3462505768445
542 | -56.5389037264497
543 | -56.4738386176619
544 | -56.1675474667469
545 | -55.8481029334332
546 | -55.4333537832659
547 | -54.6075735062039
548 | -53.0543120186042
549 | -51.9026938493357
550 | -52.0469845262084
551 | -52.2156307969917
552 | -51.7979693987728
553 | -51.3712336796950
554 | -50.5098408892394
555 | -49.8127122685108
556 | -49.8458760547282
557 | -49.9172136567674
558 | -49.6787705820160
559 | -49.7027139278540
560 | -50.0447661893172
561 | -50.5738812216765
562 | -50.8444000104348
563 | -51.6725988341588
564 | -52.6520361501712
565 | -53.4544954394396
566 | -53.8388589412379
567 | -53.4853628640474
568 | -53.5986280697455
569 | -53.8659504165454
570 | -54.5024551439048
571 | -54.6869800513882
572 | -53.8097351395190
573 | -52.6354728019433
574 | -51.8161542647129
575 | -51.0946265687436
576 | -50.7250555711989
577 | -50.8381387345738
578 | -50.6868800783601
579 | -50.3046275200483
580 | -50.0573389457838
581 | -50.1457130184384
582 | -50.7991756249999
583 | -50.9712268202748
584 | -49.7954263475416
585 | -48.5838889894907
586 | -47.9502101862968
587 | -47.6506090974371
588 | -47.4080894229850
589 | -47.2905716534570
590 | -47.3312719086576
591 | -47.1872569744680
592 | -47.0690947622734
593 | -47.2975436965076
594 | -47.8930366400132
595 | -47.9700024355495
596 | -47.4074244504539
597 | -46.6089344782895
598 | -46.5357497131219
599 | -46.8816820257243
600 | -47.1386791322070
601 | -47.3959449006275
602 | -47.7678821078970
603 | -48.3436967929704
604 | -48.5959823704482
605 | -48.8711416596028
606 | -49.3382658542975
607 | -49.2985011816317
608 | -48.9004148970551
609 | -48.8646259024023
610 | -49.3952719958035
611 | -49.6389848149734
612 | -49.7541804705625
613 | -50.0360223886218
614 | -50.0758989682472
615 | -49.6448476683349
616 | -49.1392478019212
617 | -48.8555593315202
618 | -48.7479193590646
619 | -48.4726366490851
620 | -48.0438614136667
621 | -47.7619723987618
622 | -47.6993019558815
623 | -48.0052796501540
624 | -48.7581085396051
625 | -49.2448922905500
626 | -49.1440696625620
627 | -48.7958962517825
628 | -48.4121513925362
629 | -48.2970746685160
630 | -48.4482149307389
631 | -48.8728429309068
632 | -49.1464181256068
633 | -49.2479971376426
634 | -49.3834906257670
635 | -49.8001458754592
636 | -49.9536412774848
637 | -49.5521871499923
638 | -48.9672432098827
639 | -49.4915104113600
640 | -51.0877539671627
641 | -52.1690313763700
642 | -52.6693072733970
643 | -52.4851497436348
644 | -51.9912391335044
645 | -51.4876695570837
646 | -51.6616333485612
647 | -52.2794194991618
648 | -52.9122594124174
649 | -53.0524893242819
650 | -52.0071629333352
651 | -51.4355496138251
652 | -51.3371474540060
653 | -52.0330245919621
654 | -52.6558688154400
655 | -53.7734514947122
656 | -54.4410493985024
657 | -54.1239574657292
658 | -54.1753372649054
659 | -55.0080564976408
660 | -55.1402369957412
661 | -54.2030634841925
662 | -52.4754802583408
663 | -51.7039075565903
664 | -51.6333646540221
665 | -52.3808956914562
666 | -53.1483402047377
667 | -53.2606204256099
668 | -53.4609355560196
669 | -54.1036707804901
670 | -54.7449578701241
671 | -55.5131028390850
672 | -56.3624868192740
673 | -57.2029516489085
674 | -56.3493157623446
675 | -54.3656562472848
676 | -53.1434776832526
677 | -53.1384135885218
678 | -53.6020155376185
679 | -53.2336598797944
680 | -52.2810888446620
681 | -51.2762993959688
682 | -50.7370426028796
683 | -50.9351848088865
684 | -51.1660365825124
685 | -51.4625559437793
686 | -51.7134722404443
687 | -51.8556613422279
688 | -52.1320043307362
689 | -52.6322269086531
690 | -53.3163295534850
691 | -54.1789508961146
692 | -55.5360887043850
693 | -55.9991473577668
694 | -55.9386135577112
695 | -55.1831534961424
696 | -53.3762858998186
697 | -52.0786791077353
698 | -51.6965434826883
699 | -51.1452217201643
700 | -50.5394490395471
701 | -50.3178777599554
702 | -49.9698173411522
703 | -49.3700391482648
704 | -48.8997650697686
705 | -48.3734937032375
706 | -48.1440063790875
707 | -48.4300429937135
708 | -48.7654462575464
709 | -48.8024983856775
710 | -48.6475677199816
711 | -48.2852082071657
712 | -47.9637240385320
713 | -47.8370027995620
714 | -47.7850616135006
715 | -47.6680508586431
716 | -47.7546000689892
717 | -47.9002769625283
718 | -48.0243383302400
719 | -48.3999526225085
720 | -48.5453719724314
721 | -48.9168957321288
722 | -49.4758700853652
723 | -49.4105737799931
724 | -49.2004369993763
725 | -49.0355204967360
726 | -49.3942635100564
727 | -49.9534544186046
728 | -50.3941023545918
729 | -50.1228412832117
730 | -49.5419308694342
731 | -48.9658592467223
732 | -49.1358548379772
733 | -49.2452940373481
734 | -49.1591016636802
735 | -49.2670540710847
736 | -49.1768696332767
737 | -49.0547036049927
738 | -48.3689528104999
739 | -48.1750469620141
740 | -48.9664616179283
741 | -49.3399774019150
742 | -49.2355745543868
743 | -48.7444381326196
744 | -48.1235852279675
745 | -47.6217620853794
746 | -47.4687036952535
747 | -47.5951292641660
748 | -47.7606757740160
749 | -48.0272781686340
750 | -48.3524184046909
751 | -48.8133035613391
752 | -49.4720960452109
753 | -49.9277212122301
754 | -49.8050721015401
755 | -49.3040343101833
756 | -49.1581163586526
757 | -49.2171815481302
758 | -50.0286846758652
759 | -50.9806937384057
760 | -51.1519363508735
761 | -50.3817126938913
762 | -49.9237266979651
763 | -50.5172959176427
764 | -51.3311263811668
765 | -51.2371362666264
766 | -51.2070738446282
767 | -51.0395897178681
768 | -50.5532003667690
769 | -49.8605174979627
770 | -49.0594481969068
771 | -48.6366625338410
772 | -48.2196319634645
773 | -47.9665196238043
774 | -47.9618013115201
775 | -47.6267903951265
776 | -47.2945338586339
777 | -46.9635915938868
778 | -46.7005311416945
779 | -46.7923584721408
780 | -47.3825692190020
781 | -47.5574375136123
782 | -47.6980098621002
783 | -48.1246775007500
784 | -48.5900684499100
785 | -48.6603183910625
786 | -48.7486348398479
787 | -48.7660764542508
788 | -48.8312215399253
789 | -49.2587143505105
790 | -49.4896896121933
791 | -49.5618218966999
792 | -49.5101546821518
793 | -49.3108608018315
794 | -49.6030472484713
795 | -49.7836037322325
796 | -49.6607567869740
797 | -50.1110030947260
798 | -50.4658030092903
799 | -50.9119117593588
800 | -51.2212577358531
801 | -51.3512593759874
802 | -51.5711174302837
803 | -51.5610443108967
804 | -51.8220379747095
805 | -52.3172639645796
806 | -52.9328671519799
807 | -52.9863982585486
808 | -52.5165012028823
809 | -52.3352468898704
810 | -52.1553017698750
811 | -52.2228081703721
812 | -52.5798219129055
813 | -53.0350652341048
814 | -53.0549302964198
815 | -53.0223675475612
816 | -52.8770194078152
817 | -52.7112516322546
818 | -52.5021251508788
819 | -52.6416903118636
820 | -52.8302065424942
821 | -53.5767885776129
822 | -54.1496548492548
823 | -54.7456207594568
824 | -55.1586879972362
825 | -54.9978723563307
826 | -55.3259647970182
827 | -56.1012447732089
828 | -56.0483034575265
829 | -55.2655670886457
830 | -54.6654861328603
831 | -54.4947186970572
832 | -54.5381814394620
833 | -54.4853078809819
834 | -54.6656446672502
835 | -54.9188020334165
836 | -55.1137786021635
837 | -55.6888176233898
838 | -56.7886408841234
839 | -56.8437474165624
840 | -56.3137449757942
841 | -56.1431121521087
842 | -56.4649246044748
843 | -57.1575255201161
844 | -57.3955484856606
845 | -58.0135835637677
846 | -59.8635516441522
847 | -61.8711764310111
848 | -61.6683810591258
849 | -60.5485331261733
850 | -59.8263375539866
851 | -59.1130009631051
852 | -57.6036605175868
853 | -56.9788040559370
854 | -57.2837240901213
855 | -56.8244401316574
856 | -56.1769924636595
857 | -56.1132067054206
858 | -55.7039474470799
859 | -54.8567985507575
860 | -54.7281860642135
861 | -54.4837835799012
862 | -54.1317890413724
863 | -54.2804153648391
864 | -55.3153978806190
865 | -56.1044929645689
866 | -57.2493116932601
867 | -58.3091403838365
868 | -59.4602254250230
869 | -59.7614883300541
870 | -59.6247043680489
871 | -58.8825178521067
872 | -58.2385197816049
873 | -58.3339064895622
874 | -59.2476711366893
875 | -59.6695489914286
876 | -58.9009779764525
877 | -57.2914830185827
878 | -55.7161316739904
879 | -54.9169860643841
880 | -54.3264061657297
881 | -54.4275104825382
882 | -55.2604957701694
883 | -56.6189158416680
884 | -56.9742377275374
885 | -57.1851651596625
886 | -57.9375539244844
887 | -58.4534550069198
888 | -59.0187914653689
889 | -58.7543609645882
890 | -58.1823570263701
891 | -57.7980563640620
892 | -56.9198208276275
893 | -56.3347669620642
894 | -55.7770192587641
895 | -55.9409547072964
896 | -55.7029975567545
897 | -55.5934723472635
898 | -55.4922639400877
899 | -56.1138368615761
900 | -57.5519876919883
901 | -58.2874511093577
902 | -58.5134774202864
903 | -58.4996921962990
904 | -58.6045691435937
905 | -58.5347312165030
906 | -58.8318642343133
907 | -59.2712917198754
908 | -59.3784421379197
909 | -58.8297055183007
910 | -58.1403043108668
911 | -57.5172838332293
912 | -57.0784997685929
913 | -57.0383553304223
914 | -57.8859075273446
915 | -58.7553086021144
916 | -58.5447790946546
917 | -58.0705999297282
918 | -57.3277570416603
919 | -56.4853529282085
920 | -55.8057540045673
921 | -55.1104465785929
922 | -54.6348301964656
923 | -54.5570236110588
924 | -54.7307140029718
925 | -55.0448258918025
926 | -55.4158651071877
927 | -55.7005030811655
928 | -55.6428459853677
929 | -56.1372825336540
930 | -56.2736004330784
931 | -55.8893211680545
932 | -55.9841622797536
933 | -55.9918379124592
934 | -56.4820651097672
935 | -57.4212914213524
936 | -58.1555927704765
937 | -58.5600815622679
938 | -59.3905335190438
939 | -60.7130950886474
940 | -62.4441349232447
941 | -64.0770529748554
942 | -64.5339539103560
943 | -65.9129433997162
944 | -67.7802649680132
945 | -69.0396899564186
946 | -66.7101149424404
947 | -64.3446000274395
948 | -62.9865157636456
949 | -62.5575988892006
950 | -62.4353081310049
951 | -62.5618304493378
952 | -64.4404926493728
953 | -65.9032593899093
954 | -65.7284908180851
955 | -64.7753206702860
956 | -64.4887548650813
957 | -64.9093160045738
958 | -64.3770326962248
959 | -64.7663366439465
960 | -66.1766933467885
961 | -63.8627415985765
962 | -62.5594720177661
963 | -61.9399470761746
964 | -61.5623899941991
965 | -62.3733351813305
966 | -63.5887824583983
967 | -66.1255941190581
968 | -67.2796246974143
969 | -68.1811704776338
970 | -69.2234867944798
971 | -68.7620946998242
972 | -66.8233008865125
973 | -64.5502087956498
974 | -63.3829880119880
975 | -63.4618748254275
976 | -64.1128889695113
977 | -64.5685941367043
978 | -63.9921069405645
979 | -63.9129429310180
980 | -64.4464736401399
981 | -63.4011885364310
982 | -62.4706257440233
983 | -61.4782451526870
984 | -61.1533010167919
985 | -61.3934027241474
986 | -62.8091422484525
987 | -63.6755099656340
988 | -62.9016333040074
989 | -62.1155455824559
990 | -61.7994507183779
991 | -61.6910094507187
992 | -61.5811033699261
993 | -61.3828707270631
994 | -60.8444226532247
995 | -60.5107047172838
996 | -60.0501182306086
997 | -59.8973711321276
998 | -60.6885169874288
999 | -62.6325291944830
1000 | -64.4048335796185
1001 | -65.0966644322391
--------------------------------------------------------------------------------
/DataSet/s31_average_sweep.txt:
--------------------------------------------------------------------------------
1 | -72.9560224092630
2 | -72.3898645964990
3 | -72.5085093448917
4 | -71.0239804329129
5 | -67.8808874940181
6 | -65.9439461401353
7 | -65.6456922736741
8 | -66.2684469397691
9 | -67.9506275947012
10 | -69.1314358722105
11 | -70.7860309403523
12 | -66.9596282561805
13 | -64.3369545191511
14 | -62.3192716561974
15 | -60.8197108013081
16 | -59.4782495397860
17 | -58.6256946675348
18 | -58.2102578083404
19 | -58.8901816540356
20 | -60.3727503341498
21 | -61.3864964370536
22 | -62.7746313299463
23 | -63.6622363579593
24 | -63.0939588043666
25 | -64.1824974793200
26 | -64.5017827615797
27 | -62.1389768731163
28 | -60.2924617323723
29 | -61.2168649310859
30 | -62.2633393358733
31 | -61.3650571520979
32 | -60.5565313551135
33 | -59.5963022217177
34 | -59.0063295375950
35 | -58.9701082305096
36 | -59.7524831138226
37 | -60.7256282947685
38 | -61.9771625551973
39 | -61.5538544590323
40 | -61.5272533646882
41 | -61.8322723800200
42 | -62.8937531360824
43 | -64.8409677452257
44 | -64.3218475221971
45 | -63.2519548948468
46 | -63.9871567867828
47 | -66.8941110277757
48 | -70.1632499639751
49 | -70.9282065957862
50 | -71.4718871803809
51 | -71.9656263149232
52 | -71.6094091259626
53 | -70.1922578105655
54 | -67.3957977854967
55 | -64.7954692808473
56 | -64.0809003438755
57 | -64.3548352280296
58 | -64.2438781932260
59 | -63.8602746490880
60 | -62.2740339851219
61 | -61.9765491540262
62 | -64.1671109204025
63 | -66.4085714422084
64 | -66.3592463821410
65 | -67.9554718019364
66 | -73.3347885354710
67 | -69.0754343234990
68 | -66.3613004135968
69 | -66.4937203715870
70 | -67.1308195721705
71 | -66.2521472107835
72 | -67.6170963056870
73 | -67.2729557683545
74 | -63.8752464108539
75 | -61.6169337325830
76 | -60.6300846930564
77 | -60.4720720154931
78 | -60.8371713162126
79 | -61.7220934586831
80 | -62.5302009384668
81 | -62.8814288383289
82 | -63.7434002060199
83 | -64.1920091010688
84 | -62.7590623948366
85 | -64.1955581390498
86 | -63.6878937314672
87 | -62.9542084201453
88 | -61.9918219454039
89 | -61.4110760937023
90 | -61.6957840625531
91 | -61.3932428534108
92 | -61.2314111703973
93 | -61.0177840509104
94 | -61.7118399758410
95 | -61.9677826802950
96 | -60.8713200012744
97 | -59.9551031875366
98 | -59.1854972039054
99 | -59.3179655526818
100 | -59.1729878553962
101 | -59.9850768914651
102 | -59.3796887871208
103 | -57.9889292998921
104 | -57.4776017950448
105 | -58.5001952507935
106 | -60.0295394575850
107 | -61.8360917470498
108 | -62.2984789484041
109 | -61.3203676980972
110 | -61.1584961525796
111 | -63.3653356626369
112 | -67.0862761926901
113 | -69.5407807138889
114 | -73.8026155796414
115 | -73.4998215996122
116 | -67.6027191219254
117 | -63.9347490479014
118 | -63.0416912836843
119 | -62.1984319516414
120 | -62.9751301388608
121 | -65.2884354956171
122 | -70.4106994249921
123 | -72.0021376743196
124 | -70.4884294949503
125 | -72.1664113309757
126 | -75.8355909012019
127 | -73.1905470578397
128 | -72.3382937162585
129 | -71.7250861653245
130 | -69.2571153044959
131 | -71.8645694266355
132 | -73.0713476554226
133 | -71.7246238235836
134 | -70.1933705093512
135 | -70.3042552012880
136 | -73.0035405331420
137 | -73.2109243079793
138 | -66.1704607168653
139 | -63.1446501528084
140 | -62.7456842204676
141 | -62.0063980051977
142 | -60.1393935527181
143 | -58.8521176955374
144 | -58.1319926192394
145 | -57.1007465427077
146 | -56.1062771535789
147 | -56.0709194897765
148 | -56.2339508170405
149 | -56.9814412290196
150 | -58.1475355889122
151 | -59.8933431933392
152 | -62.3123415301400
153 | -65.1617533570613
154 | -64.3697843620015
155 | -61.7176203945752
156 | -60.1905294821723
157 | -58.6892633690612
158 | -57.1024190008085
159 | -57.1928705443978
160 | -57.7093481309517
161 | -56.5448929571071
162 | -56.6185673576101
163 | -57.6922068223198
164 | -58.6323012156350
165 | -58.3873166211280
166 | -57.6697961735090
167 | -56.9281617010823
168 | -57.8143439751646
169 | -58.8356716046755
170 | -59.5498245660224
171 | -60.1527010887778
172 | -61.6444171726890
173 | -64.0308280801288
174 | -63.3875157007197
175 | -63.3274595881317
176 | -63.3893971518142
177 | -62.7807107652691
178 | -63.0948607441411
179 | -62.7990635295971
180 | -61.7731507299693
181 | -60.3448322840022
182 | -59.9321793425834
183 | -59.5474812374454
184 | -59.7461763764253
185 | -60.6416914223722
186 | -61.4065003007887
187 | -60.3542589785420
188 | -59.1624737267106
189 | -60.2290542417165
190 | -63.3036820138858
191 | -62.9312203034607
192 | -61.7882723922801
193 | -61.0143889289550
194 | -60.3565480203570
195 | -60.6779186992741
196 | -61.1547495786279
197 | -61.4418641721140
198 | -62.5718709500539
199 | -62.3930781692222
200 | -61.2594332188517
201 | -61.1068379463911
202 | -61.3296278339121
203 | -62.2206406094195
204 | -63.3239772497260
205 | -63.8503412572729
206 | -63.8617532519477
207 | -63.3253476255522
208 | -61.9306392130101
209 | -62.3801295343194
210 | -62.7476689602003
211 | -62.8147353000790
212 | -62.0040716197986
213 | -61.1601021893337
214 | -60.7539860860906
215 | -60.5399624931052
216 | -60.9960146045114
217 | -61.2777046738447
218 | -61.0515054825817
219 | -60.0683127355632
220 | -59.7443665962122
221 | -58.6150690071325
222 | -57.9214860732615
223 | -58.3204553907988
224 | -58.3267481618140
225 | -58.4660036150849
226 | -58.8050180428010
227 | -59.2651477517792
228 | -60.2563673629717
229 | -61.6058225591540
230 | -61.6781989529541
231 | -62.5437481059071
232 | -64.4376277675803
233 | -62.1640227791487
234 | -59.6683872468096
235 | -59.3375895907768
236 | -59.3801868666986
237 | -59.1506628661426
238 | -59.5313630248839
239 | -60.4659139706800
240 | -60.5339423411029
241 | -59.3335276744169
242 | -59.2719313814396
243 | -60.3167810035405
244 | -60.2302574884343
245 | -59.6396812378015
246 | -59.1733907383350
247 | -59.3639507255266
248 | -59.0960147447095
249 | -58.8869420555355
250 | -59.7832884426417
251 | -60.8036245976567
252 | -61.3888526047942
253 | -60.9322447304315
254 | -61.3519612934824
255 | -62.7764406997054
256 | -65.6252884877284
257 | -64.7639991070405
258 | -63.5289652320816
259 | -61.9826592329447
260 | -62.1329224759042
261 | -63.0417465191297
262 | -61.5265894605124
263 | -61.0610875808412
264 | -60.3232622622015
265 | -60.2051086942835
266 | -59.8866791211298
267 | -59.5277894044105
268 | -58.8141629739178
269 | -59.0675006221169
270 | -61.6410795169230
271 | -63.8263700216116
272 | -61.8614728466383
273 | -60.2258805883966
274 | -60.2248415717653
275 | -60.3086585205273
276 | -57.9806587927616
277 | -55.7652815956025
278 | -55.2117446580895
279 | -55.5615883495019
280 | -55.7690360036835
281 | -56.3525938785814
282 | -57.7676304575523
283 | -58.1665768236338
284 | -58.6934817607994
285 | -59.2516728093466
286 | -60.3651578207941
287 | -62.4480052420783
288 | -63.5541366955827
289 | -64.4518569344338
290 | -64.9212558169219
291 | -65.3297180972135
292 | -67.8484885185503
293 | -68.6619811564980
294 | -64.0352000904495
295 | -60.7355155712379
296 | -59.2653170625916
297 | -57.7576027793385
298 | -56.6566087339559
299 | -55.8620897699595
300 | -55.8770998689015
301 | -56.2720549431000
302 | -55.5685777707215
303 | -54.9488715281554
304 | -55.3111458780849
305 | -55.8676189041878
306 | -55.1369929570972
307 | -55.1240574003086
308 | -55.6797379773356
309 | -55.3444997048468
310 | -55.6094056407023
311 | -55.9223519508031
312 | -55.6438925755057
313 | -55.4640044352563
314 | -56.3032688092523
315 | -57.2558960442467
316 | -58.2362358997291
317 | -58.7730008505114
318 | -58.6461942802235
319 | -58.2359700942030
320 | -57.5665373520261
321 | -56.0080110837274
322 | -55.2855281354722
323 | -55.2583961113467
324 | -55.3299928165323
325 | -55.3199760613424
326 | -55.0078968424451
327 | -54.3503004270943
328 | -54.6432387651598
329 | -54.7559038885213
330 | -54.7246127727637
331 | -54.8756105703993
332 | -55.0248856433056
333 | -55.4317119337384
334 | -56.3208114370053
335 | -57.9211844642480
336 | -59.9636703096971
337 | -60.0608879525414
338 | -59.0338375104022
339 | -58.4590297348295
340 | -58.3664149809264
341 | -58.6533174463592
342 | -58.2897457415066
343 | -57.4423271082966
344 | -57.1475261422114
345 | -58.4757005083124
346 | -59.2849234865682
347 | -58.4941011458745
348 | -56.9642686515262
349 | -55.8135810486104
350 | -55.7616001079816
351 | -56.0358886920786
352 | -56.5950579266067
353 | -56.6825802075405
354 | -57.2551366351529
355 | -59.4628561399035
356 | -60.5628855228048
357 | -61.1174635861234
358 | -61.7322726772292
359 | -63.4977993180112
360 | -65.3139596523142
361 | -66.3540541013360
362 | -63.4894711124243
363 | -61.0126940179352
364 | -61.1713389345876
365 | -60.6719097766232
366 | -60.5023590463856
367 | -59.8924899980897
368 | -58.0050644061817
369 | -57.0359577849856
370 | -57.1484661212535
371 | -58.0005609377450
372 | -58.7091144680253
373 | -59.7571856210873
374 | -60.0340030574659
375 | -58.7904317124251
376 | -57.4375479790023
377 | -56.8937242223081
378 | -56.2780033911655
379 | -56.0530150480633
380 | -56.4799617488600
381 | -56.7592645735702
382 | -57.5060605834133
383 | -59.2886243322956
384 | -61.3588245714870
385 | -61.7428837040242
386 | -62.6228265996704
387 | -62.5678254555175
388 | -62.2237336683968
389 | -63.0543814723890
390 | -65.5142738681499
391 | -66.8788683607202
392 | -67.1702309901443
393 | -64.3732512239556
394 | -62.8405716663429
395 | -61.6099962103294
396 | -61.1102765991294
397 | -60.6461247618432
398 | -60.8457122741428
399 | -62.4885183546835
400 | -63.4129214975035
401 | -63.6570797108408
402 | -63.5121201830922
403 | -63.7982604359057
404 | -63.8829946471053
405 | -64.7820057569143
406 | -64.6472592656889
407 | -63.3300735086604
408 | -63.4876777775314
409 | -65.3978962153619
410 | -66.1646228583229
411 | -63.6988606861676
412 | -61.5445436765015
413 | -61.4633743155382
414 | -62.6236421860140
415 | -65.6153842221757
416 | -69.9242808531021
417 | -73.3488854069236
418 | -68.8304295554075
419 | -67.0459541017459
420 | -65.7980916491090
421 | -63.8790754229692
422 | -64.3051036992586
423 | -63.2863502191107
424 | -63.9672230775193
425 | -64.6901658126769
426 | -64.6821991227991
427 | -64.2963444928295
428 | -63.6643057693875
429 | -65.8541605080215
430 | -69.0879135112489
431 | -71.6222091970280
432 | -71.0273501856312
433 | -67.7599167987310
434 | -65.9889456557383
435 | -64.7116406241850
436 | -64.6048248819367
437 | -66.8404208319077
438 | -69.0804229335618
439 | -67.9978237058302
440 | -69.3712848375971
441 | -71.1992619004580
442 | -71.4934326057220
443 | -70.6654126253575
444 | -69.8027884354588
445 | -70.1755311577397
446 | -69.8706684552651
447 | -67.9156543176797
448 | -68.1934437418420
449 | -71.1162543400050
450 | -71.5364250048739
451 | -66.1281893581129
452 | -62.8597980303354
453 | -61.8388898443388
454 | -61.1690899196707
455 | -60.9706427779351
456 | -60.7644330056376
457 | -60.9341713573816
458 | -60.0338984137947
459 | -60.1471248907520
460 | -59.0783091841617
461 | -58.0549188872538
462 | -57.3613980961747
463 | -57.1553135665626
464 | -56.9244721823804
465 | -56.4287039737319
466 | -55.8722608113586
467 | -55.2375951967499
468 | -55.8369943117802
469 | -57.3713089708467
470 | -59.5996769025465
471 | -63.1939743434736
472 | -68.4575135344570
473 | -68.6756652960366
474 | -68.2025791594891
475 | -66.8536798776129
476 | -64.8862139291606
477 | -64.7196437110451
478 | -65.3331228881495
479 | -64.9100272686862
480 | -63.2234478238185
481 | -62.9393473861423
482 | -66.3517647757124
483 | -67.5737420257502
484 | -66.2796239901151
485 | -66.1186498049736
486 | -70.1165908563072
487 | -70.7261396856831
488 | -68.4955257152304
489 | -67.5766656172163
490 | -66.5746149446613
491 | -67.8910239872997
492 | -71.1074187620208
493 | -71.4189555809221
494 | -70.0869975211583
495 | -69.3590264930097
496 | -71.6979557559704
497 | -74.0434066173485
498 | -73.2266222748990
499 | -72.2009629389295
500 | -69.6156202789959
501 | -67.1831171239869
502 | -65.8531915857167
503 | -63.7164170613307
504 | -61.7052777030125
505 | -60.6198170555092
506 | -60.1072646932037
507 | -61.2791837304458
508 | -62.7153906768288
509 | -63.1645207229396
510 | -62.4511290192119
511 | -61.6060531612647
512 | -61.5401611722054
513 | -61.4760634974211
514 | -61.4536554239334
515 | -60.9365548023385
516 | -59.6606653992170
517 | -58.6054739183043
518 | -57.8510017506447
519 | -58.0297602792758
520 | -57.6873221246702
521 | -57.2497522172865
522 | -56.8312595231698
523 | -56.4170790937731
524 | -56.0536518824791
525 | -56.6240291112503
526 | -57.5522778686332
527 | -57.9045219675964
528 | -58.5373351125045
529 | -59.1241368186547
530 | -59.7404038555219
531 | -61.2039583330536
532 | -63.3886134607058
533 | -62.4018279498824
534 | -60.9958345220989
535 | -60.7880281052747
536 | -61.4844968489436
537 | -62.9910035692151
538 | -63.4636639463227
539 | -65.8780951986845
540 | -69.6990661785996
541 | -66.7346101213025
542 | -62.0660826946230
543 | -61.6894167571848
544 | -63.6935820980628
545 | -65.0341241893686
546 | -64.4663166776656
547 | -62.8944157215526
548 | -60.7966036732315
549 | -60.0562106757718
550 | -60.3284450882151
551 | -62.3506562760336
552 | -64.4230879028239
553 | -66.5458206689671
554 | -64.6528651201388
555 | -63.9247118462049
556 | -64.0106625625873
557 | -64.6445087935795
558 | -65.5749966379628
559 | -68.0507107989108
560 | -72.1675438913962
561 | -72.7529404959442
562 | -70.6731967896522
563 | -71.5900091751766
564 | -68.1397177803047
565 | -64.6062534570326
566 | -62.9813210592353
567 | -63.4593975586274
568 | -65.1653739437101
569 | -66.0254297229907
570 | -68.5687465883727
571 | -66.8352087316422
572 | -62.4143980721995
573 | -60.5299981473313
574 | -59.4312413219610
575 | -58.5572052686739
576 | -57.1428423312179
577 | -56.0770715050480
578 | -56.5066100440072
579 | -57.1466327983986
580 | -56.4152786890769
581 | -56.3141840134945
582 | -57.0366811366872
583 | -57.5440082029918
584 | -57.0459434874010
585 | -57.0977328602795
586 | -57.8198603793549
587 | -57.7252660378040
588 | -57.8464205241892
589 | -58.6732220890313
590 | -60.7755042106600
591 | -62.0091802226877
592 | -60.4264513693619
593 | -59.4961890125245
594 | -58.7619851395804
595 | -58.8214795813474
596 | -59.7486928770199
597 | -60.4547713066375
598 | -60.6922249916094
599 | -64.1699995882438
600 | -65.7214506175001
601 | -62.6687050843341
602 | -61.7871654269500
603 | -61.8353042094354
604 | -62.0458166311317
605 | -60.9824428306998
606 | -60.9800159329093
607 | -60.0070054062533
608 | -59.2683167080280
609 | -58.4558481203579
610 | -58.0574245993474
611 | -56.7025901808160
612 | -56.7274106131585
613 | -57.3714639911182
614 | -56.9577710210607
615 | -56.2976929764045
616 | -57.3750789892691
617 | -57.9960691650448
618 | -58.0730236046937
619 | -58.9403996872596
620 | -59.1301040614665
621 | -58.8039329540464
622 | -59.2318308467340
623 | -60.8974828773105
624 | -64.6895527052844
625 | -70.3030421798641
626 | -72.5377769306987
627 | -70.8749066148697
628 | -68.0854805275237
629 | -66.6727557111057
630 | -69.0497878732420
631 | -72.6688804771061
632 | -72.7538765552737
633 | -72.3289628276701
634 | -72.7643816398805
635 | -67.1116984944769
636 | -64.4551907784958
637 | -64.9123715369727
638 | -65.9321678121420
639 | -64.4965985182166
640 | -62.7485207642428
641 | -61.6512963616285
642 | -61.2545211071647
643 | -60.8276911754292
644 | -62.7499869546417
645 | -67.3213099230853
646 | -74.9230114097433
647 | -70.1681794856697
648 | -69.5652204916547
649 | -67.7748827183383
650 | -65.8856961437259
651 | -65.4211174712457
652 | -64.2061707482971
653 | -62.6251610161942
654 | -61.6706762110649
655 | -62.1812122774246
656 | -64.8940321708255
657 | -67.8345351646941
658 | -68.4807342338256
659 | -69.4808885870408
660 | -67.7394223901561
661 | -65.3172294934340
662 | -63.5750739512438
663 | -62.1845466264604
664 | -62.3387517005327
665 | -62.6989094108667
666 | -60.9251436264830
667 | -59.3317919300858
668 | -58.7126283425830
669 | -58.9169192671749
670 | -58.8896649815000
671 | -59.1608453135874
672 | -59.4567162640950
673 | -60.2212946702165
674 | -59.2600112365080
675 | -59.9107048672373
676 | -61.5187532418615
677 | -63.7402844060794
678 | -65.6473637121224
679 | -64.4531417693309
680 | -63.4691165098343
681 | -62.6026722706675
682 | -63.1471307215833
683 | -64.2362857798954
684 | -66.1053295672349
685 | -67.1444633834620
686 | -65.3809723990959
687 | -64.8817881658565
688 | -66.3792359234124
689 | -66.8044272206063
690 | -63.7568272254753
691 | -61.1896745686238
692 | -59.9812800688617
693 | -60.1770275039091
694 | -60.3494244699008
695 | -59.9412495591077
696 | -59.5438444987162
697 | -60.7586776283033
698 | -62.5966970125742
699 | -65.6972162845539
700 | -68.3053810458346
701 | -67.1336490171294
702 | -65.0638097355345
703 | -64.2158390591079
704 | -62.5661409199482
705 | -61.5872028140004
706 | -60.4227210438701
707 | -60.6332144894312
708 | -62.0862283433383
709 | -62.8015884075367
710 | -62.9804587663677
711 | -61.3269868607762
712 | -58.2541798475250
713 | -57.5236685016663
714 | -57.8954841800034
715 | -57.9988471485855
716 | -58.2656750195651
717 | -59.5964396452813
718 | -60.0826146655504
719 | -58.7557670663967
720 | -57.4065729672902
721 | -56.6289551163311
722 | -56.3867834258724
723 | -56.6182139936980
724 | -56.5865581762729
725 | -56.6119849859222
726 | -55.9108368804137
727 | -55.5035548207961
728 | -55.6185936098022
729 | -57.0660139214407
730 | -60.0130128876586
731 | -62.9741471686410
732 | -61.7758585037136
733 | -60.9130858478461
734 | -59.5385334251901
735 | -57.6622634088253
736 | -56.0068750359406
737 | -55.0095263049931
738 | -54.4973687398712
739 | -54.0405276815548
740 | -53.8066545366736
741 | -54.1570794188512
742 | -54.3162302554796
743 | -55.2124652677864
744 | -56.9511675308798
745 | -57.8947437958197
746 | -57.7407082046120
747 | -56.4123093032566
748 | -56.0808484780480
749 | -56.5196388983505
750 | -56.8979488980250
751 | -57.3405273615562
752 | -57.9681987008126
753 | -58.8691751015092
754 | -59.7641646820066
755 | -58.5751882862251
756 | -57.9841501397173
757 | -58.8592160791683
758 | -59.4465730789925
759 | -61.0232864074104
760 | -60.8061313718922
761 | -58.2616358087374
762 | -56.8835575677448
763 | -55.8580615591767
764 | -55.5661904392857
765 | -55.7470685075561
766 | -55.7329453905347
767 | -55.0520432834595
768 | -55.2432296713147
769 | -56.4145825456104
770 | -56.8747213758370
771 | -57.2986182245031
772 | -58.2527929930408
773 | -60.7132883768542
774 | -61.9263341699964
775 | -61.5953829538262
776 | -60.7978781937670
777 | -61.1864081931502
778 | -60.7136578844586
779 | -59.4528924906160
780 | -57.9573564989626
781 | -57.0385977205373
782 | -57.2774857754746
783 | -57.4219221907672
784 | -56.2023300580265
785 | -56.6897048366673
786 | -58.1357967797300
787 | -60.6029042848582
788 | -63.4703991901645
789 | -63.3703677137870
790 | -62.1865434759970
791 | -62.6429768783951
792 | -62.2592464781564
793 | -60.5914516563259
794 | -59.2959374749719
795 | -58.0764553922421
796 | -57.2997854059457
797 | -57.7369848919012
798 | -58.4576750590443
799 | -57.7278503486336
800 | -56.3755160364259
801 | -55.7903338260588
802 | -56.5542425259780
803 | -58.4547688415452
804 | -60.0997653852527
805 | -59.9943384634918
806 | -58.3224323816190
807 | -58.0154678151941
808 | -59.0405662390076
809 | -58.9070733562241
810 | -58.5561874008750
811 | -57.2392899956955
812 | -55.9785163266058
813 | -55.1985691918653
814 | -55.3481923155732
815 | -55.7509212969059
816 | -56.1679111476196
817 | -55.0125175127632
818 | -55.1515787867731
819 | -56.8460434141076
820 | -58.0801440693425
821 | -58.1815936301068
822 | -58.4323519756062
823 | -58.1706467403988
824 | -57.7305449575785
825 | -58.6546295212144
826 | -60.9798635729438
827 | -61.7035008481859
828 | -62.0073843246495
829 | -63.1386511146830
830 | -62.6241828081967
831 | -61.8086691766320
832 | -62.0002198473882
833 | -61.5650706109476
834 | -60.7889532783009
835 | -60.0416023466710
836 | -59.7671603013016
837 | -59.8097305098352
838 | -60.7368126115236
839 | -62.4538294560753
840 | -62.8922504686261
841 | -61.3892381592264
842 | -61.7519293038005
843 | -62.5036493751857
844 | -64.5423278726300
845 | -67.5605126615266
846 | -66.4399989474345
847 | -64.6936671765069
848 | -64.5021066398994
849 | -65.2437981403015
850 | -65.6923862354124
851 | -66.8326860904920
852 | -66.8472051891914
853 | -64.9395443593445
854 | -62.7216341227691
855 | -61.0122106298599
856 | -60.2276874279664
857 | -60.3058574496934
858 | -60.2797729299262
859 | -59.1477458559468
860 | -58.0907688881640
861 | -57.8881320129115
862 | -58.2670352769497
863 | -59.4232495199989
864 | -60.0046786118135
865 | -59.7425195667285
866 | -61.1218114779659
867 | -62.7580723223539
868 | -63.3417762332897
869 | -61.5840992976260
870 | -60.1843230860425
871 | -59.2529504717774
872 | -59.6096744238422
873 | -61.1451105963731
874 | -62.7106817072184
875 | -64.9576477407476
876 | -66.6989065288520
877 | -68.3442634011236
878 | -65.4970884560110
879 | -63.5551140918248
880 | -62.8140556238042
881 | -63.9098776373550
882 | -63.6871767960584
883 | -63.4675574213676
884 | -63.4573945584507
885 | -63.2819001428008
886 | -63.6058527534239
887 | -62.6511933270576
888 | -61.0546829477431
889 | -61.0445250087945
890 | -60.1971213065847
891 | -60.1541083415133
892 | -61.6601378319498
893 | -62.0842984442045
894 | -62.2643574326889
895 | -63.2685275305615
896 | -64.8153714471599
897 | -65.2595878210562
898 | -64.8576170759044
899 | -63.8560737076842
900 | -63.8854072612321
901 | -65.0558820181254
902 | -64.5895634190822
903 | -63.2077493910053
904 | -60.7131776254374
905 | -58.8865286080762
906 | -57.9267196132795
907 | -57.7670722330467
908 | -57.2545808622580
909 | -56.6301480244344
910 | -56.1185297360942
911 | -56.6709542526522
912 | -57.4438752977495
913 | -58.0973924524344
914 | -58.7298483880097
915 | -59.2664702630422
916 | -60.0287898202114
917 | -62.0122299085932
918 | -62.7648963406262
919 | -62.1597419543641
920 | -62.3974466852457
921 | -62.6322662730721
922 | -61.3568445393323
923 | -59.9384268259480
924 | -59.5418735765665
925 | -59.7089024654835
926 | -60.0804781103808
927 | -60.1793754150845
928 | -60.6890547449865
929 | -60.9195283539213
930 | -60.7551993394326
931 | -61.0625907060764
932 | -64.1970910137865
933 | -67.1057193211825
934 | -64.6001651584611
935 | -62.4633997439872
936 | -61.4021144553541
937 | -61.1181742070644
938 | -60.5688499708080
939 | -60.9642232328754
940 | -62.1813390555504
941 | -63.9338509213370
942 | -65.4418523451364
943 | -65.2703671600831
944 | -64.6573669786984
945 | -64.0884632453980
946 | -63.3787722144269
947 | -63.1828297370778
948 | -63.4479810760068
949 | -62.7174593661150
950 | -62.4148856706383
951 | -62.6870308586301
952 | -63.0435203799881
953 | -63.2847918009115
954 | -62.5442180596286
955 | -60.8773178555864
956 | -59.4999011363542
957 | -59.4731718080418
958 | -59.8936285415250
959 | -59.7484928249584
960 | -59.8734278197325
961 | -60.1032787037787
962 | -60.1194553055209
963 | -60.9026513041654
964 | -62.3312532685446
965 | -64.7643932230830
966 | -66.4518022801802
967 | -69.1919095541398
968 | -70.6433504840039
969 | -68.2648715744747
970 | -67.0415243986576
971 | -66.1824197433000
972 | -66.1021743756620
973 | -67.4349692269374
974 | -67.3495219716344
975 | -65.8750920727852
976 | -65.3603817148970
977 | -65.5358158823464
978 | -66.9178884182008
979 | -65.9723692163874
980 | -64.4059654901719
981 | -63.4802966320657
982 | -63.6543498852713
983 | -64.5502871758266
984 | -65.1045578080645
985 | -65.5336388027633
986 | -67.3506368213153
987 | -69.8184227113437
988 | -72.0827543221022
989 | -74.6200605454788
990 | -71.8732719953563
991 | -70.4487141622884
992 | -72.1248412905910
993 | -72.3843538601859
994 | -71.1696569578838
995 | -69.1856249426814
996 | -66.0986069633160
997 | -64.9672903234162
998 | -65.3700476310614
999 | -66.7680319380141
1000 | -68.2952103267948
1001 | -67.9134982551826
--------------------------------------------------------------------------------
/DataSet/s41_average_sweep.txt:
--------------------------------------------------------------------------------
1 | -65.9222889801529
2 | -65.7174739096996
3 | -65.2958958455183
4 | -64.6308253244592
5 | -63.8082634314894
6 | -63.6523685620348
7 | -64.4641594773277
8 | -64.8164601496076
9 | -65.8093205865828
10 | -65.5680930472034
11 | -65.1958220251634
12 | -64.8162835717285
13 | -64.8267775450781
14 | -66.8989814350653
15 | -70.6974446210294
16 | -69.2606419515079
17 | -66.7113115184927
18 | -67.1364399700453
19 | -69.4455129587250
20 | -72.8838361942340
21 | -75.9725382663496
22 | -74.6916061850038
23 | -74.2576201879606
24 | -73.6725797075053
25 | -72.7830751736087
26 | -71.9775490344185
27 | -70.1870621583601
28 | -69.0623542899168
29 | -67.4599531530408
30 | -65.7288150778606
31 | -64.0817361099003
32 | -63.9903136439607
33 | -65.7201473658967
34 | -66.7078983678334
35 | -66.9392140744303
36 | -67.7255312706746
37 | -68.2977604695949
38 | -66.4342007491214
39 | -66.3181424941780
40 | -66.9715418341209
41 | -68.5428333597015
42 | -70.3834780129302
43 | -69.0549008222108
44 | -67.7950233473513
45 | -68.7749014188670
46 | -70.4524445328647
47 | -71.4292469742844
48 | -68.5458913163685
49 | -68.1871242529924
50 | -71.5479708889717
51 | -71.5420716799249
52 | -70.6978694568474
53 | -69.9504105861845
54 | -69.5178117801102
55 | -70.9851232895990
56 | -68.4704265727043
57 | -65.5780894691316
58 | -64.9843220797402
59 | -63.9972206824993
60 | -64.2641328754145
61 | -64.7573771601654
62 | -64.8935145169278
63 | -63.7968344605532
64 | -64.1687485141406
65 | -63.7781944566669
66 | -61.7499730965523
67 | -60.4489891851337
68 | -59.9292099983239
69 | -59.8848291306175
70 | -60.5070363812133
71 | -61.9297222595950
72 | -62.7594943295126
73 | -62.8443910656825
74 | -62.9943310793413
75 | -61.9186471961476
76 | -60.5276831067827
77 | -60.6154560250879
78 | -60.3067880842259
79 | -60.3483829022421
80 | -60.4104757565770
81 | -60.5399458507150
82 | -61.5942015551260
83 | -64.2254737226117
84 | -69.2094714885884
85 | -72.7029951050046
86 | -68.7693457504764
87 | -67.1685341256187
88 | -66.7728125073739
89 | -65.4857218247083
90 | -65.9213878224257
91 | -65.6488908147630
92 | -65.1688285166176
93 | -67.3251280902575
94 | -71.2756312437233
95 | -71.3041324145711
96 | -69.9240360802437
97 | -70.7511061183160
98 | -71.5173874171649
99 | -71.2907924831087
100 | -69.6843697288734
101 | -67.2387341131902
102 | -65.6568656598251
103 | -64.3561103924635
104 | -64.7737647902042
105 | -67.5243455596429
106 | -65.7416204342016
107 | -64.5202574766572
108 | -62.4987910418408
109 | -60.8693956017869
110 | -60.0305586297479
111 | -61.0742038803201
112 | -61.2814363754590
113 | -60.8092723241005
114 | -60.1309016319389
115 | -60.1923821619036
116 | -60.1176355089703
117 | -60.2750956324582
118 | -60.7785621474453
119 | -60.9987176502117
120 | -61.6162011530944
121 | -63.0072099755859
122 | -64.7411963817933
123 | -64.8356816644115
124 | -66.7186650759693
125 | -70.4568386761285
126 | -73.4670897812825
127 | -74.0465339786973
128 | -74.9581695212120
129 | -72.2304707421485
130 | -73.4473839808955
131 | -72.4411130212252
132 | -71.8633474064336
133 | -72.4240008778945
134 | -70.3576823382303
135 | -66.6431590948869
136 | -65.8266230408529
137 | -66.4634573980885
138 | -66.3517210409150
139 | -65.3270321717407
140 | -66.1836326342953
141 | -67.4254392202911
142 | -65.8432524432710
143 | -66.6095449106895
144 | -67.8846181637406
145 | -66.1868971130192
146 | -64.3419680859339
147 | -64.3386402518407
148 | -63.5434577353041
149 | -63.9603963617626
150 | -64.4142433730843
151 | -64.3198225195927
152 | -64.8907463007897
153 | -64.5538711151262
154 | -65.5463720574808
155 | -68.1934342052924
156 | -68.5604839379199
157 | -67.0724095882337
158 | -67.5688253608326
159 | -68.4779530614754
160 | -68.5848782229099
161 | -68.8329521731568
162 | -70.7439857731094
163 | -71.9107240227321
164 | -74.5596684001046
165 | -71.6997344649097
166 | -68.2387449159942
167 | -66.9292802919334
168 | -66.1435746284096
169 | -66.8678057522082
170 | -68.1170068883617
171 | -70.5388034547427
172 | -70.5710622439246
173 | -71.3505288180689
174 | -69.4913738373757
175 | -66.6341346522549
176 | -66.8653524889607
177 | -68.9510634661431
178 | -71.2987563243869
179 | -72.3442779710265
180 | -69.2420485549292
181 | -66.9826114369965
182 | -66.5476436216144
183 | -67.2013242887992
184 | -69.9959376497832
185 | -71.5459101922063
186 | -74.3619521184074
187 | -72.8222433930449
188 | -72.0356972711952
189 | -71.2855742126198
190 | -70.2074446008909
191 | -70.9784554157315
192 | -72.0780581299936
193 | -73.9518262841792
194 | -72.0824538043817
195 | -69.7313502906585
196 | -68.6220014881977
197 | -70.2875859916803
198 | -66.3842292714284
199 | -64.5501609908974
200 | -64.8824770329998
201 | -64.7547710206318
202 | -63.6602355581732
203 | -62.7729738768730
204 | -63.2854527165460
205 | -62.2306514811708
206 | -60.4077757110944
207 | -60.1776545275815
208 | -60.8796994828568
209 | -61.3395986809070
210 | -60.7434734143328
211 | -61.4650598349043
212 | -62.4504199086994
213 | -62.6531669979013
214 | -62.5167333218065
215 | -63.4823526481205
216 | -65.0412231685325
217 | -66.8212033783166
218 | -66.8599318482367
219 | -66.1319733049379
220 | -68.0398927351372
221 | -67.3096043064319
222 | -65.1914854398678
223 | -63.2539055983451
224 | -62.6169823637717
225 | -61.9639268443579
226 | -61.3594746904407
227 | -61.5651679359709
228 | -62.3551087306361
229 | -61.4298621684372
230 | -61.0143717757291
231 | -60.8968302056647
232 | -61.1418796718401
233 | -61.1773762885171
234 | -61.7308090591827
235 | -61.8424091187857
236 | -61.4075897684388
237 | -62.2995049087904
238 | -62.8797578321861
239 | -63.9751420882175
240 | -65.7486239769740
241 | -67.7383487991065
242 | -67.9255321085678
243 | -67.5613927489018
244 | -65.8184555216758
245 | -63.3176837128410
246 | -62.8124898577018
247 | -63.5232500554216
248 | -66.1659457345305
249 | -69.8266196188603
250 | -69.5470622782238
251 | -66.9228076489274
252 | -65.7464903885736
253 | -64.1607230350556
254 | -63.8902382547645
255 | -64.4593927948577
256 | -66.2033664031677
257 | -65.3621035798284
258 | -64.8881204489303
259 | -65.9623327984271
260 | -67.9264511398552
261 | -68.8740515285727
262 | -67.2080461642108
263 | -64.1758876791444
264 | -62.4327865452629
265 | -61.6079414588993
266 | -61.2305137570637
267 | -61.2048982254951
268 | -62.7795116822043
269 | -64.3015175176605
270 | -64.4086296124219
271 | -62.2795318749129
272 | -61.9768721262103
273 | -63.2733372154022
274 | -64.9253363146629
275 | -64.8825292641941
276 | -63.3969207805162
277 | -63.1604768980865
278 | -63.8255047895986
279 | -65.5669237637011
280 | -67.7994864144933
281 | -69.3945489126255
282 | -69.3931782127577
283 | -70.0730759378002
284 | -71.1546812294887
285 | -69.0970645118666
286 | -67.6005955035925
287 | -67.3903718727411
288 | -67.5499669432515
289 | -66.7044086902276
290 | -68.7326026465122
291 | -70.8226783452313
292 | -70.1842596429386
293 | -68.6062164349601
294 | -67.4121163282563
295 | -68.2323278126841
296 | -70.4042957388201
297 | -67.0923931784742
298 | -63.2790327080792
299 | -61.4339873293933
300 | -60.5859065841027
301 | -60.5004266059166
302 | -60.1719919697946
303 | -60.3479556194223
304 | -60.5561144501555
305 | -60.0392854016921
306 | -59.8975168148781
307 | -59.8420979880685
308 | -59.5362094246973
309 | -59.6966490726950
310 | -60.5782139137722
311 | -61.3857300329905
312 | -61.4449650949816
313 | -60.5641910348509
314 | -60.7550040699382
315 | -61.5328582982475
316 | -62.7648077812217
317 | -64.1950495710480
318 | -66.1485765577387
319 | -66.4503029781202
320 | -65.8981211979269
321 | -66.2723686207227
322 | -66.9880383851913
323 | -69.3242173507197
324 | -67.2526478914072
325 | -65.1132529192656
326 | -63.1661942599061
327 | -61.2690461444166
328 | -59.6834469965571
329 | -59.7674899155742
330 | -60.1938936208540
331 | -60.7852774409866
332 | -61.4317841793467
333 | -62.2509268207491
334 | -64.3254411245336
335 | -63.2723717219807
336 | -62.2757374399579
337 | -62.8103277617582
338 | -63.3432767984838
339 | -64.1475430682892
340 | -65.9035874541840
341 | -68.2083151106869
342 | -67.6515685340049
343 | -66.6192672490784
344 | -65.5920598652817
345 | -65.9237452132579
346 | -67.1873320232134
347 | -68.3324501752880
348 | -68.0573699429363
349 | -67.2237696750639
350 | -66.5453573153141
351 | -66.2518795645578
352 | -66.4110375873211
353 | -66.1727529639186
354 | -65.5483727180785
355 | -64.9541284789460
356 | -64.7733228426826
357 | -66.7974269960360
358 | -70.1042671536009
359 | -71.0225584695897
360 | -69.2749723772479
361 | -70.5679192155741
362 | -69.5978542792797
363 | -68.9053298660362
364 | -68.1651509870280
365 | -69.0730826536663
366 | -71.2068829157184
367 | -75.4341133951890
368 | -73.3011985300904
369 | -71.4374359912876
370 | -70.9901561506389
371 | -71.9088469939575
372 | -72.9839859304360
373 | -72.6435405600707
374 | -68.6527157009508
375 | -66.5314999180183
376 | -66.3551913845126
377 | -67.9608924830552
378 | -67.2958627556282
379 | -66.1554134908075
380 | -65.4956633847934
381 | -65.0221551737773
382 | -65.7689142018916
383 | -65.5110539251569
384 | -65.8425853876316
385 | -65.7555770124668
386 | -65.5592037400406
387 | -66.3045286185002
388 | -67.8780839899344
389 | -69.7033360614209
390 | -72.2448817739369
391 | -73.3784001702929
392 | -74.4174340055687
393 | -74.1960148789425
394 | -72.9790080544949
395 | -70.2680964817830
396 | -68.5963059524294
397 | -68.2816193570110
398 | -68.1893564864389
399 | -68.9464403180570
400 | -71.5206934637551
401 | -73.7904665639904
402 | -72.6902462168849
403 | -74.2123071556716
404 | -74.1449142699613
405 | -73.8117705723443
406 | -74.3256342292962
407 | -74.3907241713044
408 | -72.5772517975483
409 | -70.5925729030752
410 | -70.6697405995766
411 | -71.5573029518623
412 | -72.3384691025530
413 | -71.0098136654822
414 | -71.5671342087032
415 | -70.1452553103781
416 | -67.1853178237544
417 | -65.6029617650005
418 | -64.9897798919795
419 | -65.1497765943040
420 | -64.9827157272380
421 | -64.4661238366736
422 | -64.3337017309275
423 | -64.6488443691068
424 | -64.8810812230202
425 | -67.0920021670490
426 | -72.1328959401327
427 | -67.8955934882839
428 | -65.3852989047578
429 | -64.4172380805775
430 | -62.9376153779615
431 | -61.0331960912551
432 | -60.1058733227902
433 | -60.7461235530091
434 | -61.4624072892752
435 | -61.2265780010312
436 | -61.7367207295972
437 | -63.1239693000831
438 | -64.9019225516184
439 | -66.4438558428228
440 | -66.7630885466119
441 | -66.4884304206102
442 | -69.0916640552973
443 | -67.7378125054805
444 | -65.7659242406524
445 | -65.5924746348702
446 | -64.7948909609122
447 | -64.5379276159583
448 | -66.0851571735879
449 | -69.3006696345243
450 | -67.5446145993187
451 | -66.2024260528894
452 | -64.6428775603122
453 | -65.0788057452528
454 | -64.2323143477706
455 | -63.7547006444438
456 | -62.9456816440569
457 | -61.5327517881979
458 | -61.5071153532867
459 | -61.6208872869530
460 | -60.8822434221044
461 | -60.3317180533066
462 | -60.2801652868952
463 | -60.3960686310566
464 | -60.8389843647880
465 | -61.2204579714915
466 | -63.2564789233777
467 | -65.5845528287221
468 | -65.4991854854053
469 | -64.8559359614592
470 | -65.4431382741981
471 | -65.5856050963294
472 | -67.3077250208744
473 | -69.2291688356605
474 | -70.5589325887332
475 | -69.1819365136728
476 | -69.9608631326317
477 | -70.5490304348679
478 | -69.0539146643582
479 | -68.9960790661464
480 | -68.1252656609285
481 | -68.4355012222704
482 | -68.3269759473497
483 | -69.3836931010249
484 | -71.4627134120138
485 | -72.0200567566161
486 | -71.3323527061568
487 | -71.4252859164959
488 | -72.1116436197420
489 | -73.6775624228393
490 | -72.5576224114838
491 | -69.7589022681696
492 | -69.4807994782088
493 | -67.8734670542719
494 | -66.9021624169530
495 | -65.4630837242275
496 | -65.9974544198428
497 | -67.0775720446781
498 | -65.0337043873923
499 | -64.4385360450935
500 | -66.4568495520871
501 | -68.3535157531376
502 | -69.8095121825032
503 | -69.0577498552727
504 | -68.6696621047237
505 | -70.5877351762909
506 | -69.7655514725966
507 | -69.4821584272919
508 | -71.3510959957070
509 | -72.0665446064598
510 | -73.0669458211374
511 | -73.0796952807820
512 | -71.7092728642838
513 | -71.1059231205011
514 | -69.1532660808818
515 | -67.4506904863625
516 | -66.6839241619318
517 | -67.1672224638449
518 | -68.1372733939319
519 | -66.6098981519338
520 | -64.4586357357664
521 | -64.0594222919528
522 | -63.9688455607346
523 | -63.4937105366241
524 | -62.6864339356451
525 | -61.0953195826795
526 | -61.3110938465722
527 | -62.5395186251714
528 | -65.1525423809416
529 | -68.5965441751617
530 | -65.6755968639812
531 | -63.9470515734672
532 | -61.9403277766136
533 | -61.4526654344812
534 | -62.3406595709860
535 | -62.8300959645207
536 | -63.0759567145569
537 | -63.0850625212024
538 | -62.8907165201135
539 | -63.2001752933679
540 | -63.3034650810218
541 | -63.0050517185864
542 | -63.6339832178577
543 | -64.5191633979755
544 | -63.8407642755555
545 | -64.3828111799951
546 | -65.7762085053337
547 | -68.1354872678930
548 | -70.1138733835015
549 | -72.2489639717985
550 | -72.1839680350025
551 | -70.7638859733428
552 | -70.4224665286477
553 | -71.2155808214268
554 | -71.7444968811457
555 | -70.7485722453790
556 | -71.4959697061638
557 | -70.1625469708684
558 | -69.3544178631877
559 | -70.0916552624580
560 | -70.2881545955896
561 | -70.4341927732789
562 | -69.4863729781927
563 | -68.8711018261770
564 | -67.9978277967053
565 | -67.5926789437014
566 | -67.1579349999265
567 | -67.1655869084837
568 | -67.5340498417846
569 | -66.9366597475075
570 | -67.4605110815504
571 | -69.2100461414982
572 | -71.8873035133859
573 | -72.1307474091862
574 | -72.4207676813861
575 | -72.7439283215782
576 | -73.4080879588673
577 | -72.3281846128727
578 | -73.2198181598668
579 | -71.8769547748286
580 | -69.1897777891640
581 | -68.3034796585934
582 | -68.8471663452988
583 | -69.0457775052394
584 | -68.8679888658306
585 | -68.6422469730807
586 | -68.9201175243073
587 | -72.3615215837053
588 | -72.5577317871423
589 | -69.7246200336495
590 | -70.0947434260966
591 | -70.7153228784937
592 | -70.0755207522118
593 | -69.4916353484746
594 | -68.7294334745406
595 | -68.5332442181455
596 | -70.2073010417269
597 | -69.5060653759443
598 | -69.1726591182491
599 | -68.3579823261617
600 | -67.3710029065480
601 | -65.7934471977953
602 | -65.8244751813386
603 | -66.3295225939512
604 | -66.5843821051124
605 | -66.4581258635214
606 | -66.9689388685869
607 | -67.5371328786932
608 | -67.8001516537307
609 | -67.9603299741267
610 | -68.0913529893534
611 | -66.1362804779971
612 | -64.8756516079331
613 | -63.9231106688479
614 | -63.2869432780443
615 | -64.5271006186603
616 | -64.5320499926933
617 | -64.6426635209185
618 | -65.2764640662736
619 | -64.4633717730733
620 | -63.1747947031357
621 | -62.9551041493437
622 | -62.1937321259386
623 | -61.6808619319757
624 | -60.7624973751155
625 | -61.1740290548097
626 | -62.4908337521071
627 | -62.5560530197675
628 | -61.8729847619553
629 | -61.7580264491567
630 | -61.4933955901056
631 | -62.5364223588552
632 | -63.5306198485181
633 | -64.1082337233332
634 | -64.0287994387188
635 | -65.0769088201127
636 | -66.6315772612335
637 | -68.1822593203719
638 | -65.8244610714646
639 | -64.1817470448909
640 | -63.5547089504507
641 | -64.6720198004970
642 | -67.2320785990029
643 | -70.3535399916794
644 | -72.1406665957744
645 | -72.0308853061897
646 | -72.2072836674818
647 | -71.4068468530508
648 | -70.0782320422597
649 | -70.0025386995297
650 | -68.3196689615859
651 | -67.8793366519429
652 | -68.9488564242786
653 | -67.4796467815149
654 | -66.0411683035399
655 | -65.3565152757054
656 | -65.2559035648001
657 | -65.5333078892722
658 | -68.0708402443476
659 | -68.9900027052553
660 | -68.9941695315758
661 | -70.5796235930499
662 | -71.7107843324131
663 | -72.4198147123640
664 | -73.3216756242994
665 | -73.2055045215621
666 | -72.4738314778857
667 | -71.4188341111724
668 | -69.3252650949263
669 | -67.3303382935006
670 | -66.4412113049771
671 | -67.1640927282078
672 | -69.7949110150850
673 | -72.6198087373639
674 | -72.9844966099550
675 | -72.3401377940338
676 | -70.0482743488160
677 | -68.8435379350278
678 | -67.4976241900319
679 | -67.5876850616483
680 | -66.8876017804504
681 | -68.3417460110832
682 | -68.7465546362867
683 | -69.8747607589801
684 | -70.4224041437979
685 | -70.9975358572808
686 | -70.3231229853830
687 | -70.4704384805355
688 | -71.9241363480739
689 | -71.7618067536178
690 | -70.2132161139527
691 | -70.0500134756486
692 | -68.9179690442902
693 | -68.2450797005361
694 | -67.9142210045341
695 | -66.8287012202415
696 | -64.8961887526120
697 | -65.4462045820933
698 | -65.5279622484359
699 | -64.3591527734728
700 | -63.6794477495301
701 | -62.8041053359271
702 | -64.2391115513163
703 | -66.2312309181100
704 | -68.2618771461839
705 | -66.9074070368832
706 | -66.4943407310172
707 | -67.1394204055391
708 | -67.4390264370075
709 | -66.8960747278757
710 | -66.7770094020318
711 | -69.1839350206165
712 | -71.1540077374981
713 | -71.8026075079919
714 | -70.0832791637137
715 | -68.6063841313848
716 | -68.8049043253042
717 | -70.0186851781960
718 | -71.9704451104925
719 | -71.8666052200597
720 | -69.1611974148011
721 | -65.1988446530945
722 | -63.8452462986002
723 | -63.9759687939204
724 | -65.1007295482824
725 | -66.2841912638105
726 | -68.0815981730837
727 | -68.3087002525561
728 | -66.4645934748936
729 | -64.8865866802298
730 | -64.4387042420793
731 | -64.5195033758378
732 | -65.0914749630589
733 | -68.5430299978219
734 | -70.7433733801909
735 | -67.6100774348277
736 | -67.1458103190567
737 | -67.4088278537677
738 | -69.4097959828912
739 | -70.9550891254137
740 | -70.6323187852307
741 | -71.2608814085379
742 | -70.4746155587095
743 | -70.3029335945767
744 | -70.3908018775776
745 | -73.2150535192975
746 | -73.5284856596797
747 | -72.4960149265075
748 | -70.4313567883514
749 | -69.9090162273773
750 | -68.9377942953405
751 | -70.3280783848074
752 | -72.4936725368230
753 | -69.8711698272097
754 | -67.8973495706650
755 | -65.8497774104583
756 | -63.4024113622761
757 | -64.2306914671372
758 | -65.3680082956877
759 | -67.8465481099429
760 | -67.1359612718675
761 | -66.2321647026449
762 | -66.1267263306452
763 | -65.0573752051971
764 | -65.5062136189868
765 | -66.1770383318599
766 | -67.1055882358160
767 | -69.1095138933276
768 | -70.1642170048847
769 | -71.7947412936345
770 | -72.8778738368738
771 | -72.3666173615967
772 | -70.8645719250483
773 | -70.6821707614657
774 | -69.1103895494430
775 | -66.9101526891534
776 | -66.3229027386283
777 | -65.0201054041315
778 | -66.0234549654635
779 | -67.6785409995421
780 | -67.4185381467783
781 | -65.0507122060316
782 | -63.1105101181569
783 | -63.1631235706701
784 | -63.7020440608589
785 | -65.4187602681223
786 | -68.2782274014988
787 | -69.1710924818436
788 | -68.9277625625634
789 | -67.2850427233960
790 | -66.2971707571485
791 | -66.1436028449844
792 | -65.7294045630259
793 | -66.6165319348289
794 | -67.8874627785957
795 | -67.3194941329087
796 | -66.5737233770608
797 | -65.3382492390084
798 | -66.1581158227805
799 | -66.8902242370654
800 | -68.5512809920369
801 | -70.8952159987132
802 | -72.5155053193768
803 | -71.8319052905604
804 | -71.2311655610940
805 | -69.4958732688331
806 | -66.7068220644159
807 | -65.7961449681386
808 | -66.9566967929407
809 | -68.8717886527525
810 | -70.0516127580320
811 | -69.0635174463332
812 | -67.5745121579113
813 | -66.8167645439744
814 | -66.7883648766658
815 | -66.5825989577396
816 | -65.0760502291684
817 | -65.0224739650723
818 | -65.1119204145567
819 | -65.4121652019997
820 | -67.1200905785453
821 | -68.0386298455006
822 | -67.4809878431724
823 | -65.4235600235080
824 | -64.7133534411216
825 | -63.6161069683347
826 | -63.3282114611386
827 | -64.3475474835834
828 | -66.6403896269062
829 | -68.1844330842941
830 | -70.2608399167546
831 | -71.6548199295203
832 | -71.7478372166871
833 | -69.8846409830192
834 | -70.2345163683610
835 | -70.4557297894706
836 | -71.1231808151597
837 | -70.7999306179207
838 | -70.1646885460852
839 | -71.9658318961598
840 | -73.0900080811767
841 | -73.0287272393296
842 | -72.9487498035354
843 | -73.7554600489205
844 | -73.3841671820723
845 | -73.9837029559226
846 | -73.5921261053726
847 | -72.0253538044040
848 | -73.4202677351636
849 | -74.0677986504686
850 | -73.1811205286198
851 | -71.8172086031872
852 | -69.9942671629594
853 | -68.0641871742091
854 | -68.6619607868992
855 | -69.1589054302111
856 | -70.3632153319256
857 | -70.5759487060312
858 | -71.1324387690254
859 | -71.0387772810528
860 | -72.2950675791209
861 | -72.1484872129192
862 | -72.2584119525845
863 | -72.5235015860855
864 | -73.3168119656511
865 | -71.8824829118600
866 | -72.2250161739228
867 | -72.4256527038127
868 | -70.2157267193612
869 | -69.8476369744421
870 | -68.1509743658248
871 | -66.9000495963569
872 | -65.7298224230618
873 | -64.8603639535736
874 | -64.4077869808848
875 | -64.1959050205161
876 | -64.2272319044568
877 | -64.6419437925793
878 | -64.9715008921608
879 | -65.8406281997525
880 | -65.3252614815567
881 | -65.3306771485347
882 | -67.5482749396169
883 | -68.8717515773156
884 | -68.7596981636582
885 | -67.9520078580828
886 | -66.4368108836833
887 | -66.4401405764221
888 | -67.8719129434279
889 | -69.8460101969544
890 | -69.7282256317519
891 | -69.8198634511188
892 | -69.1933370333532
893 | -68.6442958560193
894 | -68.8532342424393
895 | -70.9472765741557
896 | -70.9611323620149
897 | -71.4079404440058
898 | -70.3986118821343
899 | -68.4665920756793
900 | -67.5536776196112
901 | -66.8728977576633
902 | -66.3051984510374
903 | -66.4681037775732
904 | -67.8728815106460
905 | -69.8769491389359
906 | -70.5899052656496
907 | -70.4383242401935
908 | -67.7442354915123
909 | -66.3785175286251
910 | -64.8581547869013
911 | -63.9755586010107
912 | -62.6920438699489
913 | -62.7770102802296
914 | -63.5032923566850
915 | -63.9365884910749
916 | -65.1129056844914
917 | -67.3969893655353
918 | -67.4165177039265
919 | -64.6835470063098
920 | -62.8017765619028
921 | -62.5677403071969
922 | -63.4761277563807
923 | -64.0482735950189
924 | -65.5696034516070
925 | -66.8948246984208
926 | -67.0640111181210
927 | -67.0213775313013
928 | -65.6016277455815
929 | -65.8078745451012
930 | -67.4167228291735
931 | -70.8886816915896
932 | -73.1287930982641
933 | -72.7372984246190
934 | -72.5248530745746
935 | -71.9516584946426
936 | -74.1447774047447
937 | -73.7710849512981
938 | -72.9832342655473
939 | -72.0637072882245
940 | -70.8889908619211
941 | -70.9894643384495
942 | -70.1764781068436
943 | -70.7821910485218
944 | -70.8879990889209
945 | -69.5671602802950
946 | -68.3785476433800
947 | -68.2433883413591
948 | -68.7877522628679
949 | -68.2593498307059
950 | -67.8818238066968
951 | -68.4845683421396
952 | -69.0111563543173
953 | -68.8309813383844
954 | -67.9455427241666
955 | -68.3162313133772
956 | -70.0909707383341
957 | -71.3225244952133
958 | -72.1453998173232
959 | -73.6358440475196
960 | -73.5100688829408
961 | -72.7377636895775
962 | -71.5367531576177
963 | -71.7296147260351
964 | -74.1202478082071
965 | -73.5537719221007
966 | -74.2633354017238
967 | -74.4432955112413
968 | -75.1859948340745
969 | -75.4286529964673
970 | -74.1988964388815
971 | -75.0918049839185
972 | -74.1188630015408
973 | -72.1579911437430
974 | -69.9729194569892
975 | -69.3269369891776
976 | -68.8954297378152
977 | -68.3854873665248
978 | -69.4598862499221
979 | -69.3254478815085
980 | -69.5164623592286
981 | -69.5593149209187
982 | -70.0944014336304
983 | -70.0933412717264
984 | -69.3102845559480
985 | -68.3167909850511
986 | -68.2645810101118
987 | -69.9605835601712
988 | -69.6955403366694
989 | -69.8568967547313
990 | -70.3977351215095
991 | -70.6187091377738
992 | -72.3752914065859
993 | -72.5011576084503
994 | -72.8745812563532
995 | -75.1990509069776
996 | -74.0127965271599
997 | -72.4161743437553
998 | -71.3432264807008
999 | -71.5177617273360
1000 | -72.6610406695256
1001 | -72.6163125882056
--------------------------------------------------------------------------------
/DataSet/4G_bus_TMobile.txt:
--------------------------------------------------------------------------------
1 | -93
2 | -93
3 | -93
4 | -98
5 | -98
6 | -96
7 | -96
8 | -96
9 | -96
10 | -92
11 | -92
12 | -94
13 | -94
14 | -93
15 | -93
16 | -99
17 | -99
18 | -101
19 | -101
20 | -96
21 | -96
22 | -94
23 | -94
24 | -99
25 | -99
26 | -96
27 | -96
28 | -99
29 | -99
30 | -99
31 | -99
32 | -102
33 | -102
34 | -105
35 | -105
36 | -104
37 | -104
38 | -101
39 | -101
40 | -104
41 | -104
42 | -104
43 | -100
44 | -100
45 | -100
46 | -100
47 | -100
48 | -100
49 | -96
50 | -96
51 | -99
52 | -99
53 | -99
54 | -99
55 | -100
56 | -100
57 | -100
58 | -100
59 | -95
60 | -95
61 | -100
62 | -100
63 | -107
64 | -107
65 | -99
66 | -99
67 | -97
68 | -97
69 | -89
70 | -89
71 | -96
72 | -96
73 | -101
74 | -101
75 | -95
76 | -95
77 | -104
78 | -104
79 | -92
80 | -92
81 | -101
82 | -101
83 | -109
84 | -109
85 | -108
86 | -108
87 | -99
88 | -99
89 | -93
90 | -93
91 | -103
92 | -103
93 | -106
94 | -106
95 | -98
96 | -98
97 | -106
98 | -106
99 | -103
100 | -103
101 | -105
102 | -105
103 | -107
104 | -107
105 | -106
106 | -106
107 | -106
108 | -106
109 | -109
110 | -109
111 | -108
112 | -108
113 | -106
114 | -106
115 | -107
116 | -107
117 | -104
118 | -104
119 | -102
120 | -102
121 | -104
122 | -104
123 | -101
124 | -101
125 | -100
126 | -100
127 | -108
128 | -108
129 | -103
130 | -103
131 | -107
132 | -107
133 | -108
134 | -108
135 | -108
136 | -108
137 | -106
138 | -106
139 | -108
140 | -108
141 | -104
142 | -104
143 | -106
144 | -106
145 | -106
146 | -106
147 | -103
148 | -103
149 | -102
150 | -102
151 | -99
152 | -99
153 | -103
154 | -103
155 | -110
156 | -110
157 | -111
158 | -111
159 | -119
160 | -119
161 | -110
162 | -110
163 | -114
164 | -114
165 | -113
166 | -113
167 | -117
168 | -117
169 | -113
170 | -113
171 | -112
172 | -112
173 | -116
174 | -116
175 | -108
176 | -108
177 | -109
178 | -109
179 | -106
180 | -106
181 | -106
182 | -106
183 | -108
184 | -108
185 | -109
186 | -109
187 | -110
188 | -110
189 | -107
190 | -107
191 | -107
192 | -107
193 | -112
194 | -112
195 | -114
196 | -114
197 | -111
198 | -111
199 | -110
200 | -110
201 | -111
202 | -111
203 | -108
204 | -108
205 | -114
206 | -114
207 | -112
208 | -112
209 | -113
210 | -113
211 | -110
212 | -110
213 | -112
214 | -112
215 | -109
216 | -109
217 | -105
218 | -105
219 | -106
220 | -106
221 | -103
222 | -103
223 | -101
224 | -101
225 | -109
226 | -109
227 | -105
228 | -105
229 | -106
230 | -106
231 | -105
232 | -105
233 | -104
234 | -104
235 | -105
236 | -105
237 | -105
238 | -105
239 | -112
240 | -112
241 | -112
242 | -112
243 | -109
244 | -109
245 | -109
246 | -109
247 | -104
248 | -104
249 | -99
250 | -99
251 | -99
252 | -99
253 | -101
254 | -101
255 | -97
256 | -97
257 | -97
258 | -97
259 | -99
260 | -99
261 | -100
262 | -100
263 | -101
264 | -101
265 | -97
266 | -97
267 | -99
268 | -99
269 | -100
270 | -100
271 | -101
272 | -101
273 | -102
274 | -102
275 | -101
276 | -101
277 | -98
278 | -98
279 | -101
280 | -101
281 | -100
282 | -100
283 | -101
284 | -101
285 | -101
286 | -101
287 | -101
288 | -101
289 | -102
290 | -102
291 | -100
292 | -100
293 | -101
294 | -101
295 | -102
296 | -102
297 | -100
298 | -100
299 | -100
300 | -100
301 | -99
302 | -99
303 | -101
304 | -101
305 | -100
306 | -100
307 | -100
308 | -100
309 | -102
310 | -102
311 | -101
312 | -101
313 | -100
314 | -100
315 | -100
316 | -100
317 | -97
318 | -97
319 | -100
320 | -100
321 | -100
322 | -100
323 | -95
324 | -95
325 | -103
326 | -103
327 | -108
328 | -108
329 | -101
330 | -101
331 | -107
332 | -107
333 | -105
334 | -105
335 | -97
336 | -97
337 | -97
338 | -97
339 | -100
340 | -100
341 | -104
342 | -104
343 | -99
344 | -99
345 | -100
346 | -100
347 | -106
348 | -106
349 | -101
350 | -101
351 | -104
352 | -104
353 | -104
354 | -104
355 | -102
356 | -102
357 | -104
358 | -104
359 | -99
360 | -99
361 | -100
362 | -100
363 | -104
364 | -104
365 | -105
366 | -105
367 | -104
368 | -104
369 | -106
370 | -106
371 | -104
372 | -104
373 | -105
374 | -105
375 | -104
376 | -104
377 | -104
378 | -104
379 | -104
380 | -104
381 | -103
382 | -103
383 | -104
384 | -104
385 | -107
386 | -107
387 | -110
388 | -110
389 | -105
390 | -105
391 | -107
392 | -107
393 | -105
394 | -105
395 | -105
396 | -105
397 | -104
398 | -104
399 | -104
400 | -104
401 | -103
402 | -103
403 | -105
404 | -105
405 | -109
406 | -109
407 | -105
408 | -105
409 | -104
410 | -104
411 | -105
412 | -105
413 | -104
414 | -104
415 | -108
416 | -108
417 | -101
418 | -101
419 | -103
420 | -103
421 | -107
422 | -107
423 | -98
424 | -98
425 | -99
426 | -99
427 | -91
428 | -91
429 | -97
430 | -97
431 | -93
432 | -93
433 | -99
434 | -99
435 | -90
436 | -90
437 | -92
438 | -92
439 | -92
440 | -92
441 | -84
442 | -84
443 | -82
444 | -82
445 | -95
446 | -95
447 | -96
448 | -96
449 | -93
450 | -93
451 | -93
452 | -93
453 | -92
454 | -92
455 | -95
456 | -95
457 | -104
458 | -104
459 | -89
460 | -89
461 | -92
462 | -92
463 | -99
464 | -99
465 | -97
466 | -97
467 | -102
468 | -102
469 | -94
470 | -94
471 | -95
472 | -95
473 | -99
474 | -99
475 | -103
476 | -103
477 | -92
478 | -92
479 | -102
480 | -102
481 | -99
482 | -99
483 | -94
484 | -94
485 | -99
486 | -99
487 | -109
488 | -109
489 | -104
490 | -104
491 | -102
492 | -102
493 | -108
494 | -108
495 | -105
496 | -105
497 | -101
498 | -101
499 | -100
500 | -100
501 | -107
502 | -107
503 | -99
504 | -99
505 | -95
506 | -95
507 | -91
508 | -91
509 | -103
510 | -103
511 | -108
512 | -108
513 | -101
514 | -101
515 | -98
516 | -98
517 | -105
518 | -105
519 | -104
520 | -104
521 | -100
522 | -100
523 | -106
524 | -106
525 | -105
526 | -105
527 | -105
528 | -105
529 | -109
530 | -109
531 | -106
532 | -106
533 | -106
534 | -106
535 | -106
536 | -106
537 | -101
538 | -101
539 | -96
540 | -96
541 | -98
542 | -98
543 | -95
544 | -95
545 | -94
546 | -94
547 | -94
548 | -94
549 | -96
550 | -96
551 | -100
552 | -100
553 | -105
554 | -105
555 | -103
556 | -103
557 | -103
558 | -103
559 | -104
560 | -104
561 | -106
562 | -106
563 | -109
564 | -109
565 | -107
566 | -107
567 | -107
568 | -107
569 | -104
570 | -104
571 | -109
572 | -109
573 | -108
574 | -108
575 | -106
576 | -106
577 | -111
578 | -111
579 | -109
580 | -109
581 | -110
582 | -110
583 | -103
584 | -103
585 | -105
586 | -105
587 | -97
588 | -97
589 | -94
590 | -94
591 | -97
592 | -97
593 | -93
594 | -93
595 | -96
596 | -96
597 | -98
598 | -98
599 | -102
600 | -102
601 | -105
602 | -105
603 | -102
604 | -102
605 | -103
606 | -103
607 | -105
608 | -105
609 | -108
610 | -108
611 | -104
612 | -104
613 | -107
614 | -107
615 | -107
616 | -107
617 | -104
618 | -104
619 | -104
620 | -104
621 | -101
622 | -101
623 | -105
624 | -105
625 | -111
626 | -111
627 | -110
628 | -110
629 | -113
630 | -113
631 | -113
632 | -113
633 | -112
634 | -112
635 | -111
636 | -111
637 | -110
638 | -110
639 | -111
640 | -111
641 | -104
642 | -104
643 | -102
644 | -102
645 | -106
646 | -106
647 | -109
648 | -109
649 | -114
650 | -114
651 | -112
652 | -112
653 | -110
654 | -110
655 | -111
656 | -111
657 | -111
658 | -111
659 | -110
660 | -110
661 | -114
662 | -114
663 | -113
664 | -113
665 | -113
666 | -113
667 | -113
668 | -113
669 | -112
670 | -112
671 | -112
672 | -112
673 | -112
674 | -112
675 | -115
676 | -115
677 | -116
678 | -116
679 | -118
680 | -118
681 | -120
682 | -120
683 | -122
684 | -122
685 | -116
686 | -116
687 | -115
688 | -115
689 | -118
690 | -118
691 | -112
692 | -112
693 | -116
694 | -116
695 | -112
696 | -112
697 | -110
698 | -110
699 | -106
700 | -106
701 | -111
702 | -111
703 | -104
704 | -104
705 | -112
706 | -112
707 | -113
708 | -113
709 | -113
710 | -113
711 | -113
712 | -113
713 | -113
714 | -113
715 | -110
716 | -110
717 | -108
718 | -108
719 | -99
720 | -99
721 | -96
722 | -96
723 | -93
724 | -93
725 | -90
726 | -90
727 | -90
728 | -90
729 | -93
730 | -93
731 | -93
732 | -93
733 | -87
734 | -87
735 | -87
736 | -87
737 | -83
738 | -83
739 | -91
740 | -91
741 | -81
742 | -81
743 | -77
744 | -77
745 | -79
746 | -79
747 | -87
748 | -87
749 | -88
750 | -88
751 | -85
752 | -85
753 | -87
754 | -87
755 | -87
756 | -87
757 | -87
758 | -87
759 | -81
760 | -81
761 | -70
762 | -70
763 | -75
764 | -75
765 | -81
766 | -81
767 | -77
768 | -77
769 | -87
770 | -87
771 | -92
772 | -92
773 | -81
774 | -81
775 | -82
776 | -82
777 | -87
778 | -87
779 | -85
780 | -85
781 | -83
782 | -83
783 | -88
784 | -88
785 | -87
786 | -87
787 | -89
788 | -89
789 | -90
790 | -90
791 | -91
792 | -91
793 | -89
794 | -89
795 | -90
796 | -90
797 | -82
798 | -82
799 | -82
800 | -82
801 | -82
802 | -82
803 | -82
804 | -82
805 | -84
806 | -84
807 | -89
808 | -89
809 | -84
810 | -84
811 | -86
812 | -86
813 | -85
814 | -85
815 | -81
816 | -81
817 | -92
818 | -92
819 | -85
820 | -85
821 | -81
822 | -81
823 | -79
824 | -79
825 | -78
826 | -78
827 | -78
828 | -78
829 | -79
830 | -79
831 | -80
832 | -80
833 | -85
834 | -85
835 | -80
836 | -80
837 | -80
838 | -80
839 | -93
840 | -93
841 | -89
842 | -89
843 | -89
844 | -89
845 | -88
846 | -88
847 | -91
848 | -91
849 | -89
850 | -89
851 | -93
852 | -93
853 | -85
854 | -85
855 | -92
856 | -92
857 | -92
858 | -92
859 | -86
860 | -86
861 | -90
862 | -90
863 | -85
864 | -85
865 | -85
866 | -85
867 | -87
868 | -87
869 | -86
870 | -86
871 | -87
872 | -87
873 | -92
874 | -92
875 | -87
876 | -87
877 | -90
878 | -90
879 | -91
880 | -91
881 | -93
882 | -93
883 | -93
884 | -93
885 | -88
886 | -88
887 | -90
888 | -90
889 | -86
890 | -86
891 | -91
892 | -91
893 | -88
894 | -88
895 | -87
896 | -87
897 | -90
898 | -90
899 | -88
900 | -88
901 | -86
902 | -86
903 | -86
904 | -86
905 | -87
906 | -87
907 | -90
908 | -90
909 | -87
910 | -87
911 | -90
912 | -90
913 | -86
914 | -86
915 | -81
916 | -81
917 | -81
918 | -81
919 | -79
920 | -79
921 | -89
922 | -89
923 | -81
924 | -81
925 | -83
926 | -83
927 | -83
928 | -83
929 | -83
930 | -83
931 | -86
932 | -86
933 | -80
934 | -80
935 | -93
936 | -93
937 | -93
938 | -93
939 | -89
940 | -89
941 | -90
942 | -90
943 | -93
944 | -93
945 | -94
946 | -94
947 | -94
948 | -94
949 | -91
950 | -91
951 | -90
952 | -90
953 | -91
954 | -91
955 | -90
956 | -90
957 | -88
958 | -88
959 | -90
960 | -90
961 | -90
962 | -90
963 | -93
964 | -93
965 | -87
966 | -87
967 | -88
968 | -88
969 | -82
970 | -82
971 | -88
972 | -88
973 | -84
974 | -84
975 | -92
976 | -92
977 | -85
978 | -85
979 | -83
980 | -83
981 | -88
982 | -88
983 | -86
984 | -86
985 | -90
986 | -90
987 | -92
988 | -92
989 | -92
990 | -92
991 | -96
992 | -96
993 | -98
994 | -98
995 | -93
996 | -93
997 | -97
998 | -97
999 | -97
1000 | -97
1001 | -104
1002 | -104
1003 | -109
1004 | -109
1005 | -111
1006 | -111
1007 | -115
1008 | -115
1009 | -114
1010 | -114
1011 | -112
1012 | -112
1013 | -119
1014 | -119
1015 | -121
1016 | -121
1017 | -123
1018 | -123
1019 | -121
1020 | -121
1021 | -124
1022 | -124
1023 | -123
1024 | -123
1025 | -122
1026 | -122
1027 | -121
1028 | -121
1029 | -123
1030 | -123
1031 | -125
1032 | -125
1033 | -124
1034 | -124
1035 | -123
1036 | -123
1037 | -124
1038 | -124
1039 | -123
1040 | -123
1041 | -113
1042 | -113
1043 | -115
1044 | -115
1045 | -117
1046 | -117
1047 | -117
1048 | -117
1049 | -114
1050 | -114
1051 | -113
1052 | -113
1053 | -107
1054 | -107
1055 | -109
1056 | -109
1057 | -111
1058 | -111
1059 | -112
1060 | -112
1061 | -113
1062 | -113
1063 | -115
1064 | -115
1065 | -113
1066 | -113
1067 | -107
1068 | -107
1069 | -109
1070 | -109
1071 | -107
1072 | -107
1073 | -104
1074 | -104
1075 | -106
1076 | -106
1077 | -105
1078 | -105
1079 | -109
1080 | -109
1081 | -110
1082 | -110
1083 | -104
1084 | -104
1085 | -107
1086 | -107
1087 | -101
1088 | -101
1089 | -102
1090 | -102
1091 | -103
1092 | -103
1093 | -103
1094 | -103
1095 | -103
1096 | -103
1097 | -99
1098 | -99
1099 | -100
1100 | -100
1101 | -91
1102 | -91
1103 | -96
1104 | -96
1105 | -99
1106 | -99
1107 | -105
1108 | -105
1109 | -105
1110 | -105
1111 | -100
1112 | -100
1113 | -112
1114 | -112
1115 | -113
1116 | -113
1117 | -109
1118 | -109
1119 | -103
1120 | -103
1121 | -103
1122 | -103
1123 | -103
1124 | -103
1125 | -103
1126 | -103
1127 | -97
1128 | -97
1129 | -98
1130 | -98
1131 | -101
1132 | -101
1133 | -101
1134 | -101
1135 | -102
1136 | -102
1137 | -101
1138 | -101
1139 | -98
1140 | -98
1141 | -102
1142 | -102
1143 | -108
1144 | -108
1145 | -110
1146 | -110
1147 | -105
1148 | -105
1149 | -98
1150 | -98
1151 | -96
1152 | -96
1153 | -95
1154 | -95
1155 | -100
1156 | -100
1157 | -94
1158 | -94
1159 | -94
1160 | -94
1161 | -99
1162 | -99
1163 | -103
1164 | -103
1165 | -106
1166 | -106
1167 | -103
1168 | -103
1169 | -99
1170 | -99
1171 | -104
1172 | -104
1173 | -94
1174 | -94
1175 | -103
1176 | -103
1177 | -100
1178 | -100
1179 | -103
1180 | -103
1181 | -97
1182 | -97
1183 | -102
1184 | -102
1185 | -101
1186 | -101
1187 | -102
1188 | -102
1189 | -97
1190 | -97
1191 | -101
1192 | -101
1193 | -103
1194 | -103
1195 | -109
1196 | -109
1197 | -105
1198 | -105
1199 | -107
1200 | -107
1201 | -102
1202 | -102
1203 | -107
1204 | -107
1205 | -102
1206 | -102
1207 | -105
1208 | -105
1209 | -101
1210 | -101
1211 | -103
1212 | -103
1213 | -103
1214 | -103
1215 | -98
1216 | -98
1217 | -98
1218 | -98
1219 | -99
1220 | -99
1221 | -101
1222 | -101
1223 | -100
1224 | -100
1225 | -99
1226 | -99
1227 | -94
1228 | -94
1229 | -96
1230 | -96
1231 | -93
1232 | -93
1233 | -97
1234 | -97
1235 | -97
1236 | -97
1237 | -94
1238 | -94
1239 | -99
1240 | -99
1241 | -101
1242 | -101
1243 | -100
1244 | -100
1245 | -99
1246 | -99
1247 | -101
1248 | -101
1249 | -102
1250 | -102
1251 | -98
1252 | -98
1253 | -99
1254 | -99
1255 | -102
1256 | -102
1257 | -100
1258 | -100
1259 | -100
1260 | -100
1261 | -102
1262 | -102
1263 | -103
1264 | -103
1265 | -97
1266 | -97
1267 | -94
1268 | -94
1269 | -93
1270 | -93
1271 | -88
1272 | -88
1273 | -91
1274 | -91
1275 | -92
1276 | -92
1277 | -94
1278 | -94
1279 | -87
1280 | -87
1281 | -102
1282 | -102
1283 | -84
1284 | -84
1285 | -96
1286 | -96
1287 | -103
1288 | -103
1289 | -103
1290 | -103
1291 | -100
1292 | -100
1293 | -98
1294 | -98
1295 | -98
1296 | -98
1297 | -97
1298 | -97
1299 | -99
1300 | -99
1301 | -100
1302 | -100
1303 | -98
1304 | -98
1305 | -104
1306 | -104
1307 | -101
1308 | -101
1309 | -102
1310 | -102
1311 | -103
1312 | -103
1313 | -105
1314 | -105
1315 | -102
1316 | -102
1317 | -99
1318 | -99
1319 | -95
1320 | -95
1321 | -94
1322 | -94
1323 | -95
1324 | -95
1325 | -95
1326 | -95
1327 | -95
1328 | -95
1329 | -101
1330 | -101
1331 | -95
1332 | -95
1333 | -99
1334 | -99
1335 | -108
1336 | -108
1337 | -105
1338 | -105
1339 | -103
1340 | -103
1341 | -100
1342 | -100
1343 | -103
1344 | -103
1345 | -101
1346 | -101
1347 | -107
1348 | -107
1349 | -108
1350 | -108
1351 | -98
1352 | -98
1353 | -103
1354 | -103
1355 | -99
1356 | -99
1357 | -98
1358 | -98
1359 | -102
1360 | -102
1361 | -92
1362 | -92
1363 | -95
1364 | -95
1365 | -101
1366 | -101
1367 | -111
1368 | -111
1369 | -106
1370 | -106
1371 | -104
1372 | -104
1373 | -103
1374 | -103
1375 | -104
1376 | -104
1377 | -106
1378 | -106
1379 | -102
1380 | -102
1381 | -105
1382 | -105
1383 | -104
1384 | -104
1385 | -102
1386 | -102
1387 | -109
1388 | -109
1389 | -103
1390 | -103
1391 | -102
1392 | -102
1393 | -109
1394 | -109
1395 | -107
1396 | -107
1397 | -106
1398 | -106
1399 | -110
1400 | -110
1401 | -109
1402 | -109
1403 | -106
1404 | -106
1405 | -112
1406 | -112
1407 | -115
1408 | -115
1409 | -112
1410 | -112
1411 | -113
1412 | -113
1413 | -108
1414 | -108
1415 | -110
1416 | -110
1417 | -106
1418 | -106
1419 | -111
1420 | -111
1421 | -111
1422 | -111
1423 | -113
1424 | -113
1425 | -111
1426 | -111
1427 | -108
1428 | -108
1429 | -113
1430 | -113
1431 | -106
1432 | -106
1433 | -111
1434 | -111
1435 | -117
1436 | -117
1437 | -114
1438 | -114
1439 | -116
1440 | -116
1441 | -112
1442 | -112
1443 | -112
1444 | -112
1445 | -108
1446 | -108
1447 | -113
1448 | -113
1449 | -112
1450 | -112
1451 | -113
1452 | -113
1453 | -113
1454 | -113
1455 | -105
1456 | -105
1457 | -108
1458 | -108
1459 | -107
1460 | -107
1461 | -108
1462 | -108
1463 | -107
1464 | -107
1465 | -107
1466 | -107
1467 | -106
1468 | -106
1469 | -107
1470 | -107
1471 | -113
1472 | -113
1473 | -111
1474 | -111
1475 | -110
1476 | -110
1477 | -115
1478 | -115
1479 | -118
1480 | -118
1481 | -113
1482 | -113
1483 | -113
1484 | -113
1485 | -118
1486 | -118
1487 | -115
1488 | -115
1489 | -103
1490 | -103
1491 | -103
1492 | -103
1493 | -105
1494 | -105
1495 | -107
1496 | -107
1497 | -103
1498 | -103
1499 | -110
1500 | -110
1501 | -104
1502 | -104
1503 | -109
1504 | -109
1505 | -102
1506 | -102
1507 | -105
1508 | -105
1509 | -106
1510 | -106
1511 | -106
1512 | -106
1513 | -105
1514 | -105
1515 | -111
1516 | -111
1517 | -110
1518 | -110
1519 | -113
1520 | -113
1521 | -105
1522 | -105
1523 | -105
1524 | -105
1525 | -111
1526 | -111
1527 | -105
1528 | -105
1529 | -102
1530 | -102
1531 | -90
1532 | -90
1533 | -104
1534 | -104
1535 | -99
1536 | -99
1537 | -95
1538 | -95
1539 | -93
1540 | -93
1541 | -103
1542 | -103
1543 | -99
1544 | -99
1545 | -88
1546 | -88
1547 | -99
1548 | -99
1549 | -96
1550 | -96
1551 | -101
1552 | -101
1553 | -101
1554 | -101
1555 | -95
1556 | -95
1557 | -94
1558 | -94
1559 | -97
1560 | -97
1561 | -92
1562 | -92
1563 | -92
1564 | -99
1565 | -99
1566 | -101
1567 | -101
1568 | -96
1569 | -96
1570 | -96
1571 | -96
1572 | -96
1573 | -96
1574 | -96
1575 | -98
1576 | -98
1577 | -98
1578 | -98
1579 | -98
1580 | -98
1581 | -96
1582 | -96
1583 | -98
1584 | -98
1585 | -97
1586 | -97
1587 | -96
1588 | -96
1589 | -98
1590 | -98
1591 | -97
1592 | -97
1593 | -101
1594 | -101
1595 | -95
1596 | -95
1597 | -95
1598 | -95
1599 | -99
1600 | -99
1601 | -101
1602 | -101
1603 | -103
1604 | -103
1605 | -105
1606 | -105
1607 | -85
1608 | -85
1609 | -98
1610 | -98
1611 | -99
1612 | -99
1613 | -99
1614 | -99
1615 | -97
1616 | -97
1617 | -94
1618 | -94
1619 | -88
1620 | -88
1621 | -93
1622 | -93
1623 | -103
1624 | -103
1625 | -94
1626 | -94
1627 | -98
1628 | -98
1629 | -102
1630 | -102
1631 | -104
1632 | -104
1633 | -105
1634 | -105
1635 | -104
1636 | -104
1637 | -106
1638 | -106
1639 | -103
1640 | -103
1641 | -105
1642 | -105
1643 | -109
1644 | -109
1645 | -106
1646 | -106
1647 | -109
1648 | -109
1649 | -107
1650 | -107
1651 | -105
1652 | -105
1653 | -107
1654 | -107
1655 | -101
1656 | -101
1657 | -105
1658 | -105
1659 | -99
1660 | -99
1661 | -105
1662 | -105
1663 | -106
1664 | -106
1665 | -109
1666 | -109
1667 | -108
1668 | -108
1669 | -108
1670 | -108
1671 | -108
1672 | -108
1673 | -105
1674 | -105
1675 | -109
1676 | -109
1677 | -114
1678 | -114
1679 | -119
1680 | -119
1681 | -110
1682 | -110
1683 | -111
1684 | -111
1685 | -110
1686 | -110
1687 | -110
1688 | -110
1689 | -110
1690 | -110
1691 | -111
1692 | -111
1693 | -110
1694 | -110
1695 | -105
1696 | -105
1697 | -105
1698 | -105
1699 | -107
1700 | -107
1701 | -107
1702 | -107
1703 | -106
1704 | -106
1705 | -110
1706 | -110
1707 | -115
1708 | -115
1709 | -109
1710 | -109
1711 | -114
1712 | -114
1713 | -107
1714 | -107
1715 | -109
1716 | -109
1717 | -109
1718 | -109
1719 | -112
1720 | -112
1721 | -115
1722 | -115
1723 | -114
1724 | -114
1725 | -98
1726 | -98
1727 | -100
1728 | -100
1729 | -102
1730 | -102
1731 | -102
1732 | -102
1733 | -100
1734 | -100
1735 | -98
1736 | -98
1737 | -97
1738 | -97
1739 | -103
1740 | -103
1741 | -106
1742 | -106
1743 | -113
1744 | -113
1745 | -109
1746 | -109
1747 | -108
1748 | -108
1749 | -105
1750 | -105
1751 | -105
1752 | -105
1753 | -104
1754 | -104
1755 | -100
1756 | -100
1757 | -101
1758 | -101
1759 | -100
1760 | -100
1761 | -101
1762 | -101
1763 | -98
1764 | -98
1765 | -98
1766 | -98
1767 | -98
1768 | -98
1769 | -99
1770 | -99
1771 | -99
1772 | -99
1773 | -99
1774 | -99
1775 | -98
1776 | -98
1777 | -98
1778 | -98
1779 | -101
1780 | -101
1781 | -97
1782 | -97
1783 | -99
1784 | -99
1785 | -99
1786 | -99
1787 | -97
1788 | -97
1789 | -98
1790 | -98
1791 | -97
1792 | -97
1793 | -98
1794 | -98
1795 | -98
1796 | -98
1797 | -98
1798 | -98
1799 | -99
1800 | -99
1801 | -98
1802 | -98
1803 | -98
1804 | -98
1805 | -96
1806 | -96
1807 | -96
1808 | -96
1809 | -97
1810 | -97
1811 | -96
1812 | -96
1813 | -98
1814 | -98
1815 | -96
1816 | -96
1817 | -102
1818 | -102
1819 | -104
1820 | -104
1821 | -106
1822 | -106
1823 | -104
1824 | -104
1825 | -103
1826 | -103
1827 | -102
1828 | -102
1829 | -106
1830 | -106
1831 | -102
1832 | -102
1833 | -104
1834 | -104
1835 | -101
1836 | -101
1837 | -99
1838 | -99
1839 | -103
1840 | -103
1841 | -107
1842 | -107
1843 | -101
1844 | -101
1845 | -102
1846 | -102
1847 | -105
1848 | -105
1849 | -99
1850 | -99
1851 | -102
1852 | -102
1853 | -104
1854 | -104
1855 | -104
1856 | -104
1857 | -100
1858 | -100
1859 | -100
1860 | -100
1861 | -104
1862 | -104
1863 | -101
1864 | -101
1865 | -104
1866 | -104
1867 | -103
1868 | -103
1869 | -105
1870 | -105
1871 | -102
1872 | -102
1873 | -98
1874 | -98
1875 | -103
1876 | -103
1877 | -101
1878 | -101
1879 | -103
1880 | -103
1881 | -103
1882 | -103
1883 | -102
1884 | -102
1885 | -105
1886 | -105
1887 | -106
1888 | -106
1889 | -110
1890 | -110
1891 | -105
1892 | -105
1893 | -103
1894 | -103
1895 | -105
1896 | -105
1897 | -109
1898 | -109
1899 | -88
1900 | -88
1901 | -93
1902 | -93
1903 | -89
1904 | -89
1905 | -82
1906 | -82
1907 | -87
1908 | -87
1909 | -101
1910 | -101
1911 | -99
1912 | -99
1913 | -95
1914 | -95
1915 | -96
1916 | -96
1917 | -100
1918 | -100
1919 | -98
1920 | -98
1921 | -94
1922 | -94
1923 | -99
1924 | -99
1925 | -93
1926 | -93
1927 | -91
1928 | -91
1929 | -99
1930 | -99
1931 | -91
1932 | -91
1933 | -99
1934 | -99
1935 | -88
1936 | -88
1937 | -98
1938 | -98
1939 | -98
1940 | -98
1941 | -92
1942 | -92
1943 | -105
1944 | -105
1945 | -102
1946 | -102
1947 | -100
1948 | -100
1949 | -95
1950 | -95
1951 | -101
1952 | -101
1953 | -103
1954 | -103
1955 | -103
1956 | -103
1957 | -100
1958 | -100
1959 | -90
1960 | -90
1961 | -97
1962 | -97
1963 | -103
1964 | -103
1965 | -101
1966 | -101
1967 | -101
1968 | -101
1969 | -100
1970 | -100
1971 | -94
1972 | -94
1973 | -96
1974 | -96
1975 | -101
1976 | -101
1977 | -103
1978 | -103
1979 | -98
1980 | -98
1981 | -101
1982 | -101
1983 | -96
1984 | -96
1985 | -101
1986 | -101
1987 | -98
1988 | -98
1989 | -103
1990 | -103
1991 | -99
1992 | -99
1993 | -99
1994 | -99
1995 | -92
1996 | -92
1997 | -91
1998 | -91
1999 | -91
2000 | -91
2001 | -99
2002 | -99
2003 | -100
2004 | -100
2005 | -99
2006 | -99
2007 | -102
2008 | -102
2009 | -103
2010 | -103
2011 | -102
2012 | -102
2013 | -97
2014 | -97
2015 | -100
2016 | -100
2017 | -102
2018 | -102
2019 | -106
2020 | -106
2021 | -103
2022 | -103
2023 | -105
2024 | -105
2025 | -111
2026 | -111
2027 | -112
2028 | -112
2029 | -104
2030 | -104
2031 | -108
2032 | -108
2033 | -100
2034 | -100
2035 | -104
2036 | -104
2037 | -103
2038 | -103
2039 | -97
2040 | -97
2041 | -95
2042 | -95
2043 | -99
2044 | -99
2045 | -97
2046 | -97
2047 | -98
2048 | -98
2049 | -97
2050 | -97
2051 | -99
2052 | -99
2053 | -99
2054 | -99
2055 | -101
2056 | -101
2057 | -97
2058 | -97
2059 | -93
2060 | -93
2061 | -104
2062 | -104
2063 | -102
2064 | -102
2065 | -102
2066 | -102
2067 | -98
2068 | -98
2069 | -103
2070 | -103
2071 | -105
2072 | -105
2073 | -104
2074 | -104
2075 | -105
2076 | -105
2077 | -102
2078 | -102
2079 | -103
2080 | -103
2081 | -103
2082 | -103
2083 | -105
2084 | -105
2085 | -108
2086 | -108
2087 | -109
2088 | -109
2089 | -112
2090 | -112
2091 | -108
2092 | -108
2093 | -115
2094 | -115
2095 | -114
2096 | -114
2097 | -103
2098 | -103
2099 | -108
2100 | -108
2101 | -99
2102 | -99
2103 | -105
2104 | -105
2105 | -111
2106 | -111
2107 | -113
2108 | -113
2109 | -109
2110 | -109
2111 | -114
2112 | -114
2113 | -111
2114 | -111
2115 | -115
2116 | -115
2117 | -115
2118 | -115
2119 | -119
2120 | -119
2121 | -120
2122 | -120
2123 | -122
2124 | -122
2125 | -122
2126 | -122
2127 | -119
2128 | -119
2129 | -110
2130 | -110
2131 | -106
2132 | -106
2133 | -106
2134 | -106
2135 | -104
2136 | -104
2137 | -101
2138 | -101
2139 | -105
2140 | -105
2141 | -101
2142 | -101
2143 | -99
2144 | -99
2145 | -100
2146 | -100
2147 | -99
2148 | -99
2149 | -100
2150 | -100
2151 | -104
2152 | -104
2153 | -101
2154 | -101
2155 | -95
2156 | -95
2157 | -94
2158 | -94
2159 | -92
2160 | -92
2161 | -91
2162 | -91
2163 | -90
2164 | -90
2165 | -89
2166 | -89
2167 | -89
2168 | -89
2169 | -86
2170 | -86
2171 | -89
2172 | -89
2173 | -84
2174 | -84
2175 | -80
2176 | -80
2177 | -83
2178 | -83
2179 | -74
2180 | -74
2181 | -83
2182 | -83
2183 | -85
2184 | -85
2185 | -83
2186 | -83
2187 | -88
2188 | -88
2189 | -85
2190 | -85
2191 | -82
2192 | -82
2193 | -77
2194 | -77
2195 | -78
2196 | -78
2197 | -83
2198 | -83
2199 | -81
2200 | -81
2201 | -86
2202 | -86
2203 | -84
2204 | -84
2205 | -80
2206 | -80
2207 | -91
2208 | -91
2209 | -94
2210 | -94
2211 | -85
2212 | -85
2213 | -83
2214 | -83
2215 | -78
2216 | -78
2217 | -89
2218 | -89
2219 | -86
2220 | -86
2221 | -90
2222 | -90
2223 | -89
2224 | -89
2225 | -84
2226 | -84
2227 | -85
2228 | -85
2229 | -86
2230 | -86
2231 | -86
2232 | -100
2233 | -99
2234 | -99
2235 | -100
2236 | -100
2237 | -95
2238 | -95
2239 | -99
2240 | -99
2241 | -101
2242 | -101
2243 | -101
2244 | -101
2245 | -104
2246 | -104
2247 | -105
2248 | -105
2249 | -99
2250 | -99
2251 | -103
2252 | -103
2253 | -99
2254 | -99
2255 | -103
2256 | -103
2257 | -101
2258 | -101
2259 | -95
2260 | -95
2261 | -104
2262 | -104
2263 | -100
2264 | -100
2265 | -106
2266 | -106
2267 | -99
2268 | -99
2269 | -89
2270 | -89
2271 | -89
2272 | -89
2273 | -102
2274 | -102
2275 | -97
2276 | -97
2277 | -102
2278 | -102
2279 | -105
2280 | -105
2281 | -104
2282 | -104
2283 | -110
2284 | -110
2285 | -108
2286 | -108
2287 | -111
2288 | -111
2289 | -108
2290 | -108
2291 | -108
2292 | -108
2293 | -109
2294 | -109
2295 | -110
2296 | -110
2297 | -112
2298 | -112
2299 | -109
2300 | -109
2301 | -106
2302 | -106
2303 | -107
2304 | -107
2305 | -105
2306 | -105
2307 | -104
2308 | -104
2309 | -107
2310 | -107
2311 | -106
2312 | -106
2313 | -111
2314 | -111
2315 | -108
2316 | -108
2317 | -110
2318 | -110
2319 | -106
2320 | -106
2321 | -110
2322 | -110
2323 | -108
2324 | -108
2325 | -110
2326 | -110
2327 | -114
2328 | -114
2329 | -117
2330 | -117
2331 | -122
2332 | -122
2333 | -115
2334 | -115
2335 | -117
2336 | -117
2337 | -108
2338 | -108
2339 | -115
2340 | -115
2341 | -114
2342 | -114
2343 | -121
2344 | -121
2345 | -111
2346 | -111
2347 | -107
2348 | -107
2349 | -107
2350 | -107
2351 | -112
2352 | -112
2353 | -109
2354 | -109
2355 | -118
2356 | -118
2357 | -104
2358 | -104
2359 | -114
2360 | -114
2361 | -112
2362 | -112
2363 | -111
2364 | -111
2365 | -111
2366 | -111
2367 | -108
2368 | -108
2369 | -112
2370 | -112
2371 | -109
2372 | -109
2373 | -116
2374 | -116
2375 | -111
2376 | -111
2377 | -107
2378 | -107
2379 | -106
2380 | -106
2381 | -105
2382 | -105
2383 | -105
2384 | -105
2385 | -103
2386 | -103
2387 | -106
2388 | -106
2389 | -102
2390 | -102
2391 | -101
2392 | -101
2393 | -105
2394 | -105
2395 | -99
2396 | -99
2397 | -108
2398 | -108
2399 | -115
2400 | -115
2401 | -108
2402 | -108
2403 | -107
2404 | -107
2405 | -104
2406 | -104
2407 | -109
2408 | -109
2409 | -102
2410 | -102
2411 | -104
2412 | -104
2413 | -104
2414 | -104
2415 | -106
2416 | -106
2417 | -105
2418 | -105
2419 | -105
2420 | -105
2421 | -100
2422 | -100
2423 | -102
2424 | -102
2425 | -103
2426 | -103
2427 | -102
2428 | -102
2429 | -99
2430 | -99
2431 | -103
2432 | -103
2433 | -94
2434 | -94
2435 | -95
2436 | -95
2437 | -97
2438 | -97
2439 | -100
2440 | -100
2441 | -99
2442 | -99
2443 | -97
2444 | -97
2445 | -98
2446 | -98
2447 | -98
2448 | -98
2449 | -99
2450 | -99
2451 | -99
2452 | -99
2453 | -98
2454 | -98
2455 | -99
2456 | -99
2457 | -98
2458 | -98
2459 | -99
2460 | -99
2461 | -99
2462 | -99
2463 | -99
2464 | -99
2465 | -99
2466 | -99
2467 | -98
2468 | -98
2469 | -97
2470 | -97
2471 | -98
2472 | -98
2473 | -100
2474 | -100
2475 | -99
2476 | -99
2477 | -97
2478 | -97
2479 | -101
2480 | -101
2481 | -95
2482 | -95
2483 | -99
2484 | -99
2485 | -102
2486 | -102
2487 | -100
2488 | -100
2489 | -109
2490 | -109
2491 | -104
2492 | -104
2493 | -107
2494 | -107
2495 | -104
2496 | -104
2497 | -95
2498 | -95
2499 | -104
2500 | -104
2501 | -102
2502 | -102
2503 | -104
2504 | -104
2505 | -111
2506 | -111
2507 | -102
2508 | -102
2509 | -106
2510 | -106
2511 | -107
2512 | -107
2513 | -107
2514 | -107
2515 | -102
2516 | -102
2517 | -103
2518 | -103
2519 | -104
2520 | -104
2521 | -101
2522 | -101
2523 | -110
2524 | -110
2525 | -110
2526 | -110
2527 | -107
2528 | -107
2529 | -108
2530 | -108
2531 | -107
2532 | -107
2533 | -105
2534 | -105
2535 | -112
2536 | -112
2537 | -112
2538 | -112
2539 | -112
2540 | -112
2541 | -112
2542 | -112
2543 | -112
2544 | -112
2545 | -112
2546 | -112
2547 | -112
2548 | -112
2549 | -108
2550 | -108
2551 | -112
2552 | -112
2553 | -118
2554 | -118
2555 | -118
2556 | -118
2557 | -119
2558 | -119
2559 | -112
2560 | -112
2561 | -111
2562 | -111
2563 | -112
2564 | -112
2565 | -110
2566 | -110
2567 | -112
2568 | -112
2569 | -112
2570 | -112
2571 | -110
2572 | -110
2573 | -112
2574 | -112
2575 | -112
2576 | -112
2577 | -112
2578 | -112
2579 | -106
2580 | -106
2581 | -111
2582 | -111
2583 | -110
2584 | -110
2585 | -106
2586 | -106
2587 | -95
2588 | -95
2589 | -99
2590 | -99
2591 | -100
2592 | -100
2593 | -89
2594 | -89
2595 | -92
2596 | -92
2597 | -89
2598 | -89
2599 | -90
2600 | -90
2601 | -97
2602 | -97
2603 | -103
2604 | -103
2605 | -97
2606 | -97
2607 | -94
2608 | -94
2609 | -98
2610 | -98
2611 | -102
2612 | -102
2613 | -102
2614 | -102
2615 | -105
2616 | -105
2617 | -103
2618 | -103
2619 | -93
2620 | -93
2621 | -102
2622 | -102
2623 | -104
2624 | -104
2625 | -98
2626 | -98
2627 | -94
2628 | -94
2629 | -100
2630 | -100
2631 | -104
2632 | -104
2633 | -105
2634 | -105
2635 | -102
2636 | -102
2637 | -100
2638 | -100
2639 | -97
2640 | -97
2641 | -101
2642 | -101
2643 | -92
2644 | -92
2645 | -102
2646 | -102
2647 | -103
2648 | -103
2649 | -100
2650 | -100
2651 | -106
2652 | -106
2653 | -100
2654 | -100
2655 | -94
2656 | -94
2657 | -101
2658 | -101
2659 | -96
2660 | -96
2661 | -95
2662 | -95
2663 | -98
2664 | -98
2665 | -95
2666 | -95
2667 | -99
2668 | -99
2669 | -101
2670 | -101
2671 | -103
2672 | -103
2673 | -100
2674 | -100
2675 | -109
2676 | -109
2677 | -110
2678 | -110
2679 | -103
2680 | -103
2681 | -103
2682 | -103
2683 | -94
2684 | -94
2685 | -104
2686 | -104
2687 | -96
2688 | -96
2689 | -101
2690 | -101
2691 | -99
2692 | -99
2693 | -94
2694 | -94
2695 | -99
2696 | -99
2697 | -102
2698 | -102
2699 | -101
2700 | -101
2701 | -106
2702 | -106
2703 | -102
2704 | -102
2705 | -103
2706 | -103
2707 | -105
2708 | -105
2709 | -99
2710 | -99
2711 | -106
2712 | -106
2713 | -107
2714 | -107
2715 | -104
2716 | -104
2717 | -102
2718 | -102
2719 | -106
2720 | -106
2721 | -107
2722 | -107
2723 | -114
2724 | -114
2725 | -108
2726 | -108
2727 | -107
2728 | -107
2729 | -107
2730 | -107
2731 | -99
2732 | -99
2733 | -100
2734 | -100
2735 | -100
2736 | -100
2737 | -101
2738 | -101
2739 | -98
2740 | -98
2741 | -100
2742 | -100
2743 | -103
2744 | -103
2745 | -105
2746 | -105
2747 | -103
2748 | -103
2749 | -103
2750 | -103
2751 | -103
2752 | -103
2753 | -101
2754 | -101
2755 | -104
2756 | -104
2757 | -104
2758 | -104
2759 | -104
2760 | -104
2761 | -104
2762 | -104
2763 | -106
2764 | -106
2765 | -110
2766 | -110
2767 | -107
2768 | -107
2769 | -106
2770 | -106
2771 | -107
2772 | -107
2773 | -108
2774 | -108
2775 | -111
2776 | -111
2777 | -111
2778 | -111
2779 | -114
2780 | -114
2781 | -109
2782 | -109
2783 | -110
2784 | -110
2785 | -109
2786 | -109
2787 | -110
2788 | -110
2789 | -106
2790 | -106
2791 | -110
2792 | -110
2793 | -114
2794 | -114
2795 | -114
2796 | -114
2797 | -111
2798 | -111
2799 | -112
2800 | -112
2801 | -113
2802 | -113
2803 | -117
2804 | -117
2805 | -112
2806 | -112
2807 | -115
2808 | -115
2809 | -116
2810 | -116
2811 | -117
2812 | -117
2813 | -120
2814 | -120
2815 | -118
2816 | -118
2817 | -122
2818 | -122
2819 | -116
2820 | -116
2821 | -121
2822 | -121
2823 | -114
2824 | -114
2825 | -118
2826 | -118
2827 | -116
2828 | -116
2829 | -115
2830 | -115
2831 | -107
2832 | -107
2833 | -112
2834 | -112
2835 | -112
2836 | -112
2837 | -111
2838 | -111
2839 | -108
2840 | -108
2841 | -108
2842 | -108
2843 | -106
2844 | -106
2845 | -105
2846 | -105
2847 | -104
2848 | -104
2849 | -104
2850 | -104
2851 | -105
2852 | -105
2853 | -100
2854 | -100
2855 | -93
2856 | -93
2857 | -93
2858 | -93
2859 | -93
2860 | -93
2861 | -92
2862 | -92
2863 | -87
2864 | -87
2865 | -89
2866 | -89
2867 | -87
2868 | -87
2869 | -85
2870 | -85
2871 | -86
2872 | -86
2873 | -83
2874 | -83
2875 | -77
2876 | -77
2877 | -81
2878 | -81
2879 | -84
2880 | -84
2881 | -89
2882 | -89
2883 | -89
2884 | -89
2885 | -90
2886 | -90
2887 | -90
2888 | -90
2889 | -82
2890 | -82
2891 | -83
2892 | -83
2893 | -87
2894 | -87
2895 | -84
2896 | -84
2897 | -83
2898 | -83
2899 | -83
2900 | -83
2901 | -83
2902 | -83
2903 | -84
2904 | -84
2905 | -86
2906 | -86
2907 | -86
2908 | -86
2909 | -84
2910 | -84
2911 | -84
2912 | -84
2913 | -84
2914 | -84
2915 | -86
2916 | -86
2917 | -81
2918 | -81
2919 | -85
2920 | -85
2921 | -85
2922 | -85
2923 | -90
2924 | -90
2925 | -86
2926 | -86
2927 | -87
2928 | -87
2929 | -86
2930 | -86
2931 | -86
2932 | -86
2933 | -87
2934 | -87
2935 | -83
2936 | -83
2937 | -92
2938 | -92
2939 | -85
2940 | -85
2941 | -90
2942 | -90
2943 | -88
2944 | -88
2945 |
--------------------------------------------------------------------------------