├── .gitignore
├── Csv
├── 1113_12_12_train20_param.csv
└── 1113_12_12_train20_train.csv
├── Dockerfile
├── LICENSE
├── Pkl
├── test20.pkl
└── train20.pkl
├── Pt
└── train20_1113_12_12_step14999_act.pt
├── README.md
├── actor.py
├── config.py
├── critic.py
├── data.py
├── docker.sh
├── env.py
├── search.py
├── test.py
└── train.py
/.gitignore:
--------------------------------------------------------------------------------
1 | /__pycache__/
2 |
--------------------------------------------------------------------------------
/Csv/1113_12_12_train20_param.csv:
--------------------------------------------------------------------------------
1 | mode,train
2 | batch,512
3 | city_t,20
4 | steps,15000
5 | embed,128
6 | hidden,128
7 | clip_logits,10
8 | softmax_T,1.0
9 | optim,Adam
10 | init_min,-0.08
11 | init_max,0.08
12 | n_glimpse,1
13 | n_process,3
14 | decode_type,sampling
15 | lr,0.001
16 | is_lr_decay,True
17 | lr_decay,0.96
18 | lr_decay_step,5000.0
19 | act_model_path,None
20 | seed,1
21 | alpha,0.99
22 | islogger,True
23 | issaver,True
24 | log_step,10
25 | log_dir,./Csv/
26 | model_dir,./Pt/
27 | pkl_dir,./Pkl/
28 | cuda_dv,0
29 | dump_date,1113_11_04
30 | task,train20
31 | pkl_path,./Pkl/train20.pkl
32 | n_samples,7680000
33 |
--------------------------------------------------------------------------------
/Csv/1113_12_12_train20_train.csv:
--------------------------------------------------------------------------------
1 | step,actic loss,critic loss,average distance,time
2 | 10,-435.9835,107.7158,10.4426,0min5sec
3 | 20,-365.6867,83.2985,10.4262,0min5sec
4 | 30,-217.5953,58.8234,10.4142,0min5sec
5 | 40,-169.4671,44.9938,10.3186,0min5sec
6 | 50,-133.4929,36.5082,9.9990,0min5sec
7 | 60,-111.7530,30.7207,9.6348,0min5sec
8 | 70,-96.0632,26.5670,9.3487,0min5sec
9 | 80,-84.2144,23.4380,9.1194,0min5sec
10 | 90,-74.9880,21.0006,8.9431,0min5sec
11 | 100,-67.5947,19.0446,8.7991,0min5sec
12 | 110,-61.5110,17.4387,8.6815,0min5sec
13 | 120,-56.4081,16.0987,8.5826,0min5sec
14 | 130,-52.0913,14.9708,8.5005,0min5sec
15 | 140,-48.4517,14.0090,8.4264,0min5sec
16 | 150,-45.2023,13.1713,8.3660,0min5sec
17 | 160,-42.3922,12.4328,8.3116,0min5sec
18 | 170,-39.9252,11.7765,8.2635,0min5sec
19 | 180,-37.7238,11.1933,8.2221,0min5sec
20 | 190,-35.7641,10.6734,8.1895,0min5sec
21 | 200,-34.0016,10.2041,8.1568,0min5sec
22 | 210,-32.3933,9.7770,8.1264,0min5sec
23 | 220,-30.9310,9.3883,8.0983,0min5sec
24 | 230,-29.5925,9.0337,8.0734,0min5sec
25 | 240,-28.3605,8.7105,8.0497,0min5sec
26 | 250,-27.2630,8.4142,8.0275,0min5sec
27 | 260,-26.2136,8.1404,8.0081,0min5sec
28 | 270,-25.2441,7.8871,7.9872,0min5sec
29 | 280,-24.3512,7.6486,7.9670,0min5sec
30 | 290,-23.5131,7.4242,7.9473,0min5sec
31 | 300,-22.7338,7.2153,7.9266,0min5sec
32 | 310,-21.9987,7.0205,7.9038,0min5sec
33 | 320,-21.3229,6.8396,7.8813,0min5sec
34 | 330,-20.6881,6.6686,7.8585,0min5sec
35 | 340,-20.0781,6.5084,7.8347,0min5sec
36 | 350,-19.5044,6.3554,7.8088,0min5sec
37 | 360,-18.9597,6.2098,7.7837,0min5sec
38 | 370,-18.4586,6.0708,7.7623,0min5sec
39 | 380,-17.9777,5.9386,7.7389,0min5sec
40 | 390,-17.5173,5.8149,7.7138,0min5sec
41 | 400,-17.0814,5.6960,7.6900,0min5sec
42 | 410,-16.6780,5.5826,7.6658,0min5sec
43 | 420,-16.2910,5.4753,7.6432,0min5sec
44 | 430,-15.9039,5.3727,7.6235,0min5sec
45 | 440,-15.5462,5.2753,7.6022,0min5sec
46 | 450,-15.1969,5.1798,7.5787,0min5sec
47 | 460,-14.8816,5.0849,7.5484,0min5sec
48 | 470,-14.5686,4.9909,7.5100,0min5sec
49 | 480,-14.3007,4.9025,7.4728,0min5sec
50 | 490,-14.0072,4.8136,7.4297,0min5sec
51 | 500,-13.7339,4.7264,7.3887,0min5sec
52 | 510,-13.4721,4.6418,7.3432,0min5sec
53 | 520,-13.2224,4.5598,7.2975,0min5sec
54 | 530,-12.9775,4.4799,7.2518,0min5sec
55 | 540,-12.7423,4.4029,7.2066,0min5sec
56 | 550,-12.5152,4.3281,7.1615,0min5sec
57 | 560,-12.2964,4.2559,7.1178,0min5sec
58 | 570,-12.0846,4.1862,7.0747,0min5sec
59 | 580,-11.8818,4.1187,7.0326,0min5sec
60 | 590,-11.6853,4.0533,6.9916,0min5sec
61 | 600,-11.4926,3.9903,6.9509,0min5sec
62 | 610,-11.3039,3.9294,6.9108,0min5sec
63 | 620,-11.1230,3.8699,6.8717,0min5sec
64 | 630,-10.9488,3.8128,6.8347,0min5sec
65 | 640,-10.7808,3.7571,6.7977,0min5sec
66 | 650,-10.6180,3.7026,6.7615,0min5sec
67 | 660,-10.4601,3.6498,6.7259,0min5sec
68 | 670,-10.3073,3.5985,6.6915,0min5sec
69 | 680,-10.1580,3.5487,6.6575,0min5sec
70 | 690,-10.0137,3.5006,6.6252,0min5sec
71 | 700,-9.8721,3.4538,6.5934,0min5sec
72 | 710,-9.7347,3.4086,6.5627,0min5sec
73 | 720,-9.6013,3.3642,6.5325,0min5sec
74 | 730,-9.4717,3.3208,6.5026,0min5sec
75 | 740,-9.3457,3.2787,6.4736,0min5sec
76 | 750,-9.2235,3.2377,6.4454,0min5sec
77 | 760,-9.1041,3.1977,6.4177,0min5sec
78 | 770,-8.9866,3.1588,6.3907,0min5sec
79 | 780,-8.8752,3.1212,6.3648,0min5sec
80 | 790,-8.7645,3.0842,6.3392,0min5sec
81 | 800,-8.6560,3.0481,6.3137,0min5sec
82 | 810,-8.5515,3.0128,6.2890,0min5sec
83 | 820,-8.4472,2.9785,6.2652,0min5sec
84 | 830,-8.3474,2.9453,6.2416,0min5sec
85 | 840,-8.2497,2.9129,6.2188,0min5sec
86 | 850,-8.1534,2.8811,6.1964,0min5sec
87 | 860,-8.0609,2.8500,6.1748,0min5sec
88 | 870,-7.9699,2.8194,6.1537,0min5sec
89 | 880,-7.8809,2.7894,6.1325,0min5sec
90 | 890,-7.7940,2.7602,6.1122,0min5sec
91 | 900,-7.7095,2.7316,6.0922,0min5sec
92 | 910,-7.6254,2.7036,6.0723,0min5sec
93 | 920,-7.5439,2.6762,6.0527,0min5sec
94 | 930,-7.4638,2.6495,6.0337,0min5sec
95 | 940,-7.3856,2.6231,6.0148,0min5sec
96 | 950,-7.3090,2.5974,5.9965,0min5sec
97 | 960,-7.2343,2.5722,5.9785,0min5sec
98 | 970,-7.1602,2.5476,5.9609,0min5sec
99 | 980,-7.0887,2.5236,5.9438,0min5sec
100 | 990,-7.0183,2.4998,5.9270,0min5sec
101 | 1000,-6.9488,2.4767,5.9105,0min5sec
102 | 1010,-6.8814,2.4539,5.8942,0min5sec
103 | 1020,-6.8140,2.4316,5.8779,0min5sec
104 | 1030,-6.7488,2.4097,5.8621,0min5sec
105 | 1040,-6.6846,2.3882,5.8468,0min5sec
106 | 1050,-6.6225,2.3672,5.8320,0min5sec
107 | 1060,-6.5606,2.3466,5.8174,0min5sec
108 | 1070,-6.5003,2.3265,5.8030,0min5sec
109 | 1080,-6.4422,2.3068,5.7886,0min5sec
110 | 1090,-6.3839,2.2873,5.7745,0min5sec
111 | 1100,-6.3273,2.2682,5.7606,0min5sec
112 | 1110,-6.2713,2.2493,5.7469,0min5sec
113 | 1120,-6.2147,2.2311,5.7340,0min5sec
114 | 1130,-6.1611,2.2129,5.7208,0min5sec
115 | 1140,-6.1079,2.1951,5.7080,0min5sec
116 | 1150,-6.0555,2.1776,5.6953,0min5sec
117 | 1160,-6.0039,2.1603,5.6829,0min5sec
118 | 1170,-5.9537,2.1433,5.6707,0min5sec
119 | 1180,-5.9040,2.1266,5.6586,0min5sec
120 | 1190,-5.8562,2.1103,5.6468,0min5sec
121 | 1200,-5.8071,2.0942,5.6352,0min5sec
122 | 1210,-5.7602,2.0784,5.6237,0min5sec
123 | 1220,-5.7138,2.0627,5.6122,0min5sec
124 | 1230,-5.6682,2.0474,5.6010,0min5sec
125 | 1240,-5.6235,2.0323,5.5900,0min5sec
126 | 1250,-5.5794,2.0174,5.5791,0min5sec
127 | 1260,-5.5359,2.0028,5.5684,0min5sec
128 | 1270,-5.4926,1.9883,5.5578,0min5sec
129 | 1280,-5.4505,1.9741,5.5472,0min5sec
130 | 1290,-5.4091,1.9601,5.5368,0min5sec
131 | 1300,-5.3679,1.9463,5.5267,0min5sec
132 | 1310,-5.3275,1.9328,5.5170,0min5sec
133 | 1320,-5.2874,1.9195,5.5073,0min5sec
134 | 1330,-5.2483,1.9063,5.4977,0min5sec
135 | 1340,-5.2101,1.8934,5.4881,0min5sec
136 | 1350,-5.1720,1.8806,5.4788,0min5sec
137 | 1360,-5.1348,1.8680,5.4695,0min5sec
138 | 1370,-5.0974,1.8556,5.4605,0min5sec
139 | 1380,-5.0611,1.8434,5.4516,0min5sec
140 | 1390,-5.0250,1.8314,5.4427,0min5sec
141 | 1400,-4.9899,1.8195,5.4339,0min5sec
142 | 1410,-4.9545,1.8078,5.4253,0min5sec
143 | 1420,-4.9201,1.7963,5.4168,0min5sec
144 | 1430,-4.8865,1.7850,5.4086,0min5sec
145 | 1440,-4.8530,1.7737,5.4003,0min5sec
146 | 1450,-4.8201,1.7626,5.3920,0min5sec
147 | 1460,-4.7877,1.7517,5.3839,0min5sec
148 | 1470,-4.7559,1.7410,5.3760,0min5sec
149 | 1480,-4.7244,1.7304,5.3681,0min5sec
150 | 1490,-4.6932,1.7198,5.3602,0min5sec
151 | 1500,-4.6625,1.7095,5.3524,0min5sec
152 | 1510,-4.6321,1.6992,5.3448,0min5sec
153 | 1520,-4.6019,1.6892,5.3374,0min5sec
154 | 1530,-4.5723,1.6792,5.3300,0min5sec
155 | 1540,-4.5429,1.6695,5.3227,0min5sec
156 | 1550,-4.5147,1.6600,5.3155,0min5sec
157 | 1560,-4.4860,1.6505,5.3082,0min5sec
158 | 1570,-4.4582,1.6410,5.3010,0min5sec
159 | 1580,-4.4302,1.6317,5.2940,0min5sec
160 | 1590,-4.4028,1.6226,5.2871,0min5sec
161 | 1600,-4.3758,1.6134,5.2801,0min5sec
162 | 1610,-4.3495,1.6044,5.2733,0min5sec
163 | 1620,-4.3231,1.5955,5.2665,0min5sec
164 | 1630,-4.2972,1.5867,5.2598,0min5sec
165 | 1640,-4.2712,1.5780,5.2533,0min5sec
166 | 1650,-4.2457,1.5694,5.2470,0min5sec
167 | 1660,-4.2206,1.5610,5.2407,0min5sec
168 | 1670,-4.1958,1.5526,5.2344,0min5sec
169 | 1680,-4.1715,1.5443,5.2283,0min5sec
170 | 1690,-4.1473,1.5361,5.2223,0min5sec
171 | 1700,-4.1236,1.5282,5.2165,0min5sec
172 | 1710,-4.0998,1.5202,5.2104,0min5sec
173 | 1720,-4.0767,1.5124,5.2046,0min5sec
174 | 1730,-4.0535,1.5046,5.1989,0min5sec
175 | 1740,-4.0304,1.4968,5.1932,0min5sec
176 | 1750,-4.0078,1.4892,5.1876,0min5sec
177 | 1760,-3.9856,1.4816,5.1822,0min5sec
178 | 1770,-3.9636,1.4742,5.1767,0min5sec
179 | 1780,-3.9418,1.4668,5.1713,0min5sec
180 | 1790,-3.9204,1.4595,5.1658,0min5sec
181 | 1800,-3.8991,1.4523,5.1603,0min5sec
182 | 1810,-3.8778,1.4451,5.1549,0min5sec
183 | 1820,-3.8570,1.4380,5.1495,0min5sec
184 | 1830,-3.8364,1.4310,5.1442,0min5sec
185 | 1840,-3.8161,1.4241,5.1390,0min5sec
186 | 1850,-3.7956,1.4173,5.1341,0min5sec
187 | 1860,-3.7758,1.4106,5.1290,0min5sec
188 | 1870,-3.7559,1.4040,5.1238,0min5sec
189 | 1880,-3.7364,1.3973,5.1186,0min5sec
190 | 1890,-3.7170,1.3908,5.1138,0min5sec
191 | 1900,-3.6978,1.3845,5.1091,0min5sec
192 | 1910,-3.6786,1.3781,5.1042,0min5sec
193 | 1920,-3.6597,1.3718,5.0995,0min5sec
194 | 1930,-3.6412,1.3656,5.0948,0min5sec
195 | 1940,-3.6228,1.3593,5.0900,0min5sec
196 | 1950,-3.6045,1.3532,5.0854,0min5sec
197 | 1960,-3.5863,1.3471,5.0809,0min5sec
198 | 1970,-3.5684,1.3410,5.0763,0min5sec
199 | 1980,-3.5508,1.3350,5.0717,0min5sec
200 | 1990,-3.5334,1.3291,5.0671,0min5sec
201 | 2000,-3.5162,1.3232,5.0626,0min5sec
202 | 2010,-3.4990,1.3174,5.0581,0min5sec
203 | 2020,-3.4820,1.3116,5.0537,0min5sec
204 | 2030,-3.4651,1.3060,5.0493,0min5sec
205 | 2040,-3.4484,1.3003,5.0449,0min5sec
206 | 2050,-3.4320,1.2948,5.0407,0min5sec
207 | 2060,-3.4156,1.2893,5.0364,0min5sec
208 | 2070,-3.3996,1.2838,5.0322,0min5sec
209 | 2080,-3.3836,1.2784,5.0280,0min5sec
210 | 2090,-3.3678,1.2730,5.0238,0min5sec
211 | 2100,-3.3521,1.2677,5.0198,0min5sec
212 | 2110,-3.3367,1.2625,5.0159,0min5sec
213 | 2120,-3.3213,1.2573,5.0120,0min5sec
214 | 2130,-3.3061,1.2521,5.0082,0min5sec
215 | 2140,-3.2909,1.2470,5.0043,0min5sec
216 | 2150,-3.2759,1.2420,5.0005,0min5sec
217 | 2160,-3.2612,1.2370,4.9967,0min5sec
218 | 2170,-3.2465,1.2320,4.9929,0min5sec
219 | 2180,-3.2319,1.2271,4.9891,0min5sec
220 | 2190,-3.2173,1.2223,4.9855,0min5sec
221 | 2200,-3.2029,1.2174,4.9818,0min5sec
222 | 2210,-3.1887,1.2126,4.9783,0min5sec
223 | 2220,-3.1747,1.2078,4.9745,0min5sec
224 | 2230,-3.1607,1.2031,4.9709,0min5sec
225 | 2240,-3.1469,1.1985,4.9674,0min5sec
226 | 2250,-3.1332,1.1938,4.9639,0min5sec
227 | 2260,-3.1196,1.1892,4.9603,0min5sec
228 | 2270,-3.1061,1.1847,4.9568,0min5sec
229 | 2280,-3.0927,1.1801,4.9532,0min5sec
230 | 2290,-3.0796,1.1757,4.9497,0min5sec
231 | 2300,-3.0665,1.1713,4.9464,0min5sec
232 | 2310,-3.0536,1.1669,4.9432,0min5sec
233 | 2320,-3.0407,1.1627,4.9401,0min5sec
234 | 2330,-3.0280,1.1584,4.9368,0min5sec
235 | 2340,-3.0152,1.1541,4.9335,0min5sec
236 | 2350,-3.0026,1.1498,4.9302,0min5sec
237 | 2360,-2.9901,1.1456,4.9270,0min5sec
238 | 2370,-2.9777,1.1415,4.9239,0min5sec
239 | 2380,-2.9655,1.1373,4.9207,0min5sec
240 | 2390,-2.9534,1.1332,4.9174,0min5sec
241 | 2400,-2.9413,1.1292,4.9143,0min5sec
242 | 2410,-2.9294,1.1251,4.9112,0min5sec
243 | 2420,-2.9176,1.1211,4.9081,0min5sec
244 | 2430,-2.9057,1.1172,4.9050,0min5sec
245 | 2440,-2.8940,1.1133,4.9020,0min5sec
246 | 2450,-2.8825,1.1094,4.8990,0min5sec
247 | 2460,-2.8710,1.1055,4.8959,0min5sec
248 | 2470,-2.8596,1.1016,4.8929,0min5sec
249 | 2480,-2.8483,1.0978,4.8899,0min5sec
250 | 2490,-2.8371,1.0940,4.8869,0min5sec
251 | 2500,-2.8260,1.0902,4.8839,0min5sec
252 | 2510,-2.8151,1.0865,4.8811,0min5sec
253 | 2520,-2.8042,1.0828,4.8782,0min5sec
254 | 2530,-2.7934,1.0791,4.8753,0min5sec
255 | 2540,-2.7826,1.0755,4.8726,0min5sec
256 | 2550,-2.7719,1.0720,4.8699,0min5sec
257 | 2560,-2.7614,1.0684,4.8671,0min5sec
258 | 2570,-2.7507,1.0648,4.8644,0min5sec
259 | 2580,-2.7403,1.0612,4.8617,0min5sec
260 | 2590,-2.7298,1.0577,4.8590,0min5sec
261 | 2600,-2.7197,1.0542,4.8563,0min5sec
262 | 2610,-2.7095,1.0508,4.8535,0min5sec
263 | 2620,-2.6995,1.0473,4.8510,0min5sec
264 | 2630,-2.6894,1.0440,4.8484,0min5sec
265 | 2640,-2.6794,1.0406,4.8458,0min5sec
266 | 2650,-2.6695,1.0372,4.8432,0min5sec
267 | 2660,-2.6595,1.0339,4.8406,0min5sec
268 | 2670,-2.6497,1.0306,4.8382,0min5sec
269 | 2680,-2.6401,1.0274,4.8357,0min5sec
270 | 2690,-2.6304,1.0242,4.8334,0min5sec
271 | 2700,-2.6209,1.0210,4.8309,0min5sec
272 | 2710,-2.6116,1.0179,4.8285,0min5sec
273 | 2720,-2.6021,1.0147,4.8262,0min5sec
274 | 2730,-2.5929,1.0116,4.8237,0min5sec
275 | 2740,-2.5836,1.0084,4.8213,0min5sec
276 | 2750,-2.5744,1.0053,4.8188,0min5sec
277 | 2760,-2.5654,1.0021,4.8164,0min5sec
278 | 2770,-2.5564,0.9991,4.8139,0min5sec
279 | 2780,-2.5477,0.9960,4.8116,0min5sec
280 | 2790,-2.5386,0.9930,4.8092,0min5sec
281 | 2800,-2.5298,0.9899,4.8067,0min5sec
282 | 2810,-2.5209,0.9869,4.8043,0min5sec
283 | 2820,-2.5124,0.9839,4.8021,0min5sec
284 | 2830,-2.5037,0.9810,4.7998,0min5sec
285 | 2840,-2.4952,0.9781,4.7975,0min5sec
286 | 2850,-2.4866,0.9751,4.7952,0min5sec
287 | 2860,-2.4782,0.9722,4.7929,0min5sec
288 | 2870,-2.4698,0.9693,4.7908,0min5sec
289 | 2880,-2.4613,0.9665,4.7886,0min5sec
290 | 2890,-2.4531,0.9636,4.7863,0min5sec
291 | 2900,-2.4450,0.9608,4.7840,0min5sec
292 | 2910,-2.4366,0.9579,4.7818,0min5sec
293 | 2920,-2.4286,0.9552,4.7796,0min5sec
294 | 2930,-2.4205,0.9524,4.7775,0min5sec
295 | 2940,-2.4123,0.9496,4.7754,0min5sec
296 | 2950,-2.4047,0.9469,4.7733,0min5sec
297 | 2960,-2.3967,0.9442,4.7712,0min5sec
298 | 2970,-2.3889,0.9416,4.7693,0min5sec
299 | 2980,-2.3811,0.9389,4.7672,0min5sec
300 | 2990,-2.3734,0.9362,4.7653,0min5sec
301 | 3000,-2.3657,0.9336,4.7632,0min5sec
302 | 3010,-2.3582,0.9309,4.7611,0min5sec
303 | 3020,-2.3505,0.9283,4.7591,0min5sec
304 | 3030,-2.3429,0.9257,4.7569,0min5sec
305 | 3040,-2.3354,0.9230,4.7549,0min5sec
306 | 3050,-2.3280,0.9205,4.7528,0min5sec
307 | 3060,-2.3205,0.9179,4.7508,0min5sec
308 | 3070,-2.3130,0.9153,4.7488,0min5sec
309 | 3080,-2.3058,0.9129,4.7470,0min5sec
310 | 3090,-2.2989,0.9105,4.7451,0min5sec
311 | 3100,-2.2917,0.9080,4.7431,0min5sec
312 | 3110,-2.2844,0.9055,4.7411,0min5sec
313 | 3120,-2.2773,0.9030,4.7392,0min5sec
314 | 3130,-2.2703,0.9006,4.7373,0min5sec
315 | 3140,-2.2632,0.8982,4.7353,0min5sec
316 | 3150,-2.2563,0.8957,4.7334,0min5sec
317 | 3160,-2.2493,0.8933,4.7314,0min5sec
318 | 3170,-2.2425,0.8910,4.7296,0min5sec
319 | 3180,-2.2356,0.8886,4.7277,0min5sec
320 | 3190,-2.2286,0.8862,4.7258,0min5sec
321 | 3200,-2.2219,0.8838,4.7240,0min5sec
322 | 3210,-2.2151,0.8815,4.7221,0min5sec
323 | 3220,-2.2084,0.8791,4.7202,0min5sec
324 | 3230,-2.2018,0.8768,4.7184,0min5sec
325 | 3240,-2.1952,0.8745,4.7166,0min5sec
326 | 3250,-2.1886,0.8723,4.7148,0min5sec
327 | 3260,-2.1821,0.8700,4.7130,0min5sec
328 | 3270,-2.1755,0.8677,4.7112,0min5sec
329 | 3280,-2.1690,0.8655,4.7095,0min5sec
330 | 3290,-2.1626,0.8633,4.7077,0min5sec
331 | 3300,-2.1563,0.8611,4.7059,0min5sec
332 | 3310,-2.1499,0.8590,4.7041,0min5sec
333 | 3320,-2.1436,0.8568,4.7025,0min5sec
334 | 3330,-2.1373,0.8546,4.7008,0min5sec
335 | 3340,-2.1311,0.8525,4.6991,0min5sec
336 | 3350,-2.1248,0.8503,4.6973,0min5sec
337 | 3360,-2.1186,0.8482,4.6956,0min5sec
338 | 3370,-2.1125,0.8461,4.6941,0min5sec
339 | 3380,-2.1064,0.8439,4.6925,0min5sec
340 | 3390,-2.1005,0.8418,4.6908,0min5sec
341 | 3400,-2.0944,0.8397,4.6891,0min5sec
342 | 3410,-2.0885,0.8377,4.6874,0min5sec
343 | 3420,-2.0825,0.8356,4.6859,0min5sec
344 | 3430,-2.0766,0.8336,4.6843,0min5sec
345 | 3440,-2.0706,0.8316,4.6826,0min5sec
346 | 3450,-2.0649,0.8296,4.6810,0min5sec
347 | 3460,-2.0591,0.8275,4.6793,0min5sec
348 | 3470,-2.0533,0.8255,4.6777,0min5sec
349 | 3480,-2.0476,0.8236,4.6763,0min5sec
350 | 3490,-2.0420,0.8216,4.6747,0min5sec
351 | 3500,-2.0364,0.8196,4.6731,0min5sec
352 | 3510,-2.0309,0.8177,4.6716,0min5sec
353 | 3520,-2.0252,0.8157,4.6701,0min5sec
354 | 3530,-2.0194,0.8138,4.6685,0min5sec
355 | 3540,-2.0140,0.8119,4.6670,0min5sec
356 | 3550,-2.0085,0.8100,4.6654,0min5sec
357 | 3560,-2.0030,0.8081,4.6639,0min5sec
358 | 3570,-1.9976,0.8062,4.6623,0min5sec
359 | 3580,-1.9921,0.8043,4.6608,0min5sec
360 | 3590,-1.9868,0.8025,4.6593,0min5sec
361 | 3600,-1.9814,0.8006,4.6578,0min5sec
362 | 3610,-1.9761,0.7988,4.6564,0min5sec
363 | 3620,-1.9708,0.7969,4.6549,0min5sec
364 | 3630,-1.9655,0.7950,4.6534,0min5sec
365 | 3640,-1.9603,0.7932,4.6519,0min5sec
366 | 3650,-1.9550,0.7914,4.6504,0min5sec
367 | 3660,-1.9498,0.7896,4.6490,0min5sec
368 | 3670,-1.9447,0.7878,4.6476,0min5sec
369 | 3680,-1.9396,0.7861,4.6461,0min5sec
370 | 3690,-1.9346,0.7843,4.6447,0min5sec
371 | 3700,-1.9295,0.7826,4.6434,0min5sec
372 | 3710,-1.9244,0.7808,4.6420,0min5sec
373 | 3720,-1.9194,0.7790,4.6406,0min5sec
374 | 3730,-1.9144,0.7773,4.6393,0min5sec
375 | 3740,-1.9095,0.7756,4.6381,0min5sec
376 | 3750,-1.9046,0.7739,4.6368,0min5sec
377 | 3760,-1.8997,0.7722,4.6355,0min5sec
378 | 3770,-1.8948,0.7705,4.6341,0min5sec
379 | 3780,-1.8899,0.7688,4.6328,0min5sec
380 | 3790,-1.8851,0.7672,4.6314,0min5sec
381 | 3800,-1.8803,0.7655,4.6302,0min5sec
382 | 3810,-1.8755,0.7638,4.6289,0min5sec
383 | 3820,-1.8708,0.7621,4.6277,0min5sec
384 | 3830,-1.8661,0.7605,4.6264,0min5sec
385 | 3840,-1.8614,0.7589,4.6251,0min5sec
386 | 3850,-1.8569,0.7573,4.6239,0min5sec
387 | 3860,-1.8522,0.7557,4.6226,0min5sec
388 | 3870,-1.8476,0.7540,4.6214,0min5sec
389 | 3880,-1.8430,0.7525,4.6201,0min5sec
390 | 3890,-1.8384,0.7509,4.6188,0min5sec
391 | 3900,-1.8338,0.7493,4.6175,0min5sec
392 | 3910,-1.8294,0.7477,4.6164,0min5sec
393 | 3920,-1.8249,0.7462,4.6151,0min5sec
394 | 3930,-1.8204,0.7446,4.6139,0min5sec
395 | 3940,-1.8158,0.7431,4.6127,0min5sec
396 | 3950,-1.8115,0.7416,4.6114,0min5sec
397 | 3960,-1.8071,0.7400,4.6102,0min5sec
398 | 3970,-1.8026,0.7385,4.6089,0min5sec
399 | 3980,-1.7982,0.7370,4.6077,0min5sec
400 | 3990,-1.7938,0.7355,4.6065,0min5sec
401 | 4000,-1.7894,0.7339,4.6053,0min5sec
402 | 4010,-1.7851,0.7325,4.6042,0min5sec
403 | 4020,-1.7806,0.7310,4.6031,0min5sec
404 | 4030,-1.7764,0.7296,4.6019,0min5sec
405 | 4040,-1.7720,0.7281,4.6007,0min5sec
406 | 4050,-1.7678,0.7266,4.5996,0min5sec
407 | 4060,-1.7635,0.7251,4.5984,0min5sec
408 | 4070,-1.7593,0.7237,4.5972,0min5sec
409 | 4080,-1.7551,0.7222,4.5960,0min5sec
410 | 4090,-1.7509,0.7208,4.5948,0min5sec
411 | 4100,-1.7468,0.7193,4.5937,0min5sec
412 | 4110,-1.7427,0.7179,4.5926,0min5sec
413 | 4120,-1.7388,0.7165,4.5914,0min5sec
414 | 4130,-1.7346,0.7150,4.5903,0min5sec
415 | 4140,-1.7306,0.7136,4.5891,0min5sec
416 | 4150,-1.7266,0.7122,4.5880,0min5sec
417 | 4160,-1.7226,0.7108,4.5869,0min5sec
418 | 4170,-1.7186,0.7094,4.5858,0min5sec
419 | 4180,-1.7146,0.7080,4.5848,0min5sec
420 | 4190,-1.7106,0.7067,4.5838,0min5sec
421 | 4200,-1.7067,0.7053,4.5827,0min5sec
422 | 4210,-1.7027,0.7040,4.5816,0min5sec
423 | 4220,-1.6988,0.7026,4.5805,0min5sec
424 | 4230,-1.6949,0.7012,4.5794,0min5sec
425 | 4240,-1.6911,0.6999,4.5784,0min5sec
426 | 4250,-1.6872,0.6985,4.5773,0min5sec
427 | 4260,-1.6834,0.6972,4.5762,0min5sec
428 | 4270,-1.6796,0.6959,4.5752,0min5sec
429 | 4280,-1.6757,0.6946,4.5741,0min5sec
430 | 4290,-1.6721,0.6932,4.5731,0min5sec
431 | 4300,-1.6682,0.6919,4.5720,0min5sec
432 | 4310,-1.6646,0.6906,4.5710,0min5sec
433 | 4320,-1.6608,0.6893,4.5700,0min5sec
434 | 4330,-1.6571,0.6880,4.5689,0min5sec
435 | 4340,-1.6534,0.6867,4.5678,0min5sec
436 | 4350,-1.6497,0.6854,4.5667,0min5sec
437 | 4360,-1.6459,0.6842,4.5657,0min5sec
438 | 4370,-1.6423,0.6829,4.5647,0min5sec
439 | 4380,-1.6387,0.6817,4.5637,0min5sec
440 | 4390,-1.6350,0.6804,4.5627,0min5sec
441 | 4400,-1.6314,0.6792,4.5618,0min5sec
442 | 4410,-1.6279,0.6779,4.5608,0min5sec
443 | 4420,-1.6243,0.6767,4.5598,0min5sec
444 | 4430,-1.6208,0.6755,4.5588,0min5sec
445 | 4440,-1.6173,0.6742,4.5578,0min5sec
446 | 4450,-1.6138,0.6730,4.5568,0min5sec
447 | 4460,-1.6103,0.6718,4.5558,0min5sec
448 | 4470,-1.6068,0.6706,4.5549,0min5sec
449 | 4480,-1.6033,0.6694,4.5539,0min5sec
450 | 4490,-1.5999,0.6682,4.5530,0min5sec
451 | 4500,-1.5964,0.6670,4.5520,0min5sec
452 | 4510,-1.5930,0.6659,4.5511,0min5sec
453 | 4520,-1.5896,0.6647,4.5502,0min5sec
454 | 4530,-1.5862,0.6635,4.5492,0min5sec
455 | 4540,-1.5828,0.6623,4.5483,0min5sec
456 | 4550,-1.5795,0.6612,4.5473,0min5sec
457 | 4560,-1.5762,0.6600,4.5463,0min5sec
458 | 4570,-1.5728,0.6588,4.5453,0min5sec
459 | 4580,-1.5694,0.6577,4.5444,0min5sec
460 | 4590,-1.5661,0.6565,4.5434,0min5sec
461 | 4600,-1.5627,0.6554,4.5425,0min5sec
462 | 4610,-1.5595,0.6543,4.5416,0min5sec
463 | 4620,-1.5562,0.6532,4.5407,0min5sec
464 | 4630,-1.5530,0.6520,4.5398,0min5sec
465 | 4640,-1.5498,0.6509,4.5390,0min5sec
466 | 4650,-1.5465,0.6498,4.5381,0min5sec
467 | 4660,-1.5433,0.6487,4.5372,0min5sec
468 | 4670,-1.5401,0.6476,4.5363,0min5sec
469 | 4680,-1.5369,0.6465,4.5354,0min5sec
470 | 4690,-1.5338,0.6454,4.5345,0min5sec
471 | 4700,-1.5307,0.6443,4.5336,0min5sec
472 | 4710,-1.5275,0.6432,4.5327,0min5sec
473 | 4720,-1.5245,0.6422,4.5319,0min5sec
474 | 4730,-1.5214,0.6411,4.5310,0min5sec
475 | 4740,-1.5182,0.6400,4.5300,0min5sec
476 | 4750,-1.5152,0.6389,4.5291,0min5sec
477 | 4760,-1.5120,0.6379,4.5283,0min5sec
478 | 4770,-1.5090,0.6368,4.5274,0min5sec
479 | 4780,-1.5060,0.6357,4.5265,0min5sec
480 | 4790,-1.5030,0.6347,4.5257,0min5sec
481 | 4800,-1.5000,0.6337,4.5248,0min5sec
482 | 4810,-1.4970,0.6326,4.5240,0min5sec
483 | 4820,-1.4940,0.6315,4.5231,0min5sec
484 | 4830,-1.4910,0.6305,4.5223,0min5sec
485 | 4840,-1.4880,0.6295,4.5214,0min5sec
486 | 4850,-1.4851,0.6284,4.5206,0min5sec
487 | 4860,-1.4822,0.6274,4.5197,0min5sec
488 | 4870,-1.4793,0.6264,4.5189,0min5sec
489 | 4880,-1.4764,0.6254,4.5181,0min5sec
490 | 4890,-1.4735,0.6243,4.5172,0min5sec
491 | 4900,-1.4706,0.6233,4.5163,0min5sec
492 | 4910,-1.4676,0.6223,4.5155,0min5sec
493 | 4920,-1.4648,0.6213,4.5146,0min5sec
494 | 4930,-1.4619,0.6203,4.5138,0min5sec
495 | 4940,-1.4591,0.6193,4.5129,0min5sec
496 | 4950,-1.4562,0.6183,4.5121,0min5sec
497 | 4960,-1.4534,0.6173,4.5113,0min5sec
498 | 4970,-1.4506,0.6163,4.5105,0min5sec
499 | 4980,-1.4478,0.6153,4.5096,0min5sec
500 | 4990,-1.4449,0.6143,4.5088,0min5sec
501 | 5000,-1.4421,0.6134,4.5080,0min5sec
502 | 5010,-1.4393,0.6124,4.5072,0min5sec
503 | 5020,-1.4366,0.6114,4.5064,0min5sec
504 | 5030,-1.4338,0.6104,4.5056,0min5sec
505 | 5040,-1.4311,0.6095,4.5047,0min5sec
506 | 5050,-1.4283,0.6085,4.5039,0min5sec
507 | 5060,-1.4256,0.6076,4.5031,0min5sec
508 | 5070,-1.4229,0.6066,4.5023,0min5sec
509 | 5080,-1.4201,0.6057,4.5014,0min5sec
510 | 5090,-1.4174,0.6047,4.5007,0min5sec
511 | 5100,-1.4147,0.6038,4.4999,0min5sec
512 | 5110,-1.4121,0.6029,4.4992,0min5sec
513 | 5120,-1.4094,0.6020,4.4984,0min5sec
514 | 5130,-1.4067,0.6010,4.4977,0min5sec
515 | 5140,-1.4041,0.6001,4.4969,0min5sec
516 | 5150,-1.4014,0.5992,4.4962,0min5sec
517 | 5160,-1.3988,0.5983,4.4954,0min5sec
518 | 5170,-1.3962,0.5973,4.4947,0min5sec
519 | 5180,-1.3936,0.5964,4.4939,0min5sec
520 | 5190,-1.3910,0.5955,4.4931,0min5sec
521 | 5200,-1.3884,0.5946,4.4924,0min5sec
522 | 5210,-1.3858,0.5937,4.4916,0min5sec
523 | 5220,-1.3832,0.5928,4.4909,0min5sec
524 | 5230,-1.3806,0.5919,4.4901,0min5sec
525 | 5240,-1.3781,0.5910,4.4894,0min5sec
526 | 5250,-1.3756,0.5902,4.4887,0min5sec
527 | 5260,-1.3730,0.5893,4.4879,0min5sec
528 | 5270,-1.3705,0.5884,4.4872,0min5sec
529 | 5280,-1.3680,0.5876,4.4865,0min5sec
530 | 5290,-1.3654,0.5867,4.4858,0min5sec
531 | 5300,-1.3629,0.5858,4.4850,0min5sec
532 | 5310,-1.3604,0.5850,4.4843,0min5sec
533 | 5320,-1.3579,0.5841,4.4835,0min5sec
534 | 5330,-1.3554,0.5832,4.4828,0min5sec
535 | 5340,-1.3529,0.5824,4.4820,0min5sec
536 | 5350,-1.3505,0.5815,4.4813,0min5sec
537 | 5360,-1.3480,0.5807,4.4806,0min5sec
538 | 5370,-1.3457,0.5799,4.4800,0min5sec
539 | 5380,-1.3433,0.5791,4.4794,0min5sec
540 | 5390,-1.3409,0.5782,4.4787,0min5sec
541 | 5400,-1.3385,0.5774,4.4781,0min5sec
542 | 5410,-1.3361,0.5766,4.4774,0min5sec
543 | 5420,-1.3338,0.5757,4.4767,0min5sec
544 | 5430,-1.3314,0.5749,4.4761,0min5sec
545 | 5440,-1.3290,0.5741,4.4754,0min5sec
546 | 5450,-1.3267,0.5732,4.4747,0min5sec
547 | 5460,-1.3243,0.5724,4.4740,0min5sec
548 | 5470,-1.3220,0.5716,4.4733,0min5sec
549 | 5480,-1.3197,0.5708,4.4726,0min5sec
550 | 5490,-1.3173,0.5700,4.4719,0min5sec
551 | 5500,-1.3150,0.5691,4.4712,0min5sec
552 | 5510,-1.3127,0.5683,4.4705,0min5sec
553 | 5520,-1.3104,0.5675,4.4698,0min5sec
554 | 5530,-1.3082,0.5667,4.4691,0min5sec
555 | 5540,-1.3059,0.5660,4.4684,0min5sec
556 | 5550,-1.3036,0.5652,4.4678,0min5sec
557 | 5560,-1.3014,0.5644,4.4672,0min5sec
558 | 5570,-1.2991,0.5636,4.4665,0min5sec
559 | 5580,-1.2969,0.5629,4.4659,0min5sec
560 | 5590,-1.2947,0.5621,4.4653,0min5sec
561 | 5600,-1.2924,0.5613,4.4647,0min5sec
562 | 5610,-1.2902,0.5605,4.4640,0min5sec
563 | 5620,-1.2880,0.5598,4.4633,0min5sec
564 | 5630,-1.2858,0.5590,4.4626,0min5sec
565 | 5640,-1.2836,0.5582,4.4620,0min5sec
566 | 5650,-1.2813,0.5575,4.4614,0min5sec
567 | 5660,-1.2792,0.5567,4.4607,0min5sec
568 | 5670,-1.2771,0.5559,4.4600,0min5sec
569 | 5680,-1.2749,0.5552,4.4593,0min5sec
570 | 5690,-1.2727,0.5544,4.4587,0min5sec
571 | 5700,-1.2706,0.5537,4.4580,0min5sec
572 | 5710,-1.2685,0.5529,4.4574,0min5sec
573 | 5720,-1.2663,0.5522,4.4568,0min5sec
574 | 5730,-1.2642,0.5514,4.4561,0min5sec
575 | 5740,-1.2621,0.5506,4.4555,0min5sec
576 | 5750,-1.2600,0.5499,4.4549,0min5sec
577 | 5760,-1.2578,0.5491,4.4542,0min5sec
578 | 5770,-1.2558,0.5484,4.4536,0min5sec
579 | 5780,-1.2537,0.5477,4.4530,0min5sec
580 | 5790,-1.2516,0.5469,4.4524,0min5sec
581 | 5800,-1.2495,0.5462,4.4518,0min5sec
582 | 5810,-1.2474,0.5455,4.4511,0min5sec
583 | 5820,-1.2454,0.5448,4.4505,0min5sec
584 | 5830,-1.2433,0.5440,4.4499,0min5sec
585 | 5840,-1.2413,0.5433,4.4493,0min5sec
586 | 5850,-1.2393,0.5426,4.4487,0min5sec
587 | 5860,-1.2372,0.5419,4.4482,0min5sec
588 | 5870,-1.2352,0.5412,4.4475,0min5sec
589 | 5880,-1.2332,0.5405,4.4469,0min5sec
590 | 5890,-1.2311,0.5398,4.4463,0min5sec
591 | 5900,-1.2291,0.5391,4.4458,0min5sec
592 | 5910,-1.2271,0.5384,4.4453,0min5sec
593 | 5920,-1.2251,0.5378,4.4447,0min5sec
594 | 5930,-1.2232,0.5371,4.4441,0min5sec
595 | 5940,-1.2212,0.5364,4.4435,0min5sec
596 | 5950,-1.2192,0.5357,4.4429,0min5sec
597 | 5960,-1.2171,0.5350,4.4423,0min5sec
598 | 5970,-1.2152,0.5343,4.4417,0min5sec
599 | 5980,-1.2133,0.5336,4.4411,0min5sec
600 | 5990,-1.2113,0.5329,4.4405,0min5sec
601 | 6000,-1.2094,0.5323,4.4399,0min5sec
602 | 6010,-1.2075,0.5316,4.4393,0min5sec
603 | 6020,-1.2055,0.5309,4.4387,0min5sec
604 | 6030,-1.2036,0.5302,4.4382,0min5sec
605 | 6040,-1.2016,0.5296,4.4376,0min5sec
606 | 6050,-1.1997,0.5289,4.4371,0min5sec
607 | 6060,-1.1979,0.5283,4.4365,0min5sec
608 | 6070,-1.1960,0.5276,4.4359,0min5sec
609 | 6080,-1.1941,0.5270,4.4354,0min5sec
610 | 6090,-1.1922,0.5263,4.4348,0min5sec
611 | 6100,-1.1904,0.5257,4.4342,0min5sec
612 | 6110,-1.1885,0.5250,4.4337,0min5sec
613 | 6120,-1.1866,0.5243,4.4331,0min5sec
614 | 6130,-1.1848,0.5237,4.4326,0min5sec
615 | 6140,-1.1829,0.5230,4.4320,0min5sec
616 | 6150,-1.1811,0.5224,4.4314,0min5sec
617 | 6160,-1.1792,0.5217,4.4308,0min5sec
618 | 6170,-1.1774,0.5211,4.4302,0min5sec
619 | 6180,-1.1756,0.5204,4.4297,0min5sec
620 | 6190,-1.1737,0.5198,4.4291,0min5sec
621 | 6200,-1.1718,0.5192,4.4286,0min5sec
622 | 6210,-1.1701,0.5185,4.4280,0min5sec
623 | 6220,-1.1683,0.5179,4.4275,0min5sec
624 | 6230,-1.1664,0.5173,4.4270,0min5sec
625 | 6240,-1.1646,0.5166,4.4264,0min5sec
626 | 6250,-1.1628,0.5160,4.4259,0min5sec
627 | 6260,-1.1611,0.5154,4.4254,0min5sec
628 | 6270,-1.1592,0.5148,4.4249,0min5sec
629 | 6280,-1.1575,0.5142,4.4243,0min5sec
630 | 6290,-1.1557,0.5135,4.4238,0min5sec
631 | 6300,-1.1539,0.5129,4.4233,0min5sec
632 | 6310,-1.1522,0.5123,4.4227,0min5sec
633 | 6320,-1.1504,0.5117,4.4222,0min5sec
634 | 6330,-1.1486,0.5110,4.4216,0min5sec
635 | 6340,-1.1469,0.5104,4.4211,0min5sec
636 | 6350,-1.1452,0.5098,4.4207,0min5sec
637 | 6360,-1.1435,0.5092,4.4202,0min5sec
638 | 6370,-1.1418,0.5086,4.4197,0min5sec
639 | 6380,-1.1400,0.5080,4.4192,0min5sec
640 | 6390,-1.1383,0.5074,4.4186,0min5sec
641 | 6400,-1.1366,0.5068,4.4181,0min5sec
642 | 6410,-1.1349,0.5062,4.4176,0min5sec
643 | 6420,-1.1332,0.5056,4.4170,0min5sec
644 | 6430,-1.1315,0.5050,4.4165,0min5sec
645 | 6440,-1.1298,0.5044,4.4160,0min5sec
646 | 6450,-1.1281,0.5038,4.4155,0min5sec
647 | 6460,-1.1264,0.5032,4.4150,0min5sec
648 | 6470,-1.1247,0.5026,4.4144,0min5sec
649 | 6480,-1.1231,0.5020,4.4139,0min5sec
650 | 6490,-1.1214,0.5014,4.4134,0min5sec
651 | 6500,-1.1198,0.5009,4.4129,0min5sec
652 | 6510,-1.1181,0.5003,4.4124,0min5sec
653 | 6520,-1.1164,0.4997,4.4119,0min5sec
654 | 6530,-1.1148,0.4991,4.4114,0min5sec
655 | 6540,-1.1132,0.4986,4.4109,0min5sec
656 | 6550,-1.1116,0.4980,4.4104,0min5sec
657 | 6560,-1.1100,0.4974,4.4099,0min5sec
658 | 6570,-1.1083,0.4968,4.4094,0min5sec
659 | 6580,-1.1067,0.4963,4.4089,0min5sec
660 | 6590,-1.1050,0.4957,4.4084,0min5sec
661 | 6600,-1.1034,0.4951,4.4079,0min5sec
662 | 6610,-1.1018,0.4946,4.4075,0min5sec
663 | 6620,-1.1002,0.4940,4.4070,0min5sec
664 | 6630,-1.0986,0.4935,4.4066,0min5sec
665 | 6640,-1.0970,0.4929,4.4061,0min5sec
666 | 6650,-1.0953,0.4923,4.4056,0min5sec
667 | 6660,-1.0938,0.4918,4.4052,0min5sec
668 | 6670,-1.0923,0.4912,4.4047,0min5sec
669 | 6680,-1.0907,0.4907,4.4042,0min5sec
670 | 6690,-1.0891,0.4901,4.4038,0min5sec
671 | 6700,-1.0876,0.4896,4.4034,0min5sec
672 | 6710,-1.0860,0.4891,4.4029,0min5sec
673 | 6720,-1.0845,0.4885,4.4025,0min5sec
674 | 6730,-1.0829,0.4880,4.4021,0min5sec
675 | 6740,-1.0813,0.4875,4.4016,0min5sec
676 | 6750,-1.0798,0.4869,4.4012,0min5sec
677 | 6760,-1.0783,0.4864,4.4007,0min5sec
678 | 6770,-1.0768,0.4859,4.4003,0min5sec
679 | 6780,-1.0752,0.4853,4.3999,0min5sec
680 | 6790,-1.0737,0.4848,4.3995,0min5sec
681 | 6800,-1.0722,0.4843,4.3990,0min5sec
682 | 6810,-1.0707,0.4838,4.3986,0min5sec
683 | 6820,-1.0691,0.4832,4.3982,0min5sec
684 | 6830,-1.0676,0.4827,4.3978,0min5sec
685 | 6840,-1.0661,0.4822,4.3974,0min5sec
686 | 6850,-1.0646,0.4817,4.3970,0min5sec
687 | 6860,-1.0631,0.4811,4.3965,0min5sec
688 | 6870,-1.0616,0.4806,4.3961,0min5sec
689 | 6880,-1.0601,0.4801,4.3957,0min5sec
690 | 6890,-1.0587,0.4796,4.3952,0min5sec
691 | 6900,-1.0572,0.4790,4.3948,0min5sec
692 | 6910,-1.0557,0.4785,4.3943,0min5sec
693 | 6920,-1.0543,0.4780,4.3939,0min5sec
694 | 6930,-1.0528,0.4775,4.3934,0min5sec
695 | 6940,-1.0514,0.4770,4.3930,0min5sec
696 | 6950,-1.0499,0.4764,4.3925,0min5sec
697 | 6960,-1.0485,0.4759,4.3921,0min5sec
698 | 6970,-1.0470,0.4754,4.3916,0min5sec
699 | 6980,-1.0456,0.4749,4.3912,0min5sec
700 | 6990,-1.0441,0.4744,4.3907,0min5sec
701 | 7000,-1.0427,0.4738,4.3903,0min5sec
702 | 7010,-1.0413,0.4733,4.3898,0min5sec
703 | 7020,-1.0398,0.4728,4.3894,0min5sec
704 | 7030,-1.0384,0.4723,4.3889,0min5sec
705 | 7040,-1.0370,0.4718,4.3885,0min5sec
706 | 7050,-1.0356,0.4713,4.3881,0min5sec
707 | 7060,-1.0342,0.4708,4.3877,0min5sec
708 | 7070,-1.0328,0.4702,4.3872,0min5sec
709 | 7080,-1.0314,0.4697,4.3868,0min5sec
710 | 7090,-1.0300,0.4692,4.3863,0min5sec
711 | 7100,-1.0286,0.4688,4.3859,0min5sec
712 | 7110,-1.0272,0.4683,4.3855,0min5sec
713 | 7120,-1.0258,0.4678,4.3851,0min5sec
714 | 7130,-1.0244,0.4673,4.3847,0min5sec
715 | 7140,-1.0230,0.4668,4.3842,0min5sec
716 | 7150,-1.0216,0.4663,4.3839,0min5sec
717 | 7160,-1.0203,0.4658,4.3834,0min5sec
718 | 7170,-1.0189,0.4653,4.3830,0min5sec
719 | 7180,-1.0175,0.4648,4.3826,0min5sec
720 | 7190,-1.0162,0.4643,4.3822,0min5sec
721 | 7200,-1.0148,0.4638,4.3818,0min5sec
722 | 7210,-1.0134,0.4633,4.3813,0min5sec
723 | 7220,-1.0120,0.4629,4.3809,0min5sec
724 | 7230,-1.0107,0.4624,4.3805,0min5sec
725 | 7240,-1.0094,0.4619,4.3801,0min5sec
726 | 7250,-1.0081,0.4614,4.3797,0min5sec
727 | 7260,-1.0067,0.4609,4.3793,0min5sec
728 | 7270,-1.0054,0.4604,4.3789,0min5sec
729 | 7280,-1.0040,0.4600,4.3785,0min5sec
730 | 7290,-1.0027,0.4595,4.3781,0min5sec
731 | 7300,-1.0014,0.4590,4.3777,0min5sec
732 | 7310,-1.0001,0.4586,4.3772,0min5sec
733 | 7320,-0.9988,0.4581,4.3768,0min5sec
734 | 7330,-0.9975,0.4576,4.3764,0min5sec
735 | 7340,-0.9962,0.4571,4.3760,0min5sec
736 | 7350,-0.9949,0.4567,4.3756,0min5sec
737 | 7360,-0.9936,0.4562,4.3752,0min5sec
738 | 7370,-0.9923,0.4557,4.3748,0min5sec
739 | 7380,-0.9910,0.4553,4.3745,0min5sec
740 | 7390,-0.9897,0.4548,4.3740,0min5sec
741 | 7400,-0.9884,0.4543,4.3736,0min5sec
742 | 7410,-0.9871,0.4539,4.3732,0min5sec
743 | 7420,-0.9857,0.4534,4.3728,0min5sec
744 | 7430,-0.9845,0.4530,4.3724,0min5sec
745 | 7440,-0.9832,0.4525,4.3720,0min5sec
746 | 7450,-0.9819,0.4520,4.3716,0min5sec
747 | 7460,-0.9807,0.4516,4.3712,0min5sec
748 | 7470,-0.9794,0.4511,4.3708,0min5sec
749 | 7480,-0.9781,0.4506,4.3704,0min5sec
750 | 7490,-0.9770,0.4502,4.3700,0min5sec
751 | 7500,-0.9758,0.4497,4.3696,0min5sec
752 | 7510,-0.9746,0.4493,4.3692,0min5sec
753 | 7520,-0.9733,0.4489,4.3688,0min5sec
754 | 7530,-0.9721,0.4484,4.3685,0min5sec
755 | 7540,-0.9708,0.4480,4.3681,0min5sec
756 | 7550,-0.9695,0.4476,4.3677,0min5sec
757 | 7560,-0.9683,0.4471,4.3673,0min5sec
758 | 7570,-0.9671,0.4467,4.3670,0min5sec
759 | 7580,-0.9658,0.4462,4.3666,0min5sec
760 | 7590,-0.9646,0.4458,4.3663,0min5sec
761 | 7600,-0.9634,0.4453,4.3659,0min5sec
762 | 7610,-0.9622,0.4449,4.3656,0min5sec
763 | 7620,-0.9610,0.4445,4.3652,0min5sec
764 | 7630,-0.9597,0.4440,4.3649,0min5sec
765 | 7640,-0.9585,0.4436,4.3645,0min5sec
766 | 7650,-0.9573,0.4432,4.3641,0min5sec
767 | 7660,-0.9561,0.4427,4.3638,0min5sec
768 | 7670,-0.9549,0.4423,4.3634,0min5sec
769 | 7680,-0.9538,0.4419,4.3630,0min5sec
770 | 7690,-0.9526,0.4414,4.3627,0min5sec
771 | 7700,-0.9514,0.4410,4.3623,0min5sec
772 | 7710,-0.9502,0.4405,4.3619,0min5sec
773 | 7720,-0.9491,0.4401,4.3615,0min5sec
774 | 7730,-0.9479,0.4397,4.3612,0min5sec
775 | 7740,-0.9467,0.4393,4.3608,0min5sec
776 | 7750,-0.9455,0.4388,4.3604,0min5sec
777 | 7760,-0.9443,0.4384,4.3600,0min5sec
778 | 7770,-0.9432,0.4380,4.3597,0min5sec
779 | 7780,-0.9420,0.4376,4.3593,0min5sec
780 | 7790,-0.9408,0.4372,4.3589,0min5sec
781 | 7800,-0.9397,0.4367,4.3586,0min5sec
782 | 7810,-0.9385,0.4363,4.3582,0min5sec
783 | 7820,-0.9374,0.4359,4.3578,0min5sec
784 | 7830,-0.9362,0.4355,4.3575,0min5sec
785 | 7840,-0.9351,0.4350,4.3571,0min5sec
786 | 7850,-0.9339,0.4346,4.3568,0min5sec
787 | 7860,-0.9328,0.4342,4.3564,0min5sec
788 | 7870,-0.9317,0.4338,4.3561,0min5sec
789 | 7880,-0.9305,0.4334,4.3557,0min5sec
790 | 7890,-0.9294,0.4330,4.3554,0min5sec
791 | 7900,-0.9283,0.4326,4.3550,0min5sec
792 | 7910,-0.9272,0.4322,4.3546,0min5sec
793 | 7920,-0.9261,0.4318,4.3542,0min5sec
794 | 7930,-0.9249,0.4314,4.3539,0min5sec
795 | 7940,-0.9238,0.4310,4.3535,0min5sec
796 | 7950,-0.9227,0.4306,4.3532,0min5sec
797 | 7960,-0.9216,0.4302,4.3528,0min5sec
798 | 7970,-0.9205,0.4298,4.3525,0min5sec
799 | 7980,-0.9194,0.4294,4.3522,0min5sec
800 | 7990,-0.9183,0.4290,4.3518,0min5sec
801 | 8000,-0.9173,0.4286,4.3515,0min5sec
802 | 8010,-0.9161,0.4282,4.3512,0min5sec
803 | 8020,-0.9151,0.4278,4.3509,0min5sec
804 | 8030,-0.9140,0.4274,4.3505,0min5sec
805 | 8040,-0.9129,0.4270,4.3502,0min5sec
806 | 8050,-0.9119,0.4266,4.3498,0min5sec
807 | 8060,-0.9108,0.4262,4.3495,0min5sec
808 | 8070,-0.9097,0.4258,4.3491,0min5sec
809 | 8080,-0.9086,0.4254,4.3488,0min5sec
810 | 8090,-0.9075,0.4250,4.3485,0min5sec
811 | 8100,-0.9065,0.4246,4.3481,0min5sec
812 | 8110,-0.9054,0.4242,4.3477,0min5sec
813 | 8120,-0.9043,0.4238,4.3474,0min5sec
814 | 8130,-0.9032,0.4234,4.3470,0min5sec
815 | 8140,-0.9022,0.4230,4.3467,0min5sec
816 | 8150,-0.9011,0.4227,4.3464,0min5sec
817 | 8160,-0.9001,0.4223,4.3460,0min5sec
818 | 8170,-0.8990,0.4219,4.3457,0min5sec
819 | 8180,-0.8979,0.4215,4.3453,0min5sec
820 | 8190,-0.8969,0.4211,4.3450,0min5sec
821 | 8200,-0.8959,0.4207,4.3447,0min5sec
822 | 8210,-0.8948,0.4203,4.3443,0min5sec
823 | 8220,-0.8938,0.4200,4.3440,0min5sec
824 | 8230,-0.8927,0.4196,4.3436,0min5sec
825 | 8240,-0.8917,0.4192,4.3433,0min5sec
826 | 8250,-0.8907,0.4188,4.3429,0min5sec
827 | 8260,-0.8897,0.4184,4.3426,0min5sec
828 | 8270,-0.8887,0.4181,4.3423,0min5sec
829 | 8280,-0.8876,0.4177,4.3420,0min5sec
830 | 8290,-0.8866,0.4173,4.3416,0min5sec
831 | 8300,-0.8856,0.4169,4.3413,0min5sec
832 | 8310,-0.8846,0.4166,4.3410,0min5sec
833 | 8320,-0.8836,0.4162,4.3406,0min5sec
834 | 8330,-0.8826,0.4158,4.3403,0min5sec
835 | 8340,-0.8816,0.4155,4.3400,0min5sec
836 | 8350,-0.8805,0.4151,4.3397,0min5sec
837 | 8360,-0.8795,0.4147,4.3394,0min5sec
838 | 8370,-0.8786,0.4144,4.3391,0min5sec
839 | 8380,-0.8776,0.4140,4.3388,0min5sec
840 | 8390,-0.8766,0.4136,4.3384,0min5sec
841 | 8400,-0.8756,0.4132,4.3381,0min5sec
842 | 8410,-0.8746,0.4129,4.3378,0min5sec
843 | 8420,-0.8736,0.4125,4.3375,0min5sec
844 | 8430,-0.8726,0.4121,4.3372,0min5sec
845 | 8440,-0.8717,0.4118,4.3369,0min5sec
846 | 8450,-0.8707,0.4114,4.3366,0min5sec
847 | 8460,-0.8697,0.4110,4.3363,0min5sec
848 | 8470,-0.8687,0.4107,4.3360,0min5sec
849 | 8480,-0.8677,0.4103,4.3356,0min5sec
850 | 8490,-0.8668,0.4099,4.3353,0min5sec
851 | 8500,-0.8658,0.4096,4.3350,0min5sec
852 | 8510,-0.8649,0.4092,4.3347,0min5sec
853 | 8520,-0.8639,0.4088,4.3344,0min5sec
854 | 8530,-0.8629,0.4085,4.3341,0min5sec
855 | 8540,-0.8620,0.4081,4.3338,0min5sec
856 | 8550,-0.8610,0.4078,4.3335,0min5sec
857 | 8560,-0.8600,0.4074,4.3331,0min5sec
858 | 8570,-0.8591,0.4071,4.3329,0min5sec
859 | 8580,-0.8581,0.4067,4.3326,0min5sec
860 | 8590,-0.8571,0.4064,4.3323,0min5sec
861 | 8600,-0.8562,0.4060,4.3320,0min5sec
862 | 8610,-0.8553,0.4057,4.3316,0min5sec
863 | 8620,-0.8543,0.4053,4.3313,0min5sec
864 | 8630,-0.8534,0.4050,4.3310,0min5sec
865 | 8640,-0.8524,0.4046,4.3307,0min5sec
866 | 8650,-0.8515,0.4043,4.3303,0min5sec
867 | 8660,-0.8506,0.4039,4.3300,0min5sec
868 | 8670,-0.8496,0.4036,4.3297,0min5sec
869 | 8680,-0.8487,0.4032,4.3294,0min5sec
870 | 8690,-0.8478,0.4028,4.3291,0min5sec
871 | 8700,-0.8468,0.4025,4.3288,0min5sec
872 | 8710,-0.8459,0.4021,4.3285,0min5sec
873 | 8720,-0.8449,0.4018,4.3282,0min5sec
874 | 8730,-0.8440,0.4015,4.3279,0min5sec
875 | 8740,-0.8430,0.4011,4.3276,0min5sec
876 | 8750,-0.8421,0.4008,4.3273,0min5sec
877 | 8760,-0.8412,0.4004,4.3271,0min5sec
878 | 8770,-0.8402,0.4001,4.3268,0min5sec
879 | 8780,-0.8393,0.3997,4.3265,0min5sec
880 | 8790,-0.8384,0.3994,4.3262,0min5sec
881 | 8800,-0.8375,0.3991,4.3259,0min5sec
882 | 8810,-0.8367,0.3987,4.3257,0min5sec
883 | 8820,-0.8357,0.3984,4.3254,0min5sec
884 | 8830,-34.2708,0.3981,4.3251,0min5sec
885 | 8840,-34.2321,0.3978,4.3248,0min5sec
886 | 8850,-34.1934,0.3974,4.3245,0min5sec
887 | 8860,-34.1549,0.3971,4.3242,0min5sec
888 | 8870,-34.1164,0.3968,4.3240,0min5sec
889 | 8880,-34.0780,0.3964,4.3237,0min5sec
890 | 8890,-34.0398,0.3961,4.3234,0min5sec
891 | 8900,-34.0015,0.3958,4.3231,0min5sec
892 | 8910,-33.9634,0.3955,4.3228,0min5sec
893 | 8920,-33.9254,0.3951,4.3225,0min5sec
894 | 8930,-33.8874,0.3948,4.3223,0min5sec
895 | 8940,-33.8496,0.3945,4.3220,0min5sec
896 | 8950,-33.8118,0.3942,4.3217,0min5sec
897 | 8960,-33.7741,0.3938,4.3214,0min5sec
898 | 8970,-33.7365,0.3935,4.3211,0min5sec
899 | 8980,-33.6990,0.3932,4.3209,0min5sec
900 | 8990,-33.6615,0.3929,4.3206,0min5sec
901 | 9000,-33.6241,0.3926,4.3203,0min5sec
902 | 9010,-33.5869,0.3922,4.3200,0min5sec
903 | 9020,-33.5497,0.3919,4.3197,0min5sec
904 | 9030,-33.5126,0.3916,4.3194,0min5sec
905 | 9040,-33.4756,0.3913,4.3192,0min5sec
906 | 9050,-33.4386,0.3909,4.3189,0min5sec
907 | 9060,-33.4017,0.3906,4.3186,0min5sec
908 | 9070,-33.3650,0.3903,4.3183,0min5sec
909 | 9080,-33.3283,0.3900,4.3181,0min5sec
910 | 9090,-33.2917,0.3896,4.3178,0min5sec
911 | 9100,-33.2551,0.3893,4.3175,0min5sec
912 | 9110,-33.2187,0.3890,4.3172,0min5sec
913 | 9120,-33.1823,0.3887,4.3170,0min5sec
914 | 9130,-33.1460,0.3883,4.3167,0min5sec
915 | 9140,-33.1098,0.3880,4.3164,0min5sec
916 | 9150,-33.0737,0.3877,4.3161,0min5sec
917 | 9160,-33.0376,0.3874,4.3158,0min5sec
918 | 9170,-33.0016,0.3870,4.3155,0min5sec
919 | 9180,-32.9657,0.3867,4.3153,0min5sec
920 | 9190,-32.9299,0.3864,4.3150,0min5sec
921 | 9200,-32.8942,0.3861,4.3147,0min5sec
922 | 9210,-32.8585,0.3858,4.3145,0min5sec
923 | 9220,-32.8229,0.3855,4.3142,0min5sec
924 | 9230,-32.7873,0.3851,4.3139,0min5sec
925 | 9240,-32.7519,0.3848,4.3136,0min5sec
926 | 9250,-32.7165,0.3845,4.3134,0min5sec
927 | 9260,-32.6812,0.3842,4.3131,0min5sec
928 | 9270,-32.6460,0.3839,4.3128,0min5sec
929 | 9280,-32.6109,0.3835,4.3125,0min5sec
930 | 9290,-32.5758,0.3832,4.3122,0min5sec
931 | 9300,-32.5408,0.3829,4.3120,0min5sec
932 | 9310,-32.5059,0.3826,4.3117,0min5sec
933 | 9320,-32.4710,0.3823,4.3114,0min5sec
934 | 9330,-32.4363,0.3820,4.3112,0min5sec
935 | 9340,-32.4016,0.3817,4.3109,0min5sec
936 | 9350,-32.3670,0.3814,4.3106,0min5sec
937 | 9360,-32.3324,0.3811,4.3104,0min5sec
938 | 9370,-32.2980,0.3807,4.3101,0min5sec
939 | 9380,-32.2636,0.3804,4.3099,0min5sec
940 | 9390,-32.2293,0.3801,4.3096,0min5sec
941 | 9400,-32.1950,0.3798,4.3093,0min5sec
942 | 9410,-32.1609,0.3795,4.3090,0min5sec
943 | 9420,-32.1268,0.3792,4.3088,0min5sec
944 | 9430,-32.0927,0.3789,4.3085,0min5sec
945 | 9440,-32.0588,0.3786,4.3082,0min5sec
946 | 9450,-32.0249,0.3783,4.3080,0min5sec
947 | 9460,-31.9910,0.3780,4.3077,0min5sec
948 | 9470,-31.9573,0.3777,4.3074,0min5sec
949 | 9480,-31.9236,0.3774,4.3072,0min5sec
950 | 9490,-31.8901,0.3771,4.3069,0min5sec
951 | 9500,-31.8565,0.3768,4.3067,0min5sec
952 | 9510,-31.8231,0.3765,4.3064,0min5sec
953 | 9520,-31.7897,0.3762,4.3061,0min5sec
954 | 9530,-31.7564,0.3759,4.3059,0min5sec
955 | 9540,-31.7231,0.3756,4.3056,0min5sec
956 | 9550,-31.6900,0.3753,4.3054,0min5sec
957 | 9560,-31.6568,0.3750,4.3051,0min5sec
958 | 9570,-31.6238,0.3747,4.3049,0min5sec
959 | 9580,-31.5908,0.3744,4.3046,0min5sec
960 | 9590,-31.5580,0.3741,4.3044,0min5sec
961 | 9600,-31.5252,0.3738,4.3042,0min5sec
962 | 9610,-31.4924,0.3735,4.3040,0min5sec
963 | 9620,-31.4597,0.3732,4.3037,0min5sec
964 | 9630,-31.4271,0.3729,4.3035,0min5sec
965 | 9640,-31.3945,0.3726,4.3032,0min5sec
966 | 9650,-31.3620,0.3723,4.3030,0min5sec
967 | 9660,-31.3296,0.3721,4.3027,0min5sec
968 | 9670,-31.2972,0.3718,4.3025,0min5sec
969 | 9680,-31.2649,0.3715,4.3022,0min5sec
970 | 9690,-31.2327,0.3712,4.3020,0min5sec
971 | 9700,-31.2005,0.3709,4.3017,0min5sec
972 | 9710,-31.1684,0.3706,4.3015,0min5sec
973 | 9720,-31.1364,0.3703,4.3012,0min5sec
974 | 9730,-31.1044,0.3700,4.3010,0min5sec
975 | 9740,-31.0726,0.3697,4.3008,0min5sec
976 | 9750,-31.0407,0.3694,4.3005,0min5sec
977 | 9760,-31.0090,0.3691,4.3003,0min5sec
978 | 9770,-30.9773,0.3688,4.3000,0min5sec
979 | 9780,-30.9456,0.3686,4.2998,0min5sec
980 | 9790,-30.9141,0.3683,4.2996,0min5sec
981 | 9800,-30.8826,0.3680,4.2993,0min5sec
982 | 9810,-30.8511,0.3677,4.2990,0min5sec
983 | 9820,-30.8197,0.3674,4.2988,0min5sec
984 | 9830,-30.7884,0.3671,4.2985,0min5sec
985 | 9840,-30.7571,0.3668,4.2983,0min5sec
986 | 9850,-30.7260,0.3665,4.2980,0min5sec
987 | 9860,-30.6948,0.3662,4.2978,0min5sec
988 | 9870,-30.6638,0.3660,4.2976,0min5sec
989 | 9880,-30.6328,0.3657,4.2973,0min5sec
990 | 9890,-30.6019,0.3654,4.2971,0min5sec
991 | 9900,-30.5710,0.3651,4.2968,0min5sec
992 | 9910,-30.5402,0.3648,4.2966,0min5sec
993 | 9920,-30.5094,0.3645,4.2964,0min5sec
994 | 9930,-30.4788,0.3642,4.2961,0min5sec
995 | 9940,-30.4481,0.3640,4.2959,0min5sec
996 | 9950,-30.4176,0.3637,4.2956,0min5sec
997 | 9960,-30.3871,0.3634,4.2954,0min5sec
998 | 9970,-30.3566,0.3631,4.2952,0min5sec
999 | 9980,-30.3263,0.3628,4.2949,0min5sec
1000 | 9990,-30.2959,0.3625,4.2947,0min5sec
1001 | 10000,-30.2657,0.3623,4.2944,0min5sec
1002 | 10010,-30.2355,0.3620,4.2942,0min5sec
1003 | 10020,-30.2054,0.3617,4.2940,0min5sec
1004 | 10030,-30.1753,0.3614,4.2937,0min5sec
1005 | 10040,-30.1453,0.3611,4.2935,0min5sec
1006 | 10050,-30.1154,0.3609,4.2933,0min5sec
1007 | 10060,-30.0855,0.3606,4.2930,0min5sec
1008 | 10070,-30.0556,0.3603,4.2928,0min5sec
1009 | 10080,-30.0259,0.3600,4.2925,0min5sec
1010 | 10090,-29.9961,0.3597,4.2923,0min5sec
1011 | 10100,-29.9665,0.3595,4.2921,0min5sec
1012 | 10110,-29.9369,0.3592,4.2918,0min5sec
1013 | 10120,-29.9073,0.3589,4.2916,0min5sec
1014 | 10130,-29.8778,0.3587,4.2914,0min5sec
1015 | 10140,-29.8484,0.3584,4.2911,0min5sec
1016 | 10150,-29.8190,0.3581,4.2909,0min5sec
1017 | 10160,-29.7897,0.3578,4.2906,0min5sec
1018 | 10170,-29.7604,0.3576,4.2904,0min5sec
1019 | 10180,-29.7312,0.3573,4.2901,0min5sec
1020 | 10190,-29.7021,0.3570,4.2899,0min5sec
1021 | 10200,-29.6730,0.3568,4.2897,0min5sec
1022 | 10210,-29.6440,0.3565,4.2895,0min5sec
1023 | 10220,-29.6150,0.3562,4.2892,0min5sec
1024 | 10230,-29.5861,0.3560,4.2890,0min5sec
1025 | 10240,-29.5572,0.3557,4.2888,0min5sec
1026 | 10250,-29.5285,0.3554,4.2886,0min5sec
1027 | 10260,-29.4997,0.3551,4.2884,0min5sec
1028 | 10270,-29.4710,0.3549,4.2881,0min5sec
1029 | 10280,-29.4424,0.3546,4.2879,0min5sec
1030 | 10290,-29.4138,0.3543,4.2877,0min5sec
1031 | 10300,-29.3853,0.3541,4.2874,0min5sec
1032 | 10310,-29.3569,0.3538,4.2872,0min5sec
1033 | 10320,-29.3284,0.3535,4.2870,0min5sec
1034 | 10330,-29.3000,0.3533,4.2867,0min5sec
1035 | 10340,-29.2718,0.3530,4.2865,0min5sec
1036 | 10350,-29.2436,0.3528,4.2863,0min5sec
1037 | 10360,-29.2154,0.3525,4.2861,0min5sec
1038 | 10370,-29.1872,0.3522,4.2858,0min5sec
1039 | 10380,-29.1592,0.3520,4.2856,0min5sec
1040 | 10390,-29.1311,0.3517,4.2854,0min5sec
1041 | 10400,-29.1032,0.3515,4.2851,0min5sec
1042 | 10410,-29.0752,0.3512,4.2849,0min5sec
1043 | 10420,-29.0474,0.3509,4.2847,0min5sec
1044 | 10430,-29.0196,0.3506,4.2844,0min5sec
1045 | 10440,-28.9918,0.3504,4.2842,0min5sec
1046 | 10450,-28.9641,0.3501,4.2840,0min5sec
1047 | 10460,-28.9365,0.3499,4.2838,0min5sec
1048 | 10470,-28.9088,0.3496,4.2835,0min5sec
1049 | 10480,-28.8812,0.3494,4.2833,0min5sec
1050 | 10490,-28.8537,0.3491,4.2831,0min5sec
1051 | 10500,-28.8263,0.3489,4.2829,0min5sec
1052 | 10510,-28.7989,0.3486,4.2826,0min5sec
1053 | 10520,-28.7716,0.3484,4.2824,0min5sec
1054 | 10530,-28.7443,0.3481,4.2822,0min5sec
1055 | 10540,-28.7171,0.3478,4.2820,0min5sec
1056 | 10550,-28.6899,0.3476,4.2818,0min5sec
1057 | 10560,-28.6628,0.3473,4.2815,0min5sec
1058 | 10570,-28.6357,0.3471,4.2813,0min5sec
1059 | 10580,-28.6087,0.3468,4.2811,0min5sec
1060 | 10590,-28.5817,0.3465,4.2809,0min5sec
1061 | 10600,-28.5548,0.3463,4.2807,0min5sec
1062 | 10610,-28.5279,0.3460,4.2804,0min5sec
1063 | 10620,-28.5010,0.3458,4.2802,0min5sec
1064 | 10630,-28.4742,0.3455,4.2800,0min5sec
1065 | 10640,-28.4475,0.3453,4.2798,0min5sec
1066 | 10650,-28.4208,0.3450,4.2795,0min5sec
1067 | 10660,-28.3942,0.3448,4.2793,0min5sec
1068 | 10670,-28.3676,0.3445,4.2791,0min5sec
1069 | 10680,-28.3411,0.3443,4.2789,0min5sec
1070 | 10690,-28.3146,0.3440,4.2787,0min5sec
1071 | 10700,-28.2882,0.3438,4.2785,0min5sec
1072 | 10710,-28.2618,0.3435,4.2783,0min5sec
1073 | 10720,-28.2355,0.3433,4.2781,0min5sec
1074 | 10730,-28.2092,0.3430,4.2779,0min5sec
1075 | 10740,-28.1830,0.3428,4.2777,0min5sec
1076 | 10750,-28.1568,0.3425,4.2775,0min5sec
1077 | 10760,-28.1307,0.3423,4.2772,0min5sec
1078 | 10770,-28.1046,0.3420,4.2770,0min5sec
1079 | 10780,-28.0786,0.3418,4.2768,0min5sec
1080 | 10790,-28.0526,0.3416,4.2766,0min5sec
1081 | 10800,-28.0267,0.3413,4.2764,0min5sec
1082 | 10810,-28.0008,0.3411,4.2762,0min5sec
1083 | 10820,-27.9749,0.3408,4.2760,0min5sec
1084 | 10830,-27.9491,0.3406,4.2757,0min5sec
1085 | 10840,-27.9234,0.3403,4.2755,0min5sec
1086 | 10850,-27.8977,0.3401,4.2753,0min5sec
1087 | 10860,-27.8720,0.3398,4.2751,0min5sec
1088 | 10870,-27.8464,0.3396,4.2749,0min5sec
1089 | 10880,-27.8209,0.3393,4.2747,0min5sec
1090 | 10890,-27.7954,0.3391,4.2744,0min5sec
1091 | 10900,-27.7699,0.3389,4.2742,0min5sec
1092 | 10910,-27.7445,0.3386,4.2740,0min5sec
1093 | 10920,-27.7192,0.3384,4.2738,0min5sec
1094 | 10930,-27.6938,0.3382,4.2736,0min5sec
1095 | 10940,-27.6685,0.3379,4.2734,0min5sec
1096 | 10950,-27.6433,0.3377,4.2732,0min5sec
1097 | 10960,-27.6181,0.3374,4.2730,0min5sec
1098 | 10970,-27.5929,0.3372,4.2728,0min5sec
1099 | 10980,-27.5678,0.3369,4.2725,0min5sec
1100 | 10990,-27.5428,0.3367,4.2723,0min5sec
1101 | 11000,-27.5178,0.3365,4.2721,0min5sec
1102 | 11010,-27.4928,0.3362,4.2719,0min5sec
1103 | 11020,-27.4679,0.3360,4.2717,0min5sec
1104 | 11030,-27.4430,0.3357,4.2715,0min5sec
1105 | 11040,-27.4182,0.3355,4.2713,0min5sec
1106 | 11050,-27.3934,0.3353,4.2711,0min5sec
1107 | 11060,-27.3687,0.3350,4.2709,0min5sec
1108 | 11070,-27.3440,0.3348,4.2707,0min5sec
1109 | 11080,-27.3194,0.3346,4.2705,0min5sec
1110 | 11090,-27.2948,0.3343,4.2703,0min5sec
1111 | 11100,-27.2702,0.3341,4.2701,0min5sec
1112 | 11110,-27.2457,0.3339,4.2699,0min5sec
1113 | 11120,-27.2212,0.3336,4.2698,0min5sec
1114 | 11130,-27.1968,0.3334,4.2696,0min5sec
1115 | 11140,-27.1724,0.3332,4.2694,0min5sec
1116 | 11150,-27.1481,0.3329,4.2692,0min5sec
1117 | 11160,-27.1238,0.3327,4.2690,0min5sec
1118 | 11170,-27.0996,0.3325,4.2688,0min5sec
1119 | 11180,-27.0754,0.3323,4.2686,0min5sec
1120 | 11190,-27.0512,0.3320,4.2684,0min5sec
1121 | 11200,-27.0271,0.3318,4.2682,0min5sec
1122 | 11210,-27.0030,0.3316,4.2680,0min5sec
1123 | 11220,-26.9790,0.3313,4.2678,0min5sec
1124 | 11230,-26.9550,0.3311,4.2676,0min5sec
1125 | 11240,-26.9310,0.3309,4.2674,0min5sec
1126 | 11250,-26.9071,0.3306,4.2672,0min5sec
1127 | 11260,-26.8833,0.3304,4.2670,0min5sec
1128 | 11270,-26.8594,0.3302,4.2667,0min5sec
1129 | 11280,-26.8357,0.3299,4.2666,0min5sec
1130 | 11290,-26.8119,0.3297,4.2663,0min5sec
1131 | 11300,-26.7882,0.3295,4.2662,0min5sec
1132 | 11310,-26.7646,0.3293,4.2660,0min5sec
1133 | 11320,-26.7410,0.3290,4.2658,0min5sec
1134 | 11330,-26.7174,0.3288,4.2656,0min5sec
1135 | 11340,-26.6938,0.3286,4.2654,0min5sec
1136 | 11350,-26.6704,0.3284,4.2652,0min5sec
1137 | 11360,-26.6469,0.3281,4.2650,0min5sec
1138 | 11370,-26.6235,0.3279,4.2648,0min5sec
1139 | 11380,-26.6001,0.3277,4.2646,0min5sec
1140 | 11390,-26.5768,0.3274,4.2644,0min5sec
1141 | 11400,-26.5535,0.3272,4.2642,0min5sec
1142 | 11410,-26.5303,0.3270,4.2640,0min5sec
1143 | 11420,-26.5071,0.3267,4.2638,0min5sec
1144 | 11430,-26.4839,0.3265,4.2636,0min5sec
1145 | 11440,-26.4608,0.3263,4.2634,0min5sec
1146 | 11450,-26.4377,0.3261,4.2632,0min5sec
1147 | 11460,-26.4147,0.3259,4.2630,0min5sec
1148 | 11470,-26.3917,0.3256,4.2628,0min5sec
1149 | 11480,-26.3687,0.3254,4.2626,0min5sec
1150 | 11490,-26.3458,0.3252,4.2624,0min5sec
1151 | 11500,-26.3229,0.3250,4.2623,0min5sec
1152 | 11510,-26.3001,0.3247,4.2621,0min5sec
1153 | 11520,-26.2773,0.3245,4.2619,0min5sec
1154 | 11530,-26.2546,0.3243,4.2617,0min5sec
1155 | 11540,-26.2319,0.3241,4.2615,0min5sec
1156 | 11550,-26.2092,0.3239,4.2613,0min5sec
1157 | 11560,-26.1865,0.3236,4.2611,0min5sec
1158 | 11570,-26.1640,0.3234,4.2610,0min5sec
1159 | 11580,-26.1414,0.3232,4.2608,0min5sec
1160 | 11590,-26.1189,0.3230,4.2606,0min5sec
1161 | 11600,-26.0964,0.3228,4.2604,0min5sec
1162 | 11610,-26.0739,0.3225,4.2602,0min5sec
1163 | 11620,-26.0516,0.3223,4.2600,0min5sec
1164 | 11630,-26.0292,0.3221,4.2598,0min5sec
1165 | 11640,-26.0068,0.3219,4.2597,0min5sec
1166 | 11650,-25.9845,0.3217,4.2595,0min5sec
1167 | 11660,-25.9623,0.3215,4.2593,0min5sec
1168 | 11670,-25.9401,0.3213,4.2591,0min5sec
1169 | 11680,-25.9179,0.3211,4.2589,0min5sec
1170 | 11690,-25.8958,0.3209,4.2587,0min5sec
1171 | 11700,-25.8737,0.3206,4.2586,0min5sec
1172 | 11710,-25.8516,0.3204,4.2584,0min5sec
1173 | 11720,-25.8296,0.3202,4.2582,0min5sec
1174 | 11730,-25.8076,0.3200,4.2580,0min5sec
1175 | 11740,-25.7856,0.3198,4.2578,0min5sec
1176 | 11750,-25.7637,0.3196,4.2576,0min5sec
1177 | 11760,-25.7418,0.3194,4.2574,0min5sec
1178 | 11770,-25.7200,0.3191,4.2572,0min5sec
1179 | 11780,-25.6982,0.3189,4.2571,0min5sec
1180 | 11790,-25.6764,0.3187,4.2569,0min5sec
1181 | 11800,-25.6547,0.3185,4.2567,0min5sec
1182 | 11810,-25.6330,0.3183,4.2565,0min5sec
1183 | 11820,-25.6114,0.3181,4.2563,0min5sec
1184 | 11830,-25.5897,0.3179,4.2562,0min5sec
1185 | 11840,-25.5682,0.3177,4.2560,0min5sec
1186 | 11850,-25.5466,0.3175,4.2558,0min5sec
1187 | 11860,-25.5251,0.3173,4.2556,0min5sec
1188 | 11870,-25.5036,0.3171,4.2555,0min5sec
1189 | 11880,-25.4822,0.3169,4.2553,0min5sec
1190 | 11890,-25.4608,0.3167,4.2551,0min5sec
1191 | 11900,-25.4394,0.3165,4.2549,0min5sec
1192 | 11910,-25.4181,0.3163,4.2548,0min5sec
1193 | 11920,-25.3968,0.3160,4.2546,0min5sec
1194 | 11930,-25.3755,0.3158,4.2544,0min5sec
1195 | 11940,-25.3544,0.3156,4.2542,0min5sec
1196 | 11950,-25.3332,0.3154,4.2540,0min5sec
1197 | 11960,-25.3120,0.3152,4.2539,0min5sec
1198 | 11970,-25.2909,0.3150,4.2537,0min5sec
1199 | 11980,-25.2698,0.3148,4.2535,0min5sec
1200 | 11990,-25.2488,0.3146,4.2533,0min5sec
1201 | 12000,-25.2278,0.3144,4.2532,0min5sec
1202 | 12010,-25.2068,0.3142,4.2530,0min5sec
1203 | 12020,-25.1858,0.3140,4.2528,0min5sec
1204 | 12030,-25.1650,0.3138,4.2526,0min5sec
1205 | 12040,-25.1441,0.3136,4.2525,0min5sec
1206 | 12050,-25.1232,0.3134,4.2523,0min5sec
1207 | 12060,-25.1024,0.3132,4.2521,0min5sec
1208 | 12070,-25.0817,0.3130,4.2519,0min5sec
1209 | 12080,-25.0609,0.3128,4.2517,0min5sec
1210 | 12090,-25.0402,0.3126,4.2516,0min5sec
1211 | 12100,-25.0196,0.3123,4.2514,0min5sec
1212 | 12110,-24.9989,0.3121,4.2512,0min5sec
1213 | 12120,-24.9783,0.3119,4.2510,0min5sec
1214 | 12130,-24.9578,0.3117,4.2508,0min5sec
1215 | 12140,-24.9372,0.3115,4.2507,0min5sec
1216 | 12150,-24.9167,0.3113,4.2505,0min5sec
1217 | 12160,-24.8963,0.3111,4.2503,0min5sec
1218 | 12170,-24.8759,0.3109,4.2502,0min5sec
1219 | 12180,-24.8554,0.3107,4.2500,0min5sec
1220 | 12190,-24.8351,0.3105,4.2498,0min5sec
1221 | 12200,-24.8148,0.3103,4.2496,0min5sec
1222 | 12210,-24.7945,0.3101,4.2495,0min5sec
1223 | 12220,-24.7742,0.3099,4.2493,0min5sec
1224 | 12230,-24.7540,0.3097,4.2491,0min5sec
1225 | 12240,-24.7338,0.3095,4.2489,0min5sec
1226 | 12250,-24.7137,0.3093,4.2488,0min5sec
1227 | 12260,-24.6935,0.3091,4.2486,0min5sec
1228 | 12270,-24.6734,0.3089,4.2484,0min5sec
1229 | 12280,-24.6534,0.3087,4.2483,0min5sec
1230 | 12290,-24.6333,0.3085,4.2481,0min5sec
1231 | 12300,-24.6133,0.3083,4.2479,0min5sec
1232 | 12310,-24.5934,0.3081,4.2477,0min5sec
1233 | 12320,-24.5734,0.3079,4.2476,0min5sec
1234 | 12330,-24.5535,0.3077,4.2474,0min5sec
1235 | 12340,-24.5337,0.3075,4.2472,0min5sec
1236 | 12350,-24.5138,0.3073,4.2471,0min5sec
1237 | 12360,-24.4940,0.3071,4.2469,0min5sec
1238 | 12370,-24.4742,0.3070,4.2467,0min5sec
1239 | 12380,-24.4545,0.3068,4.2465,0min5sec
1240 | 12390,-24.4348,0.3066,4.2464,0min5sec
1241 | 12400,-24.4152,0.3064,4.2462,0min5sec
1242 | 12410,-24.3955,0.3062,4.2460,0min5sec
1243 | 12420,-24.3759,0.3060,4.2459,0min5sec
1244 | 12430,-24.3563,0.3058,4.2457,0min5sec
1245 | 12440,-24.3368,0.3056,4.2455,0min5sec
1246 | 12450,-24.3172,0.3054,4.2454,0min5sec
1247 | 12460,-24.2978,0.3052,4.2452,0min5sec
1248 | 12470,-24.2783,0.3050,4.2450,0min5sec
1249 | 12480,-24.2589,0.3048,4.2449,0min5sec
1250 | 12490,-24.2395,0.3046,4.2447,0min5sec
1251 | 12500,-24.2201,0.3044,4.2446,0min5sec
1252 | 12510,-24.2008,0.3043,4.2444,0min5sec
1253 | 12520,-24.1815,0.3041,4.2442,0min5sec
1254 | 12530,-24.1622,0.3039,4.2441,0min5sec
1255 | 12540,-24.1430,0.3037,4.2439,0min5sec
1256 | 12550,-24.1238,0.3035,4.2437,0min5sec
1257 | 12560,-24.1046,0.3033,4.2436,0min5sec
1258 | 12570,-24.0855,0.3031,4.2434,0min5sec
1259 | 12580,-24.0663,0.3029,4.2432,0min5sec
1260 | 12590,-24.0472,0.3027,4.2431,0min5sec
1261 | 12600,-24.0282,0.3025,4.2429,0min5sec
1262 | 12610,-24.0092,0.3023,4.2428,0min5sec
1263 | 12620,-23.9902,0.3021,4.2426,0min5sec
1264 | 12630,-23.9712,0.3020,4.2425,0min5sec
1265 | 12640,-23.9523,0.3018,4.2423,0min5sec
1266 | 12650,-23.9334,0.3016,4.2422,0min5sec
1267 | 12660,-23.9145,0.3014,4.2420,0min5sec
1268 | 12670,-23.8956,0.3012,4.2418,0min5sec
1269 | 12680,-23.8768,0.3010,4.2417,0min5sec
1270 | 12690,-23.8580,0.3008,4.2415,0min5sec
1271 | 12700,-23.8393,0.3006,4.2413,0min5sec
1272 | 12710,-23.8206,0.3004,4.2412,0min5sec
1273 | 12720,-23.8019,0.3003,4.2410,0min5sec
1274 | 12730,-23.7832,0.3001,4.2409,0min5sec
1275 | 12740,-23.7646,0.2999,4.2407,0min5sec
1276 | 12750,-23.7460,0.2997,4.2405,0min5sec
1277 | 12760,-23.7274,0.2995,4.2404,0min5sec
1278 | 12770,-23.7089,0.2993,4.2402,0min5sec
1279 | 12780,-23.6903,0.2992,4.2400,0min5sec
1280 | 12790,-23.6719,0.2990,4.2399,0min5sec
1281 | 12800,-23.6534,0.2988,4.2397,0min5sec
1282 | 12810,-23.6350,0.2986,4.2396,0min5sec
1283 | 12820,-23.6165,0.2984,4.2394,0min5sec
1284 | 12830,-23.5982,0.2982,4.2392,0min5sec
1285 | 12840,-23.5798,0.2980,4.2391,0min5sec
1286 | 12850,-23.5615,0.2979,4.2389,0min5sec
1287 | 12860,-23.5432,0.2977,4.2388,0min5sec
1288 | 12870,-23.5249,0.2975,4.2386,0min5sec
1289 | 12880,-23.5067,0.2973,4.2385,0min5sec
1290 | 12890,-23.4885,0.2971,4.2383,0min5sec
1291 | 12900,-23.4703,0.2969,4.2382,0min5sec
1292 | 12910,-23.4522,0.2968,4.2380,0min5sec
1293 | 12920,-23.4340,0.2966,4.2379,0min5sec
1294 | 12930,-23.4160,0.2964,4.2378,0min5sec
1295 | 12940,-23.3979,0.2962,4.2376,0min5sec
1296 | 12950,-23.3799,0.2961,4.2374,0min5sec
1297 | 12960,-23.3618,0.2959,4.2373,0min5sec
1298 | 12970,-23.3438,0.2957,4.2371,0min5sec
1299 | 12980,-23.3259,0.2955,4.2370,0min5sec
1300 | 12990,-23.3080,0.2953,4.2368,0min5sec
1301 | 13000,-23.2901,0.2952,4.2367,0min5sec
1302 | 13010,-23.2722,0.2950,4.2365,0min5sec
1303 | 13020,-23.2543,0.2948,4.2364,0min5sec
1304 | 13030,-23.2365,0.2946,4.2362,0min5sec
1305 | 13040,-23.2188,0.2944,4.2361,0min5sec
1306 | 13050,-23.2010,0.2943,4.2359,0min5sec
1307 | 13060,-23.1832,0.2941,4.2358,0min5sec
1308 | 13070,-23.1655,0.2939,4.2356,0min5sec
1309 | 13080,-23.1478,0.2937,4.2355,0min5sec
1310 | 13090,-23.1302,0.2935,4.2353,0min5sec
1311 | 13100,-23.1126,0.2934,4.2352,0min5sec
1312 | 13110,-23.0950,0.2932,4.2350,0min5sec
1313 | 13120,-23.0774,0.2930,4.2349,0min5sec
1314 | 13130,-23.0598,0.2928,4.2347,0min5sec
1315 | 13140,-23.0423,0.2927,4.2346,0min5sec
1316 | 13150,-23.0248,0.2925,4.2344,0min5sec
1317 | 13160,-23.0074,0.2923,4.2343,0min5sec
1318 | 13170,-22.9899,0.2921,4.2341,0min5sec
1319 | 13180,-22.9725,0.2920,4.2340,0min5sec
1320 | 13190,-22.9551,0.2918,4.2338,0min5sec
1321 | 13200,-22.9378,0.2916,4.2337,0min5sec
1322 | 13210,-22.9205,0.2914,4.2335,0min5sec
1323 | 13220,-22.9031,0.2912,4.2334,0min5sec
1324 | 13230,-22.8859,0.2911,4.2332,0min5sec
1325 | 13240,-22.8686,0.2909,4.2331,0min5sec
1326 | 13250,-22.8514,0.2907,4.2329,0min5sec
1327 | 13260,-22.8342,0.2905,4.2328,0min5sec
1328 | 13270,-22.8170,0.2904,4.2326,0min5sec
1329 | 13280,-22.7998,0.2902,4.2325,0min5sec
1330 | 13290,-22.7827,0.2900,4.2323,0min5sec
1331 | 13300,-22.7656,0.2899,4.2322,0min5sec
1332 | 13310,-22.7486,0.2897,4.2320,0min5sec
1333 | 13320,-22.7315,0.2895,4.2319,0min5sec
1334 | 13330,-22.7145,0.2893,4.2317,0min5sec
1335 | 13340,-22.6975,0.2892,4.2316,0min5sec
1336 | 13350,-22.6805,0.2890,4.2315,0min5sec
1337 | 13360,-22.6636,0.2888,4.2313,0min5sec
1338 | 13370,-22.6466,0.2887,4.2311,0min5sec
1339 | 13380,-22.6298,0.2885,4.2310,0min5sec
1340 | 13390,-22.6129,0.2883,4.2308,0min5sec
1341 | 13400,-22.5960,0.2882,4.2307,0min5sec
1342 | 13410,-22.5792,0.2880,4.2305,0min5sec
1343 | 13420,-22.5624,0.2878,4.2304,0min5sec
1344 | 13430,-22.5456,0.2876,4.2302,0min5sec
1345 | 13440,-22.5289,0.2875,4.2301,0min5sec
1346 | 13450,-22.5122,0.2873,4.2300,0min5sec
1347 | 13460,-22.4954,0.2871,4.2298,0min5sec
1348 | 13470,-22.4788,0.2870,4.2297,0min5sec
1349 | 13480,-22.4621,0.2868,4.2295,0min5sec
1350 | 13490,-22.4455,0.2866,4.2294,0min5sec
1351 | 13500,-22.4289,0.2865,4.2292,0min5sec
1352 | 13510,-22.4123,0.2863,4.2291,0min5sec
1353 | 13520,-22.3958,0.2861,4.2290,0min5sec
1354 | 13530,-22.3793,0.2860,4.2288,0min5sec
1355 | 13540,-22.3627,0.2858,4.2287,0min5sec
1356 | 13550,-22.3463,0.2856,4.2285,0min5sec
1357 | 13560,-22.3298,0.2854,4.2284,0min5sec
1358 | 13570,-22.3134,0.2853,4.2282,0min5sec
1359 | 13580,-22.2970,0.2851,4.2281,0min5sec
1360 | 13590,-22.2806,0.2849,4.2279,0min5sec
1361 | 13600,-22.2643,0.2848,4.2278,0min5sec
1362 | 13610,-22.2479,0.2846,4.2276,0min5sec
1363 | 13620,-22.2316,0.2844,4.2275,0min5sec
1364 | 13630,-22.2154,0.2843,4.2273,0min5sec
1365 | 13640,-22.1991,0.2841,4.2272,0min5sec
1366 | 13650,-22.1829,0.2840,4.2271,0min5sec
1367 | 13660,-22.1667,0.2838,4.2269,0min5sec
1368 | 13670,-22.1505,0.2836,4.2268,0min5sec
1369 | 13680,-22.1343,0.2835,4.2266,0min5sec
1370 | 13690,-22.1182,0.2833,4.2265,0min5sec
1371 | 13700,-22.1021,0.2831,4.2263,0min5sec
1372 | 13710,-22.0860,0.2830,4.2262,0min5sec
1373 | 13720,-22.0699,0.2828,4.2260,0min5sec
1374 | 13730,-22.0539,0.2826,4.2259,0min5sec
1375 | 13740,-22.0378,0.2825,4.2258,0min5sec
1376 | 13750,-22.0218,0.2823,4.2256,0min5sec
1377 | 13760,-22.0059,0.2822,4.2255,0min5sec
1378 | 13770,-21.9899,0.2820,4.2253,0min5sec
1379 | 13780,-21.9740,0.2818,4.2252,0min5sec
1380 | 13790,-21.9581,0.2817,4.2250,0min5sec
1381 | 13800,-21.9422,0.2815,4.2249,0min5sec
1382 | 13810,-21.9263,0.2813,4.2248,0min5sec
1383 | 13820,-21.9105,0.2812,4.2246,0min5sec
1384 | 13830,-21.8947,0.2810,4.2245,0min5sec
1385 | 13840,-21.8789,0.2809,4.2243,0min5sec
1386 | 13850,-21.8631,0.2807,4.2242,0min5sec
1387 | 13860,-21.8473,0.2806,4.2240,0min5sec
1388 | 13870,-21.8316,0.2804,4.2239,0min5sec
1389 | 13880,-21.8159,0.2802,4.2238,0min5sec
1390 | 13890,-21.8002,0.2801,4.2236,0min5sec
1391 | 13900,-21.7846,0.2799,4.2235,0min5sec
1392 | 13910,-21.7689,0.2798,4.2234,0min5sec
1393 | 13920,-21.7533,0.2796,4.2232,0min5sec
1394 | 13930,-21.7377,0.2794,4.2231,0min5sec
1395 | 13940,-21.7221,0.2793,4.2229,0min5sec
1396 | 13950,-21.7066,0.2791,4.2228,0min5sec
1397 | 13960,-21.6911,0.2790,4.2227,0min5sec
1398 | 13970,-21.6756,0.2788,4.2225,0min5sec
1399 | 13980,-21.6601,0.2786,4.2224,0min5sec
1400 | 13990,-21.6446,0.2785,4.2223,0min5sec
1401 | 14000,-21.6292,0.2783,4.2221,0min5sec
1402 | 14010,-21.6138,0.2782,4.2220,0min5sec
1403 | 14020,-21.5984,0.2780,4.2219,0min5sec
1404 | 14030,-21.5830,0.2779,4.2217,0min5sec
1405 | 14040,-21.5677,0.2777,4.2216,0min5sec
1406 | 14050,-21.5523,0.2776,4.2214,0min5sec
1407 | 14060,-21.5370,0.2774,4.2213,0min5sec
1408 | 14070,-21.5217,0.2772,4.2212,0min5sec
1409 | 14080,-21.5065,0.2771,4.2210,0min5sec
1410 | 14090,-21.4912,0.2769,4.2209,0min5sec
1411 | 14100,-21.4760,0.2768,4.2208,0min5sec
1412 | 14110,-21.4608,0.2766,4.2206,0min5sec
1413 | 14120,-21.4457,0.2765,4.2205,0min5sec
1414 | 14130,-21.4305,0.2763,4.2204,0min5sec
1415 | 14140,-21.4154,0.2761,4.2202,0min5sec
1416 | 14150,-21.4003,0.2760,4.2201,0min5sec
1417 | 14160,-21.3852,0.2758,4.2200,0min5sec
1418 | 14170,-21.3701,0.2757,4.2198,0min5sec
1419 | 14180,-21.3550,0.2755,4.2197,0min5sec
1420 | 14190,-21.3400,0.2754,4.2195,0min5sec
1421 | 14200,-21.3250,0.2752,4.2194,0min5sec
1422 | 14210,-21.3100,0.2750,4.2193,0min5sec
1423 | 14220,-21.2951,0.2749,4.2191,0min5sec
1424 | 14230,-21.2801,0.2747,4.2190,0min5sec
1425 | 14240,-21.2652,0.2746,4.2189,0min5sec
1426 | 14250,-21.2503,0.2744,4.2188,0min5sec
1427 | 14260,-21.2355,0.2743,4.2186,0min5sec
1428 | 14270,-21.2206,0.2741,4.2185,0min5sec
1429 | 14280,-21.2058,0.2740,4.2184,0min5sec
1430 | 14290,-21.1910,0.2739,4.2182,0min5sec
1431 | 14300,-21.1762,0.2737,4.2181,0min5sec
1432 | 14310,-21.1614,0.2736,4.2180,0min5sec
1433 | 14320,-21.1466,0.2734,4.2179,0min5sec
1434 | 14330,-21.1319,0.2733,4.2177,0min5sec
1435 | 14340,-21.1172,0.2731,4.2176,0min5sec
1436 | 14350,-21.1025,0.2730,4.2175,0min5sec
1437 | 14360,-21.0878,0.2728,4.2174,0min5sec
1438 | 14370,-21.0731,0.2727,4.2172,0min5sec
1439 | 14380,-21.0585,0.2725,4.2171,0min5sec
1440 | 14390,-21.0439,0.2724,4.2170,0min5sec
1441 | 14400,-21.0293,0.2722,4.2169,0min5sec
1442 | 14410,-21.0148,0.2721,4.2167,0min5sec
1443 | 14420,-21.0002,0.2719,4.2166,0min5sec
1444 | 14430,-20.9857,0.2718,4.2165,0min5sec
1445 | 14440,-20.9712,0.2716,4.2164,0min5sec
1446 | 14450,-20.9567,0.2715,4.2162,0min5sec
1447 | 14460,-20.9422,0.2713,4.2161,0min5sec
1448 | 14470,-20.9278,0.2712,4.2160,0min5sec
1449 | 14480,-20.9134,0.2710,4.2158,0min5sec
1450 | 14490,-20.8990,0.2709,4.2157,0min5sec
1451 | 14500,-20.8846,0.2707,4.2156,0min5sec
1452 | 14510,-20.8702,0.2706,4.2154,0min5sec
1453 | 14520,-20.8558,0.2704,4.2153,0min5sec
1454 | 14530,-20.8415,0.2703,4.2152,0min5sec
1455 | 14540,-20.8272,0.2701,4.2150,0min5sec
1456 | 14550,-20.8129,0.2700,4.2149,0min5sec
1457 | 14560,-20.7986,0.2698,4.2148,0min5sec
1458 | 14570,-20.7844,0.2697,4.2146,0min5sec
1459 | 14580,-20.7701,0.2695,4.2145,0min5sec
1460 | 14590,-20.7559,0.2694,4.2144,0min5sec
1461 | 14600,-20.7417,0.2692,4.2142,0min5sec
1462 | 14610,-20.7275,0.2691,4.2141,0min5sec
1463 | 14620,-20.7134,0.2689,4.2140,0min5sec
1464 | 14630,-20.6992,0.2688,4.2138,0min5sec
1465 | 14640,-20.6851,0.2687,4.2137,0min5sec
1466 | 14650,-20.6710,0.2685,4.2136,0min5sec
1467 | 14660,-20.6569,0.2684,4.2135,0min5sec
1468 | 14670,-20.6429,0.2682,4.2133,0min5sec
1469 | 14680,-20.6288,0.2681,4.2132,0min5sec
1470 | 14690,-20.6148,0.2679,4.2131,0min5sec
1471 | 14700,-20.6008,0.2678,4.2129,0min5sec
1472 | 14710,-20.5868,0.2676,4.2128,0min5sec
1473 | 14720,-20.5729,0.2675,4.2127,0min5sec
1474 | 14730,-20.5589,0.2673,4.2126,0min5sec
1475 | 14740,-20.5450,0.2672,4.2124,0min5sec
1476 | 14750,-20.5311,0.2671,4.2123,0min5sec
1477 | 14760,-20.5172,0.2669,4.2122,0min5sec
1478 | 14770,-20.5034,0.2668,4.2121,0min5sec
1479 | 14780,-20.4895,0.2666,4.2119,0min5sec
1480 | 14790,-20.4757,0.2665,4.2118,0min5sec
1481 | 14800,-20.4619,0.2663,4.2117,0min5sec
1482 | 14810,-20.4481,0.2662,4.2115,0min5sec
1483 | 14820,-20.4343,0.2660,4.2114,0min5sec
1484 | 14830,-20.4205,0.2659,4.2113,0min5sec
1485 | 14840,-20.4068,0.2658,4.2112,0min5sec
1486 | 14850,-20.3931,0.2656,4.2111,0min5sec
1487 | 14860,-20.3794,0.2655,4.2109,0min5sec
1488 | 14870,-20.3657,0.2653,4.2108,0min5sec
1489 | 14880,-20.3520,0.2652,4.2107,0min5sec
1490 | 14890,-20.3384,0.2650,4.2106,0min5sec
1491 | 14900,-20.3248,0.2649,4.2104,0min5sec
1492 | 14910,-20.3111,0.2648,4.2103,0min5sec
1493 | 14920,-20.2975,0.2646,4.2102,0min5sec
1494 | 14930,-20.2840,0.2645,4.2101,0min5sec
1495 | 14940,-20.2704,0.2644,4.2100,0min5sec
1496 | 14950,-20.2569,0.2642,4.2099,0min5sec
1497 | 14960,-20.2434,0.2641,4.2097,0min5sec
1498 | 14970,-20.2299,0.2639,4.2096,0min5sec
1499 | 14980,-20.2164,0.2638,4.2095,0min5sec
1500 | 14990,-20.2029,0.2637,4.2094,0min5sec
1501 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04
2 | # https://github.com/pytorch/pytorch/blob/master/docker/pytorch/Dockerfile
3 | ARG PYTHON_VERSION=3.6.10
4 | ARG CUDA_TOOLKIT_VERSION=10.1
5 | ARG PYTORCH_VERSION=1.6.0
6 |
7 | # Install some basic utilities
8 | RUN apt-get update && apt-get install -y --no-install-recommends \
9 | curl \
10 | ca-certificates \
11 | sudo \
12 | git \
13 | vim \
14 | bzip2 \
15 | libx11-6 \
16 | && rm -rf /var/lib/apt/lists/*
17 |
18 | # Create a working directory
19 | RUN mkdir /app
20 | WORKDIR /app
21 |
22 | # Create a non-root user and switch to it
23 | RUN adduser --disabled-password --gecos '' --shell /bin/bash user \
24 | && chown -R user:user /app
25 | RUN echo "user ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/90-user
26 | USER user
27 |
28 | # All users can use /home/user as their home directory
29 | ENV HOME=/home/user
30 | RUN chmod 777 /home/user
31 |
32 | # Install Miniconda and Python 3.x
33 | ENV CONDA_AUTO_UPDATE_CONDA=false
34 | ENV PATH=/home/user/miniconda/bin:$PATH
35 | RUN curl -sLo ~/miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \
36 | && chmod +x ~/miniconda.sh \
37 | && ~/miniconda.sh -b -p ~/miniconda \
38 | && rm ~/miniconda.sh \
39 | && conda install -y python==$PYTHON_VERSION numpy scipy pandas matplotlib tqdm \
40 | && conda clean -ya
41 |
42 | # Install PyTorch1.x
43 | # https://pytorch.org/get-started/previous-versions/
44 | # https://github.com/anibali/docker-pytorch/blob/master/dockerfiles/
45 | RUN conda install -y -c pytorch \
46 | cudatoolkit=$CUDA_TOOLKIT_VERSION \
47 | "pytorch=1.6.0=py3.6_cuda10.1.243_cudnn7.6.3_0" \
48 | && conda clean -ya
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Rintarooo
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Pkl/test20.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Rintarooo/TSP_DRL_PtrNet/33636039c37da190cadb5bc136162c1e3f4daf46/Pkl/test20.pkl
--------------------------------------------------------------------------------
/Pkl/train20.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Rintarooo/TSP_DRL_PtrNet/33636039c37da190cadb5bc136162c1e3f4daf46/Pkl/train20.pkl
--------------------------------------------------------------------------------
/Pt/train20_1113_12_12_step14999_act.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Rintarooo/TSP_DRL_PtrNet/33636039c37da190cadb5bc136162c1e3f4daf46/Pt/train20_1113_12_12_step14999_act.pt
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TSP Solver with Deep RL
2 | This is PyTorch implementation of NEURAL COMBINATORIAL OPTIMIZATION WITH REINFORCEMENT LEARNING, Bello et al. 2016
3 | [https://arxiv.org/abs/1611.09940]
4 |
5 | Pointer Networks is the model architecture proposed by Vinyals et al. 2015
6 | [https://arxiv.org/abs/1506.03134]
7 |
8 | This model uses attention mechanism to output a permutation of the input index.
9 |
10 | 
11 |
12 |
13 | In this work, we will tackle Traveling Salesman Problem(TSP), which is one of the combinatorial optimization problems known as NP-hard. TSP seeks for the shortest tour for a salesman to visit each city exactly once.
14 |
15 | ## Training without supervised solution
16 |
17 | In the training phase, this TSP solver will optimize 2 different types of Pointer Networks, Actor and Critic model.
18 |
19 | Given a graph of cities where the cities are the nodes, critic model predicts expected tour length, which is generally called state-value. Parameters of critic model are optimized as the estimated tour length catches up with the actual length calculated from the tour(city permutation) predicted by actor model. Actor model updates its policy parameters with the value called advantage which subtracts state-value from the actual tour length.
20 |
21 | ### Actor-Critic
22 | ```
23 | Actor: Defines the agent's behavior, its policy
24 | Critic: Estimates the state-value
25 | ```
26 |
27 |
28 |
29 | ## Inference
30 | ### Active Search and Sampling
31 | In this paper, two approaches to find the best tour at inference time are proposed, which we refer to as Sampling and Active Search.
32 |
33 | Search strategy called Active Search takes actor model and use policy gradient for updating its parameters to find the shortest tour. Sampling simply just select the shortest tour out of 1 batch.
34 |
35 | 
36 |
37 | ## Usage
38 |
39 | ### Training
40 |
41 | First generate the pickle file contaning hyperparameter values by running the following command
42 |
43 | (in this example, train mode, batch size 512, 20 city nodes, 13000 steps).
44 |
45 | ```bash
46 | python config.py -m train -b 512 -t 20 -s 13000
47 | ```
48 | `-m train` could be replaced with `-m train_emv`. emv is the abbreviation of 'Exponential Moving Average', which doesn't need critic model. Then, go on training.
49 | ```bash
50 | python train.py -p Pkl/train20.pkl
51 | ```
52 |
53 |
54 | ### Inference
55 | If training is done, set the configuration for inference.
56 | Now, you can see how the training process went from the csv files in the `Csv` dir.
57 | You may use my pre-trained weight `Pt/train20_1113_12_12_step14999_act.pt` which I've trained for 20 nodes'.
58 | ```bash
59 | python config.py -m test -t 20 -s 10 -ap Pt/train20_1113_12_12_step14999_act.pt --islogger --seed 123
60 | ```
61 | ```bash
62 | python test.py -p Pkl/test20.pkl
63 | ```
64 |
65 |
66 | ## Environment
67 | I leave my own environment below. I tested it out on a single GPU.
68 | * OS:
69 | * Linux(Ubuntu 18.04.5 LTS)
70 | * GPU:
71 | * NVIDIA® GeForce® RTX 2080 Ti VENTUS 11GB OC
72 | * CPU:
73 | * Intel® Xeon® CPU E5640 @ 2.67GHz
74 | * NVIDIA® Driver = 455.45.01
75 | * Docker = 20.10.3
76 | * [nvidia-docker2](https://github.com/NVIDIA/nvidia-docker)(for GPU)
77 |
78 | ### Dependencies
79 | * Python = 3.6.10
80 | * PyTorch = 1.6.0
81 | * numpy
82 | * tqdm (if you need)
83 | * matplotlib (only for plotting)
84 |
85 | ### Docker(option)
86 | Make sure you've already installed `Docker`
87 | ```bash
88 | docker version
89 | ```
90 | latest `NVIDIA® Driver`
91 | ```bash
92 | nvidia-smi
93 | ```
94 | and `nvidia-docker2`(for GPU)
95 |
96 | #### Usage
97 |
98 | 1. build or pull docker image
99 |
100 | build image(this might take some time)
101 | ```bash
102 | ./docker.sh build
103 | ```
104 | pull image from [dockerhub](https://hub.docker.com/repository/docker/docker4rintarooo/tspdrl/tags?page=1&ordering=last_updated)
105 | ```bash
106 | docker pull docker4rintarooo/tspdrl:latest
107 | ```
108 |
109 | 2. run container using docker image(-v option is to mount directory)
110 | ```bash
111 | ./docker.sh run
112 | ```
113 | If you don't have a GPU, you can run
114 | ```bash
115 | ./docker.sh run_cpu
116 | ```
117 |
118 |
119 | ## Reference
120 | * https://github.com/higgsfield/np-hard-deep-reinforcement-learning
121 | * https://github.com/zhengsr3/Reinforcement_Learning_Pointer_Networks_TSP_Pytorch
122 | * https://github.com/pemami4911/neural-combinatorial-rl-pytorch
123 | * https://github.com/MichelDeudon/neural-combinatorial-optimization-rl-tensorflow
124 | * https://github.com/jingw2/neural-combinatorial-optimization-rl
125 | * https://github.com/dave-yxw/rl_tsp
126 | * https://github.com/shirgur/PointerNet
127 | * https://github.com/MichelDeudon/encode-attend-navigate
128 | * https://github.com/qiang-ma/HRL-for-combinatorial-optimization
129 | * https://www.youtube.com/watch?v=mxCVgVrUw50&ab_channel=%D0%9A%D0%BE%D0%BC%D0%BF%D1%8C%D1%8E%D1%82%D0%B5%D1%80%D0%BD%D1%8B%D0%B5%D0%BD%D0%B0%D1%83%D0%BA%D0%B8
130 |
--------------------------------------------------------------------------------
/actor.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 |
5 | from config import Config, load_pkl, pkl_parser
6 | from env import Env_tsp
7 |
8 | class Greedy(nn.Module):
9 | def __init__(self):
10 | super().__init__()
11 |
12 | def forward(self, log_p):
13 | return torch.argmax(log_p, dim = 1).long()
14 |
15 | class Categorical(nn.Module):
16 | def __init__(self):
17 | super().__init__()
18 |
19 | def forward(self, log_p):
20 | return torch.multinomial(log_p.exp(), 1).long().squeeze(1)
21 |
22 | # https://github.com/higgsfield/np-hard-deep-reinforcement-learning/blob/master/Neural%20Combinatorial%20Optimization.ipynb
23 | class PtrNet1(nn.Module):
24 | def __init__(self, cfg):
25 | super().__init__()
26 | self.Embedding = nn.Linear(2, cfg.embed, bias = False)
27 | self.Encoder = nn.LSTM(input_size = cfg.embed, hidden_size = cfg.hidden, batch_first = True)
28 | self.Decoder = nn.LSTM(input_size = cfg.embed, hidden_size = cfg.hidden, batch_first = True)
29 | if torch.cuda.is_available():
30 | self.Vec = nn.Parameter(torch.cuda.FloatTensor(cfg.embed))
31 | self.Vec2 = nn.Parameter(torch.cuda.FloatTensor(cfg.embed))
32 | else:
33 | self.Vec = nn.Parameter(torch.FloatTensor(cfg.embed))
34 | self.Vec2 = nn.Parameter(torch.FloatTensor(cfg.embed))
35 | self.W_q = nn.Linear(cfg.hidden, cfg.hidden, bias = True)
36 | self.W_ref = nn.Conv1d(cfg.hidden, cfg.hidden, 1, 1)
37 | self.W_q2 = nn.Linear(cfg.hidden, cfg.hidden, bias = True)
38 | self.W_ref2 = nn.Conv1d(cfg.hidden, cfg.hidden, 1, 1)
39 | self.dec_input = nn.Parameter(torch.FloatTensor(cfg.embed))
40 | self._initialize_weights(cfg.init_min, cfg.init_max)
41 | self.clip_logits = cfg.clip_logits
42 | self.softmax_T = cfg.softmax_T
43 | self.n_glimpse = cfg.n_glimpse
44 | self.city_selecter = {'greedy': Greedy(), 'sampling': Categorical()}.get(cfg.decode_type, None)
45 |
46 | def _initialize_weights(self, init_min = -0.08, init_max = 0.08):
47 | for param in self.parameters():
48 | nn.init.uniform_(param.data, init_min, init_max)
49 |
50 | def forward(self, x, device):
51 | ''' x: (batch, city_t, 2)
52 | enc_h: (batch, city_t, embed)
53 | dec_input: (batch, 1, embed)
54 | h: (1, batch, embed)
55 | return: pi: (batch, city_t), ll: (batch)
56 | '''
57 | x = x.to(device)
58 | batch, city_t, _ = x.size()
59 | embed_enc_inputs = self.Embedding(x)
60 | embed = embed_enc_inputs.size(2)
61 | mask = torch.zeros((batch, city_t), device = device)
62 | enc_h, (h, c) = self.Encoder(embed_enc_inputs, None)
63 | ref = enc_h
64 | pi_list, log_ps = [], []
65 | dec_input = self.dec_input.unsqueeze(0).repeat(batch,1).unsqueeze(1).to(device)
66 | for i in range(city_t):
67 | _, (h, c) = self.Decoder(dec_input, (h, c))
68 | query = h.squeeze(0)
69 | for i in range(self.n_glimpse):
70 | query = self.glimpse(query, ref, mask)
71 | logits = self.pointer(query, ref, mask)
72 | log_p = torch.log_softmax(logits, dim = -1)
73 | next_node = self.city_selecter(log_p)
74 | dec_input = torch.gather(input = embed_enc_inputs, dim = 1, index = next_node.unsqueeze(-1).unsqueeze(-1).repeat(1, 1, embed))
75 |
76 | pi_list.append(next_node)
77 | log_ps.append(log_p)
78 | mask += torch.zeros((batch,city_t), device = device).scatter_(dim = 1, index = next_node.unsqueeze(1), value = 1)
79 |
80 | pi = torch.stack(pi_list, dim = 1)
81 | ll = self.get_log_likelihood(torch.stack(log_ps, 1), pi)
82 | return pi, ll
83 |
84 | def glimpse(self, query, ref, mask, inf = 1e8):
85 | """ -ref about torch.bmm, torch.matmul and so on
86 | https://qiita.com/tand826/items/9e1b6a4de785097fe6a5
87 | https://qiita.com/shinochin/items/aa420e50d847453cc296
88 |
89 | Args:
90 | query: the hidden state of the decoder at the current
91 | (batch, 128)
92 | ref: the set of hidden states from the encoder.
93 | (batch, city_t, 128)
94 | mask: model only points at cities that have yet to be visited, so prevent them from being reselected
95 | (batch, city_t)
96 | """
97 | u1 = self.W_q(query).unsqueeze(-1).repeat(1,1,ref.size(1))# u1: (batch, 128, city_t)
98 | u2 = self.W_ref(ref.permute(0,2,1))# u2: (batch, 128, city_t)
99 | V = self.Vec.unsqueeze(0).unsqueeze(0).repeat(ref.size(0), 1, 1)
100 | u = torch.bmm(V, torch.tanh(u1 + u2)).squeeze(1)
101 | # V: (batch, 1, 128) * u1+u2: (batch, 128, city_t) => u: (batch, 1, city_t) => (batch, city_t)
102 | u = u - inf * mask
103 | a = F.softmax(u / self.softmax_T, dim = 1)
104 | d = torch.bmm(u2, a.unsqueeze(2)).squeeze(2)
105 | # u2: (batch, 128, city_t) * a: (batch, city_t, 1) => d: (batch, 128)
106 | return d
107 |
108 | def pointer(self, query, ref, mask, inf = 1e8):
109 | """ Args:
110 | query: the hidden state of the decoder at the current
111 | (batch, 128)
112 | ref: the set of hidden states from the encoder.
113 | (batch, city_t, 128)
114 | mask: model only points at cities that have yet to be visited, so prevent them from being reselected
115 | (batch, city_t)
116 | """
117 | u1 = self.W_q2(query).unsqueeze(-1).repeat(1,1,ref.size(1))# u1: (batch, 128, city_t)
118 | u2 = self.W_ref2(ref.permute(0,2,1))# u2: (batch, 128, city_t)
119 | V = self.Vec2.unsqueeze(0).unsqueeze(0).repeat(ref.size(0), 1, 1)
120 | u = torch.bmm(V, self.clip_logits * torch.tanh(u1 + u2)).squeeze(1)
121 | # V: (batch, 1, 128) * u1+u2: (batch, 128, city_t) => u: (batch, 1, city_t) => (batch, city_t)
122 | u = u - inf * mask
123 | return u
124 |
125 | def get_log_likelihood(self, _log_p, pi):
126 | """ args:
127 | _log_p: (batch, city_t, city_t)
128 | pi: (batch, city_t), predicted tour
129 | return: (batch)
130 | """
131 | log_p = torch.gather(input = _log_p, dim = 2, index = pi[:,:,None])
132 | return torch.sum(log_p.squeeze(-1), 1)
133 |
134 | if __name__ == '__main__':
135 | cfg = load_pkl(pkl_parser().path)
136 | model = PtrNet1(cfg)
137 | inputs = torch.randn(3,20,2)
138 | pi, ll = model(inputs, device = 'cpu')
139 | print('pi:', pi.size(), pi)
140 | print('log_likelihood:', ll.size(), ll)
141 |
142 | cnt = 0
143 | for i, k in model.state_dict().items():
144 | print(i, k.size(), torch.numel(k))
145 | cnt += torch.numel(k)
146 | print('total parameters:', cnt)
147 |
148 | # ll.mean().backward()
149 | # print(model.W_q.weight.grad)
150 |
151 | cfg.batch = 3
152 | env = Env_tsp(cfg)
153 | cost = env.stack_l(inputs, pi)
154 | print('cost:', cost.size(), cost)
155 | cost = env.stack_l_fast(inputs, pi)
156 | print('cost:', cost.size(), cost)
157 |
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import os
3 | import argparse
4 | import torch
5 | from datetime import datetime
6 |
7 | def argparser():
8 | parser = argparse.ArgumentParser()
9 | # main parts
10 | parser.add_argument('-m', '--mode', metavar = 'M', type = str, required = True, choices = ['train', 'train_emv', 'test'], help = 'train or train_emv or test')
11 | parser.add_argument('-b', '--batch', metavar = 'B', type = int, default = 512, help = 'batch size, default: 512')
12 | parser.add_argument('-t', '--city_t', metavar = 'T', type = int, default = 20, help = 'number of cities(nodes), time sequence, default: 20')
13 | parser.add_argument('-s', '--steps', metavar = 'S', type = int, default = 15000, help = 'training steps(epochs), default: 15000')
14 |
15 | # details
16 | parser.add_argument('-e', '--embed', metavar = 'EM', type = int, default = 128, help = 'embedding size')
17 | parser.add_argument('-hi', '--hidden', metavar = 'HI', type = int, default = 128, help = 'hidden size')
18 | parser.add_argument('-c', '--clip_logits', metavar = 'C', type = int, default = 10, help = 'improve exploration; clipping logits')
19 | parser.add_argument('-st', '--softmax_T', metavar = 'ST', type = float, default = 1.0, help = 'might improve exploration; softmax temperature default 1.0 but 2.0, 2.2 and 1.5 might yield better results')
20 | parser.add_argument('-o', '--optim', metavar = 'O', type = str, default = 'Adam', help = 'torch optimizer')
21 | parser.add_argument('-minv', '--init_min', metavar = 'MINV', type = float, default = -0.08, help = 'initialize weight minimun value -0.08~')
22 | parser.add_argument('-maxv', '--init_max', metavar = 'MAXV', type = float, default = 0.08, help = 'initialize weight ~0.08 maximum value')
23 | parser.add_argument('-ng', '--n_glimpse', metavar = 'NG', type = int, default = 1, help = 'how many glimpse function')
24 | parser.add_argument('-np', '--n_process', metavar = 'NP', type = int, default = 3, help = 'how many process step in critic; at each process step, use glimpse')
25 | parser.add_argument('-dt', '--decode_type', metavar = 'DT', type = str, default = 'sampling', choices = ['greedy', 'sampling'], help = 'how to choose next city in actor model')
26 |
27 | # train, learning rate
28 | parser.add_argument('--lr', metavar = 'LR', type = float, default = 1e-3, help = 'initial learning rate')
29 | parser.add_argument('--is_lr_decay', action = 'store_false', help = 'flag learning rate scheduler default true')
30 | parser.add_argument('--lr_decay', metavar = 'LRD', type = float, default = 0.96, help = 'learning rate scheduler, decay by a factor of 0.96 ')
31 | parser.add_argument('--lr_decay_step', metavar = 'LRDS', type = int, default = 5e3, help = 'learning rate scheduler, decay every 5000 steps')
32 |
33 | # inference
34 | parser.add_argument('-ap', '--act_model_path', metavar = 'AMP', type = str, help = 'load actor model path')
35 | parser.add_argument('--seed', metavar = 'SEED', type = int, default = 1, help = 'random seed number for inference, reproducibility')
36 | parser.add_argument('-al', '--alpha', metavar = 'ALP', type = float, default = 0.99, help = 'alpha decay in active search')
37 |
38 | # path
39 | parser.add_argument('--islogger', action = 'store_false', help = 'flag csv logger default true')
40 | parser.add_argument('--issaver', action = 'store_false', help = 'flag model saver default true')
41 | parser.add_argument('-ls', '--log_step', metavar = 'LOGS', type = int, default = 10, help = 'logger timing')
42 | parser.add_argument('-ld', '--log_dir', metavar = 'LD', type = str, default = './Csv/', help = 'csv logger dir')
43 | parser.add_argument('-md', '--model_dir', metavar = 'MD', type = str, default = './Pt/', help = 'model save dir')
44 | parser.add_argument('-pd', '--pkl_dir', metavar = 'PD', type = str, default = './Pkl/', help = 'pkl save dir')
45 |
46 | # GPU
47 | parser.add_argument('-cd', '--cuda_dv', metavar = 'CD', type = str, default = '0', help = 'os CUDA_VISIBLE_DEVICE, default single GPU')
48 | args = parser.parse_args()
49 | return args
50 |
51 | class Config():
52 | def __init__(self, **kwargs):
53 | for k, v in kwargs.items():
54 | self.__dict__[k] = v
55 | self.dump_date = datetime.now().strftime('%m%d_%H_%M')
56 | self.task = '%s%d'%(self.mode, self.city_t)
57 | self.pkl_path = self.pkl_dir + '%s.pkl'%(self.task)
58 | self.n_samples = self.batch * self.steps
59 | for x in [self.log_dir, self.model_dir, self.pkl_dir]:
60 | os.makedirs(x, exist_ok = True)
61 |
62 | def print_cfg(cfg):
63 | print(''.join('%s: %s\n'%item for item in vars(cfg).items()))
64 |
65 | def dump_pkl(args, verbose = True, override = None):
66 | cfg = Config(**vars(args))
67 | if os.path.exists(cfg.pkl_path):
68 | override = input(f'found the same name pkl file "{cfg.pkl_path}".\noverride this file? [y/n]:')
69 | with open(cfg.pkl_path, 'wb') as f:
70 | if override == 'n':
71 | raise RuntimeError('modify cfg.pkl_path in config.py as you like')
72 | pickle.dump(cfg, f)
73 | print('--- save pickle file in %s ---\n'%cfg.pkl_path)
74 | if verbose:
75 | print_cfg(cfg)
76 |
77 | def load_pkl(pkl_path, verbose = True):
78 | if not os.path.isfile(pkl_path):
79 | raise FileNotFoundError('pkl_path')
80 | with open(pkl_path, 'rb') as f:
81 | cfg = pickle.load(f)
82 | if verbose:
83 | print_cfg(cfg)
84 | os.environ['CUDA_VISIBLE_DEVICE'] = cfg.cuda_dv
85 | return cfg
86 |
87 | def pkl_parser():
88 | parser = argparse.ArgumentParser()
89 | parser.add_argument('-p', '--path', metavar = 'P', type = str,
90 | default = 'Pkl/test20.pkl', help = 'pkl file name')
91 | args = parser.parse_args()
92 | return args
93 |
94 | if __name__ == '__main__':
95 | args = argparser()
96 | dump_pkl(args)
97 | # cfg = load_pkl('./Pkl/test.pkl')
98 | # for k, v in vars(cfg).items():
99 | # print(k, v)
100 | # print(vars(cfg)[k])#==v
101 |
--------------------------------------------------------------------------------
/critic.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 | from config import Config, load_pkl, pkl_parser
5 |
6 | class PtrNet2(nn.Module):
7 | def __init__(self, cfg):
8 | super().__init__()
9 | self.Embedding = nn.Linear(2, cfg.embed, bias = False)
10 | self.Encoder = nn.LSTM(input_size = cfg.embed, hidden_size = cfg.hidden, batch_first = True)
11 | self.Decoder = nn.LSTM(input_size = cfg.embed, hidden_size = cfg.hidden, batch_first = True)
12 | if torch.cuda.is_available():
13 | self.Vec = nn.Parameter(torch.cuda.FloatTensor(cfg.embed))
14 | else:
15 | self.Vec = nn.Parameter(torch.FloatTensor(cfg.embed))
16 | self.W_q = nn.Linear(cfg.hidden, cfg.hidden, bias = True)
17 | self.W_ref = nn.Conv1d(cfg.hidden, cfg.hidden, 1, 1)
18 | # self.dec_input = nn.Parameter(torch.FloatTensor(cfg.embed))
19 | self.final2FC = nn.Sequential(
20 | nn.Linear(cfg.hidden, cfg.hidden, bias = False),
21 | nn.ReLU(inplace = False),
22 | nn.Linear(cfg.hidden, 1, bias = False))
23 | self._initialize_weights(cfg.init_min, cfg.init_max)
24 | self.n_glimpse = cfg.n_glimpse
25 | self.n_process = cfg.n_process
26 |
27 | def _initialize_weights(self, init_min = -0.08, init_max = 0.08):
28 | for param in self.parameters():
29 | nn.init.uniform_(param.data, init_min, init_max)
30 |
31 | def forward(self, x, device):
32 | ''' x: (batch, city_t, 2)
33 | enc_h: (batch, city_t, embed)
34 | query(Decoder input): (batch, 1, embed)
35 | h: (1, batch, embed)
36 | return: pred_l: (batch)
37 | '''
38 | x = x.to(device)
39 | batch, city_t, xy = x.size()
40 | embed_enc_inputs = self.Embedding(x)
41 | embed = embed_enc_inputs.size(2)
42 | enc_h, (h, c) = self.Encoder(embed_enc_inputs, None)
43 | ref = enc_h
44 | # ~ query = h.permute(1,0,2).to(device)# query = self.dec_input.unsqueeze(0).repeat(batch,1).unsqueeze(1).to(device)
45 | query = h[-1]
46 | # ~ process_h, process_c = [torch.zeros((1, batch, embed), device = device) for _ in range(2)]
47 | for i in range(self.n_process):
48 | # ~ _, (process_h, process_c) = self.Decoder(query, (process_h, process_c))
49 | # ~ _, (h, c) = self.Decoder(query, (h, c))
50 | # ~ query = query.squeeze(1)
51 | for i in range(self.n_glimpse):
52 | query = self.glimpse(query, ref)
53 | # ~ query = query.unsqueeze(1)
54 | '''
55 | - page 5/15 in paper
56 | critic model architecture detail is out there, "Critic’s architecture for TSP"
57 | - page 14/15 in paper
58 | glimpsing more than once with the same parameters
59 | made the model less likely to learn and barely improved the results
60 |
61 | query(batch,hidden)*FC(hidden,hidden)*FC(hidden,1) -> pred_l(batch,1) ->pred_l(batch)
62 | '''
63 | pred_l = self.final2FC(query).squeeze(-1).squeeze(-1)
64 | return pred_l
65 |
66 | def glimpse(self, query, ref, infinity = 1e8):
67 | """ Args:
68 | query: the hidden state of the decoder at the current
69 | (batch, 128)
70 | ref: the set of hidden states from the encoder.
71 | (batch, city_t, 128)
72 | """
73 | u1 = self.W_q(query).unsqueeze(-1).repeat(1,1,ref.size(1))# u1: (batch, 128, city_t)
74 | u2 = self.W_ref(ref.permute(0,2,1))# u2: (batch, 128, city_t)
75 | V = self.Vec.unsqueeze(0).unsqueeze(0).repeat(ref.size(0), 1, 1)
76 | u = torch.bmm(V, torch.tanh(u1 + u2)).squeeze(1)
77 | # V: (batch, 1, 128) * u1+u2: (batch, 128, city_t) => u: (batch, 1, city_t) => (batch, city_t)
78 | a = F.softmax(u, dim = 1)
79 | d = torch.bmm(u2, a.unsqueeze(2)).squeeze(2)
80 | # u2: (batch, 128, city_t) * a: (batch, city_t, 1) => d: (batch, 128)
81 | return d
82 |
83 | if __name__ == '__main__':
84 | cfg = load_pkl(pkl_parser().path)
85 | model = PtrNet2(cfg)
86 | inputs = torch.randn(3,20,2)
87 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
88 | model = model.to(device)
89 | pred_l = model(inputs, device)
90 | print('pred_length:', pred_l.size(), pred_l)
91 |
92 | cnt = 0
93 | for i, k in model.state_dict().items():
94 | print(i, k.size(), torch.numel(k))
95 | cnt += torch.numel(k)
96 | print('total parameters:', cnt)
97 |
98 | # pred_l.mean().backward()
99 | # print(model.W_q.weight.grad)
100 |
--------------------------------------------------------------------------------
/data.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from torch.utils.data.dataset import Dataset
3 | from torch.utils.data import DataLoader
4 |
5 | from env import Env_tsp
6 | from config import Config, load_pkl, pkl_parser
7 |
8 | class Generator(Dataset):
9 | def __init__(self, cfg, env):
10 | self.data = env.get_batch_nodes(cfg.n_samples)
11 |
12 | def __getitem__(self, idx):
13 | return self.data[idx]
14 |
15 | def __len__(self):
16 | return self.data.size(0)
17 |
18 | if __name__ == '__main__':
19 | cfg = load_pkl(pkl_parser().path)
20 | env = Env_tsp(cfg)
21 | dataset = Generator(cfg, env)
22 | data = next(iter(dataset))
23 | print(data.size())
24 |
25 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
26 | dataloader = DataLoader(dataset, batch_size = cfg.batch, shuffle = True)
27 | for i, data in enumerate(dataloader):
28 | print(data.size())
29 | if i == 0:
30 | break
31 |
--------------------------------------------------------------------------------
/docker.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # if $2 is None
4 | # image:tag is docker4rintarooo/tspdrl:latest
5 | IMAGE_NAME=${2:-docker4rintarooo/tspdrl:latest}
6 |
7 | if [ "$1" = "build" ]; then
8 | docker build -t $IMAGE_NAME .
9 | echo -e "\n\n\ndocker images | head"
10 | docker images | head
11 | elif [ "$1" = "run" ]; then
12 | docker run -it --rm \
13 | --gpus=all \
14 | -v ${PWD}:/app \
15 | $IMAGE_NAME
16 | elif [ "$1" = "run_cpu" ]; then
17 | docker run -it --rm \
18 | -v ${PWD}:/app \
19 | $IMAGE_NAME
20 | else
21 | echo "command should be either one:
22 | ${0} build
23 | or
24 | ${0} run"
25 | fi
--------------------------------------------------------------------------------
/env.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | import math
4 | import itertools
5 | import matplotlib.pyplot as plt
6 |
7 | def get_2city_distance(n1, n2):
8 | x1,y1,x2,y2 = n1[0],n1[1],n2[0],n2[1]
9 | if isinstance(n1, torch.Tensor):
10 | return torch.sqrt((x2-x1).pow(2)+(y2-y1).pow(2))
11 | elif isinstance(n1, (list, np.ndarray)):
12 | return math.sqrt(pow(x2-x1,2)+pow(y2-y1,2))
13 | else:
14 | raise TypeError
15 |
16 | class Env_tsp():
17 | def __init__(self, cfg):
18 | '''
19 | nodes(cities) : contains nodes and their 2 dimensional coordinates
20 | [city_t, 2] = [3,2] dimension array e.g. [[0.5,0.7],[0.2,0.3],[0.4,0.1]]
21 | '''
22 | self.batch = cfg.batch
23 | self.city_t = cfg.city_t
24 |
25 | def get_nodes(self, seed = None):
26 | '''
27 | return nodes:(city_t,2)
28 | '''
29 | if seed is not None:
30 | torch.manual_seed(seed)
31 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
32 | return torch.rand((self.city_t, 2), device = device)
33 |
34 | def stack_nodes(self):
35 | '''
36 | nodes:(city_t,2)
37 | return inputs:(batch,city_t,2)
38 | '''
39 | list = [self.get_nodes() for i in range(self.batch)]
40 | inputs = torch.stack(list, dim = 0)
41 | return inputs
42 |
43 | def get_batch_nodes(self, n_samples, seed = None):
44 | '''
45 | return nodes:(batch,city_t,2)
46 | '''
47 | if seed is not None:
48 | torch.manual_seed(seed)
49 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
50 | return torch.rand((n_samples, self.city_t, 2), device = device)
51 |
52 | def stack_random_tours(self):
53 | '''
54 | tour:(city_t)
55 | return tours:(batch,city_t)
56 | '''
57 | list = [self.get_random_tour() for i in range(self.batch)]
58 | tours = torch.stack(list, dim = 0)
59 | return tours
60 |
61 | def stack_l(self, inputs, tours):
62 | '''
63 | inputs:(batch,city_t,2)
64 | tours:(batch,city_t)
65 | return l_batch:(batch)
66 | '''
67 | list = [self.get_tour_distance(inputs[i], tours[i]) for i in range(self.batch)]
68 | l_batch = torch.stack(list, dim = 0)
69 | return l_batch
70 |
71 | def stack_l_fast(self, inputs, tours):
72 | """
73 | *** this function is faster version of stack_l! ***
74 | inputs: (batch, city_t, 2), Coordinates of nodes
75 | tours: (batch, city_t), predicted tour
76 | d: (batch, city_t, 2)
77 | """
78 | d = torch.gather(input = inputs, dim = 1, index = tours[:,:,None].repeat(1,1,2))
79 | return (torch.sum((d[:, 1:] - d[:, :-1]).norm(p = 2, dim = 2), dim = 1)
80 | + (d[:, 0] - d[:, -1]).norm(p = 2, dim = 1))# distance from last node to first selected node)
81 |
82 | def show(self, nodes, tour):
83 | nodes = nodes.cpu().detach()
84 | print('distance:{:.3f}'.format(self.get_tour_distance(nodes, tour)))
85 | print(tour)
86 | plt.figure()
87 | plt.plot(nodes[:,0], nodes[:,1], 'yo', markersize = 16)
88 | np_tour = tour[:].cpu().detach()
89 | np_fin_tour = [tour[-1].item(), tour[0].item()]
90 | plt.plot(nodes[np_tour, 0], nodes[np_tour, 1], 'k-', linewidth = 0.7)
91 | plt.plot(nodes[np_fin_tour, 0], nodes[np_fin_tour, 1], 'k-', linewidth = 0.7)
92 | for i in range(self.city_t):
93 | plt.text(nodes[i,0], nodes[i,1], str(i), size = 10, color = 'b')
94 | plt.show()
95 |
96 | def shuffle(self, inputs):
97 | '''
98 | shuffle nodes order with a set of xy coordinate
99 | inputs:(batch,city_t,2)
100 | return shuffle_inputs:(batch,city_t,2)
101 | '''
102 | shuffle_inputs = torch.zeros(inputs.size())
103 | for i in range(self.batch):
104 | perm = torch.randperm(self.city_t)
105 | shuffle_inputs[i,:,:] = inputs[i,perm,:]
106 | return shuffle_inputs
107 |
108 | def back_tours(self, pred_shuffle_tours, shuffle_inputs, test_inputs, device):
109 | '''
110 | pred_shuffle_tours:(batch,city_t)
111 | shuffle_inputs:(batch,city_t_t,2)
112 | test_inputs:(batch,city_t,2)
113 | return pred_tours:(batch,city_t)
114 | '''
115 | pred_tours = []
116 | for i in range(self.batch):
117 | pred_tour = []
118 | for j in range(self.city_t):
119 | xy_temp = shuffle_inputs[i, pred_shuffle_tours[i, j]].to(device)
120 | for k in range(self.city_t):
121 | if torch.all(torch.eq(xy_temp, test_inputs[i,k])):
122 | pred_tour.append(torch.tensor(k))
123 | if len(pred_tour) == self.city_t:
124 | pred_tours.append(torch.stack(pred_tour, dim = 0))
125 | break
126 | pred_tours = torch.stack(pred_tours, dim = 0)
127 | return pred_tours
128 |
129 | def get_tour_distance(self, nodes, tour):
130 | '''
131 | nodes:(city_t,2), tour:(city_t)
132 | l(= total distance) = l(0-1) + l(1-2) + l(2-3) + ... + l(18-19) + l(19-0) @20%20->0
133 | return l:(1)
134 | '''
135 | l = 0
136 | for i in range(self.city_t):
137 | l += get_2city_distance(nodes[tour[i]], nodes[tour[(i+1)%self.city_t]])
138 | return l
139 |
140 | def get_random_tour(self):
141 | '''
142 | return tour:(city_t)
143 | '''
144 | tour = []
145 | while set(tour) != set(range(self.city_t)):
146 | city = np.random.randint(self.city_t)
147 | if city not in tour:
148 | tour.append(city)
149 | tour = torch.from_numpy(np.array(tour))
150 | return tour
151 |
152 | def get_optimal_tour(self, nodes):
153 | # dynamic programming algorithm, calculate lengths between all nodes
154 | points = nodes.numpy()
155 | all_distances = [[get_2city_distance(x, y) for y in points] for x in points]
156 | # initial value - just distance from 0 to every other point + keep the track of edges
157 | A = {(frozenset([0, idx + 1]), idx + 1): (dist, [0, idx + 1]) for idx, dist in enumerate(all_distances[0][1:])}
158 | cnt = len(points)
159 | for m in range(2, cnt):
160 | B = {}
161 | for S in [frozenset(C) | {0} for C in itertools.combinations(range(1, cnt), m)]:
162 | for j in S - {0}:
163 | B[(S, j)] = min([(A[(S - {j}, k)][0] + all_distances[k][j], A[(S - {j}, k)][1] + [j]) for k in S if
164 | k != 0 and k != j]) # this will use 0th index of tuple for ordering, the same as if key=itemgetter(0) used
165 | A = B
166 | res = min([(A[d][0] + all_distances[0][d[1]], A[d][1]) for d in iter(A)])
167 | tour = torch.from_numpy(np.array(res[1]))
168 | return tour
169 |
--------------------------------------------------------------------------------
/search.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.optim as optim
4 | import os
5 | from tqdm import tqdm
6 | from datetime import datetime
7 | from actor import PtrNet1
8 |
9 | def sampling(cfg, env, test_input):
10 | test_inputs = test_input.repeat(cfg.batch,1,1)
11 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
12 | act_model = PtrNet1(cfg)
13 | if os.path.exists(cfg.act_model_path):
14 | act_model.load_state_dict(torch.load(cfg.act_model_path, map_location = device))
15 | else:
16 | print('specify pretrained model path')
17 | act_model = act_model.to(device)
18 | pred_tours, _ = act_model(test_inputs, device)
19 | l_batch = env.stack_l_fast(test_inputs, pred_tours)
20 | index_lmin = torch.argmin(l_batch)
21 | best_tour = pred_tours[index_lmin]
22 | return best_tour
23 |
24 | def active_search(cfg, env, test_input, log_path = None):
25 | '''
26 | active search updates model parameters even during inference on a single input
27 | test input:(city_t,xy)
28 | '''
29 | date = datetime.now().strftime('%m%d_%H_%M')
30 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
31 | test_inputs = test_input.repeat(cfg.batch,1,1).to(device)
32 | random_tours = env.stack_random_tours().to(device)
33 | baseline = env.stack_l_fast(test_inputs, random_tours)
34 | l_min = baseline[0]
35 |
36 | act_model = PtrNet1(cfg)
37 | if os.path.exists(cfg.act_model_path):
38 | act_model.load_state_dict(torch.load(cfg.act_model_path, map_location = device))
39 |
40 | if cfg.optim == 'Adam':
41 | act_optim = optim.Adam(act_model.parameters(), lr = cfg.lr)
42 |
43 | act_model = act_model.to(device)
44 | baseline = baseline.to(device)
45 |
46 | for i in tqdm(range(cfg.steps)):
47 | '''
48 | - page 6/15 in papar
49 | we randomly shuffle the input sequence before feeding it to our pointer network.
50 | This increases the stochasticity of the sampling procedure and leads to large improvements in Active Search.
51 | '''
52 | test_inputs = test_inputs.to(device)
53 | shuffle_inputs = env.shuffle(test_inputs)
54 | pred_shuffle_tours, neg_log = act_model(shuffle_inputs, device)
55 | pred_tours = env.back_tours(pred_shuffle_tours, shuffle_inputs, test_inputs, device).to(device)
56 |
57 | l_batch = env.stack_l_fast(test_inputs, pred_tours)
58 |
59 | index_lmin = torch.argmin(l_batch)
60 | if torch.min(l_batch) != l_batch[index_lmin]:
61 | raise RuntimeError
62 | if l_batch[index_lmin] < l_min:
63 | best_tour = pred_tours[index_lmin]
64 | print('update best tour, min l(%1.3f -> %1.3f)'%(l_min,l_batch[index_lmin]))
65 | l_min = l_batch[index_lmin]
66 |
67 | adv = l_batch - baseline
68 | act_optim.zero_grad()
69 | act_loss = torch.mean(adv * neg_log)
70 | '''
71 | adv(batch) = l_batch(batch) - baseline(batch)
72 | mean(adv(batch) * neg_log(batch)) -> act_loss(scalar)
73 | '''
74 | act_loss.backward()
75 | nn.utils.clip_grad_norm_(act_model.parameters(), max_norm = 1., norm_type = 2)
76 | act_optim.step()
77 | baseline = baseline*cfg.alpha + (1-cfg.alpha)*torch.mean(l_batch, dim = 0)
78 | print('step:%d/%d, actic loss:%1.3f'%(i, cfg.steps, act_loss.data))
79 |
80 | if cfg.islogger:
81 | if i % cfg.log_step == 0:
82 | if log_path is None:
83 | log_path = cfg.log_dir + 'active_search_%s.csv'%(date)#cfg.log_dir = ./Csv/
84 | with open(log_path, 'w') as f:
85 | f.write('step,actic loss,minimum distance\n')
86 | else:
87 | with open(log_path, 'a') as f:
88 | f.write('%d,%1.4f,%1.4f\n'%(i, act_loss, l_min))
89 | return best_tour
90 |
91 |
--------------------------------------------------------------------------------
/test.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from time import time
3 | from env import Env_tsp
4 | from config import Config, load_pkl, pkl_parser
5 | from search import sampling, active_search
6 |
7 | def search_tour(cfg, env):
8 | test_input = env.get_nodes(cfg.seed)
9 |
10 | # random
11 | print('generate random tour...')
12 | random_tour = env.get_random_tour()
13 | env.show(test_input, random_tour)
14 |
15 | # simplest way
16 | print('sampling ...')
17 | t1 = time()
18 | pred_tour = sampling(cfg, env, test_input)
19 | t2 = time()
20 | print('%dmin %1.2fsec\n'%((t2-t1)//60, (t2-t1)%60))
21 | env.show(test_input, pred_tour)
22 |
23 | # active search, update parameters during test
24 | print('active search ...')
25 | t1 = time()
26 | pred_tour = active_search(cfg, env, test_input)
27 | t2 = time()
28 | print('%dmin %1.2fsec\n'%((t2-t1)//60, (t2-t1)%60))
29 | env.show(test_input, pred_tour)
30 |
31 | """
32 | # optimal solution, it takes time
33 | print('generate optimal solution ...')
34 | t1 = time()
35 | optimal_tour = env.get_optimal_tour(test_input)
36 | env.show(test_input, optimal_tour)
37 | t2 = time()
38 | print('%dmin %1.2fsec\n'%((t2-t1)//60, (t2-t1)%60))
39 | """
40 |
41 | if __name__ == '__main__':
42 | cfg = load_pkl(pkl_parser().path)
43 | env = Env_tsp(cfg)
44 |
45 | # inputs = env.stack_nodes()
46 | # ~ tours = env.stack_random_tours()
47 | # ~ l = env.stack_l(inputs, tours)
48 |
49 | # ~ nodes = env.get_nodes(cfg.seed)
50 | # random_tour = env.get_random_tour()
51 | # ~ env.show(nodes, random_tour)
52 |
53 | # ~ env.show(inputs[0], random_tour)
54 | # ~ inputs = env.shuffle_index(inputs)
55 | # env.show(inputs[0], random_tour)
56 |
57 | # inputs = env.stack_nodes()
58 | # random_tour = env.get_random_tour()
59 | # env.show(inputs[0], random_tour)
60 |
61 |
62 | if cfg.mode == 'test':
63 | search_tour(cfg, env)
64 |
65 | else:
66 | raise NotImplementedError('test only, specify test pkl file')
67 |
68 |
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.optim as optim
4 | from torch.utils.data import DataLoader
5 | from tqdm import tqdm
6 | from time import time
7 | from datetime import datetime
8 |
9 | from actor import PtrNet1
10 | from critic import PtrNet2
11 | from env import Env_tsp
12 | from config import Config, load_pkl, pkl_parser
13 | from data import Generator
14 |
15 | # torch.autograd.set_detect_anomaly(True)
16 | torch.backends.cudnn.benchmark = True
17 |
18 | def train_model(cfg, env, log_path = None):
19 | date = datetime.now().strftime('%m%d_%H_%M')
20 | if cfg.islogger:
21 | param_path = cfg.log_dir + '%s_%s_param.csv'%(date, cfg.task)# cfg.log_dir = ./Csv/
22 | print(f'generate {param_path}')
23 | with open(param_path, 'w') as f:
24 | f.write(''.join('%s,%s\n'%item for item in vars(cfg).items()))
25 |
26 | act_model = PtrNet1(cfg)
27 | if cfg.optim == 'Adam':
28 | act_optim = optim.Adam(act_model.parameters(), lr = cfg.lr)
29 | if cfg.is_lr_decay:
30 | act_lr_scheduler = optim.lr_scheduler.StepLR(act_optim,
31 | step_size=cfg.lr_decay_step, gamma=cfg.lr_decay)
32 | device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
33 | act_model = act_model.to(device)
34 |
35 | if cfg.mode == 'train':
36 | cri_model = PtrNet2(cfg)
37 | if cfg.optim == 'Adam':
38 | cri_optim = optim.Adam(cri_model.parameters(), lr = cfg.lr)
39 | if cfg.is_lr_decay:
40 | cri_lr_scheduler = optim.lr_scheduler.StepLR(cri_optim,
41 | step_size = cfg.lr_decay_step, gamma = cfg.lr_decay)
42 | cri_model = cri_model.to(device)
43 | ave_cri_loss = 0.
44 |
45 | mse_loss = nn.MSELoss()
46 | dataset = Generator(cfg, env)
47 | dataloader = DataLoader(dataset, batch_size = cfg.batch, shuffle = True)
48 |
49 | ave_act_loss, ave_L = 0., 0.
50 | min_L, cnt = 1e7, 0
51 | t1 = time()
52 | # for i, inputs in tqdm(enumerate(dataloader)):
53 | for i, inputs in enumerate(dataloader):
54 | inputs = inputs.to(device)
55 | pred_tour, ll = act_model(inputs, device)
56 | real_l = env.stack_l_fast(inputs, pred_tour)
57 | if cfg.mode == 'train':
58 | pred_l = cri_model(inputs, device)
59 | cri_loss = mse_loss(pred_l, real_l.detach())
60 | cri_optim.zero_grad()
61 | cri_loss.backward()
62 | nn.utils.clip_grad_norm_(cri_model.parameters(), max_norm = 1., norm_type = 2)
63 | cri_optim.step()
64 | if cfg.is_lr_decay:
65 | cri_lr_scheduler.step()
66 | elif cfg.mode == 'train_emv':
67 | if i == 0:
68 | L = real_l.detach().mean()
69 | else:
70 | L = (L * 0.9) + (0.1 * real_l.detach().mean())
71 | pred_l = L
72 |
73 | adv = real_l.detach() - pred_l.detach()
74 | act_loss = (adv * ll).mean()
75 | act_optim.zero_grad()
76 | act_loss.backward()
77 | nn.utils.clip_grad_norm_(act_model.parameters(), max_norm = 1., norm_type = 2)
78 | act_optim.step()
79 | if cfg.is_lr_decay:
80 | act_lr_scheduler.step()
81 |
82 | ave_act_loss += act_loss.item()
83 | if cfg.mode == 'train':
84 | ave_cri_loss += cri_loss.item()
85 | ave_L += real_l.mean().item()
86 |
87 | if i % cfg.log_step == 0:
88 | t2 = time()
89 | if cfg.mode == 'train':
90 | print('step:%d/%d, actic loss:%1.3f, critic loss:%1.3f, L:%1.3f, %dmin%dsec'%(i, cfg.steps, ave_act_loss/(i+1), ave_cri_loss/(i+1), ave_L/(i+1), (t2-t1)//60, (t2-t1)%60))
91 | if cfg.islogger:
92 | if log_path is None:
93 | log_path = cfg.log_dir + '%s_%s_train.csv'%(date, cfg.task)#cfg.log_dir = ./Csv/
94 | with open(log_path, 'w') as f:
95 | f.write('step,actic loss,critic loss,average distance,time\n')
96 | else:
97 | with open(log_path, 'a') as f:
98 | f.write('%d,%1.4f,%1.4f,%1.4f,%dmin%dsec\n'%(i, ave_act_loss/(i+1), ave_cri_loss/(i+1), ave_L/(i+1), (t2-t1)//60, (t2-t1)%60))
99 |
100 | elif cfg.mode == 'train_emv':
101 | print('step:%d/%d, actic loss:%1.3f, L:%1.3f, %dmin%dsec'%(i, cfg.steps, ave_act_loss/(i+1), ave_L/(i+1), (t2-t1)//60, (t2-t1)%60))
102 | if cfg.islogger:
103 | if log_path is None:
104 | log_path = cfg.log_dir + '%s_%s_train_emv.csv'%(date, cfg.task)#cfg.log_dir = ./Csv/
105 | with open(log_path, 'w') as f:
106 | f.write('step,actic loss,average distance,time\n')
107 | else:
108 | with open(log_path, 'a') as f:
109 | f.write('%d,%1.4f,%1.4f,%dmin%dsec\n'%(i, ave_act_loss/(i+1), ave_L/(i+1), (t2-t1)//60, (t2-t1)%60))
110 | if(ave_L/(i+1) < min_L):
111 | min_L = ave_L/(i+1)
112 |
113 | else:
114 | cnt += 1
115 | print(f'cnt: {cnt}/20')
116 | if(cnt >= 20):
117 | print('early stop, average cost cant decrease anymore')
118 | if log_path is not None:
119 | with open(log_path, 'a') as f:
120 | f.write('\nearly stop')
121 | break
122 | t1 = time()
123 | if cfg.issaver:
124 | torch.save(act_model.state_dict(), cfg.model_dir + '%s_%s_step%d_act.pt'%(cfg.task, date, i))#'cfg.model_dir = ./Pt/'
125 | print('save model...')
126 |
127 | if __name__ == '__main__':
128 | cfg = load_pkl(pkl_parser().path)
129 | env = Env_tsp(cfg)
130 |
131 | if cfg.mode in ['train', 'train_emv']:
132 | # train_emv --> exponential moving average, not use critic model
133 | train_model(cfg, env)
134 | else:
135 | raise NotImplementedError('train and train_emv only, specify train pkl file')
136 |
137 |
--------------------------------------------------------------------------------