├── .gitignore ├── datasets ├── homography │ ├── eth.txt │ ├── hotel.txt │ ├── univ.txt │ └── zara.txt ├── navMap │ ├── eth.npy │ ├── hotel.npy │ ├── univ1.npy │ ├── univ2.npy │ ├── univ3.npy │ ├── zara1.npy │ ├── zara2.npy │ └── zara3.npy ├── test │ ├── biwi_eth.txt │ ├── biwi_hotel.txt │ ├── crowds_zara01.txt │ ├── crowds_zara02.txt │ ├── students001.txt │ └── students003.txt ├── train │ ├── biwi_eth_train.txt │ ├── biwi_hotel_train.txt │ ├── crowds_zara01_train.txt │ ├── crowds_zara02_train.txt │ ├── crowds_zara03_train.txt │ ├── students001_train.txt │ ├── students003_train.txt │ └── uni_examples_train.txt └── val │ ├── biwi_eth_val.txt │ ├── biwi_hotel_val.txt │ ├── crowds_zara01_val.txt │ ├── crowds_zara02_val.txt │ ├── crowds_zara03_val.txt │ ├── students001_val.txt │ ├── students003_val.txt │ └── uni_examples_val.txt ├── experiments ├── eth.yaml ├── hotel.yaml ├── univ.yaml ├── zara1.yaml └── zara2.yaml ├── requirements.txt ├── scripts ├── logger.py ├── makeNavMap.py ├── sample.py └── train.py └── social-lstm ├── coordinates_helpers.py ├── losses.py ├── model.py ├── pooling_layers.py ├── position_estimates.py └── utils ├── __init__.py ├── dataset.py ├── evaluation.py ├── loader.py └── yparams.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled 2 | __pycache__/ 3 | datasets/processed 4 | logs/ 5 | models 6 | -------------------------------------------------------------------------------- /datasets/homography/eth.txt: -------------------------------------------------------------------------------- 1 | 3.215240936583170139e+01 -3.610659347222200832e+00 2.850658422262313820e+02 2 | -5.627508844324188964e+00 3.878869378943068114e+01 3.675861142998107312e+02 3 | -2.289419902647419486e-02 -5.060500348103997778e-03 1.875435156256752833e+00 4 | -------------------------------------------------------------------------------- /datasets/homography/hotel.txt: -------------------------------------------------------------------------------- 1 | 8.499303514235288048e+01 -5.439458294827234752e+00 4.673104455193656008e+02 2 | 3.497403433330066314e+00 8.475412351033250502e+01 8.639175474678052069e+02 3 | -1.761153264056391360e-02 -1.004884871363295263e-03 1.724390214498285223e+00 4 | -------------------------------------------------------------------------------- /datasets/homography/univ.txt: -------------------------------------------------------------------------------- 1 | 4.751381365677470114e+01 0.000000000000000000e+00 4.760000000000000000e+02 2 | 0.000000000000000000e+00 4.190064691249489925e+01 1.170000000000000853e+02 3 | 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 4 | -------------------------------------------------------------------------------- /datasets/homography/zara.txt: -------------------------------------------------------------------------------- 1 | 4.751381365677470114e+01 0.000000000000000000e+00 4.760000000000000000e+02 2 | 0.000000000000000000e+00 4.190064691249489925e+01 1.170000000000000853e+02 3 | 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 4 | -------------------------------------------------------------------------------- /datasets/navMap/eth.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/eth.npy -------------------------------------------------------------------------------- /datasets/navMap/hotel.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/hotel.npy -------------------------------------------------------------------------------- /datasets/navMap/univ1.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/univ1.npy -------------------------------------------------------------------------------- /datasets/navMap/univ2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/univ2.npy -------------------------------------------------------------------------------- /datasets/navMap/univ3.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/univ3.npy -------------------------------------------------------------------------------- /datasets/navMap/zara1.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/zara1.npy -------------------------------------------------------------------------------- /datasets/navMap/zara2.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/zara2.npy -------------------------------------------------------------------------------- /datasets/navMap/zara3.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Oghma/sns-lstm/f98d80ac1b55f48cbdfc13095d0a8d457b225082/datasets/navMap/zara3.npy -------------------------------------------------------------------------------- /datasets/val/biwi_hotel_val.txt: -------------------------------------------------------------------------------- 1 | 14400.0 333.0 -0.31 -3.56 2 | 14400.0 334.0 0.94 -2.25 3 | 14400.0 335.0 1.32 2.34 4 | 14400.0 336.0 3.15 -5.23 5 | 14410.0 333.0 -0.53 -3.49 6 | 14410.0 334.0 1.1 -2.94 7 | 14410.0 335.0 1.23 1.72 8 | 14410.0 336.0 3.15 -4.58 9 | 14420.0 333.0 -0.61 -3.41 10 | 14420.0 334.0 1.09 -3.51 11 | 14420.0 335.0 1.28 1.0 12 | 14420.0 336.0 3.15 -3.97 13 | 14430.0 333.0 -0.75 -3.25 14 | 14430.0 334.0 0.96 -4.09 15 | 14430.0 335.0 1.42 0.32 16 | 14430.0 336.0 3.2 -3.31 17 | 14440.0 333.0 -0.96 -2.87 18 | 14440.0 334.0 0.8 -4.61 19 | 14440.0 335.0 1.4 -0.39 20 | 14440.0 336.0 3.14 -2.66 21 | 14450.0 333.0 -1.15 -2.71 22 | 14450.0 334.0 0.43 -5.08 23 | 14450.0 335.0 1.58 -1.06 24 | 14450.0 336.0 3.11 -2.06 25 | 14450.0 337.0 1.34 2.66 26 | 14460.0 333.0 -1.32 -2.55 27 | 14460.0 334.0 0.07 -5.43 28 | 14460.0 335.0 1.53 -1.69 29 | 14460.0 336.0 3.12 -1.43 30 | 14460.0 337.0 1.34 2.04 31 | 14470.0 333.0 -1.4 -2.42 32 | 14470.0 334.0 -0.32 -5.89 33 | 14470.0 335.0 1.59 -2.37 34 | 14470.0 336.0 3.01 -0.77 35 | 14470.0 337.0 1.28 1.5 36 | 14470.0 338.0 -1.2 -0.24 37 | 14480.0 333.0 -1.37 -2.41 38 | 14480.0 334.0 -0.8 -6.21 39 | 14480.0 335.0 1.66 -3.1 40 | 14480.0 336.0 2.99 -0.18 41 | 14480.0 337.0 1.26 0.9 42 | 14480.0 338.0 -1.18 -0.24 43 | 14490.0 333.0 -1.27 -2.33 44 | 14490.0 334.0 -1.12 -6.46 45 | 14490.0 335.0 1.83 -3.83 46 | 14490.0 336.0 2.94 0.44 47 | 14490.0 337.0 1.28 0.28 48 | 14490.0 338.0 -1.18 -0.26 49 | 14500.0 333.0 -1.26 -2.32 50 | 14500.0 334.0 -1.68 -6.91 51 | 14500.0 335.0 1.93 -4.67 52 | 14500.0 336.0 2.85 1.25 53 | 14500.0 337.0 1.23 -0.41 54 | 14500.0 338.0 -1.18 -0.26 55 | 14510.0 333.0 -1.26 -2.32 56 | 14510.0 334.0 -2.09 -7.25 57 | 14510.0 335.0 1.93 -5.42 58 | 14510.0 336.0 2.8 1.89 59 | 14510.0 337.0 1.32 -0.89 60 | 14510.0 338.0 -1.18 -0.26 61 | 14520.0 333.0 -1.26 -2.32 62 | 14520.0 334.0 -2.43 -7.62 63 | 14520.0 335.0 2.16 -6.1 64 | 14520.0 336.0 2.72 2.4 65 | 14520.0 337.0 1.31 -1.66 66 | 14520.0 338.0 -1.18 -0.26 67 | 14530.0 333.0 -1.26 -2.32 68 | 14530.0 334.0 -2.77 -8.08 69 | 14530.0 335.0 2.27 -6.8 70 | 14530.0 336.0 2.65 2.98 71 | 14530.0 337.0 1.32 -2.31 72 | 14530.0 338.0 -1.18 -0.26 73 | 14540.0 335.0 2.54 -7.59 74 | 14540.0 336.0 2.65 3.58 75 | 14540.0 337.0 1.5 -3.07 76 | 14540.0 338.0 -1.18 -0.26 77 | 14550.0 335.0 2.66 -8.3 78 | 14550.0 337.0 1.58 -3.72 79 | 14560.0 337.0 1.72 -4.53 80 | 14570.0 337.0 1.88 -5.31 81 | 14580.0 337.0 1.86 -5.89 82 | 14590.0 337.0 1.94 -6.53 83 | 14600.0 337.0 1.98 -7.1 84 | 14800.0 340.0 2.42 -9.19 85 | 14800.0 341.0 1.6 -9.16 86 | 14810.0 340.0 2.35 -8.48 87 | 14810.0 341.0 1.62 -8.46 88 | 14820.0 340.0 2.38 -7.79 89 | 14820.0 341.0 1.62 -7.79 90 | 14820.0 342.0 3.81 -6.38 91 | 14830.0 340.0 2.46 -7.13 92 | 14830.0 341.0 1.69 -7.08 93 | 14830.0 342.0 3.69 -7.05 94 | 14840.0 340.0 2.41 -6.4 95 | 14840.0 341.0 1.65 -6.34 96 | 14840.0 342.0 3.64 -7.74 97 | 14850.0 340.0 2.43 -5.71 98 | 14850.0 341.0 1.88 -5.73 99 | 14860.0 340.0 2.39 -5.02 100 | 14860.0 341.0 1.79 -5.01 101 | 14870.0 340.0 2.53 -4.35 102 | 14870.0 341.0 1.78 -4.29 103 | 14880.0 340.0 2.56 -3.69 104 | 14880.0 341.0 1.92 -3.57 105 | 14880.0 343.0 1.6 3.25 106 | 14890.0 340.0 2.65 -2.98 107 | 14890.0 341.0 1.97 -2.86 108 | 14890.0 343.0 1.57 2.64 109 | 14900.0 340.0 2.86 -2.29 110 | 14900.0 341.0 2.0 -2.16 111 | 14900.0 343.0 1.53 2.01 112 | 14910.0 340.0 2.94 -1.61 113 | 14910.0 341.0 2.11 -1.5 114 | 14910.0 343.0 1.48 1.36 115 | 14920.0 340.0 3.05 -0.98 116 | 14920.0 341.0 2.2 -0.88 117 | 14920.0 343.0 1.49 0.72 118 | 14930.0 340.0 3.23 -0.39 119 | 14930.0 341.0 2.29 -0.27 120 | 14930.0 343.0 1.46 0.07 121 | 14940.0 340.0 3.23 0.29 122 | 14940.0 341.0 2.42 0.34 123 | 14940.0 343.0 1.49 -0.59 124 | 14950.0 340.0 3.31 0.92 125 | 14950.0 341.0 2.54 0.97 126 | 14950.0 343.0 1.48 -1.24 127 | 14950.0 344.0 1.66 4.05 128 | 14960.0 340.0 3.37 1.5 129 | 14960.0 341.0 2.59 1.54 130 | 14960.0 343.0 1.49 -1.9 131 | 14960.0 344.0 1.71 3.59 132 | 14960.0 345.0 2.07 -10.06 133 | 14970.0 340.0 3.39 2.16 134 | 14970.0 341.0 2.67 2.14 135 | 14970.0 343.0 1.53 -2.55 136 | 14970.0 344.0 1.71 3.17 137 | 14970.0 345.0 2.1 -9.44 138 | 14980.0 340.0 3.27 2.72 139 | 14980.0 341.0 2.72 2.9 140 | 14980.0 343.0 1.48 -3.24 141 | 14980.0 344.0 1.71 2.82 142 | 14980.0 345.0 2.19 -8.79 143 | 14990.0 343.0 1.41 -4.0 144 | 14990.0 344.0 1.72 2.43 145 | 14990.0 345.0 2.28 -8.12 146 | 15000.0 343.0 1.45 -4.72 147 | 15000.0 344.0 1.75 1.98 148 | 15000.0 345.0 2.36 -7.43 149 | 15010.0 343.0 1.43 -5.4 150 | 15010.0 344.0 1.72 1.65 151 | 15010.0 345.0 2.46 -6.76 152 | 15010.0 346.0 2.27 2.86 153 | 15020.0 343.0 1.47 -5.97 154 | 15020.0 344.0 1.83 1.21 155 | 15020.0 345.0 2.54 -6.07 156 | 15020.0 346.0 2.35 2.33 157 | 15030.0 343.0 1.37 -6.75 158 | 15030.0 344.0 1.73 0.75 159 | 15030.0 345.0 2.6 -5.36 160 | 15030.0 346.0 2.4 1.87 161 | 15040.0 344.0 1.7 0.35 162 | 15040.0 345.0 2.72 -4.66 163 | 15040.0 346.0 2.4 1.34 164 | 15050.0 344.0 1.71 -0.1 165 | 15050.0 345.0 2.8 -3.98 166 | 15050.0 346.0 2.44 0.81 167 | 15060.0 344.0 1.54 -0.5 168 | 15060.0 345.0 2.95 -3.31 169 | 15060.0 346.0 2.43 0.33 170 | 15070.0 344.0 1.42 -0.87 171 | 15070.0 345.0 3.06 -2.67 172 | 15070.0 346.0 2.47 -0.26 173 | 15080.0 344.0 1.3 -1.34 174 | 15080.0 345.0 3.16 -1.96 175 | 15080.0 346.0 2.43 -0.77 176 | 15090.0 344.0 1.15 -1.8 177 | 15090.0 345.0 3.29 -1.34 178 | 15090.0 346.0 2.38 -1.32 179 | 15100.0 344.0 1.06 -2.25 180 | 15100.0 345.0 3.37 -0.67 181 | 15100.0 346.0 2.35 -1.87 182 | 15100.0 347.0 3.77 -4.99 183 | 15100.0 348.0 3.84 -8.15 184 | 15100.0 349.0 3.04 -7.88 185 | 15100.0 350.0 2.5 -8.95 186 | 15110.0 344.0 0.99 -2.74 187 | 15110.0 345.0 3.41 -0.01 188 | 15110.0 346.0 2.24 -2.44 189 | 15110.0 347.0 3.81 -4.45 190 | 15110.0 348.0 3.77 -7.59 191 | 15110.0 349.0 2.96 -7.27 192 | 15110.0 350.0 2.63 -8.64 193 | 15120.0 344.0 0.86 -3.23 194 | 15120.0 345.0 3.48 0.64 195 | 15120.0 346.0 2.23 -3.02 196 | 15120.0 347.0 3.69 -3.83 197 | 15120.0 348.0 3.61 -7.0 198 | 15120.0 349.0 2.85 -6.64 199 | 15120.0 350.0 2.72 -8.38 200 | 15130.0 344.0 0.96 -3.67 201 | 15130.0 345.0 3.57 1.32 202 | 15130.0 346.0 2.22 -3.59 203 | 15130.0 347.0 3.67 -3.33 204 | 15130.0 348.0 3.55 -6.37 205 | 15130.0 349.0 2.79 -6.04 206 | 15130.0 350.0 2.76 -8.0 207 | 15140.0 344.0 0.93 -4.12 208 | 15140.0 345.0 3.56 1.99 209 | 15140.0 346.0 2.16 -4.18 210 | 15140.0 347.0 3.74 -2.73 211 | 15140.0 348.0 3.47 -5.75 212 | 15140.0 349.0 2.73 -5.44 213 | 15140.0 350.0 2.85 -7.72 214 | 15140.0 351.0 0.82 3.36 215 | 15150.0 344.0 0.95 -4.57 216 | 15150.0 345.0 3.55 2.58 217 | 15150.0 346.0 2.1 -4.79 218 | 15150.0 347.0 3.74 -2.17 219 | 15150.0 348.0 3.42 -5.12 220 | 15150.0 349.0 2.72 -4.81 221 | 15150.0 350.0 3.07 -7.39 222 | 15150.0 351.0 0.81 2.66 223 | 15160.0 344.0 0.72 -5.12 224 | 15160.0 346.0 2.11 -5.49 225 | 15160.0 347.0 3.74 -1.66 226 | 15160.0 348.0 3.43 -4.46 227 | 15160.0 349.0 2.79 -4.15 228 | 15160.0 350.0 3.23 -7.15 229 | 15160.0 351.0 0.88 1.93 230 | 15170.0 344.0 0.76 -5.49 231 | 15170.0 346.0 1.99 -5.97 232 | 15170.0 347.0 3.73 -1.05 233 | 15170.0 348.0 3.36 -3.76 234 | 15170.0 349.0 2.63 -3.59 235 | 15170.0 350.0 3.36 -6.85 236 | 15170.0 351.0 0.88 1.19 237 | 15180.0 344.0 0.74 -6.03 238 | 15180.0 346.0 2.05 -6.58 239 | 15180.0 347.0 3.71 -0.55 240 | 15180.0 348.0 3.34 -3.08 241 | 15180.0 349.0 2.54 -2.9 242 | 15180.0 350.0 3.43 -6.51 243 | 15180.0 351.0 0.95 0.46 244 | 15190.0 344.0 0.85 -6.48 245 | 15190.0 346.0 1.92 -7.24 246 | 15190.0 347.0 3.7 -0.06 247 | 15190.0 348.0 3.27 -2.42 248 | 15190.0 349.0 2.45 -2.32 249 | 15190.0 350.0 3.53 -6.29 250 | 15190.0 351.0 0.94 -0.39 251 | 15200.0 347.0 3.84 0.49 252 | 15200.0 348.0 3.2 -1.78 253 | 15200.0 349.0 2.44 -1.63 254 | 15200.0 351.0 1.01 -1.15 255 | 15210.0 347.0 3.79 1.0 256 | 15210.0 348.0 3.18 -1.11 257 | 15210.0 349.0 2.32 -0.99 258 | 15210.0 351.0 1.06 -1.94 259 | 15220.0 347.0 3.73 1.46 260 | 15220.0 348.0 3.07 -0.36 261 | 15220.0 349.0 2.24 -0.45 262 | 15230.0 347.0 3.72 2.03 263 | 15230.0 348.0 3.02 0.34 264 | 15230.0 349.0 2.18 0.1 265 | 15240.0 347.0 3.69 2.51 266 | 15240.0 348.0 2.94 0.95 267 | 15240.0 349.0 2.17 0.7 268 | 15250.0 347.0 3.66 2.95 269 | 15250.0 348.0 2.87 1.56 270 | 15250.0 349.0 2.16 1.27 271 | 15810.0 354.0 1.61 -1.05 272 | 15820.0 354.0 1.55 -1.42 273 | 15820.0 355.0 1.19 2.9 274 | 15820.0 356.0 0.54 0.01 275 | 15830.0 354.0 1.49 -1.81 276 | 15830.0 355.0 1.16 2.33 277 | 15830.0 356.0 0.52 -0.03 278 | 15840.0 354.0 1.51 -2.23 279 | 15840.0 355.0 1.21 1.74 280 | 15840.0 356.0 0.51 -0.03 281 | 15850.0 354.0 1.48 -2.66 282 | 15850.0 355.0 1.32 1.09 283 | 15850.0 356.0 0.51 -0.03 284 | 15860.0 354.0 1.45 -3.08 285 | 15860.0 355.0 1.33 0.5 286 | 15860.0 356.0 0.51 -0.03 287 | 15860.0 357.0 1.27 3.15 288 | 15870.0 354.0 1.46 -3.56 289 | 15870.0 355.0 1.32 -0.32 290 | 15870.0 356.0 0.51 -0.03 291 | 15870.0 357.0 1.36 2.5 292 | 15880.0 354.0 1.4 -4.03 293 | 15880.0 355.0 1.32 -1.0 294 | 15880.0 356.0 0.51 -0.03 295 | 15880.0 357.0 1.39 1.85 296 | 15890.0 354.0 1.41 -4.48 297 | 15890.0 355.0 1.22 -1.78 298 | 15890.0 356.0 0.51 -0.03 299 | 15890.0 357.0 1.44 1.19 300 | 15900.0 354.0 1.4 -4.95 301 | 15900.0 355.0 1.11 -2.33 302 | 15900.0 356.0 0.51 -0.03 303 | 15900.0 357.0 1.45 0.53 304 | 15900.0 358.0 2.7 -10.16 305 | 15910.0 354.0 1.35 -5.41 306 | 15910.0 355.0 0.95 -3.14 307 | 15910.0 356.0 0.51 -0.03 308 | 15910.0 357.0 1.38 -0.11 309 | 15910.0 358.0 2.6 -9.58 310 | 15910.0 359.0 3.58 -9.8 311 | 15920.0 354.0 1.35 -5.86 312 | 15920.0 355.0 0.83 -3.74 313 | 15920.0 356.0 0.53 -0.07 314 | 15920.0 357.0 1.42 -0.8 315 | 15920.0 358.0 2.68 -9.04 316 | 15920.0 359.0 3.53 -9.32 317 | 15920.0 360.0 3.77 -6.98 318 | 15930.0 354.0 1.33 -6.3 319 | 15930.0 355.0 0.64 -4.47 320 | 15930.0 356.0 0.53 -0.08 321 | 15930.0 357.0 1.33 -1.46 322 | 15930.0 358.0 2.61 -8.45 323 | 15930.0 359.0 3.6 -8.73 324 | 15930.0 360.0 3.78 -6.51 325 | 15940.0 354.0 1.29 -6.73 326 | 15940.0 355.0 0.49 -5.33 327 | 15940.0 356.0 0.5 -0.04 328 | 15940.0 357.0 1.26 -2.15 329 | 15940.0 358.0 2.58 -7.92 330 | 15940.0 359.0 3.54 -8.17 331 | 15940.0 360.0 3.68 -6.03 332 | 15950.0 354.0 1.35 -7.16 333 | 15950.0 355.0 0.35 -6.05 334 | 15950.0 356.0 0.53 -0.02 335 | 15950.0 357.0 1.19 -2.84 336 | 15950.0 358.0 2.63 -7.2 337 | 15950.0 359.0 3.45 -7.57 338 | 15950.0 360.0 3.54 -5.67 339 | 15950.0 361.0 -0.89 -6.71 340 | 15960.0 354.0 1.35 -7.57 341 | 15960.0 355.0 0.3 -6.81 342 | 15960.0 356.0 0.53 -0.07 343 | 15960.0 357.0 1.11 -3.56 344 | 15960.0 358.0 2.52 -6.7 345 | 15960.0 359.0 3.53 -7.1 346 | 15960.0 360.0 3.54 -5.21 347 | 15960.0 361.0 -0.89 -6.71 348 | 15970.0 354.0 1.33 -7.99 349 | 15970.0 355.0 0.33 -7.51 350 | 15970.0 356.0 0.53 -0.08 351 | 15970.0 357.0 1.07 -4.31 352 | 15970.0 358.0 2.55 -6.16 353 | 15970.0 359.0 3.44 -6.48 354 | 15970.0 360.0 3.47 -4.82 355 | 15970.0 361.0 -0.89 -6.71 356 | 15980.0 354.0 1.41 -8.43 357 | 15980.0 355.0 0.26 -8.2 358 | 15980.0 356.0 0.53 -0.02 359 | 15980.0 357.0 0.99 -5.03 360 | 15980.0 358.0 2.54 -5.6 361 | 15980.0 359.0 3.37 -5.98 362 | 15980.0 360.0 3.5 -4.33 363 | 15980.0 361.0 -0.89 -6.71 364 | 15990.0 354.0 1.44 -8.88 365 | 15990.0 355.0 0.34 -8.93 366 | 15990.0 356.0 0.53 -0.05 367 | 15990.0 357.0 1.15 -5.73 368 | 15990.0 358.0 2.41 -5.03 369 | 15990.0 359.0 3.39 -5.42 370 | 15990.0 360.0 3.67 -3.93 371 | 15990.0 361.0 -0.89 -6.71 372 | 16000.0 355.0 0.48 -9.62 373 | 16000.0 356.0 0.46 -0.03 374 | 16000.0 357.0 1.19 -6.42 375 | 16000.0 358.0 2.48 -4.47 376 | 16000.0 359.0 3.32 -4.85 377 | 16000.0 360.0 3.87 -3.52 378 | 16000.0 361.0 -0.89 -6.71 379 | 16010.0 356.0 0.47 0.14 380 | 16010.0 357.0 1.1 -7.19 381 | 16010.0 358.0 2.53 -3.85 382 | 16010.0 359.0 3.29 -4.35 383 | 16010.0 360.0 4.02 -3.04 384 | 16010.0 361.0 -0.89 -6.71 385 | 16010.0 362.0 1.69 2.49 386 | 16010.0 363.0 2.25 2.49 387 | 16020.0 356.0 0.47 0.24 388 | 16020.0 358.0 2.53 -3.27 389 | 16020.0 359.0 3.35 -3.74 390 | 16020.0 360.0 4.03 -2.69 391 | 16020.0 361.0 -0.89 -6.71 392 | 16020.0 362.0 1.68 1.97 393 | 16020.0 363.0 2.3 1.97 394 | 16020.0 364.0 1.92 3.47 395 | 16030.0 356.0 0.47 0.18 396 | 16030.0 358.0 2.67 -2.75 397 | 16030.0 359.0 3.31 -3.14 398 | 16030.0 360.0 4.14 -2.38 399 | 16030.0 361.0 -0.89 -6.71 400 | 16030.0 362.0 1.61 1.41 401 | 16030.0 363.0 2.34 1.4 402 | 16030.0 364.0 1.79 2.86 403 | 16040.0 356.0 0.47 0.16 404 | 16040.0 358.0 2.76 -2.24 405 | 16040.0 359.0 3.36 -2.59 406 | 16040.0 360.0 4.12 -1.89 407 | 16040.0 361.0 -0.89 -6.71 408 | 16040.0 362.0 1.65 0.83 409 | 16040.0 363.0 2.3 0.85 410 | 16040.0 364.0 1.63 2.27 411 | 16040.0 365.0 1.28 4.0 412 | 16040.0 366.0 0.64 4.11 413 | 16050.0 356.0 0.47 0.12 414 | 16050.0 358.0 2.78 -1.71 415 | 16050.0 359.0 3.41 -2.03 416 | 16050.0 361.0 -0.89 -6.71 417 | 16050.0 362.0 1.65 0.27 418 | 16050.0 363.0 2.25 0.3 419 | 16050.0 364.0 1.59 1.7 420 | 16050.0 365.0 1.33 3.54 421 | 16050.0 366.0 0.78 3.58 422 | 16060.0 356.0 0.46 0.08 423 | 16060.0 358.0 2.87 -1.2 424 | 16060.0 359.0 3.36 -1.53 425 | 16060.0 361.0 -0.89 -6.71 426 | 16060.0 362.0 1.63 -0.28 427 | 16060.0 363.0 2.24 -0.27 428 | 16060.0 364.0 1.37 0.99 429 | 16060.0 365.0 1.41 2.98 430 | 16060.0 366.0 0.83 3.07 431 | 16070.0 356.0 0.51 0.01 432 | 16070.0 358.0 2.92 -0.65 433 | 16070.0 359.0 3.45 -1.04 434 | 16070.0 361.0 -0.89 -6.71 435 | 16070.0 362.0 1.68 -0.88 436 | 16070.0 363.0 2.21 -0.85 437 | 16070.0 364.0 1.25 0.16 438 | 16070.0 365.0 1.54 2.5 439 | 16070.0 366.0 0.88 2.46 440 | 16080.0 356.0 0.52 0.01 441 | 16080.0 358.0 2.9 -0.15 442 | 16080.0 359.0 3.51 -0.52 443 | 16080.0 361.0 -0.89 -6.71 444 | 16080.0 362.0 1.69 -1.5 445 | 16080.0 363.0 2.27 -1.44 446 | 16080.0 364.0 1.07 -0.47 447 | 16080.0 365.0 1.65 1.96 448 | 16080.0 366.0 0.97 1.89 449 | 16090.0 356.0 0.52 0.01 450 | 16090.0 358.0 3.02 0.35 451 | 16090.0 359.0 3.49 0.01 452 | 16090.0 361.0 -0.89 -6.71 453 | 16090.0 362.0 1.76 -2.07 454 | 16090.0 363.0 2.35 -2.0 455 | 16090.0 364.0 0.83 -1.24 456 | 16090.0 365.0 1.75 1.41 457 | 16090.0 366.0 1.06 1.38 458 | 16100.0 356.0 0.52 0.01 459 | 16100.0 358.0 3.06 0.82 460 | 16100.0 359.0 3.64 0.51 461 | 16100.0 361.0 -0.89 -6.71 462 | 16100.0 362.0 1.8 -2.67 463 | 16100.0 363.0 2.38 -2.6 464 | 16100.0 364.0 0.7 -1.98 465 | 16100.0 365.0 1.8 0.87 466 | 16100.0 366.0 1.17 0.79 467 | 16110.0 356.0 0.52 0.01 468 | 16110.0 361.0 -0.89 -6.71 469 | 16110.0 362.0 1.78 -3.26 470 | 16110.0 363.0 2.47 -3.2 471 | 16110.0 364.0 0.56 -2.71 472 | 16110.0 365.0 1.82 0.31 473 | 16110.0 366.0 1.28 0.1 474 | 16110.0 367.0 3.38 -6.8 475 | 16110.0 368.0 4.16 -6.96 476 | 16120.0 356.0 0.52 0.01 477 | 16120.0 361.0 -0.89 -6.71 478 | 16120.0 362.0 1.83 -3.81 479 | 16120.0 363.0 2.48 -3.78 480 | 16120.0 364.0 0.5 -3.37 481 | 16120.0 365.0 2.08 -0.3 482 | 16120.0 366.0 1.22 -0.37 483 | 16120.0 367.0 3.35 -6.21 484 | 16120.0 368.0 4.19 -6.4 485 | 16120.0 369.0 -0.59 2.83 486 | 16120.0 371.0 -0.66 3.35 487 | 16130.0 356.0 0.52 0.01 488 | 16130.0 361.0 -0.89 -6.71 489 | 16130.0 362.0 1.89 -4.36 490 | 16130.0 363.0 2.47 -4.37 491 | 16130.0 364.0 0.39 -4.08 492 | 16130.0 365.0 1.94 -0.93 493 | 16130.0 366.0 1.25 -0.98 494 | 16130.0 367.0 3.33 -5.64 495 | 16130.0 368.0 4.19 -5.78 496 | 16130.0 369.0 -0.59 2.83 497 | 16130.0 371.0 -0.66 3.35 498 | 16140.0 356.0 0.52 0.01 499 | 16140.0 361.0 -0.89 -6.71 500 | 16140.0 362.0 1.89 -4.93 501 | 16140.0 363.0 2.47 -4.98 502 | 16140.0 364.0 0.36 -4.97 503 | 16140.0 365.0 1.96 -1.55 504 | 16140.0 366.0 1.27 -1.56 505 | 16140.0 367.0 3.37 -5.07 506 | 16140.0 368.0 4.24 -5.18 507 | 16140.0 369.0 -0.59 2.83 508 | 16140.0 371.0 -0.66 3.35 509 | 16140.0 372.0 2.03 2.2 510 | 16140.0 373.0 2.58 2.43 511 | 16150.0 356.0 0.43 0.04 512 | 16150.0 361.0 -0.89 -6.71 513 | 16150.0 362.0 1.92 -5.64 514 | 16150.0 363.0 2.42 -5.66 515 | 16150.0 364.0 0.4 -5.58 516 | 16150.0 365.0 1.95 -2.17 517 | 16150.0 366.0 1.25 -2.11 518 | 16150.0 367.0 3.31 -4.49 519 | 16150.0 368.0 4.27 -4.61 520 | 16150.0 369.0 -0.59 2.83 521 | 16150.0 371.0 -0.66 3.35 522 | 16150.0 372.0 1.94 1.64 523 | 16150.0 373.0 2.45 1.8 524 | 16150.0 374.0 0.78 1.89 525 | 16160.0 356.0 0.42 0.04 526 | 16160.0 361.0 -0.89 -6.71 527 | 16160.0 362.0 1.89 -6.26 528 | 16160.0 363.0 2.4 -6.23 529 | 16160.0 364.0 0.49 -6.3 530 | 16160.0 365.0 1.93 -2.78 531 | 16160.0 366.0 1.22 -2.81 532 | 16160.0 367.0 3.31 -3.92 533 | 16160.0 368.0 4.09 -4.01 534 | 16160.0 369.0 -0.59 2.83 535 | 16160.0 371.0 -0.66 3.35 536 | 16160.0 372.0 1.98 0.86 537 | 16160.0 373.0 2.54 1.15 538 | 16160.0 374.0 0.85 1.06 539 | 16160.0 375.0 1.12 2.68 540 | 16160.0 376.0 1.84 2.87 541 | 16160.0 377.0 2.43 2.89 542 | 16170.0 356.0 0.34 0.02 543 | 16170.0 361.0 -0.89 -6.71 544 | 16170.0 362.0 1.87 -6.86 545 | 16170.0 363.0 2.47 -6.87 546 | 16170.0 364.0 0.56 -7.29 547 | 16170.0 365.0 1.95 -3.42 548 | 16170.0 366.0 1.19 -3.36 549 | 16170.0 367.0 3.32 -3.36 550 | 16170.0 368.0 3.88 -3.38 551 | 16170.0 369.0 -0.59 2.83 552 | 16170.0 371.0 -0.66 3.35 553 | 16170.0 372.0 1.87 0.27 554 | 16170.0 373.0 2.53 0.52 555 | 16170.0 374.0 0.9 0.2 556 | 16170.0 375.0 1.11 2.22 557 | 16170.0 376.0 1.83 2.4 558 | 16170.0 377.0 2.43 2.4 559 | 16170.0 378.0 -0.82 -2.48 560 | 16180.0 356.0 0.29 0.0 561 | 16180.0 361.0 -0.89 -6.71 562 | 16180.0 362.0 1.94 -7.47 563 | 16180.0 363.0 2.49 -7.56 564 | 16180.0 364.0 0.69 -8.06 565 | 16180.0 365.0 1.94 -4.05 566 | 16180.0 366.0 1.18 -4.01 567 | 16180.0 367.0 3.29 -2.77 568 | 16180.0 368.0 3.71 -2.8 569 | 16180.0 369.0 -0.59 2.83 570 | 16180.0 371.0 -0.66 3.35 571 | 16180.0 372.0 1.83 -0.36 572 | 16180.0 373.0 2.6 -0.17 573 | 16180.0 374.0 0.84 -0.69 574 | 16180.0 375.0 1.14 1.78 575 | 16180.0 376.0 1.87 1.93 576 | 16180.0 377.0 2.45 1.94 577 | 16180.0 378.0 -0.82 -2.48 578 | 16190.0 356.0 0.31 0.1 579 | 16190.0 361.0 -0.89 -6.71 580 | 16190.0 362.0 1.98 -7.95 581 | 16190.0 363.0 2.57 -8.1 582 | 16190.0 364.0 0.78 -8.85 583 | 16190.0 365.0 1.98 -4.71 584 | 16190.0 366.0 1.16 -4.67 585 | 16190.0 367.0 3.36 -2.19 586 | 16190.0 368.0 3.71 -2.41 587 | 16190.0 369.0 -0.59 2.83 588 | 16190.0 371.0 -0.66 3.35 589 | 16190.0 372.0 1.76 -0.97 590 | 16190.0 373.0 2.57 -0.84 591 | 16190.0 374.0 0.72 -1.51 592 | 16190.0 375.0 1.16 1.35 593 | 16190.0 376.0 1.89 1.49 594 | 16190.0 377.0 2.54 1.43 595 | 16190.0 378.0 -1.06 -2.3 596 | 16200.0 356.0 0.29 0.23 597 | 16200.0 361.0 -0.89 -6.71 598 | 16200.0 362.0 1.95 -8.6 599 | 16200.0 363.0 2.58 -8.65 600 | 16200.0 364.0 0.98 -9.68 601 | 16200.0 365.0 1.99 -5.37 602 | 16200.0 366.0 1.18 -5.48 603 | 16200.0 367.0 3.35 -1.62 604 | 16200.0 368.0 3.8 -1.87 605 | 16200.0 369.0 -0.59 2.83 606 | 16200.0 371.0 -0.66 3.35 607 | 16200.0 372.0 1.77 -1.59 608 | 16200.0 373.0 2.53 -1.54 609 | 16200.0 374.0 0.68 -2.35 610 | 16200.0 375.0 1.15 0.86 611 | 16200.0 376.0 1.88 1.01 612 | 16200.0 377.0 2.52 0.91 613 | 16200.0 378.0 -1.1 -2.19 614 | 16210.0 356.0 0.35 0.2 615 | 16210.0 361.0 -0.89 -6.71 616 | 16210.0 362.0 1.93 -9.25 617 | 16210.0 363.0 2.57 -9.24 618 | 16210.0 365.0 2.01 -6.01 619 | 16210.0 366.0 1.23 -5.95 620 | 16210.0 367.0 3.34 -1.08 621 | 16210.0 368.0 3.86 -1.26 622 | 16210.0 369.0 -0.59 2.83 623 | 16210.0 371.0 -0.66 3.35 624 | 16210.0 372.0 1.73 -2.18 625 | 16210.0 373.0 2.46 -2.26 626 | 16210.0 374.0 0.68 -3.12 627 | 16210.0 375.0 1.26 0.33 628 | 16210.0 376.0 1.91 0.53 629 | 16210.0 377.0 2.49 0.4 630 | 16210.0 378.0 -1.14 -1.97 631 | 16210.0 379.0 3.88 -5.92 632 | 16220.0 356.0 0.18 0.38 633 | 16220.0 361.0 -0.89 -6.71 634 | 16220.0 362.0 1.94 -9.92 635 | 16220.0 363.0 2.62 -9.88 636 | 16220.0 365.0 2.04 -6.69 637 | 16220.0 366.0 1.24 -6.82 638 | 16220.0 367.0 3.36 -0.56 639 | 16220.0 368.0 3.95 -0.69 640 | 16220.0 369.0 -0.59 2.83 641 | 16220.0 371.0 -0.66 3.35 642 | 16220.0 372.0 1.72 -2.85 643 | 16220.0 373.0 2.4 -3.02 644 | 16220.0 374.0 0.72 -3.96 645 | 16220.0 375.0 1.27 -0.19 646 | 16220.0 376.0 1.9 -0.0 647 | 16220.0 377.0 2.48 -0.13 648 | 16220.0 378.0 -1.15 -1.8 649 | 16220.0 379.0 3.74 -5.14 650 | 16230.0 356.0 0.24 0.32 651 | 16230.0 361.0 -0.89 -6.71 652 | 16230.0 365.0 1.91 -7.47 653 | 16230.0 366.0 1.25 -7.55 654 | 16230.0 367.0 3.28 -0.0 655 | 16230.0 368.0 3.92 -0.06 656 | 16230.0 369.0 -0.59 2.83 657 | 16230.0 371.0 -0.66 3.35 658 | 16230.0 372.0 1.75 -3.57 659 | 16230.0 373.0 2.46 -3.72 660 | 16230.0 374.0 0.57 -4.91 661 | 16230.0 375.0 1.23 -0.76 662 | 16230.0 376.0 1.86 -0.51 663 | 16230.0 377.0 2.42 -0.65 664 | 16230.0 378.0 -1.4 -1.49 665 | 16230.0 379.0 3.73 -4.38 666 | 16240.0 356.0 0.27 0.14 667 | 16240.0 361.0 -0.89 -6.71 668 | 16240.0 365.0 1.95 -8.11 669 | 16240.0 366.0 1.19 -8.18 670 | 16240.0 367.0 3.27 0.57 671 | 16240.0 368.0 3.87 0.53 672 | 16240.0 369.0 -0.59 2.83 673 | 16240.0 371.0 -0.66 3.35 674 | 16240.0 372.0 1.71 -4.26 675 | 16240.0 373.0 2.5 -4.4 676 | 16240.0 374.0 0.83 -5.55 677 | 16240.0 375.0 1.24 -1.21 678 | 16240.0 376.0 1.86 -1.05 679 | 16240.0 377.0 2.42 -1.17 680 | 16240.0 378.0 -1.58 -1.34 681 | 16240.0 379.0 3.74 -3.64 682 | 16250.0 356.0 0.33 0.1 683 | 16250.0 361.0 -0.89 -6.71 684 | 16250.0 365.0 1.95 -8.75 685 | 16250.0 366.0 1.15 -8.93 686 | 16250.0 367.0 3.21 1.09 687 | 16250.0 368.0 3.9 1.11 688 | 16250.0 369.0 -0.59 2.83 689 | 16250.0 371.0 -0.66 3.35 690 | 16250.0 372.0 1.73 -4.94 691 | 16250.0 373.0 2.52 -5.1 692 | 16250.0 374.0 1.05 -6.47 693 | 16250.0 375.0 1.26 -1.82 694 | 16250.0 376.0 1.8 -1.58 695 | 16250.0 377.0 2.46 -1.72 696 | 16250.0 378.0 -1.45 -1.2 697 | 16250.0 379.0 3.74 -2.96 698 | 16250.0 380.0 1.82 3.63 699 | 16250.0 381.0 -2.35 -1.05 700 | 16260.0 356.0 0.21 0.16 701 | 16260.0 361.0 -0.87 -6.64 702 | 16260.0 365.0 1.88 -9.46 703 | 16260.0 366.0 1.24 -9.59 704 | 16260.0 367.0 3.1 1.63 705 | 16260.0 368.0 3.91 1.63 706 | 16260.0 369.0 -0.59 2.83 707 | 16260.0 371.0 -0.66 3.35 708 | 16260.0 372.0 1.72 -5.64 709 | 16260.0 373.0 2.53 -5.79 710 | 16260.0 374.0 1.13 -7.26 711 | 16260.0 375.0 1.18 -2.34 712 | 16260.0 376.0 1.75 -2.11 713 | 16260.0 377.0 2.45 -2.29 714 | 16260.0 378.0 -1.41 -1.01 715 | 16260.0 379.0 3.77 -2.24 716 | 16260.0 380.0 1.9 3.12 717 | 16260.0 381.0 -2.29 -1.08 718 | 16270.0 356.0 0.23 0.15 719 | 16270.0 361.0 -0.85 -6.64 720 | 16270.0 367.0 3.07 2.18 721 | 16270.0 368.0 3.88 2.09 722 | 16270.0 369.0 -0.59 2.83 723 | 16270.0 371.0 -0.66 3.35 724 | 16270.0 372.0 1.67 -6.32 725 | 16270.0 373.0 2.47 -6.48 726 | 16270.0 374.0 1.28 -7.96 727 | 16270.0 375.0 1.18 -2.88 728 | 16270.0 376.0 1.77 -2.67 729 | 16270.0 377.0 2.49 -2.83 730 | 16270.0 378.0 -1.38 -0.88 731 | 16270.0 379.0 3.88 -1.46 732 | 16270.0 380.0 2.01 2.57 733 | 16270.0 381.0 -2.28 -1.08 734 | 16280.0 356.0 0.22 0.19 735 | 16280.0 361.0 -0.86 -6.75 736 | 16280.0 367.0 2.92 2.79 737 | 16280.0 368.0 3.93 2.72 738 | 16280.0 369.0 -0.59 2.83 739 | 16280.0 371.0 -0.66 3.35 740 | 16280.0 372.0 1.69 -7.02 741 | 16280.0 373.0 2.41 -7.19 742 | 16280.0 374.0 1.5 -8.71 743 | 16280.0 375.0 1.25 -3.46 744 | 16280.0 376.0 1.79 -3.23 745 | 16280.0 377.0 2.5 -3.39 746 | 16280.0 378.0 -1.66 -0.52 747 | 16280.0 379.0 4.09 -0.67 748 | 16280.0 380.0 2.09 2.01 749 | 16280.0 381.0 -2.36 -1.07 750 | 16290.0 356.0 0.22 0.14 751 | 16290.0 361.0 -0.86 -6.83 752 | 16290.0 367.0 2.91 3.33 753 | 16290.0 368.0 3.8 3.23 754 | 16290.0 369.0 -0.59 2.83 755 | 16290.0 371.0 -0.66 3.35 756 | 16290.0 372.0 1.66 -7.72 757 | 16290.0 373.0 2.37 -7.89 758 | 16290.0 375.0 1.18 -4.06 759 | 16290.0 376.0 1.82 -3.77 760 | 16290.0 377.0 2.48 -3.92 761 | 16290.0 378.0 -1.73 -0.32 762 | 16290.0 379.0 4.08 0.02 763 | 16290.0 380.0 2.17 1.46 764 | 16290.0 381.0 -2.29 -1.08 765 | 16290.0 382.0 3.29 -10.05 766 | 16290.0 383.0 3.84 -10.31 767 | 16300.0 356.0 0.22 0.15 768 | 16300.0 361.0 -0.85 -6.88 769 | 16300.0 369.0 -0.59 2.83 770 | 16300.0 371.0 -0.66 3.35 771 | 16300.0 372.0 1.64 -8.48 772 | 16300.0 373.0 2.44 -8.73 773 | 16300.0 375.0 1.16 -4.62 774 | 16300.0 376.0 1.84 -4.33 775 | 16300.0 377.0 2.53 -4.5 776 | 16300.0 378.0 -1.71 -0.12 777 | 16300.0 379.0 4.14 0.78 778 | 16300.0 380.0 2.24 0.9 779 | 16300.0 381.0 -2.28 -1.08 780 | 16300.0 382.0 3.26 -9.71 781 | 16300.0 383.0 3.92 -9.78 782 | 16310.0 356.0 0.24 0.13 783 | 16310.0 361.0 -0.71 -6.73 784 | 16310.0 369.0 -0.59 2.83 785 | 16310.0 371.0 -0.66 3.35 786 | 16310.0 372.0 1.6 -9.13 787 | 16310.0 373.0 2.42 -9.43 788 | 16310.0 375.0 1.23 -5.19 789 | 16310.0 376.0 1.87 -4.89 790 | 16310.0 377.0 2.54 -5.05 791 | 16310.0 378.0 -1.81 0.04 792 | 16310.0 380.0 2.27 0.34 793 | 16310.0 381.0 -2.28 -1.08 794 | 16310.0 382.0 3.24 -9.32 795 | 16310.0 383.0 3.93 -9.32 796 | 16320.0 356.0 0.24 0.16 797 | 16320.0 361.0 -0.66 -6.64 798 | 16320.0 369.0 -0.59 2.83 799 | 16320.0 371.0 -0.66 3.35 800 | 16320.0 375.0 1.34 -5.64 801 | 16320.0 376.0 1.84 -5.45 802 | 16320.0 377.0 2.55 -5.61 803 | 16320.0 378.0 -1.97 0.19 804 | 16320.0 380.0 2.31 -0.24 805 | 16320.0 381.0 -2.28 -1.08 806 | 16320.0 382.0 3.31 -8.94 807 | 16320.0 383.0 3.92 -8.92 808 | 16330.0 356.0 0.27 0.18 809 | 16330.0 361.0 -0.71 -6.7 810 | 16330.0 369.0 -0.61 2.85 811 | 16330.0 371.0 -0.66 3.35 812 | 16330.0 375.0 1.37 -6.19 813 | 16330.0 376.0 1.84 -6.01 814 | 16330.0 377.0 2.58 -6.22 815 | 16330.0 378.0 -2.11 0.3 816 | 16330.0 380.0 2.3 -0.81 817 | 16330.0 381.0 -2.28 -1.08 818 | 16330.0 382.0 3.41 -8.38 819 | 16330.0 383.0 3.93 -8.46 820 | 16340.0 356.0 0.28 0.18 821 | 16340.0 361.0 -0.72 -6.71 822 | 16340.0 369.0 -0.63 2.85 823 | 16340.0 371.0 -0.67 3.35 824 | 16340.0 375.0 1.34 -6.77 825 | 16340.0 376.0 1.85 -6.6 826 | 16340.0 377.0 2.57 -6.84 827 | 16340.0 378.0 -2.22 0.45 828 | 16340.0 380.0 2.27 -1.36 829 | 16340.0 381.0 -2.28 -1.08 830 | 16340.0 382.0 3.42 -7.97 831 | 16340.0 383.0 3.83 -8.04 832 | 16340.0 384.0 1.56 3.74 833 | 16350.0 356.0 0.3 0.18 834 | 16350.0 361.0 -0.72 -6.71 835 | 16350.0 369.0 -0.64 2.86 836 | 16350.0 371.0 -0.69 3.35 837 | 16350.0 375.0 1.3 -7.29 838 | 16350.0 376.0 1.87 -7.22 839 | 16350.0 377.0 2.64 -7.42 840 | 16350.0 378.0 -2.37 0.53 841 | 16350.0 380.0 2.22 -1.94 842 | 16350.0 381.0 -2.28 -1.08 843 | 16350.0 382.0 3.47 -7.52 844 | 16350.0 383.0 3.83 -7.61 845 | 16350.0 384.0 1.62 3.03 846 | 16360.0 356.0 0.26 0.13 847 | 16360.0 361.0 -0.72 -6.71 848 | 16360.0 369.0 -0.67 2.82 849 | 16360.0 371.0 -0.71 3.34 850 | 16360.0 375.0 1.29 -8.0 851 | 16360.0 376.0 1.98 -7.78 852 | 16360.0 377.0 2.7 -8.04 853 | 16360.0 378.0 -2.52 0.48 854 | 16360.0 380.0 2.17 -2.53 855 | 16360.0 381.0 -2.28 -1.08 856 | 16360.0 382.0 3.48 -7.05 857 | 16360.0 383.0 3.83 -7.16 858 | 16360.0 384.0 1.64 2.35 859 | 16370.0 356.0 0.22 0.15 860 | 16370.0 361.0 -0.72 -6.71 861 | 16370.0 369.0 -0.67 2.82 862 | 16370.0 371.0 -0.72 3.34 863 | 16370.0 375.0 1.28 -8.57 864 | 16370.0 376.0 2.01 -8.3 865 | 16370.0 377.0 2.69 -8.67 866 | 16370.0 378.0 -2.61 0.48 867 | 16370.0 380.0 2.15 -3.09 868 | 16370.0 381.0 -2.28 -1.08 869 | 16370.0 382.0 3.4 -6.57 870 | 16370.0 383.0 3.84 -6.65 871 | 16370.0 384.0 1.76 1.5 872 | 16380.0 356.0 0.24 0.15 873 | 16380.0 361.0 -0.72 -6.71 874 | 16380.0 369.0 -0.67 2.82 875 | 16380.0 371.0 -0.72 3.34 876 | 16380.0 375.0 1.45 -9.06 877 | 16380.0 376.0 2.07 -8.88 878 | 16380.0 378.0 -2.63 0.52 879 | 16380.0 380.0 2.16 -3.68 880 | 16380.0 381.0 -2.28 -1.08 881 | 16380.0 382.0 3.29 -6.11 882 | 16380.0 383.0 3.78 -6.09 883 | 16380.0 384.0 1.71 0.74 884 | 16380.0 385.0 0.87 4.04 885 | 16390.0 356.0 0.24 0.17 886 | 16390.0 361.0 -0.72 -6.71 887 | 16390.0 369.0 -0.67 2.82 888 | 16390.0 371.0 -0.78 3.18 889 | 16390.0 378.0 -2.63 0.54 890 | 16390.0 380.0 2.16 -4.28 891 | 16390.0 381.0 -2.28 -1.08 892 | 16390.0 382.0 3.16 -5.63 893 | 16390.0 383.0 3.81 -5.75 894 | 16390.0 384.0 1.79 -0.02 895 | 16390.0 385.0 1.1 3.36 896 | 16400.0 356.0 0.24 0.17 897 | 16400.0 361.0 -0.72 -6.71 898 | 16400.0 369.0 -0.65 2.84 899 | 16400.0 371.0 -0.75 3.2 900 | 16400.0 378.0 -2.63 0.56 901 | 16400.0 380.0 2.21 -4.88 902 | 16400.0 381.0 -2.28 -1.08 903 | 16400.0 382.0 3.04 -5.18 904 | 16400.0 383.0 3.76 -5.26 905 | 16400.0 384.0 1.77 -0.76 906 | 16400.0 385.0 1.19 2.74 907 | 16410.0 356.0 0.22 0.08 908 | 16410.0 361.0 -0.72 -6.71 909 | 16410.0 369.0 -0.65 2.84 910 | 16410.0 371.0 -0.72 3.32 911 | 16410.0 380.0 2.22 -5.48 912 | 16410.0 382.0 3.05 -4.71 913 | 16410.0 383.0 3.76 -4.78 914 | 16410.0 384.0 1.67 -1.54 915 | 16410.0 385.0 1.25 2.13 916 | 16410.0 386.0 3.44 2.78 917 | 16420.0 356.0 0.24 0.12 918 | 16420.0 361.0 -0.72 -6.71 919 | 16420.0 369.0 -0.65 2.84 920 | 16420.0 371.0 -0.72 3.34 921 | 16420.0 380.0 2.2 -6.05 922 | 16420.0 382.0 3.06 -4.28 923 | 16420.0 383.0 3.71 -4.27 924 | 16420.0 384.0 1.66 -2.31 925 | 16420.0 385.0 1.23 1.59 926 | 16420.0 386.0 3.21 2.76 927 | 16430.0 356.0 0.22 0.13 928 | 16430.0 361.0 -0.72 -6.71 929 | 16430.0 369.0 -0.6 2.77 930 | 16430.0 371.0 -0.67 3.29 931 | 16430.0 380.0 2.18 -6.65 932 | 16430.0 382.0 3.0 -3.8 933 | 16430.0 383.0 3.7 -3.75 934 | 16430.0 384.0 1.62 -3.12 935 | 16430.0 385.0 1.26 0.98 936 | 16430.0 386.0 3.0 2.74 937 | 16440.0 356.0 0.25 0.12 938 | 16440.0 361.0 -0.62 -6.81 939 | 16440.0 369.0 -0.7 2.71 940 | 16440.0 371.0 -0.61 3.24 941 | 16440.0 380.0 2.18 -7.23 942 | 16440.0 382.0 2.98 -3.33 943 | 16440.0 383.0 3.69 -3.22 944 | 16440.0 384.0 1.67 -3.96 945 | 16440.0 385.0 1.28 0.36 946 | 16440.0 386.0 2.89 2.74 947 | 16440.0 387.0 0.77 -9.65 948 | 16450.0 356.0 0.24 0.14 949 | 16450.0 361.0 -0.71 -6.72 950 | 16450.0 369.0 -0.72 2.67 951 | 16450.0 371.0 -0.6 3.24 952 | 16450.0 380.0 2.12 -7.86 953 | 16450.0 382.0 2.94 -2.83 954 | 16450.0 383.0 3.68 -2.73 955 | 16450.0 384.0 1.67 -4.89 956 | 16450.0 385.0 1.34 -0.32 957 | 16450.0 386.0 2.73 2.82 958 | 16450.0 387.0 0.78 -8.88 959 | 16460.0 356.0 0.22 0.13 960 | 16460.0 361.0 -0.65 -6.8 961 | 16460.0 369.0 -0.57 2.7 962 | 16460.0 371.0 -0.6 3.24 963 | 16460.0 380.0 2.14 -8.41 964 | 16460.0 382.0 2.86 -2.33 965 | 16460.0 383.0 3.46 -2.26 966 | 16460.0 384.0 1.67 -5.71 967 | 16460.0 385.0 1.43 -0.98 968 | 16460.0 386.0 2.56 2.96 969 | 16460.0 387.0 0.68 -8.09 970 | 16470.0 356.0 0.27 0.14 971 | 16470.0 361.0 -0.71 -6.72 972 | 16470.0 369.0 -0.56 2.7 973 | 16470.0 371.0 -0.6 3.24 974 | 16470.0 380.0 2.05 -9.18 975 | 16470.0 382.0 2.83 -1.85 976 | 16470.0 383.0 3.64 -1.77 977 | 16470.0 384.0 1.72 -6.48 978 | 16470.0 385.0 1.5 -1.63 979 | 16470.0 386.0 2.46 3.04 980 | 16470.0 387.0 0.69 -7.3 981 | 16480.0 356.0 0.24 0.15 982 | 16480.0 361.0 -0.65 -6.75 983 | 16480.0 369.0 -0.67 2.66 984 | 16480.0 371.0 -0.6 3.24 985 | 16480.0 380.0 1.98 -9.78 986 | 16480.0 382.0 2.8 -1.41 987 | 16480.0 383.0 3.43 -1.28 988 | 16480.0 384.0 1.68 -7.31 989 | 16480.0 385.0 1.56 -2.29 990 | 16480.0 386.0 2.27 3.05 991 | 16480.0 387.0 0.65 -6.58 992 | 16480.0 388.0 3.45 -9.94 993 | 16480.0 389.0 4.0 -10.12 994 | 16490.0 356.0 0.27 0.16 995 | 16490.0 361.0 -0.59 -6.78 996 | 16490.0 369.0 -0.68 2.64 997 | 16490.0 371.0 -0.67 3.27 998 | 16490.0 382.0 2.79 -0.92 999 | 16490.0 383.0 3.36 -0.84 1000 | 16490.0 384.0 1.66 -8.13 1001 | 16490.0 385.0 1.62 -2.98 1002 | 16490.0 386.0 2.09 3.04 1003 | 16490.0 387.0 0.58 -5.83 1004 | 16490.0 388.0 3.34 -9.38 1005 | 16490.0 389.0 4.02 -9.59 1006 | 16500.0 356.0 0.29 0.14 1007 | 16500.0 361.0 -0.7 -6.71 1008 | 16500.0 369.0 -0.7 2.62 1009 | 16500.0 371.0 -0.61 3.24 1010 | 16500.0 382.0 2.82 -0.51 1011 | 16500.0 383.0 3.36 -0.38 1012 | 16500.0 384.0 1.58 -8.88 1013 | 16500.0 385.0 1.53 -3.74 1014 | 16500.0 386.0 1.97 3.02 1015 | 16500.0 387.0 0.64 -5.07 1016 | 16500.0 388.0 3.31 -8.72 1017 | 16500.0 389.0 3.98 -8.99 1018 | 16510.0 356.0 0.29 0.08 1019 | 16510.0 361.0 -0.72 -6.71 1020 | 16510.0 369.0 -0.57 2.69 1021 | 16510.0 371.0 -0.6 3.24 1022 | 16510.0 382.0 2.83 -0.1 1023 | 16510.0 383.0 3.35 0.06 1024 | 16510.0 384.0 1.54 -9.65 1025 | 16510.0 385.0 1.52 -4.42 1026 | 16510.0 386.0 1.83 3.04 1027 | 16510.0 387.0 0.66 -4.3 1028 | 16510.0 388.0 3.16 -8.13 1029 | 16510.0 389.0 3.81 -8.3 1030 | 16520.0 356.0 0.27 0.06 1031 | 16520.0 361.0 -0.72 -6.71 1032 | 16520.0 369.0 -0.56 2.7 1033 | 16520.0 371.0 -0.6 3.24 1034 | 16520.0 382.0 2.79 0.36 1035 | 16520.0 383.0 3.3 0.51 1036 | 16520.0 385.0 1.52 -5.14 1037 | 16520.0 386.0 1.62 3.14 1038 | 16520.0 387.0 0.73 -3.54 1039 | 16520.0 388.0 3.14 -7.5 1040 | 16520.0 389.0 3.8 -7.73 1041 | 16530.0 356.0 0.27 0.04 1042 | 16530.0 361.0 -0.72 -6.71 1043 | 16530.0 369.0 -0.68 2.65 1044 | 16530.0 371.0 -0.6 3.24 1045 | 16530.0 382.0 2.79 0.77 1046 | 16530.0 383.0 3.34 0.97 1047 | 16530.0 385.0 1.47 -5.88 1048 | 16530.0 386.0 1.5 3.18 1049 | 16530.0 387.0 0.77 -2.82 1050 | 16530.0 388.0 3.05 -6.9 1051 | 16530.0 389.0 3.8 -7.11 1052 | 16540.0 356.0 0.18 -0.02 1053 | 16540.0 361.0 -0.72 -6.71 1054 | 16540.0 369.0 -0.7 2.64 1055 | 16540.0 371.0 -0.6 3.24 1056 | 16540.0 382.0 2.78 1.22 1057 | 16540.0 383.0 3.33 1.41 1058 | 16540.0 385.0 1.51 -6.6 1059 | 16540.0 386.0 1.45 3.17 1060 | 16540.0 387.0 0.85 -2.03 1061 | 16540.0 388.0 2.94 -6.31 1062 | 16540.0 389.0 3.69 -6.51 1063 | 16540.0 390.0 3.84 -2.7 1064 | 16550.0 356.0 0.14 0.05 1065 | 16550.0 361.0 -0.72 -6.71 1066 | 16550.0 369.0 -0.7 2.64 1067 | 16550.0 371.0 -0.6 3.24 1068 | 16550.0 382.0 2.73 1.68 1069 | 16550.0 383.0 3.4 1.86 1070 | 16550.0 385.0 1.51 -7.27 1071 | 16550.0 386.0 1.43 3.16 1072 | 16550.0 387.0 0.9 -1.23 1073 | 16550.0 388.0 2.88 -5.71 1074 | 16550.0 389.0 3.56 -5.91 1075 | 16550.0 390.0 3.83 -2.25 1076 | 16560.0 356.0 0.08 0.22 1077 | 16560.0 361.0 -0.72 -6.71 1078 | 16560.0 369.0 -0.68 2.66 1079 | 16560.0 371.0 -0.6 3.24 1080 | 16560.0 382.0 2.78 2.09 1081 | 16560.0 383.0 3.44 2.3 1082 | 16560.0 385.0 1.53 -7.95 1083 | 16560.0 386.0 1.43 3.16 1084 | 16560.0 387.0 1.01 -0.55 1085 | 16560.0 388.0 2.77 -5.13 1086 | 16560.0 389.0 3.5 -5.25 1087 | 16560.0 390.0 3.82 -1.84 1088 | 16570.0 356.0 0.01 0.37 1089 | 16570.0 361.0 -0.72 -6.71 1090 | 16570.0 369.0 -0.69 2.66 1091 | 16570.0 371.0 -0.6 3.24 1092 | 16570.0 382.0 2.81 2.51 1093 | 16570.0 383.0 3.44 2.72 1094 | 16570.0 385.0 1.58 -8.66 1095 | 16570.0 386.0 1.43 3.16 1096 | 16570.0 387.0 1.01 0.2 1097 | 16570.0 388.0 2.69 -4.57 1098 | 16570.0 389.0 3.41 -4.57 1099 | 16570.0 390.0 3.61 -1.38 1100 | 16570.0 391.0 3.34 -10.19 1101 | 16580.0 356.0 -0.15 0.48 1102 | 16580.0 361.0 -0.72 -6.71 1103 | 16580.0 369.0 -0.71 2.68 1104 | 16580.0 371.0 -0.6 3.24 1105 | 16580.0 382.0 2.76 3.04 1106 | 16580.0 383.0 3.44 3.16 1107 | 16580.0 385.0 1.55 -9.42 1108 | 16580.0 386.0 1.43 3.16 1109 | 16580.0 387.0 0.94 0.95 1110 | 16580.0 388.0 2.66 -3.99 1111 | 16580.0 389.0 3.3 -4.03 1112 | 16580.0 390.0 3.69 -0.98 1113 | 16580.0 391.0 3.12 -9.64 1114 | 16590.0 361.0 -0.72 -6.71 1115 | 16590.0 369.0 -0.72 2.65 1116 | 16590.0 371.0 -0.6 3.24 1117 | 16590.0 386.0 1.43 3.16 1118 | 16590.0 387.0 0.87 1.67 1119 | 16590.0 388.0 2.61 -3.39 1120 | 16590.0 389.0 3.28 -3.45 1121 | 16590.0 390.0 3.69 -0.57 1122 | 16590.0 391.0 3.04 -9.14 1123 | 16590.0 392.0 3.92 -4.09 1124 | 16600.0 361.0 -0.72 -6.71 1125 | 16600.0 369.0 -0.57 2.7 1126 | 16600.0 371.0 -0.6 3.24 1127 | 16600.0 386.0 1.43 3.16 1128 | 16600.0 387.0 0.81 2.29 1129 | 16600.0 388.0 2.6 -2.77 1130 | 16600.0 389.0 3.25 -2.81 1131 | 16600.0 390.0 3.61 -0.15 1132 | 16600.0 391.0 2.91 -8.65 1133 | 16600.0 392.0 3.56 -4.25 1134 | 16600.0 393.0 0.75 -9.32 1135 | 16600.0 394.0 0.25 -9.03 1136 | 16610.0 361.0 -0.72 -6.71 1137 | 16610.0 369.0 -0.68 2.67 1138 | 16610.0 371.0 -0.67 3.28 1139 | 16610.0 386.0 1.43 3.16 1140 | 16610.0 387.0 0.76 3.06 1141 | 16610.0 388.0 2.62 -2.21 1142 | 16610.0 389.0 3.24 -2.25 1143 | 16610.0 390.0 3.6 0.24 1144 | 16610.0 391.0 2.69 -8.15 1145 | 16610.0 392.0 3.16 -4.19 1146 | 16610.0 393.0 0.74 -8.59 1147 | 16610.0 394.0 0.26 -8.3 1148 | 16610.0 395.0 2.05 3.59 1149 | 16620.0 361.0 -0.72 -6.71 1150 | 16620.0 369.0 -0.7 2.65 1151 | 16620.0 371.0 -0.61 3.24 1152 | 16620.0 386.0 1.43 3.16 1153 | 16620.0 387.0 0.63 3.68 1154 | 16620.0 388.0 2.61 -1.64 1155 | 16620.0 389.0 3.26 -1.74 1156 | 16620.0 390.0 3.61 0.63 1157 | 16620.0 391.0 2.66 -7.61 1158 | 16620.0 392.0 2.82 -4.18 1159 | 16620.0 393.0 0.91 -7.78 1160 | 16620.0 394.0 0.33 -7.57 1161 | 16620.0 395.0 2.15 3.02 1162 | 16630.0 361.0 -0.72 -6.71 1163 | 16630.0 369.0 -0.57 2.69 1164 | 16630.0 371.0 -0.6 3.24 1165 | 16630.0 386.0 1.43 3.16 1166 | 16630.0 388.0 2.66 -1.07 1167 | 16630.0 389.0 3.35 -1.13 1168 | 16630.0 390.0 3.6 1.05 1169 | 16630.0 391.0 2.6 -7.2 1170 | 16630.0 392.0 2.49 -4.3 1171 | 16630.0 393.0 0.95 -7.03 1172 | 16630.0 394.0 0.34 -6.82 1173 | 16630.0 395.0 2.12 2.28 1174 | 16640.0 361.0 -0.72 -6.71 1175 | 16640.0 369.0 -0.56 2.7 1176 | 16640.0 371.0 -0.6 3.24 1177 | 16640.0 386.0 1.43 3.16 1178 | 16640.0 388.0 2.65 -0.45 1179 | 16640.0 389.0 3.34 -0.4 1180 | 16640.0 390.0 3.6 1.41 1181 | 16640.0 391.0 2.5 -6.81 1182 | 16640.0 392.0 2.19 -4.3 1183 | 16640.0 393.0 0.79 -6.25 1184 | 16640.0 394.0 0.26 -6.03 1185 | 16640.0 395.0 2.1 1.64 1186 | 16650.0 361.0 -0.72 -6.71 1187 | 16650.0 369.0 -0.56 2.7 1188 | 16650.0 371.0 -0.6 3.24 1189 | 16650.0 386.0 1.43 3.16 1190 | 16650.0 388.0 2.65 0.15 1191 | 16650.0 389.0 3.42 0.17 1192 | 16650.0 390.0 3.75 1.86 1193 | 16650.0 391.0 2.47 -6.45 1194 | 16650.0 392.0 1.9 -4.24 1195 | 16650.0 393.0 0.75 -5.46 1196 | 16650.0 394.0 0.27 -5.27 1197 | 16650.0 395.0 2.03 0.93 1198 | 16660.0 361.0 -0.72 -6.71 1199 | 16660.0 369.0 -0.68 2.67 1200 | 16660.0 371.0 -0.6 3.24 1201 | 16660.0 386.0 1.43 3.16 1202 | 16660.0 388.0 2.66 0.68 1203 | 16660.0 389.0 3.49 0.83 1204 | 16660.0 390.0 3.75 2.27 1205 | 16660.0 391.0 2.42 -6.12 1206 | 16660.0 392.0 1.65 -4.24 1207 | 16660.0 393.0 0.69 -4.71 1208 | 16660.0 394.0 0.2 -4.41 1209 | 16660.0 395.0 2.06 0.23 1210 | 16670.0 361.0 -0.65 -6.69 1211 | 16670.0 369.0 -0.66 2.64 1212 | 16670.0 371.0 -0.6 3.24 1213 | 16670.0 386.0 1.43 3.16 1214 | 16670.0 388.0 2.68 1.12 1215 | 16670.0 389.0 3.51 1.18 1216 | 16670.0 390.0 3.78 2.67 1217 | 16670.0 391.0 2.35 -5.8 1218 | 16670.0 392.0 1.37 -4.38 1219 | 16670.0 393.0 0.61 -3.88 1220 | 16670.0 394.0 0.17 -3.75 1221 | 16670.0 395.0 2.04 -0.45 1222 | 16670.0 396.0 3.84 -5.52 1223 | 16680.0 361.0 -0.64 -6.69 1224 | 16680.0 369.0 -0.57 2.69 1225 | 16680.0 371.0 -0.6 3.24 1226 | 16680.0 386.0 1.43 3.14 1227 | 16680.0 388.0 2.62 1.78 1228 | 16680.0 389.0 3.48 1.88 1229 | 16680.0 390.0 3.82 2.97 1230 | 16680.0 391.0 2.39 -5.52 1231 | 16680.0 392.0 1.05 -4.42 1232 | 16680.0 393.0 0.49 -3.11 1233 | 16680.0 394.0 0.07 -3.0 1234 | 16680.0 395.0 1.92 -1.12 1235 | 16680.0 396.0 3.76 -4.81 1236 | 16690.0 361.0 -0.64 -6.69 1237 | 16690.0 369.0 -0.56 2.7 1238 | 16690.0 371.0 -0.6 3.24 1239 | 16690.0 386.0 1.31 3.12 1240 | 16690.0 391.0 2.49 -5.23 1241 | 16690.0 392.0 0.72 -4.36 1242 | 16690.0 393.0 0.56 -2.24 1243 | 16690.0 394.0 0.07 -2.23 1244 | 16690.0 395.0 1.92 -1.92 1245 | 16690.0 396.0 3.85 -4.1 1246 | 16700.0 361.0 -0.64 -6.69 1247 | 16700.0 369.0 -0.65 2.61 1248 | 16700.0 371.0 -0.6 3.24 1249 | 16700.0 386.0 1.28 3.1 1250 | 16700.0 391.0 2.6 -4.91 1251 | 16700.0 392.0 0.4 -4.35 1252 | 16700.0 393.0 0.55 -1.46 1253 | 16700.0 394.0 0.06 -1.48 1254 | 16700.0 395.0 1.8 -2.65 1255 | 16700.0 396.0 3.73 -3.36 1256 | 16710.0 361.0 -0.69 -6.69 1257 | 16710.0 369.0 -0.69 2.66 1258 | 16710.0 371.0 -0.6 3.24 1259 | 16710.0 391.0 2.65 -4.62 1260 | 16710.0 392.0 -0.09 -4.47 1261 | 16710.0 393.0 0.58 -0.83 1262 | 16710.0 394.0 0.06 -0.69 1263 | 16710.0 395.0 1.8 -3.46 1264 | 16710.0 396.0 3.72 -2.69 1265 | 16720.0 361.0 -0.82 -6.68 1266 | 16720.0 369.0 -0.72 2.59 1267 | 16720.0 371.0 -0.6 3.24 1268 | 16720.0 391.0 2.89 -4.27 1269 | 16720.0 392.0 -0.36 -4.34 1270 | 16720.0 393.0 0.6 -0.14 1271 | 16720.0 394.0 0.11 -0.04 1272 | 16720.0 395.0 1.77 -4.15 1273 | 16720.0 396.0 3.79 -2.07 1274 | 16720.0 398.0 2.02 2.65 1275 | 16720.0 399.0 0.89 2.94 1276 | 16720.0 400.0 1.27 3.11 1277 | 16730.0 361.0 -0.9 -6.67 1278 | 16730.0 369.0 -0.57 2.69 1279 | 16730.0 371.0 -0.6 3.24 1280 | 16730.0 391.0 2.98 -3.98 1281 | 16730.0 392.0 -0.65 -4.37 1282 | 16730.0 393.0 0.61 0.51 1283 | 16730.0 394.0 0.12 0.75 1284 | 16730.0 395.0 1.72 -5.06 1285 | 16730.0 396.0 3.71 -1.44 1286 | 16730.0 398.0 1.94 1.95 1287 | 16730.0 399.0 0.9 2.42 1288 | 16730.0 400.0 1.18 3.25 1289 | 16740.0 361.0 -0.91 -6.67 1290 | 16740.0 369.0 -0.64 2.57 1291 | 16740.0 371.0 -0.6 3.24 1292 | 16740.0 391.0 2.95 -3.64 1293 | 16740.0 392.0 -0.95 -4.44 1294 | 16740.0 393.0 0.5 1.15 1295 | 16740.0 394.0 0.04 1.48 1296 | 16740.0 395.0 1.72 -5.85 1297 | 16740.0 396.0 3.73 -0.75 1298 | 16740.0 398.0 1.95 1.29 1299 | 16740.0 399.0 1.04 1.9 1300 | 16740.0 400.0 1.14 3.32 1301 | 16750.0 361.0 -0.91 -6.67 1302 | 16750.0 369.0 -0.58 2.69 1303 | 16750.0 371.0 -0.6 3.24 1304 | 16750.0 391.0 2.97 -3.42 1305 | 16750.0 392.0 -1.21 -4.44 1306 | 16750.0 393.0 0.5 1.76 1307 | 16750.0 394.0 0.18 2.03 1308 | 16750.0 395.0 1.68 -6.65 1309 | 16750.0 396.0 3.76 -0.14 1310 | 16750.0 398.0 1.97 0.67 1311 | 16750.0 399.0 1.11 1.3 1312 | 16750.0 400.0 1.1 3.26 1313 | 16760.0 361.0 -0.92 -6.5 1314 | 16760.0 369.0 -0.64 2.81 1315 | 16760.0 371.0 -0.67 3.32 1316 | 16760.0 391.0 3.01 -3.14 1317 | 16760.0 392.0 -1.35 -4.39 1318 | 16760.0 393.0 0.61 2.4 1319 | 16760.0 394.0 0.12 2.83 1320 | 16760.0 395.0 1.71 -7.45 1321 | 16760.0 396.0 3.75 0.54 1322 | 16760.0 398.0 1.99 0.07 1323 | 16760.0 399.0 1.2 0.6 1324 | 16760.0 400.0 1.11 3.2 1325 | 16770.0 361.0 -0.97 -6.54 1326 | 16770.0 369.0 -0.73 2.9 1327 | 16770.0 371.0 -0.76 3.42 1328 | 16770.0 391.0 2.97 -2.92 1329 | 16770.0 392.0 -1.37 -4.39 1330 | 16770.0 393.0 0.63 3.03 1331 | 16770.0 394.0 0.12 3.4 1332 | 16770.0 395.0 1.72 -8.22 1333 | 16770.0 396.0 3.73 1.17 1334 | 16770.0 398.0 2.06 -0.59 1335 | 16770.0 399.0 1.25 -0.15 1336 | 16770.0 400.0 1.13 3.17 1337 | 16780.0 361.0 -1.0 -6.5 1338 | 16780.0 369.0 -0.88 2.94 1339 | 16780.0 371.0 -0.89 3.45 1340 | 16780.0 391.0 2.96 -2.78 1341 | 16780.0 392.0 -1.37 -4.39 1342 | 16780.0 393.0 0.63 3.67 1343 | 16780.0 394.0 0.11 4.1 1344 | 16780.0 395.0 1.72 -9.02 1345 | 16780.0 396.0 3.79 1.81 1346 | 16780.0 398.0 2.02 -1.27 1347 | 16780.0 399.0 1.24 -0.87 1348 | 16780.0 400.0 1.1 3.18 1349 | 16790.0 361.0 -1.22 -6.37 1350 | 16790.0 391.0 2.98 -2.59 1351 | 16790.0 396.0 3.68 2.44 1352 | 16790.0 398.0 1.96 -1.96 1353 | 16790.0 399.0 1.23 -1.6 1354 | 16800.0 361.0 -1.44 -6.31 1355 | 16800.0 396.0 3.66 2.96 1356 | 16800.0 398.0 1.94 -2.65 1357 | 16800.0 399.0 1.21 -2.34 1358 | 16800.0 401.0 2.88 -2.24 1359 | 16810.0 361.0 -1.38 -6.47 1360 | 16810.0 398.0 1.93 -3.38 1361 | 16810.0 399.0 1.22 -3.12 1362 | 16810.0 401.0 2.46 -1.99 1363 | 16820.0 398.0 2.0 -4.13 1364 | 16820.0 399.0 1.24 -3.87 1365 | 16820.0 401.0 2.0 -1.68 1366 | 16830.0 398.0 2.05 -4.89 1367 | 16830.0 399.0 1.28 -4.63 1368 | 16840.0 398.0 2.17 -5.8 1369 | 16840.0 399.0 1.23 -5.45 1370 | 16850.0 398.0 2.32 -6.53 1371 | 16850.0 399.0 1.36 -6.1 1372 | 16860.0 398.0 2.35 -7.16 1373 | 16860.0 399.0 1.42 -6.85 1374 | 16870.0 398.0 2.47 -7.85 1375 | 16870.0 399.0 1.46 -7.6 1376 | 16880.0 398.0 2.51 -8.57 1377 | 16880.0 399.0 1.52 -8.37 1378 | 17260.0 402.0 2.23 -1.34 1379 | 17260.0 403.0 3.02 -1.12 1380 | 17260.0 404.0 1.92 -8.36 1381 | 17260.0 405.0 1.42 -8.11 1382 | 17270.0 402.0 2.24 -1.87 1383 | 17270.0 403.0 3.06 -1.7 1384 | 17270.0 404.0 1.97 -7.81 1385 | 17270.0 405.0 1.38 -7.55 1386 | 17280.0 402.0 2.26 -2.36 1387 | 17280.0 403.0 3.12 -2.17 1388 | 17280.0 404.0 1.92 -7.21 1389 | 17280.0 405.0 1.52 -7.07 1390 | 17290.0 402.0 2.41 -2.87 1391 | 17290.0 403.0 3.12 -2.69 1392 | 17290.0 404.0 2.06 -6.67 1393 | 17290.0 405.0 1.46 -6.54 1394 | 17300.0 402.0 2.51 -3.36 1395 | 17300.0 403.0 3.13 -3.21 1396 | 17300.0 404.0 2.1 -6.14 1397 | 17300.0 405.0 1.4 -5.96 1398 | 17310.0 402.0 2.53 -3.9 1399 | 17310.0 403.0 3.17 -3.74 1400 | 17310.0 404.0 1.96 -5.61 1401 | 17310.0 405.0 1.45 -5.4 1402 | 17320.0 402.0 2.58 -4.46 1403 | 17320.0 403.0 3.22 -4.08 1404 | 17320.0 404.0 1.92 -5.08 1405 | 17320.0 405.0 1.28 -4.83 1406 | 17550.0 408.0 0.99 -3.39 1407 | 17550.0 409.0 2.49 3.29 1408 | 17560.0 408.0 1.0 -2.77 1409 | 17560.0 409.0 2.38 2.6 1410 | 17570.0 408.0 1.04 -2.14 1411 | 17570.0 409.0 2.35 1.86 1412 | 17580.0 408.0 1.04 -1.5 1413 | 17580.0 409.0 2.26 1.13 1414 | 17590.0 408.0 1.04 -0.91 1415 | 17590.0 409.0 2.27 0.41 1416 | 17590.0 410.0 3.7 -2.0 1417 | 17590.0 411.0 3.63 -5.72 1418 | 17600.0 408.0 1.05 -0.28 1419 | 17600.0 409.0 2.2 -0.29 1420 | 17600.0 410.0 3.69 -1.53 1421 | 17600.0 411.0 3.62 -5.19 1422 | 17610.0 408.0 1.05 0.35 1423 | 17610.0 409.0 2.08 -1.0 1424 | 17610.0 410.0 3.69 -1.15 1425 | 17610.0 411.0 3.62 -4.62 1426 | 17620.0 408.0 1.08 0.93 1427 | 17620.0 409.0 2.03 -1.74 1428 | 17620.0 410.0 3.47 -0.7 1429 | 17620.0 411.0 3.62 -4.09 1430 | 17620.0 412.0 2.73 3.47 1431 | 17630.0 408.0 1.13 1.57 1432 | 17630.0 409.0 1.9 -2.52 1433 | 17630.0 410.0 3.42 -0.26 1434 | 17630.0 411.0 3.62 -3.56 1435 | 17630.0 412.0 2.6 2.91 1436 | 17640.0 408.0 1.15 2.17 1437 | 17640.0 409.0 1.92 -3.3 1438 | 17640.0 410.0 3.41 0.17 1439 | 17640.0 411.0 3.61 -3.03 1440 | 17640.0 412.0 2.45 2.31 1441 | 17650.0 408.0 1.23 2.74 1442 | 17650.0 409.0 1.9 -4.06 1443 | 17650.0 410.0 3.36 0.64 1444 | 17650.0 411.0 3.62 -2.58 1445 | 17650.0 412.0 2.38 1.71 1446 | 17660.0 409.0 1.89 -4.81 1447 | 17660.0 410.0 3.36 1.05 1448 | 17660.0 411.0 3.61 -2.06 1449 | 17660.0 412.0 2.28 1.13 1450 | 17670.0 409.0 1.94 -5.57 1451 | 17670.0 410.0 3.33 1.45 1452 | 17670.0 411.0 3.6 -1.55 1453 | 17670.0 412.0 2.16 0.54 1454 | 17680.0 409.0 1.94 -6.35 1455 | 17680.0 410.0 3.19 1.81 1456 | 17680.0 411.0 3.6 -1.16 1457 | 17680.0 412.0 2.14 -0.07 1458 | 17690.0 409.0 1.91 -7.2 1459 | 17690.0 410.0 3.15 2.01 1460 | 17690.0 411.0 3.69 -0.87 1461 | 17690.0 412.0 2.09 -0.68 1462 | 17700.0 409.0 1.96 -7.88 1463 | 17700.0 412.0 2.05 -1.27 1464 | 17710.0 409.0 1.92 -8.65 1465 | 17710.0 412.0 2.05 -1.86 1466 | 17720.0 412.0 1.96 -2.48 1467 | 17730.0 412.0 1.89 -3.04 1468 | 17740.0 412.0 1.83 -3.64 1469 | 17750.0 412.0 1.76 -4.29 1470 | 17750.0 413.0 0.67 -9.43 1471 | 17760.0 412.0 1.79 -4.93 1472 | 17760.0 413.0 0.7 -8.8 1473 | 17770.0 412.0 1.71 -5.62 1474 | 17770.0 413.0 0.74 -8.19 1475 | 17770.0 414.0 2.79 -10.1 1476 | 17780.0 412.0 1.65 -6.23 1477 | 17780.0 413.0 0.8 -7.59 1478 | 17780.0 414.0 2.69 -9.56 1479 | 17790.0 412.0 1.69 -6.79 1480 | 17790.0 413.0 0.89 -7.01 1481 | 17790.0 414.0 2.63 -8.95 1482 | 17800.0 412.0 1.63 -7.4 1483 | 17800.0 413.0 1.0 -6.4 1484 | 17800.0 414.0 2.66 -8.25 1485 | 17800.0 415.0 2.19 1.54 1486 | 17810.0 412.0 1.52 -8.02 1487 | 17810.0 413.0 1.16 -5.78 1488 | 17810.0 414.0 2.63 -7.57 1489 | 17810.0 415.0 2.17 0.95 1490 | 17820.0 412.0 1.48 -8.64 1491 | 17820.0 413.0 1.34 -5.2 1492 | 17820.0 414.0 2.68 -6.89 1493 | 17820.0 415.0 2.11 0.39 1494 | 17830.0 412.0 1.38 -9.24 1495 | 17830.0 413.0 1.5 -4.61 1496 | 17830.0 414.0 2.71 -6.26 1497 | 17830.0 415.0 2.04 -0.2 1498 | 17840.0 413.0 1.68 -4.03 1499 | 17840.0 414.0 2.73 -5.57 1500 | 17840.0 415.0 1.93 -0.7 1501 | 17850.0 413.0 1.85 -3.43 1502 | 17850.0 414.0 2.82 -4.89 1503 | 17850.0 415.0 1.74 -1.25 1504 | 17860.0 413.0 2.08 -2.82 1505 | 17860.0 414.0 2.81 -4.25 1506 | 17860.0 415.0 1.64 -1.87 1507 | 17870.0 413.0 2.26 -2.21 1508 | 17870.0 414.0 2.82 -3.62 1509 | 17870.0 415.0 1.45 -2.43 1510 | 17880.0 413.0 2.39 -1.59 1511 | 17880.0 414.0 2.89 -2.94 1512 | 17880.0 415.0 1.28 -3.01 1513 | 17880.0 416.0 1.63 1.68 1514 | 17890.0 413.0 2.52 -0.94 1515 | 17890.0 414.0 2.84 -2.46 1516 | 17890.0 415.0 1.2 -3.6 1517 | 17890.0 416.0 1.58 1.19 1518 | 17900.0 413.0 2.61 -0.32 1519 | 17900.0 414.0 2.8 -2.0 1520 | 17900.0 415.0 1.1 -4.21 1521 | 17900.0 416.0 1.6 0.66 1522 | 17910.0 413.0 2.73 0.29 1523 | 17910.0 414.0 2.81 -1.52 1524 | 17910.0 415.0 1.09 -4.85 1525 | 17910.0 416.0 1.56 0.11 1526 | 17920.0 413.0 2.87 0.9 1527 | 17920.0 414.0 2.75 -1.0 1528 | 17920.0 415.0 1.08 -5.4 1529 | 17920.0 416.0 1.56 -0.39 1530 | 17920.0 417.0 3.71 -0.95 1531 | 17930.0 413.0 3.01 1.48 1532 | 17930.0 414.0 2.75 -0.38 1533 | 17930.0 415.0 1.06 -6.01 1534 | 17930.0 416.0 1.55 -0.95 1535 | 17930.0 417.0 3.54 -1.45 1536 | 17930.0 418.0 3.71 -4.82 1537 | 17940.0 413.0 3.22 2.05 1538 | 17940.0 414.0 2.78 0.23 1539 | 17940.0 415.0 1.15 -6.65 1540 | 17940.0 416.0 1.51 -1.5 1541 | 17940.0 417.0 3.36 -1.92 1542 | 17940.0 418.0 3.57 -4.35 1543 | 17940.0 419.0 3.37 -9.71 1544 | 17950.0 413.0 3.48 2.73 1545 | 17950.0 414.0 2.76 0.85 1546 | 17950.0 415.0 1.13 -7.25 1547 | 17950.0 416.0 1.51 -2.06 1548 | 17950.0 417.0 3.1 -2.33 1549 | 17950.0 418.0 3.52 -3.81 1550 | 17950.0 419.0 3.35 -9.05 1551 | 17960.0 414.0 2.82 1.45 1552 | 17960.0 415.0 1.16 -7.88 1553 | 17960.0 416.0 1.43 -2.62 1554 | 17960.0 417.0 2.95 -2.79 1555 | 17960.0 418.0 3.78 -3.29 1556 | 17960.0 419.0 3.3 -8.36 1557 | 17970.0 414.0 2.89 2.05 1558 | 17970.0 415.0 1.2 -8.45 1559 | 17970.0 416.0 1.38 -3.14 1560 | 17970.0 417.0 2.84 -3.31 1561 | 17970.0 418.0 3.92 -2.88 1562 | 17970.0 419.0 3.33 -7.66 1563 | 17980.0 416.0 1.34 -3.73 1564 | 17980.0 417.0 2.7 -3.83 1565 | 17980.0 418.0 3.96 -2.32 1566 | 17980.0 419.0 3.33 -6.98 1567 | 17990.0 416.0 1.28 -4.31 1568 | 17990.0 417.0 2.62 -4.37 1569 | 17990.0 418.0 3.94 -1.91 1570 | 17990.0 419.0 3.33 -6.31 1571 | 18000.0 416.0 1.28 -4.85 1572 | 18000.0 417.0 2.57 -4.9 1573 | 18000.0 419.0 3.34 -5.65 1574 | 18010.0 416.0 1.27 -5.41 1575 | 18010.0 417.0 2.49 -5.41 1576 | 18010.0 419.0 3.3 -4.96 1577 | 18010.0 420.0 3.5 -8.72 1578 | 18020.0 416.0 1.22 -5.94 1579 | 18020.0 417.0 2.51 -5.95 1580 | 18020.0 419.0 3.29 -4.27 1581 | 18020.0 420.0 3.52 -8.11 1582 | 18030.0 416.0 1.22 -6.5 1583 | 18030.0 417.0 2.55 -6.5 1584 | 18030.0 419.0 3.3 -3.58 1585 | 18030.0 420.0 3.58 -7.49 1586 | 18040.0 416.0 1.16 -7.04 1587 | 18040.0 417.0 2.54 -7.04 1588 | 18040.0 419.0 3.28 -2.87 1589 | 18040.0 420.0 3.6 -6.86 1590 | 18050.0 416.0 1.15 -7.53 1591 | 18050.0 417.0 2.61 -7.65 1592 | 18050.0 419.0 3.35 -2.18 1593 | 18050.0 420.0 3.62 -6.25 1594 | 18060.0 416.0 1.13 -8.1 1595 | 18060.0 417.0 2.69 -8.24 1596 | 18060.0 419.0 3.35 -1.49 1597 | 18060.0 420.0 3.62 -5.63 1598 | -------------------------------------------------------------------------------- /datasets/val/crowds_zara01_val.txt: -------------------------------------------------------------------------------- 1 | 7110.0 120.0 14.404653033 4.57176712331 2 | 7110.0 121.0 14.9851158053 3.62762895564 3 | 7110.0 122.0 7.02721954529 4.18060371158 4 | 7110.0 123.0 6.97776024452 5.36626559656 5 | 7110.0 124.0 5.59563586961 4.78345836566 6 | 7110.0 125.0 5.58742773034 3.995403707 7 | 7120.0 120.0 14.9430227834 4.51067021458 8 | 7120.0 122.0 7.58032185338 4.19420732016 9 | 7120.0 123.0 7.50118697216 5.40588312331 10 | 7120.0 124.0 6.14473934063 4.85624960457 11 | 7120.0 125.0 6.14705445683 4.02786143976 12 | 7130.0 122.0 8.13363462659 4.20781092875 13 | 7130.0 123.0 7.98146835233 5.43046508268 14 | 7130.0 124.0 6.67911025397 4.89491249212 15 | 7130.0 125.0 6.70078816026 4.05626195593 16 | 7140.0 122.0 8.68673693468 4.22165319713 17 | 7140.0 123.0 8.46174973249 5.45480838225 18 | 7140.0 124.0 7.19874860963 4.89992434792 19 | 7140.0 125.0 7.20127419095 4.04552226494 20 | 7150.0 122.0 9.23268342905 4.21019752674 21 | 7150.0 123.0 8.94203111266 5.47939034162 22 | 7150.0 124.0 7.7183869653 4.90469754391 23 | 7150.0 125.0 7.70154975653 4.03502123376 24 | 7160.0 122.0 9.77694620254 4.19277536137 25 | 7160.0 123.0 9.45177760817 5.4934712698 26 | 7160.0 124.0 8.23823578607 4.90947073991 27 | 7160.0 125.0 8.20203578722 4.02428154277 28 | 7170.0 122.0 10.3209985109 4.17535319599 29 | 7170.0 123.0 9.98109735888 5.5006310638 30 | 7170.0 124.0 8.75787414174 4.9144825957 31 | 7170.0 125.0 8.70252181791 4.01378051158 32 | 7180.0 122.0 10.8650508193 4.15793103061 33 | 7180.0 123.0 10.5104171096 5.50802951759 34 | 7180.0 124.0 9.27372412543 4.92832486408 35 | 7180.0 125.0 9.21268924364 4.04528360514 36 | 7190.0 122.0 11.3819531285 4.13048515364 37 | 7190.0 123.0 11.0405787207 5.47724240342 38 | 7190.0 124.0 9.78662759759 4.94813362746 39 | 7190.0 125.0 9.72917062266 4.10494855507 40 | 7200.0 122.0 11.8870693916 4.09850474048 41 | 7200.0 123.0 11.5722135877 5.35791250357 42 | 7200.0 124.0 10.2997415349 4.96818105063 43 | 7200.0 125.0 10.2458624668 4.164613505 44 | 7210.0 122.0 12.3921856548 4.06676298712 45 | 7210.0 123.0 12.1040589197 5.23858260371 46 | 7210.0 124.0 10.812645007 4.98798981401 47 | 7210.0 125.0 10.7623438458 4.22427845493 48 | 7220.0 122.0 12.8973019179 4.03502123376 49 | 7220.0 123.0 12.6356937866 5.11925270385 50 | 7220.0 124.0 11.3257589443 5.00779857739 51 | 7220.0 125.0 11.2838763875 4.26938515707 52 | 7230.0 122.0 13.4017867856 3.97106040743 53 | 7230.0 123.0 13.1563844678 4.97796610242 54 | 7230.0 124.0 11.8590775321 4.98369393762 55 | 7230.0 125.0 11.8241403239 4.25649752789 56 | 7240.0 122.0 13.9043774674 3.77846194906 57 | 7240.0 123.0 13.6659204982 4.81472279942 58 | 7240.0 124.0 12.3976577476 4.94861094706 59 | 7240.0 125.0 12.3646147254 4.2436098987 60 | 7250.0 122.0 14.4067576841 3.58610215049 61 | 7250.0 123.0 14.1754565286 4.65124083662 62 | 7250.0 124.0 12.9362379631 4.9135279565 63 | 7250.0 125.0 12.9048786618 4.23072226952 64 | 7260.0 122.0 14.9091379008 3.39350369213 65 | 7260.0 123.0 14.684992559 4.48799753361 66 | 7260.0 124.0 13.4533507375 4.834292903 67 | 7260.0 125.0 13.3954728323 4.16867072159 68 | 7270.0 124.0 13.9386832803 4.68823310557 69 | 7270.0 125.0 13.8359763068 4.05769391473 70 | 7280.0 124.0 14.423805358 4.54241196795 71 | 7280.0 125.0 14.2764797812 3.94671710786 72 | 7290.0 124.0 14.9091379008 4.39659083032 73 | 7290.0 125.0 14.7169832557 3.83574030099 74 | 7300.0 125.0 15.1574867301 3.72476349413 75 | 7300.0 126.0 14.9893251075 5.5822527153 76 | 7310.0 126.0 14.6946739541 5.59012848869 77 | 7320.0 126.0 14.4000228006 5.59824292188 78 | 7330.0 126.0 14.1053716471 5.60611869527 79 | 7330.0 127.0 14.7700204633 3.06152791072 80 | 7330.0 129.0 14.9975332468 5.39562075192 81 | 7340.0 126.0 13.801880959 5.60850529327 82 | 7340.0 127.0 14.3257286169 3.21403152274 83 | 7340.0 128.0 14.6708913967 4.59014392789 84 | 7340.0 129.0 14.6506867461 5.331182606 85 | 7350.0 126.0 13.4948123641 5.60850529327 86 | 7350.0 127.0 13.8814367705 3.36653513476 87 | 7350.0 128.0 14.3059448966 4.55052640114 88 | 7350.0 129.0 14.3036297804 5.26674446008 89 | 7360.0 126.0 13.187533304 5.60850529327 90 | 7360.0 127.0 13.4369344589 3.51880008698 91 | 7360.0 128.0 13.9412088616 4.51067021458 92 | 7360.0 129.0 13.9567832797 5.20206765435 93 | 7360.0 130.0 14.9219762724 4.17654649499 94 | 7370.0 126.0 12.8804647091 5.60850529327 95 | 7370.0 127.0 12.9936949381 3.55412173733 96 | 7370.0 128.0 13.5478495717 4.47988310042 97 | 7370.0 129.0 13.6097263139 5.13762950843 98 | 7370.0 130.0 14.4442204736 4.00876865579 99 | 7380.0 126.0 12.5180437903 5.6614877688 100 | 7380.0 127.0 12.5512972776 3.51140163318 101 | 7380.0 128.0 13.1115553995 4.46246093504 102 | 7380.0 129.0 13.1565949329 5.13118569384 103 | 7380.0 130.0 13.9662542096 3.84122947639 104 | 7390.0 126.0 12.1556228715 5.71447024434 105 | 7390.0 127.0 12.1086891521 3.46844286924 106 | 7390.0 128.0 12.6750507621 4.44480010986 107 | 7390.0 129.0 12.6914670406 5.13118569384 108 | 7390.0 130.0 13.4884984108 3.67345163719 109 | 7400.0 126.0 11.7929914876 5.76769137967 110 | 7400.0 127.0 11.6662914917 3.42572276509 111 | 7400.0 128.0 12.2387565898 4.42737794448 112 | 7400.0 129.0 12.2265496135 5.13118569384 113 | 7400.0 130.0 12.9962205194 3.52810781916 114 | 7410.0 126.0 11.4017368489 5.80874086523 115 | 7410.0 127.0 11.235048482 3.4171310123 116 | 7410.0 128.0 11.8039356733 4.40255732531 117 | 7410.0 129.0 11.7616321863 5.13118569384 118 | 7410.0 130.0 12.4700577453 3.43526915708 119 | 7420.0 126.0 10.9673368626 5.831652206 120 | 7420.0 127.0 10.808856635 3.42309750729 121 | 7420.0 128.0 11.3724821986 4.36079186036 122 | 7420.0 129.0 11.3335461533 5.14311868382 123 | 7420.0 130.0 11.9438949712 3.34266915479 124 | 7430.0 126.0 10.5327264112 5.85480220657 125 | 7430.0 127.0 10.382664788 3.42906400228 126 | 7430.0 128.0 10.9410287239 4.31902639541 127 | 7430.0 129.0 10.9300845382 5.1629274472 128 | 7430.0 130.0 11.4177321972 3.2498304927 129 | 7440.0 126.0 10.117689215 5.88081612474 130 | 7440.0 127.0 9.95668340616 3.43503049728 131 | 7440.0 128.0 10.5095752492 4.27726093046 132 | 7440.0 129.0 10.5268333881 5.18297487038 133 | 7440.0 130.0 10.8372694248 3.11188512846 134 | 7450.0 126.0 9.74790201742 5.9130351977 135 | 7450.0 127.0 9.50734039711 3.43670111587 136 | 7450.0 128.0 10.0795950302 4.25816814649 137 | 7450.0 129.0 10.123371773 5.20278363375 138 | 7450.0 130.0 10.2336554904 2.95508564005 139 | 7460.0 126.0 9.3781148198 5.94525427066 140 | 7460.0 127.0 9.04810552791 3.43670111587 141 | 7460.0 128.0 9.65277178787 4.29134185865 142 | 7460.0 129.0 9.71991015784 5.22259239713 143 | 7460.0 130.0 9.62983109092 2.79804749184 144 | 7470.0 126.0 9.0085380873 5.97747334362 145 | 7470.0 127.0 8.58887065871 3.43670111587 146 | 7470.0 128.0 9.22594854554 4.32427691101 147 | 7470.0 129.0 9.27014621857 5.26698311988 148 | 7470.0 130.0 9.02621715651 2.64100934363 149 | 7480.0 126.0 8.59223810046 6.02973983976 150 | 7480.0 127.0 8.12963578951 3.43670111587 151 | 7480.0 128.0 8.79912530322 4.35745062317 152 | 7480.0 129.0 8.80880669827 5.31757899742 153 | 7480.0 130.0 8.51205089369 2.63528150844 154 | 7490.0 126.0 8.14499974251 6.09537128468 155 | 7490.0 127.0 7.68913231506 3.44481554906 156 | 7490.0 128.0 8.37903694441 4.40494392331 157 | 7490.0 129.0 8.34746717797 5.36793621515 158 | 7490.0 130.0 8.00798695614 2.64625985922 159 | 7500.0 126.0 7.69776138455 6.1610027296 160 | 7500.0 127.0 7.2919846532 3.47178410643 161 | 7500.0 128.0 7.96168463202 4.45840371845 162 | 7500.0 129.0 7.88591719256 5.41853209269 163 | 7500.0 130.0 7.50371255348 2.65747686981 164 | 7500.0 131.0 15.0061623163 4.22427845493 165 | 7510.0 126.0 7.2505230266 6.22663417452 166 | 7510.0 127.0 6.89483699133 3.4987526638 167 | 7510.0 128.0 7.54454278475 4.51186351358 168 | 7510.0 129.0 7.42457767226 5.46888931043 169 | 7510.0 130.0 6.99964861593 2.6684552206 170 | 7510.0 131.0 14.4799995422 4.29587639484 171 | 7520.0 126.0 6.83232885377 6.29059500085 172 | 7520.0 127.0 6.49789979458 3.52595988097 173 | 7520.0 128.0 7.12719047236 4.56532330872 174 | 7520.0 129.0 7.02869280105 5.53595271415 175 | 7520.0 130.0 6.52441839839 2.70019697396 176 | 7520.0 131.0 13.9538367682 4.36747433476 177 | 7530.0 126.0 6.43349747103 6.35336252817 178 | 7530.0 127.0 6.10075213272 3.55292843833 179 | 7530.0 128.0 6.70983815997 4.61878310385 180 | 7530.0 129.0 6.67679513776 5.61423312846 181 | 7530.0 130.0 6.06834050583 2.7457809957 182 | 7530.0 131.0 13.4276739941 4.43907227467 183 | 7540.0 126.0 6.0348765534 6.4161300555 184 | 7540.0 127.0 5.70360447085 3.5798969957 185 | 7540.0 128.0 6.29248584758 4.67248155879 186 | 7540.0 129.0 6.32468700936 5.69227488297 187 | 7540.0 130.0 5.61247307838 2.79112635765 188 | 7540.0 131.0 12.8520519193 4.47415526523 189 | 7550.0 126.0 5.63604517066 6.47889758282 190 | 7550.0 127.0 5.3329754128 3.68371400858 191 | 7550.0 128.0 5.96079283481 4.73023723032 192 | 7550.0 129.0 5.97257888095 5.77031663747 193 | 7550.0 130.0 5.15639518583 2.83671037939 194 | 7550.0 131.0 12.2762193793 4.50923825579 195 | 7560.0 126.0 5.2586812291 6.52710686236 196 | 7560.0 127.0 4.96234635475 3.78753102145 197 | 7560.0 128.0 5.63878121709 4.78870888125 198 | 7560.0 129.0 5.62047075255 5.84835839198 199 | 7560.0 130.0 4.70115915371 2.9166614123 200 | 7560.0 131.0 11.7005973045 4.54432124634 201 | 7570.0 126.0 4.8867893804 6.57149758511 202 | 7570.0 127.0 4.5917172967 3.89110937453 203 | 7570.0 128.0 5.31655913425 4.84718053218 204 | 7570.0 129.0 5.26331146152 5.92162695049 205 | 7570.0 130.0 4.24697544713 3.04864028154 206 | 7570.0 131.0 11.1855891813 4.5817908349 207 | 7580.0 126.0 4.51468706658 6.61588830786 208 | 7580.0 127.0 4.22129870376 3.9949263874 209 | 7580.0 128.0 4.99454751652 4.90565218311 210 | 7580.0 129.0 4.85963938125 5.95098210585 211 | 7580.0 130.0 3.79279174056 3.18038049098 212 | 7580.0 131.0 10.710779894 4.62045372245 213 | 7590.0 126.0 4.14279521787 6.6602790306 214 | 7590.0 127.0 3.88413359814 4.12881453504 215 | 7590.0 128.0 4.67232543369 4.96412383404 216 | 7590.0 129.0 4.45575683588 5.98009860142 217 | 7590.0 130.0 3.33860803399 3.31235936022 218 | 7590.0 131.0 10.2359706066 4.65935526981 219 | 7600.0 126.0 3.78542546172 6.70466975335 220 | 7600.0 127.0 3.59705918861 4.30780938483 221 | 7600.0 128.0 4.29054172483 5.07510064091 222 | 7600.0 129.0 4.05208475562 6.00945375678 223 | 7600.0 130.0 2.9999696726 3.47345472503 224 | 7600.0 131.0 9.76116131932 4.69825681716 225 | 7610.0 126.0 3.44278826325 6.7490604761 226 | 7610.0 127.0 3.30998477908 4.48680423461 227 | 7610.0 128.0 3.90875801597 5.18607744777 228 | 7610.0 129.0 3.64841267536 6.03857025235 229 | 7610.0 130.0 2.73815107623 3.65388153361 230 | 7610.0 131.0 9.27561831142 4.73166918912 231 | 7620.0 126.0 3.10015106478 6.79345119884 232 | 7620.0 127.0 3.02312083466 4.6657990844 233 | 7620.0 128.0 3.5269743071 5.29705425464 234 | 7620.0 129.0 3.27041733847 6.0824836555 235 | 7620.0 130.0 2.47633247985 3.8343083422 236 | 7620.0 131.0 8.76524042057 4.75195527209 237 | 7630.0 126.0 2.75751386631 6.83784192159 238 | 7630.0 127.0 2.73604642513 4.84479393418 239 | 7630.0 128.0 3.14519059824 5.40803106151 240 | 7630.0 129.0 2.9955499053 6.18463004977 241 | 7630.0 130.0 2.21451388348 4.01473515078 242 | 7630.0 131.0 8.25465206462 4.77224135507 243 | 7630.0 132.0 0.0810290672058 3.31307533962 244 | 7640.0 126.0 2.43739643457 6.89225635592 245 | 7640.0 127.0 2.49548480483 4.98608053561 246 | 7640.0 128.0 2.78276967947 5.55958003432 247 | 7640.0 129.0 2.72068247213 6.28701510385 248 | 7640.0 130.0 1.93775226432 4.21067484634 249 | 7640.0 131.0 7.74427417378 4.79252743805 250 | 7640.0 132.0 0.582567423443 3.32715626781 251 | 7650.0 126.0 2.20735806975 6.98652697681 252 | 7650.0 127.0 2.25492318453 5.12736713704 253 | 7650.0 128.0 2.42855689997 5.72831251272 254 | 7650.0 129.0 2.44560457385 6.38916149813 255 | 7650.0 130.0 1.64583715727 4.42236608869 256 | 7650.0 131.0 7.23368581783 4.81281352102 257 | 7650.0 132.0 1.08389531457 3.34123719599 258 | 7660.0 126.0 1.97731970493 7.0810362575 259 | 7660.0 127.0 2.01436156423 5.26865373847 260 | 7660.0 128.0 2.07434412047 5.89728365092 261 | 7660.0 129.0 2.17073714068 6.49154655221 262 | 7660.0 130.0 1.35413251533 4.63381867124 263 | 7660.0 131.0 6.77466141374 4.82355321201 264 | 7660.0 132.0 1.5852232057 3.35531812417 265 | 7670.0 126.0 1.74728134011 7.17530687838 266 | 7670.0 127.0 1.77401040903 5.4097016801 267 | 7670.0 128.0 1.72013134097 6.06601612932 268 | 7670.0 129.0 1.8651418015 6.61374036966 269 | 7670.0 130.0 1.06242787339 4.84527125378 270 | 7670.0 131.0 6.3497323574 4.8280877482 271 | 7670.0 132.0 2.10296737538 3.37369492875 272 | 7670.0 133.0 15.1311785914 4.62093104205 273 | 7680.0 126.0 1.51724297529 7.26957749927 274 | 7680.0 127.0 1.53681623049 5.55313621973 275 | 7680.0 128.0 1.40001390923 6.25193211329 276 | 7680.0 129.0 1.48756739483 6.78223418826 277 | 7680.0 130.0 0.777458114957 5.05338259913 278 | 7680.0 131.0 5.92480330107 4.8326222844 279 | 7680.0 132.0 2.65859526479 3.40233410471 280 | 7680.0 133.0 14.6443727929 4.65076351702 281 | 7690.0 126.0 1.29457088931 7.47649554562 282 | 7690.0 127.0 1.32992902772 5.71494756394 283 | 7690.0 128.0 1.10262670933 6.44906510786 284 | 7690.0 129.0 1.10999298817 6.95072800685 285 | 7690.0 130.0 0.553312773206 5.23046817052 286 | 7690.0 131.0 5.49987424473 4.83715682059 287 | 7690.0 132.0 3.2142231542 3.43097328068 288 | 7690.0 133.0 14.1577774594 4.68059599198 289 | 7700.0 126.0 1.07274066376 7.69582390156 290 | 7700.0 127.0 1.12304182496 5.87675890814 291 | 7700.0 128.0 0.805239509427 6.64619810242 292 | 7700.0 129.0 0.732208116387 7.11898316565 293 | 7700.0 130.0 0.329377896564 5.40755374191 294 | 7700.0 131.0 5.0749451884 4.84169135679 295 | 7700.0 132.0 3.76985104361 3.45961245665 296 | 7700.0 133.0 13.6709716608 4.71042846694 297 | 7700.0 134.0 14.9324995279 3.51880008698 298 | 7710.0 126.0 0.850910438216 7.9151522575 299 | 7710.0 127.0 0.9161546222 6.03857025235 300 | 7710.0 128.0 0.507852309526 6.84333109698 301 | 7710.0 129.0 0.602351143748 7.31778677881 302 | 7710.0 130.0 0.105232554813 5.58463931329 303 | 7710.0 131.0 4.63633589994 4.85314702717 304 | 7710.0 132.0 4.32547893302 3.48825163261 305 | 7710.0 133.0 13.1812193507 4.69133568297 306 | 7710.0 134.0 14.3537204765 3.57082792331 307 | 7720.0 126.0 0.665280211526 8.13042339684 308 | 7720.0 127.0 0.709056954328 6.20038159656 309 | 7720.0 128.0 0.210465109626 7.04046409155 310 | 7720.0 129.0 0.499644170251 7.51969296937 311 | 7720.0 131.0 4.16594637993 4.88130888354 312 | 7720.0 132.0 4.84848473044 3.51712946838 313 | 7720.0 133.0 12.6880995989 4.62307898025 314 | 7720.0 134.0 13.774941425 3.62309441945 315 | 7730.0 126.0 0.624449980259 8.32922701 316 | 7730.0 127.0 0.502169751567 6.36219294076 317 | 7730.0 129.0 0.397147661863 7.72159915993 318 | 7730.0 131.0 3.69555685991 4.90947073991 319 | 7730.0 132.0 5.29509169307 3.54624596394 320 | 7730.0 133.0 12.194979847 4.55506093733 321 | 7730.0 134.0 13.1961623735 3.67536091559 322 | 7740.0 126.0 0.583409283882 8.52803062316 323 | 7740.0 127.0 0.295282548805 6.52400428497 324 | 7740.0 129.0 0.294651153476 7.92350535049 325 | 7740.0 131.0 3.2251673399 4.93739393647 326 | 7740.0 132.0 5.7419091208 3.57560111931 327 | 7740.0 133.0 11.7018600952 4.48680423461 328 | 7740.0 134.0 12.713355412 3.73144596852 329 | 7750.0 126.0 0.542579052615 8.72707289612 330 | 7750.0 131.0 2.75456735478 4.96555579284 331 | 7750.0 132.0 6.18851608343 3.60471761487 332 | 7750.0 133.0 11.2283135985 4.53310423576 333 | 7750.0 134.0 12.2305484506 3.78776968125 334 | 7760.0 131.0 2.28438829988 4.9949109482 335 | 7760.0 132.0 6.63533351116 3.63407277024 336 | 7760.0 133.0 10.7547671019 4.5796428967 337 | 7760.0 134.0 11.7477414891 3.84385473418 338 | 7760.0 135.0 0.140801158339 2.85651914277 339 | 7770.0 131.0 1.81526157052 5.02975527896 340 | 7770.0 132.0 7.1061439614 3.61020679027 341 | 7770.0 133.0 10.2812206052 4.62594289785 342 | 7770.0 134.0 11.2352589471 3.89421195192 343 | 7770.0 135.0 0.48891044966 2.89327275193 344 | 7770.0 136.0 -0.0492488356524 3.81044236222 345 | 7780.0 131.0 1.34613484116 5.06459960972 346 | 7780.0 132.0 7.61315441048 3.50710575679 347 | 7780.0 133.0 9.84871480493 4.68107331158 348 | 7780.0 134.0 10.7034136151 3.94051195307 349 | 7780.0 135.0 0.837019740981 2.93002636108 350 | 7780.0 136.0 0.389570917917 3.8307284452 351 | 7790.0 131.0 0.87700811181 5.09944394048 352 | 7790.0 132.0 8.12016485957 3.40424338311 353 | 7790.0 133.0 9.47787528177 4.7493300143 354 | 7790.0 134.0 10.171357818 3.98705061401 355 | 7790.0 135.0 1.1851290323 2.96677997024 356 | 7790.0 136.0 0.828601136596 3.85125318798 357 | 7800.0 131.0 0.407670917345 5.13404961144 358 | 7800.0 132.0 8.62717530866 3.30114234964 359 | 7800.0 133.0 9.10703575861 4.81758671702 360 | 7800.0 134.0 9.63930202085 4.03335061516 361 | 7800.0 135.0 1.53323832362 3.00353357939 362 | 7800.0 136.0 1.26763135527 3.87177793075 363 | 7810.0 131.0 -0.0614558120107 5.16889394219 364 | 7810.0 132.0 9.13418575775 3.19804131616 365 | 7810.0 133.0 8.73640670056 4.88560475993 366 | 7810.0 134.0 9.14491947834 4.09993669928 367 | 7810.0 135.0 1.88113714983 3.04004852875 368 | 7810.0 136.0 1.70350459731 3.89110937453 369 | 7820.0 132.0 9.65677062495 3.22262327553 370 | 7820.0 133.0 8.36914508426 4.95911197825 371 | 7820.0 134.0 8.65053693583 4.1665227834 372 | 7820.0 135.0 2.27302318396 3.06152791072 373 | 7820.0 136.0 2.11117551465 3.90113308612 374 | 7830.0 132.0 10.1793554921 3.2474438947 375 | 7830.0 133.0 8.01682649075 5.05338259913 376 | 7830.0 134.0 8.15615439332 4.23310886752 377 | 7830.0 135.0 2.69395340321 3.07250626151 378 | 7830.0 136.0 2.51905689711 3.9109181379 379 | 7840.0 132.0 10.7004671036 3.27679905007 380 | 7840.0 133.0 7.66429743212 5.14789187982 381 | 7840.0 134.0 7.66198231592 4.29969495164 382 | 7840.0 135.0 3.11488362246 3.0837232721 383 | 7840.0 136.0 2.92672781445 3.92094184949 384 | 7850.0 132.0 11.2097926689 3.34839698998 385 | 7850.0 133.0 7.31197883861 5.24216250071 386 | 7850.0 134.0 7.2155858184 4.43119650128 387 | 7850.0 135.0 3.53581384171 3.09470162288 388 | 7850.0 136.0 3.33460919691 3.93096556108 389 | 7860.0 132.0 11.7191182342 3.41999492989 390 | 7860.0 133.0 6.95944977999 5.33643312159 391 | 7860.0 134.0 6.79002536674 4.59014392789 392 | 7860.0 135.0 3.95948010739 3.10854389127 393 | 7860.0 136.0 3.74627895133 3.94242123147 394 | 7870.0 132.0 12.2284437995 3.49159286981 395 | 7870.0 133.0 6.52778584015 5.48702745521 396 | 7870.0 134.0 6.36446491508 4.7493300143 397 | 7870.0 135.0 4.38924986124 3.12882997424 398 | 7870.0 136.0 4.16720917059 3.95769545865 399 | 7880.0 132.0 12.7377693648 3.56319080972 400 | 7880.0 133.0 6.08707190059 5.64358828382 401 | 7880.0 134.0 5.93890446341 4.90851610071 402 | 7880.0 135.0 4.8190196151 3.14887739742 403 | 7880.0 136.0 4.58813938984 3.97296968583 404 | 7890.0 132.0 13.2483577207 3.63001555364 405 | 7890.0 133.0 5.64635796103 5.80014911244 406 | 7890.0 134.0 5.51187075598 5.05171198053 407 | 7890.0 135.0 5.24878936895 3.16916348039 408 | 7890.0 136.0 5.00906960909 3.98824391301 409 | 7900.0 132.0 13.771153053 3.65292689441 410 | 7900.0 133.0 5.22753239288 5.95551664205 411 | 7900.0 134.0 5.07915449059 5.13118569384 412 | 7900.0 135.0 5.67876958792 3.18921090357 413 | 7900.0 136.0 5.42999982834 4.00327948039 414 | 7910.0 132.0 14.2939483853 3.67607689499 415 | 7910.0 133.0 4.81796728955 6.11040685206 416 | 7910.0 134.0 4.64664869031 5.21065940714 417 | 7910.0 135.0 6.10853934177 3.20949698655 418 | 7910.0 136.0 5.84882539649 4.02929339856 419 | 7920.0 132.0 14.8167437176 3.69922689556 420 | 7920.0 133.0 4.40840218622 6.26505840228 421 | 7920.0 134.0 4.21393242492 5.29037178025 422 | 7920.0 135.0 6.53830909563 3.22954440972 423 | 7920.0 136.0 6.26596724377 4.06222845092 424 | 7930.0 133.0 3.99883708289 6.41994861229 425 | 7930.0 134.0 3.79489639166 5.43237436108 426 | 7930.0 135.0 6.98049629095 3.2457732761 427 | 7930.0 136.0 6.68331955616 4.09516350328 428 | 7940.0 133.0 3.61200221139 6.51660583118 429 | 7940.0 134.0 3.384699893 5.61590374706 430 | 7940.0 135.0 7.47151139171 3.2457732761 431 | 7940.0 136.0 7.10067186855 4.12809855564 432 | 7950.0 133.0 3.2251673399 6.61350170986 433 | 7950.0 134.0 2.97450339434 5.79943313304 434 | 7950.0 135.0 7.96252649246 3.2457732761 435 | 7950.0 136.0 7.51802418093 4.1607949482 436 | 7960.0 133.0 2.83833246841 6.71015892874 437 | 7960.0 134.0 2.56451736079 5.98296251902 438 | 7960.0 135.0 8.45375205833 3.2457732761 439 | 7960.0 136.0 7.93516602821 4.19373000056 440 | 7970.0 133.0 2.46559875926 6.8306821276 441 | 7970.0 134.0 2.16400225717 6.14620582202 442 | 7970.0 135.0 8.94476715908 3.2457732761 443 | 7970.0 136.0 8.42533926853 4.22929031072 444 | 7980.0 133.0 2.12506621189 7.00609708039 445 | 7980.0 134.0 1.76790692085 6.30061871244 446 | 7980.0 135.0 9.43578225984 3.2457732761 447 | 7980.0 136.0 8.93361250827 4.26532794048 448 | 7990.0 133.0 1.78453366451 7.18151203318 449 | 7990.0 134.0 1.37181158454 6.45503160285 450 | 7990.0 135.0 9.9202729422 3.23407894592 451 | 7990.0 136.0 9.44188574802 4.30136557024 452 | 8000.0 133.0 1.44421158225 7.35692698597 453 | 8000.0 134.0 0.997815084735 6.67507593819 454 | 8000.0 135.0 10.3782450207 3.17536863519 455 | 8000.0 136.0 9.95015898777 4.33740319999 456 | 8010.0 133.0 1.26279065775 7.54761616594 457 | 8010.0 134.0 0.675382536788 7.04786254534 458 | 8010.0 135.0 10.8360066342 3.11689698426 459 | 8010.0 136.0 10.4584322275 4.37344082975 460 | 8010.0 137.0 8.84837413888 -0.159186086409 461 | 8010.0 138.0 8.11700788293 -0.276845367668 462 | 8020.0 133.0 1.09904880246 7.73997596451 463 | 8020.0 134.0 0.353160453952 7.42088781229 464 | 8020.0 135.0 11.2937682476 3.05818667353 465 | 8020.0 136.0 10.9488159329 4.36222381916 466 | 8020.0 137.0 9.08809389874 0.0264912377682 467 | 8020.0 138.0 8.36241020075 0.0334123719599 468 | 8030.0 133.0 0.935306947176 7.93209710328 469 | 8030.0 134.0 0.030938371115 7.79367441944 470 | 8030.0 135.0 11.7517403262 2.9994763628 471 | 8030.0 136.0 11.421310104 4.30375216823 472 | 8030.0 137.0 9.32760319349 0.212168561945 473 | 8030.0 138.0 8.60802298368 0.343670111587 474 | 8040.0 133.0 0.789244161096 8.12851411844 475 | 8040.0 135.0 12.2095019396 2.94100471187 476 | 8040.0 136.0 11.8938042752 4.2452805173 477 | 8040.0 137.0 9.56732295336 0.397845886122 478 | 8040.0 138.0 8.85363576662 0.653927851215 479 | 8050.0 133.0 0.684011606283 8.33471618539 480 | 8050.0 135.0 12.6832589014 2.92071862889 481 | 8050.0 136.0 12.3662984463 4.18680886637 482 | 8050.0 137.0 9.86029038596 0.791873215449 483 | 8050.0 138.0 9.14597180389 1.15439745121 484 | 8060.0 133.0 0.57877905147 8.54067959255 485 | 8060.0 135.0 13.1730112115 2.93909543347 486 | 8060.0 136.0 12.8387926174 4.12833721544 487 | 8060.0 137.0 10.1759880504 1.27515930987 488 | 8060.0 138.0 9.44988342219 1.70236035136 489 | 8070.0 133.0 0.473546496657 8.7468816595 490 | 8070.0 135.0 13.6627635216 2.95747223805 491 | 8070.0 136.0 13.298869347 4.11902948325 492 | 8070.0 137.0 10.4916857148 1.75844540429 493 | 8070.0 138.0 9.7607403891 2.2455500555 494 | 8080.0 133.0 0.368313941845 8.95308372645 495 | 8080.0 135.0 14.1525158317 2.97584904263 496 | 8080.0 136.0 13.7505274723 4.14217948383 497 | 8080.0 137.0 10.8073833793 2.24173149871 498 | 8080.0 138.0 10.1338950285 2.7445876967 499 | 8090.0 135.0 14.6422681418 2.9942258472 500 | 8090.0 136.0 14.2021855975 4.1655681442 501 | 8090.0 137.0 11.139076392 2.55365985693 502 | 8090.0 138.0 10.5070496678 3.24362533791 503 | 8100.0 135.0 15.1320204519 3.01260265178 504 | 8100.0 136.0 14.6540541879 4.18871814477 505 | 8100.0 137.0 11.4777147534 2.79231965665 506 | 8100.0 138.0 10.8818880281 3.47082946723 507 | 8110.0 136.0 15.1057123132 4.21210680514 508 | 8110.0 137.0 11.8232984634 3.02358100257 509 | 8110.0 138.0 11.2571473185 3.6682011216 510 | 8120.0 137.0 12.1794054289 3.24386399771 511 | 8120.0 138.0 11.7292205594 3.86867535336 512 | 8130.0 137.0 12.5357228595 3.46414699284 513 | 8130.0 138.0 12.265906589 4.07153618311 514 | 8140.0 137.0 12.9534961021 3.50829905579 515 | 8140.0 138.0 12.7480821551 4.19468463976 516 | 8150.0 137.0 13.3956832974 3.50829905579 517 | 8150.0 138.0 13.1936367922 4.26508928068 518 | 8160.0 137.0 13.9182681646 3.50829905579 519 | 8160.0 138.0 13.6657100331 4.31496917882 520 | 8170.0 137.0 14.4408530318 3.50829905579 521 | 8170.0 138.0 14.1409402506 4.36270113876 522 | 8180.0 137.0 14.963437899 3.50829905579 523 | 8180.0 138.0 14.6159600031 4.4104330987 524 | 8190.0 138.0 15.0909797555 4.45816505865 525 | 8360.0 139.0 0.244560457385 3.58825008869 526 | 8370.0 139.0 0.796399974823 3.67201967839 527 | 8380.0 139.0 1.34802902715 3.75602792789 528 | 8390.0 139.0 1.89986854459 3.83979751759 529 | 8400.0 139.0 2.44749875984 3.89707586952 530 | 8400.0 140.0 15.2246251001 6.5788960389 531 | 8410.0 139.0 2.98860455668 3.9140207153 532 | 8410.0 140.0 14.6367960489 6.44715582946 533 | 8420.0 139.0 3.52971035353 3.93096556108 534 | 8420.0 140.0 14.0489669977 6.31541562002 535 | 8430.0 139.0 4.07102661549 3.94814906666 536 | 8430.0 140.0 13.4611379465 6.18367541058 537 | 8440.0 139.0 4.61086962168 3.96294597424 538 | 8440.0 140.0 12.8499472682 6.06530014992 539 | 8450.0 139.0 5.14503006991 3.96915112903 540 | 8450.0 140.0 12.2330740319 5.95002746666 541 | 8460.0 139.0 5.67940098324 3.97511762403 542 | 8460.0 140.0 11.6162007956 5.83475478339 543 | 8470.0 139.0 6.21356143147 3.98132277882 544 | 8470.0 140.0 11.0090089543 5.72019807953 545 | 8480.0 139.0 6.67174397513 3.98991453161 546 | 8480.0 140.0 10.4028694386 5.60564137567 547 | 8490.0 139.0 6.95229396626 4.00447277939 548 | 8490.0 140.0 9.821985736 5.60778931387 549 | 8500.0 139.0 7.2330544225 4.01879236737 550 | 8500.0 140.0 9.27877528806 5.78487488525 551 | 8510.0 139.0 7.51360441363 4.03335061516 552 | 8510.0 140.0 8.73556484012 5.96196045664 553 | 8520.0 139.0 7.78994510257 4.0779799977 554 | 8520.0 140.0 8.20014160123 6.10133777967 555 | 8530.0 139.0 8.12395323155 4.12165474105 556 | 8530.0 140.0 7.68260789666 6.15288829641 557 | 8540.0 139.0 8.5448834508 4.1629428864 558 | 8540.0 140.0 7.16486372698 6.20443881315 559 | 8550.0 139.0 8.97507413487 4.20446969155 560 | 8550.0 140.0 6.64733002241 6.25622798969 561 | 8550.0 148.0 0.461549985409 8.76525846408 562 | 8560.0 139.0 9.48966132791 4.2440872183 563 | 8560.0 140.0 6.12306143434 6.3292578884 564 | 8560.0 148.0 0.573938353949 8.61394815106 565 | 8570.0 139.0 10.0040380558 4.28394340486 566 | 8570.0 140.0 5.59416261385 6.4163687153 567 | 8570.0 148.0 0.686116257379 8.46287649784 568 | 8580.0 139.0 10.5186252489 4.32379959141 569 | 8580.0 140.0 5.06547425847 6.50371820199 570 | 8580.0 141.0 7.18317419152 -0.37469588555 571 | 8580.0 148.0 0.79829416081 8.31180484462 572 | 8590.0 139.0 10.9660740719 4.34862021058 573 | 8590.0 140.0 4.53678590309 6.59106768869 574 | 8590.0 141.0 6.94113931545 0.305484543633 575 | 8590.0 148.0 0.846280205804 8.17338216079 576 | 8600.0 139.0 11.3468054552 4.35888258197 577 | 8600.0 140.0 4.04850684876 6.63474243203 578 | 8600.0 141.0 6.69910443938 0.985664972817 579 | 8600.0 142.0 7.72617417435 -0.289732996852 580 | 8600.0 148.0 0.878060437358 8.03830071415 581 | 8610.0 139.0 11.7273263734 4.36890629355 582 | 8610.0 140.0 3.60063709547 6.63474243203 583 | 8610.0 141.0 6.3672009615 1.86536499456 584 | 8610.0 142.0 7.61020789895 -0.0346056709584 585 | 8610.0 148.0 0.909840668911 7.90321926751 586 | 8620.0 139.0 12.1080577568 4.37916866494 587 | 8620.0 140.0 3.15255687708 6.63474243203 588 | 8620.0 141.0 6.01256725178 2.79494491444 589 | 8620.0 142.0 7.49403115844 0.220521654935 590 | 8620.0 143.0 15.186109985 4.79586867524 591 | 8620.0 148.0 0.941620900465 7.76813782087 592 | 8630.0 139.0 12.488578675 4.38919237653 593 | 8630.0 140.0 2.7046871238 6.63474243203 594 | 8630.0 141.0 5.73159633043 3.1662995628 595 | 8630.0 142.0 7.37785441792 0.475648980829 596 | 8630.0 143.0 14.7268751158 4.95863465865 597 | 8630.0 148.0 0.973401132018 7.63305637424 598 | 8640.0 139.0 12.7952263397 4.35124546838 599 | 8640.0 140.0 2.24503132438 6.64524346322 600 | 8640.0 141.0 5.45883354836 3.47560266323 601 | 8640.0 142.0 7.05310675377 0.856311361372 602 | 8640.0 143.0 14.2676402466 5.12140064205 603 | 8640.0 148.0 1.05001043192 7.52780740256 604 | 8650.0 139.0 13.0831426097 4.30088825064 605 | 8650.0 140.0 1.77716738568 6.6631429482 606 | 8650.0 141.0 5.23510913683 3.76676761888 607 | 8650.0 142.0 6.70499746245 1.2510546701 608 | 8650.0 143.0 13.7985135173 5.22330837653 609 | 8650.0 144.0 14.9263960398 4.59706506208 610 | 8650.0 148.0 1.17186973039 7.45239090585 611 | 8660.0 139.0 13.3874751582 4.30566144663 612 | 8660.0 140.0 1.30951391209 6.68080377338 613 | 8660.0 141.0 5.20753820746 3.98466401602 614 | 8660.0 142.0 6.35141607828 1.64150210243 615 | 8660.0 143.0 13.3194949278 5.26459652188 616 | 8660.0 144.0 14.5271437268 4.67104959999 617 | 8660.0 145.0 14.8632565069 2.38063150214 618 | 8660.0 146.0 15.0272088273 3.15030935622 619 | 8660.0 147.0 15.0602518495 5.43237436108 620 | 8660.0 148.0 1.29372902887 7.37721306894 621 | 8670.0 139.0 13.6937018927 4.31640113762 622 | 8670.0 140.0 0.98371392239 6.87459553075 623 | 8670.0 141.0 5.18017774321 4.20256041315 624 | 8670.0 142.0 5.98499632242 2.02144850357 625 | 8670.0 143.0 12.8404763383 5.30564600743 626 | 8670.0 144.0 14.1281018789 4.7452727977 627 | 8670.0 145.0 14.2470146659 2.44029645207 628 | 8670.0 146.0 14.4435890783 3.19374543977 629 | 8670.0 147.0 14.5191460526 5.52330374477 630 | 8670.0 148.0 1.41558832734 7.30179657223 631 | 8680.0 139.0 14.0853774617 4.26007742489 632 | 8680.0 140.0 0.673488350802 7.08819605149 633 | 8680.0 141.0 5.01264751595 4.28967124005 634 | 8680.0 142.0 5.61857656656 2.40163356452 635 | 8680.0 143.0 12.3711391438 5.24192384091 636 | 8680.0 144.0 13.7071716597 4.79205011845 637 | 8680.0 145.0 13.6305623598 2.499961402 638 | 8680.0 146.0 13.8601797944 3.23718152331 639 | 8680.0 147.0 13.9778297907 5.61423312846 640 | 8680.0 148.0 1.46736274431 7.18127337338 641 | 8690.0 139.0 14.5138844249 4.17487587639 642 | 8690.0 140.0 0.431453474732 7.50585070099 643 | 8690.0 141.0 4.82975333569 4.36222381916 644 | 8690.0 142.0 5.18059867343 2.59781191988 645 | 8690.0 143.0 11.9081159026 5.10827435307 646 | 8690.0 144.0 13.2353088839 4.77558259227 647 | 8690.0 145.0 13.0206344721 2.52358872217 648 | 8690.0 146.0 13.2334146979 3.28944801945 649 | 8690.0 147.0 13.5268030607 5.67318209899 650 | 8690.0 148.0 1.48925111571 7.04118007095 651 | 8700.0 139.0 14.942180923 4.08967432789 652 | 8700.0 140.0 0.189418598663 7.92350535049 653 | 8700.0 141.0 4.5369963682 4.36031454076 654 | 8700.0 142.0 4.69505566553 2.67108047839 655 | 8700.0 143.0 11.4560368471 4.98417125722 656 | 8700.0 144.0 12.7636565732 4.75911506609 657 | 8700.0 145.0 12.4168100726 2.51093975279 658 | 8700.0 146.0 12.596126346 3.34386245379 659 | 8700.0 147.0 13.0985065627 5.72449395593 660 | 8700.0 148.0 1.510929022 6.90132542831 661 | 8710.0 140.0 0.15174534404 8.13662855163 662 | 8710.0 141.0 4.17078707745 4.30828670443 663 | 8710.0 142.0 4.20930219251 2.7445876967 664 | 8710.0 143.0 11.1014031374 4.94646300886 665 | 8710.0 144.0 12.3267310057 4.73286248812 666 | 8710.0 145.0 11.8131961382 2.4982907834 667 | 8710.0 146.0 11.9586275289 3.39851554792 668 | 8710.0 147.0 12.6699995995 5.77556715307 669 | 8710.0 148.0 1.50503599893 6.86648109756 670 | 8720.0 140.0 0.114282554527 8.34975175277 671 | 8720.0 141.0 3.78184755486 4.26222536308 672 | 8720.0 142.0 3.8001580194 2.75795264549 673 | 8720.0 143.0 10.7448752417 4.90875476051 674 | 8720.0 144.0 11.9420007853 4.69181300257 675 | 8720.0 145.0 11.2095822038 2.48588047382 676 | 8720.0 146.0 11.3244961536 3.44720214706 677 | 8720.0 147.0 12.2069763583 5.7438253997 678 | 8720.0 148.0 1.49198716214 6.85788934477 679 | 8730.0 140.0 0.0766092999037 8.56287495392 680 | 8730.0 141.0 3.33923942932 4.22929031072 681 | 8730.0 142.0 3.37438710263 2.76368048068 682 | 8730.0 143.0 10.3792973463 4.87128517195 683 | 8730.0 144.0 11.55748103 4.65052485722 684 | 8730.0 145.0 10.6032322229 2.51070109299 685 | 8730.0 146.0 10.6983624525 3.48204647782 686 | 8730.0 147.0 11.7439531171 5.71184498654 687 | 8730.0 148.0 1.47893832534 6.84905893218 688 | 8740.0 141.0 2.89642083867 4.19635525836 689 | 8740.0 142.0 2.88337200187 2.73981450071 690 | 8740.0 143.0 10.0162450322 4.89157125493 691 | 8740.0 144.0 11.14475895 4.65601403261 692 | 8740.0 145.0 9.99582991655 2.55151191874 693 | 8740.0 146.0 10.0720182862 3.51712946838 694 | 8740.0 147.0 11.2809298759 5.68010323318 695 | 8740.0 148.0 1.46588948854 6.84046717939 696 | 8750.0 141.0 2.44791969005 4.14361144262 697 | 8750.0 142.0 2.39235690111 2.71594852074 698 | 8750.0 143.0 9.65950667137 5.04717744434 699 | 8750.0 144.0 10.7040450104 4.70828052875 700 | 8750.0 145.0 9.38863807529 2.59256140429 701 | 8750.0 146.0 9.44567411999 3.55197379914 702 | 8750.0 147.0 10.8665240751 5.64287230442 703 | 8750.0 148.0 1.45284065175 6.8316367668 704 | 8760.0 141.0 1.99078947195 4.06127381172 705 | 8760.0 142.0 1.91838947424 2.70258357196 706 | 8760.0 143.0 9.30255784545 5.20278363375 707 | 8760.0 144.0 10.263541536 4.76054702488 708 | 8760.0 145.0 8.78123576891 2.63337223004 709 | 8760.0 146.0 8.82985320923 3.58992070729 710 | 8760.0 147.0 10.4523287393 5.60540271587 711 | 8760.0 148.0 1.43979181495 6.82304501401 712 | 8770.0 141.0 1.53344878873 3.97893618082 713 | 8770.0 142.0 1.51282320799 2.73170006752 714 | 8770.0 143.0 8.96939157691 5.27247229527 715 | 8770.0 144.0 9.83419271236 4.80732434563 716 | 8770.0 145.0 8.17383346253 2.67442171559 717 | 8770.0 146.0 8.25507299484 3.64051658483 718 | 8770.0 147.0 10.0379229385 5.56817178711 719 | 8770.0 148.0 1.42695344326 6.81445326122 720 | 8780.0 141.0 1.08221159369 3.90566762231 721 | 8780.0 142.0 1.10725694174 2.76081656309 722 | 8780.0 143.0 8.63601484326 5.34192229699 723 | 8780.0 144.0 9.43115202743 4.84145269699 724 | 8780.0 145.0 7.56643115615 2.71523254134 725 | 8780.0 146.0 7.68029278045 3.69087380257 726 | 8780.0 147.0 9.64309039282 5.56650116852 727 | 8780.0 148.0 1.41390460646 6.80562284863 728 | 8790.0 141.0 0.637077886836 3.84146813619 729 | 8790.0 142.0 0.701690675492 2.78993305865 730 | 8790.0 143.0 8.28706369151 5.36984549355 731 | 8790.0 144.0 9.02832180761 4.87558104835 732 | 8790.0 145.0 6.95923931488 2.75628202689 733 | 8790.0 146.0 7.10572303118 3.74146968011 734 | 8790.0 147.0 9.25309854469 5.57342230271 735 | 8790.0 148.0 1.40085576967 6.79703109584 736 | 8800.0 141.0 0.191944179978 3.77726865006 737 | 8800.0 142.0 0.296124409243 2.81904955422 738 | 8800.0 143.0 7.92758928426 5.36984549355 739 | 8800.0 144.0 8.62549158778 4.90947073991 740 | 8800.0 145.0 6.38235444939 2.79399027525 741 | 8800.0 146.0 6.53094281679 3.79182689785 742 | 8800.0 147.0 8.86310669655 5.5803434369 743 | 8800.0 148.0 1.38780693287 6.78843934305 744 | 8810.0 143.0 7.57148231878 5.37581198855 745 | 8810.0 144.0 8.22076718197 4.93071146208 746 | 8810.0 145.0 5.81872888582 2.8302665648 747 | 8810.0 146.0 5.9561626024 3.84242277539 748 | 8810.0 147.0 8.46511717425 5.60659601487 749 | 8810.0 148.0 1.37475809607 6.77960893046 750 | 8820.0 143.0 7.22947651564 5.40564446351 751 | 8820.0 144.0 7.81562184594 4.94861094706 752 | 8820.0 145.0 5.25510332224 2.86678151416 753 | 8820.0 146.0 5.40369168964 3.86032226037 754 | 8820.0 147.0 8.05492067559 5.6617264286 755 | 8820.0 148.0 1.36191972439 6.77101717767 756 | 8830.0 143.0 6.8874707125 5.43547693847 757 | 8830.0 144.0 7.41047650991 4.96651043204 758 | 8830.0 145.0 4.69168822377 2.90305780371 759 | 8830.0 146.0 4.85122077687 3.87822174534 760 | 8830.0 147.0 7.64472417693 5.71685684234 761 | 8830.0 148.0 1.34887088759 6.76242542488 762 | 8840.0 143.0 6.54546490935 5.46530941344 763 | 8840.0 144.0 7.00533117389 4.98440991702 764 | 8840.0 145.0 4.1280626602 2.93957275307 765 | 8840.0 146.0 4.2987498641 3.89612123032 766 | 8840.0 147.0 7.23452767827 5.77198725607 767 | 8840.0 148.0 1.33582205079 6.75359501229 768 | 8850.0 143.0 6.18199166503 5.55218158053 769 | 8850.0 144.0 6.58755793128 5.04479084634 770 | 8850.0 145.0 3.56443709662 2.97584904263 771 | 8850.0 146.0 3.74627895133 3.9140207153 772 | 8850.0 147.0 6.84621955101 5.8302202472 773 | 8850.0 148.0 1.322773214 6.7450032595 774 | 8860.0 143.0 5.8183079556 5.63881508783 775 | 8860.0 144.0 6.16662771203 5.11567280686 776 | 8860.0 145.0 3.01554409071 2.98635007381 777 | 8860.0 146.0 3.18686268995 3.96437793304 778 | 8860.0 147.0 6.47937886493 5.89155581573 779 | 8860.0 148.0 1.3097243772 6.73617284691 780 | 8870.0 143.0 5.45483471127 5.72568725493 781 | 8870.0 144.0 5.74569749278 5.18655476737 782 | 8870.0 145.0 2.47043945678 2.99040729041 783 | 8870.0 146.0 2.62744642857 4.01449649098 784 | 8870.0 147.0 6.11253817885 5.95289138425 785 | 8870.0 148.0 1.2966755404 6.72758109412 786 | 8880.0 143.0 5.07957542081 5.84859705178 787 | 8880.0 144.0 5.32371494798 5.2469356967 788 | 8880.0 145.0 1.92533482285 2.9942258472 789 | 8880.0 146.0 2.06803016718 4.06485370872 790 | 8880.0 147.0 5.74569749278 6.01422695278 791 | 8880.0 148.0 1.28383716872 6.71898934133 792 | 8890.0 143.0 4.69947543283 5.98725839541 793 | 8890.0 144.0 4.89205100814 5.21018208754 794 | 8890.0 145.0 1.38001972381 2.998044404 795 | 8890.0 146.0 1.51134995222 4.09969803948 796 | 8890.0 147.0 5.27341378678 5.96768829183 797 | 8890.0 148.0 1.27078833192 6.71015892874 798 | 8900.0 143.0 4.31937544484 6.12591973904 799 | 8900.0 144.0 4.46017660318 5.17342847839 800 | 8900.0 145.0 0.834915089884 3.0021016206 801 | 8900.0 146.0 0.965824388072 4.07320680171 802 | 8900.0 147.0 4.80113008078 5.92114963089 803 | 8900.0 148.0 1.25773949512 6.70156717595 804 | 8910.0 143.0 3.95379754943 6.19107386437 805 | 8910.0 144.0 4.02851266334 5.13667486923 806 | 8910.0 145.0 0.289810455954 3.00592017739 807 | 8910.0 146.0 0.420088358813 4.04671556394 808 | 8910.0 147.0 4.32884637478 5.87461096995 809 | 8910.0 148.0 1.24469065833 6.69297542316 810 | 8920.0 143.0 3.59832197927 6.20754139055 811 | 8920.0 144.0 3.60905569986 5.10039857968 812 | 8920.0 146.0 -0.125437205337 4.01998566637 813 | 8920.0 147.0 3.85656266878 5.828072309 814 | 8920.0 148.0 1.23164182153 6.68414501057 815 | 8930.0 143.0 3.242635944 6.22400891673 816 | 8930.0 144.0 3.2382161767 5.06650888812 817 | 8930.0 147.0 3.44468244924 5.7438253997 818 | 8930.0 148.0 1.21880344984 6.67555325778 819 | 8940.0 143.0 2.78508479567 6.165298606 820 | 8940.0 144.0 2.86737665354 5.03238053676 821 | 8940.0 147.0 3.0393266481 5.65528261401 822 | 8940.0 148.0 1.20575461304 6.666961505 823 | 8950.0 143.0 2.28396736966 6.07436922231 824 | 8950.0 144.0 2.49653713038 4.9982521854 825 | 8950.0 147.0 2.62765689367 5.56817178711 826 | 8950.0 148.0 1.19270577625 6.65813109241 827 | 8960.0 143.0 1.78579645517 5.97938262202 828 | 8960.0 144.0 2.12569760722 4.96412383404 829 | 8960.0 147.0 2.19052086098 5.48559549641 830 | 8960.0 148.0 1.17965693945 6.64953933962 831 | 8970.0 143.0 1.28930926157 5.88153210414 832 | 8970.0 144.0 1.67467087729 4.78226506666 833 | 8970.0 147.0 1.75338482829 5.40278054591 834 | 8970.0 148.0 1.16660810265 6.64070892703 835 | 8980.0 143.0 0.792822067959 5.78344292646 836 | 8980.0 144.0 1.22364414736 4.60040629928 837 | 8980.0 147.0 1.31246042362 5.27772281086 838 | 8980.0 148.0 1.07231973354 6.5767481007 839 | 8990.0 143.0 0.296334874353 5.68559240857 840 | 8990.0 144.0 0.772617417435 4.41854753189 841 | 8990.0 147.0 0.869010437644 5.12426455965 842 | 8990.0 148.0 0.787770905328 6.38319500314 843 | 9000.0 144.0 0.321801152617 4.23668876451 844 | 9000.0 147.0 0.425349986553 4.97104496823 845 | 9000.0 148.0 0.503432542224 6.18964190557 846 | 9010.0 148.0 0.21909417912 5.996088808 847 | -------------------------------------------------------------------------------- /datasets/val/uni_examples_val.txt: -------------------------------------------------------------------------------- 1 | 5940.0 95.0 14.6694181409 7.59129090929 2 | 5940.0 96.0 14.6927797681 6.77030119827 3 | 5950.0 95.0 15.195580915 7.38007698654 4 | 5950.0 96.0 15.268612308 6.60085274047 5 | 5970.0 97.0 14.796328602 8.11896772645 6 | 5970.0 98.0 14.748342557 8.76859970127 7 | 5980.0 97.0 14.2503821076 8.22326205893 8 | 5980.0 98.0 14.2747960604 8.83566310499 9 | 5990.0 97.0 13.7046460784 8.3277950512 10 | 5990.0 98.0 13.8012495637 8.90272650871 11 | 6000.0 97.0 13.1595414444 8.42612288868 12 | 6000.0 98.0 13.3041309748 8.98768939741 13 | 6010.0 97.0 12.6163309965 8.51084711758 14 | 6010.0 98.0 12.751660062 9.11465641086 15 | 6020.0 97.0 12.0733310137 8.59557134648 16 | 6020.0 98.0 12.1991891492 9.2413847645 17 | 6030.0 97.0 11.5204391707 8.69771774076 18 | 6030.0 98.0 11.6502961433 9.36859043775 19 | 6040.0 97.0 10.9284008173 8.86931413675 20 | 6040.0 98.0 11.1336042992 9.4986600286 21 | 6040.0 99.0 14.9234495282 8.73160743232 22 | 6050.0 97.0 10.336572929 9.04067187295 23 | 6050.0 98.0 10.6169124551 9.62872961944 24 | 6050.0 99.0 14.2996309433 8.83375382659 25 | 6060.0 97.0 9.74453457566 9.21226826894 26 | 6060.0 98.0 10.100431076 9.75903787008 27 | 6060.0 99.0 13.6758123584 8.93613888067 28 | 6070.0 97.0 9.19290552333 9.36047600456 29 | 6070.0 98.0 9.54817062838 9.87192395535 30 | 6070.0 99.0 13.0551507501 9.02229506837 31 | 6070.0 100.0 14.6401634907 9.11465641086 32 | 6080.0 97.0 8.64148693611 9.50844508038 33 | 6080.0 98.0 8.98686018101 9.98027550442 34 | 6080.0 99.0 12.4393298393 9.08410795649 35 | 6080.0 100.0 13.9081658394 9.06286723432 36 | 6090.0 97.0 8.08964741868 9.65164096021 37 | 6090.0 98.0 8.42554973364 10.0888657133 38 | 6090.0 99.0 11.8235089285 9.14592084462 39 | 6090.0 100.0 13.1601728398 8.9986677482 40 | 6100.0 97.0 7.53612418036 9.74877549869 41 | 6100.0 98.0 7.86844858846 10.1907734478 42 | 6100.0 99.0 11.1923240648 9.23541826951 43 | 6100.0 100.0 12.3481984468 8.88506568353 44 | 6110.0 97.0 6.98281140716 9.84614869698 45 | 6110.0 98.0 7.31513581525 10.2862373676 46 | 6110.0 99.0 10.5544043175 9.33661002459 47 | 6110.0 100.0 11.5358031237 8.77862341286 48 | 6110.0 101.0 15.1280216148 8.40249556851 49 | 6120.0 97.0 6.42928816884 9.95044302945 50 | 6120.0 98.0 6.76203350716 10.3817012875 51 | 6120.0 99.0 9.91669503534 9.43780177967 52 | 6120.0 100.0 10.7173043124 8.73900588611 53 | 6120.0 101.0 14.5191460526 8.51323371558 54 | 6130.0 97.0 5.87597539564 10.0652383931 55 | 6130.0 98.0 6.20872073395 10.4771652074 56 | 6130.0 99.0 9.28761482266 9.50558116279 57 | 6130.0 100.0 9.88028457137 8.72086774133 58 | 6130.0 101.0 13.9102704905 8.62421052245 59 | 6140.0 97.0 5.32245215732 10.1802724166 60 | 6140.0 98.0 5.66277423958 10.5890966535 61 | 6140.0 99.0 8.67116251657 9.52252600857 62 | 6140.0 100.0 9.00032994803 8.75332547409 63 | 6140.0 101.0 13.3013949283 8.73494866951 64 | 6150.0 97.0 4.79565798793 10.2721564395 65 | 6150.0 98.0 5.11682774522 10.7010280996 66 | 6150.0 99.0 8.05471021048 9.53947085435 67 | 6150.0 100.0 8.12142765023 8.78649918625 68 | 6150.0 101.0 12.6474798327 8.9139435193 69 | 6160.0 97.0 4.28043939956 10.3544940704 70 | 6160.0 98.0 4.57109171596 10.8127208858 71 | 6160.0 99.0 7.44562418322 9.61560333045 72 | 6160.0 100.0 7.25431139857 8.8246847542 73 | 6160.0 101.0 11.9935647371 9.09293836908 74 | 6170.0 97.0 3.7652208112 10.4368317013 75 | 6170.0 98.0 4.01798940786 10.9392105797 76 | 6170.0 99.0 6.84137885349 9.73111467352 77 | 6170.0 100.0 6.38719514692 8.86287032216 78 | 6170.0 101.0 11.3531194085 9.26334146608 79 | 6180.0 97.0 3.22011617727 10.5647533539 80 | 6180.0 98.0 3.46320337889 11.0695188303 81 | 6180.0 99.0 6.23713352375 9.84662601658 82 | 6180.0 100.0 5.48114284998 8.88840692073 83 | 6180.0 101.0 10.7667636131 9.39985487151 84 | 6190.0 97.0 2.6676452645 10.7038920171 85 | 6190.0 98.0 2.90820688481 11.1995884212 86 | 6190.0 99.0 5.63983354264 9.95450024605 87 | 6190.0 100.0 4.54899287945 8.90535176651 88 | 6190.0 101.0 10.1804078177 9.53612961715 89 | 6200.0 97.0 2.12464528167 10.836348206 90 | 6200.0 98.0 2.3593138789 11.3456482186 91 | 6200.0 99.0 5.04905797992 10.0544987021 92 | 6200.0 100.0 3.61705337402 8.92253527209 93 | 6200.0 101.0 9.57616248796 9.67526828038 94 | 6210.0 97.0 1.5961673914 10.9583033636 95 | 6210.0 98.0 1.81610343096 11.507220903 96 | 6210.0 99.0 4.45849288231 10.1544971582 97 | 6210.0 100.0 2.68048363619 8.98673475821 98 | 6210.0 101.0 8.94476715908 9.81846416021 99 | 6220.0 97.0 1.06747903602 11.0804971811 100 | 6220.0 98.0 1.27289298302 11.6690322472 101 | 6220.0 99.0 3.83804173913 10.2413693253 102 | 6220.0 100.0 1.74307203792 9.06262857452 103 | 6220.0 101.0 8.31337183021 9.96166004004 104 | 6230.0 97.0 0.538790680641 11.2026909985 105 | 6230.0 98.0 0.752202301802 11.8263090552 106 | 6230.0 99.0 3.17297199271 10.3081940692 107 | 6230.0 100.0 0.831337183021 9.22730383632 108 | 6230.0 101.0 7.71102068646 10.1222780852 109 | 6240.0 97.0 0.0103127903717 11.3246461562 110 | 6240.0 98.0 0.264765107909 11.9769033888 111 | 6240.0 99.0 2.5079022463 10.3750188131 112 | 6240.0 100.0 -0.0631395328877 9.45092806865 113 | 6240.0 101.0 7.15181489019 10.3086713888 114 | 6250.0 98.0 -0.222672085984 12.1277363822 115 | 6250.0 99.0 1.90155226547 10.4838476818 116 | 6250.0 101.0 6.59281955902 10.4950646924 117 | 6260.0 99.0 1.30172670303 10.5972110867 118 | 6260.0 101.0 6.0260370188 10.6805033568 119 | 6270.0 99.0 0.701901140601 10.7105744915 120 | 6270.0 101.0 5.42831610746 10.8618848046 121 | 6280.0 99.0 0.102075578168 10.8239378964 122 | 6280.0 101.0 4.83059519613 11.0432662523 123 | 6290.0 102.0 15.0482553382 8.80654660943 124 | 6290.0 103.0 3.81699522817 9.61608065005 125 | 6290.0 104.0 0.542368587505 13.7658972475 126 | 6300.0 102.0 14.2617472236 8.96978991243 127 | 6300.0 103.0 4.41471613951 9.4919775542 128 | 6300.0 104.0 1.19775693888 13.6704333276 129 | 6310.0 102.0 13.469767016 9.12396414305 130 | 6310.0 103.0 5.01243705084 9.36787445835 131 | 6310.0 104.0 1.85335575536 13.5749694077 132 | 6320.0 102.0 12.6279065775 9.19556208296 133 | 6320.0 103.0 5.604054474 9.28100229126 134 | 6320.0 104.0 2.50874410674 13.4795054878 135 | 6330.0 102.0 11.7900449761 9.27360383747 136 | 6330.0 103.0 6.19504050183 9.19842600056 137 | 6330.0 104.0 3.16118594658 13.3864281659 138 | 6340.0 102.0 10.9883833735 9.4101172429 139 | 6340.0 103.0 6.79549745959 9.11036053446 140 | 6340.0 104.0 3.8066824378 13.2988400194 141 | 6350.0 102.0 10.1865113059 9.54639198854 142 | 6350.0 103.0 7.4188951143 9.00940743918 143 | 6350.0 104.0 4.45196846391 13.2114905327 144 | 6360.0 102.0 9.36464505279 9.73970642631 145 | 6360.0 103.0 8.04229276901 8.9084543439 146 | 6360.0 104.0 5.11977425675 13.1200838294 147 | 6360.0 105.0 -0.0631395328877 13.025335889 148 | 6370.0 102.0 8.5427787997 9.93278220427 149 | 6370.0 103.0 8.66021833087 8.81012650642 150 | 6370.0 104.0 5.83935446656 13.0200853734 151 | 6370.0 105.0 0.357790686363 12.9642389802 152 | 6380.0 102.0 7.75458696415 10.0301554026 153 | 6380.0 103.0 9.27603924164 8.71275330814 154 | 6380.0 104.0 6.55914514148 12.9198482575 155 | 6380.0 105.0 0.778720905614 12.9029034117 156 | 6390.0 102.0 6.97018350058 10.1170275697 157 | 6390.0 103.0 9.8918601524 8.61561876966 158 | 6390.0 104.0 7.27788349085 12.795267842 159 | 6390.0 105.0 1.19965112487 12.841806503 160 | 6400.0 102.0 6.13379515493 10.2031837573 161 | 6400.0 103.0 10.5232554813 8.37695896994 162 | 6400.0 104.0 7.99346486358 12.5725982489 163 | 6400.0 105.0 1.62058134412 12.7804709345 164 | 6410.0 102.0 5.27509750765 10.2891012852 165 | 6410.0 103.0 11.1546508102 8.13829917023 166 | 6410.0 104.0 8.7090462363 12.3499286558 167 | 6410.0 105.0 2.07602784135 12.7193740257 168 | 6420.0 102.0 4.42292427878 10.3614152046 169 | 6420.0 103.0 11.7786798602 7.89963937052 170 | 6420.0 104.0 9.40779040026 12.1050637013 171 | 6420.0 105.0 2.54010340807 12.6580384572 172 | 6430.0 102.0 3.59726965372 10.3797920091 173 | 6430.0 103.0 12.37345426 7.6609795708 174 | 6430.0 104.0 10.0391857291 11.7728492601 175 | 6430.0 105.0 3.00417897479 12.5969415485 176 | 6430.0 106.0 14.9851158053 8.75881464948 177 | 6430.0 107.0 15.2561948665 8.16813164519 178 | 6440.0 102.0 2.77161502866 10.3981688137 179 | 6440.0 103.0 12.9682286598 7.42231977109 180 | 6440.0 104.0 10.670581058 11.4403961591 181 | 6440.0 105.0 3.46825454152 12.5358446397 182 | 6440.0 106.0 14.4042321028 8.76836104147 183 | 6440.0 107.0 14.6334286072 8.22779659512 184 | 6450.0 102.0 1.94069877586 10.4463780933 185 | 6450.0 103.0 13.6198286392 7.23735842631 186 | 6450.0 104.0 11.3051333635 11.0719054283 187 | 6450.0 105.0 4.00262545486 12.4904992778 188 | 6450.0 106.0 13.8233484002 8.77790743346 189 | 6450.0 107.0 14.0108728129 8.28746154505 190 | 6460.0 102.0 1.1064150813 10.514634796 191 | 6460.0 103.0 14.2775321068 7.05836357653 192 | 6460.0 104.0 11.9447368317 10.6497162426 193 | 6460.0 105.0 4.54478357725 12.4473018541 194 | 6460.0 106.0 13.237413535 8.84688011558 195 | 6460.0 107.0 13.3876856233 8.35380896937 196 | 6470.0 102.0 0.272131386746 10.5828914987 197 | 6470.0 103.0 14.9352355744 6.87936872674 198 | 6470.0 104.0 12.5841298347 10.2275270569 199 | 6470.0 105.0 5.08694169965 12.4038657705 200 | 6470.0 106.0 12.6464275072 8.97551774762 201 | 6470.0 107.0 12.7636565732 8.43614660027 202 | 6480.0 104.0 13.3192844627 9.90008581171 203 | 6480.0 105.0 5.60679052042 12.3434848412 204 | 6480.0 106.0 12.0556519445 9.10391671987 205 | 6480.0 107.0 12.1394170581 8.51848423117 206 | 6490.0 104.0 14.078221648 9.59627188668 207 | 6490.0 105.0 6.11190678353 12.2718869013 208 | 6490.0 106.0 11.5147566127 9.28243425005 209 | 6490.0 107.0 11.5257007984 8.64545124462 210 | 6500.0 104.0 14.8371588333 9.29245796164 211 | 6500.0 105.0 6.61702304663 12.2002889614 212 | 6500.0 106.0 10.9797543041 9.46667961543 213 | 6500.0 107.0 10.9271380266 8.83948166179 214 | 6510.0 104.0 15.5960960186 8.98888269641 215 | 6510.0 105.0 7.12213930973 12.1525570014 216 | 6510.0 106.0 10.4445415303 9.65068632101 217 | 6510.0 107.0 10.3285752549 9.03327341915 218 | 6520.0 105.0 7.62725557283 12.120815248 219 | 6520.0 106.0 9.88701945488 9.81965745921 220 | 6520.0 107.0 9.71948922762 9.21370022774 221 | 6530.0 105.0 8.13237183593 12.0888348349 222 | 6530.0 106.0 9.29645435727 9.96595591643 223 | 6530.0 107.0 9.06852064354 9.34066724118 224 | 6540.0 105.0 8.61728344851 12.0074518432 225 | 6540.0 106.0 8.70588925966 10.1122543737 226 | 6540.0 107.0 8.41734159436 9.46739559483 227 | 6550.0 105.0 9.08872529407 11.8928951393 228 | 6550.0 106.0 8.1186916038 10.2392213871 229 | 6550.0 107.0 7.76616254518 9.59412394848 230 | 6560.0 105.0 9.56227179073 11.7823956521 231 | 6560.0 106.0 7.53991255233 10.3213203582 232 | 6560.0 107.0 7.17812302889 9.67837085778 233 | 6570.0 105.0 10.0558124728 11.7084111141 234 | 6570.0 106.0 6.96113350086 10.4034193293 235 | 6570.0 107.0 6.59008351259 9.76261776708 236 | 6580.0 105.0 10.5493531549 11.6341879164 237 | 6580.0 106.0 6.37709282165 10.4833703622 238 | 6580.0 107.0 6.0020439963 9.84686467638 239 | 6580.0 108.0 0.110073252334 11.8107961682 240 | 6590.0 105.0 11.0494182553 11.5640219353 241 | 6590.0 106.0 5.74569749278 10.5449445905 242 | 6590.0 107.0 5.39548355036 9.88481158453 243 | 6590.0 108.0 0.790086021534 11.7833502913 244 | 6600.0 105.0 11.5755810294 11.5088915216 245 | 6600.0 106.0 5.1143021639 10.6065188189 246 | 6600.0 107.0 4.77692659317 9.89197137852 247 | 6600.0 108.0 1.47009879073 11.7559044143 248 | 6600.0 109.0 10.1970345614 8.43662391987 249 | 6610.0 105.0 12.1017438035 11.4537611078 250 | 6610.0 106.0 4.48290683502 10.6680930472 251 | 6610.0 107.0 4.15836963598 9.89936983231 252 | 6610.0 108.0 2.13516853715 11.7096044131 253 | 6610.0 109.0 10.0497089846 8.7468816595 254 | 6620.0 105.0 12.6051763457 11.4432600767 255 | 6620.0 106.0 3.81909987926 10.7139157287 256 | 6620.0 107.0 3.52992081864 9.90223374991 257 | 6620.0 108.0 2.785716191 11.6444502878 258 | 6620.0 109.0 9.90238340788 9.05713939913 259 | 6630.0 105.0 13.0934554 11.4623528606 260 | 6630.0 106.0 3.15529292351 10.7599770701 261 | 6630.0 107.0 2.87874176946 9.89459663632 262 | 6630.0 108.0 3.42089989185 11.5630672961 263 | 6630.0 109.0 9.68034271723 9.36954507695 264 | 6640.0 105.0 13.5865751519 11.4740471908 265 | 6640.0 106.0 2.52137201331 10.816062123 266 | 6640.0 107.0 2.22756272028 9.88719818253 267 | 6640.0 108.0 4.01988359385 11.4437373963 268 | 6640.0 109.0 9.38379737776 9.68409869297 269 | 6650.0 105.0 14.1228402512 11.4203487359 270 | 6650.0 106.0 1.93206970636 10.8876600629 271 | 6650.0 107.0 1.58374994993 9.89674457452 272 | 6650.0 108.0 4.61886729584 11.3244074964 273 | 6650.0 109.0 9.06809971333 10.0031868452 274 | 6660.0 105.0 14.6593158156 11.3664116212 275 | 6660.0 106.0 1.34276739941 10.9592580028 276 | 6660.0 107.0 1.00497089846 10.0585559187 277 | 6660.0 108.0 5.2214289047 11.2377739891 278 | 6660.0 109.0 8.70715205032 10.3327760286 279 | 6670.0 105.0 15.195580915 11.3124745064 280 | 6670.0 106.0 0.766303464147 11.0470848091 281 | 6670.0 107.0 0.426191846992 10.2206059227 282 | 6670.0 108.0 5.82525330422 11.16522141 283 | 6670.0 109.0 8.34346834088 10.6544894386 284 | 6680.0 106.0 0.209202318968 11.1594935748 285 | 6680.0 107.0 -0.152587204479 10.3826559267 286 | 6680.0 108.0 6.39582421641 11.0110471794 287 | 6680.0 109.0 7.96757765509 10.9442224355 288 | 6690.0 108.0 6.91651489762 10.7346791313 289 | 6690.0 109.0 7.61883696844 11.4556703862 290 | 6700.0 108.0 7.48603348427 10.5031791256 291 | 6700.0 109.0 7.21284977198 11.6091286375 292 | 6710.0 108.0 8.10416951124 10.316785822 293 | 6710.0 109.0 6.76013932117 11.7850209099 294 | 6720.0 108.0 8.72251600332 10.1303925184 295 | 6720.0 109.0 6.27606956903 11.9759487496 296 | 6730.0 108.0 9.34170435584 9.94185127666 297 | 6730.0 109.0 5.78168702652 12.0045879256 298 | 6740.0 108.0 9.96299735945 9.74877549869 299 | 6740.0 109.0 5.2805696005 11.9251142123 300 | 6750.0 108.0 10.5842903631 9.55546106093 301 | 6750.0 109.0 4.7796626396 11.8454018392 302 | 6750.0 110.0 10.9652322115 9.40319610871 303 | 6750.0 111.0 0.778720905614 9.23613424891 304 | 6760.0 110.0 10.3338368826 9.54472136994 305 | 6760.0 111.0 1.0910511283 9.33613270499 306 | 6770.0 110.0 9.70244155374 9.68600797137 307 | 6770.0 111.0 1.40338135098 9.43636982087 308 | 6780.0 110.0 9.098196224 9.83875024319 309 | 6780.0 111.0 1.71571157367 9.53636827695 310 | 6790.0 110.0 8.55687996205 10.017745093 311 | 6790.0 111.0 2.1187522586 9.72132962173 312 | 6800.0 110.0 8.0157741652 10.1967399428 313 | 6800.0 111.0 2.5318952688 9.91583735849 314 | 6810.0 110.0 7.43720557884 10.3833719061 315 | 6810.0 111.0 2.95198362761 10.1108224149 316 | 6820.0 110.0 6.82138466807 10.5778796429 317 | 6820.0 111.0 3.40048477622 10.3079554094 318 | 6830.0 110.0 6.20556375731 10.7723873797 319 | 6830.0 111.0 3.84877545972 10.505088404 320 | 6840.0 110.0 5.60131842757 10.9750095496 321 | 6840.0 111.0 4.33221381653 10.5248971674 322 | 6850.0 110.0 5.00001960937 11.179540998 323 | 6850.0 111.0 4.81965101042 10.5248971674 324 | 6860.0 110.0 4.4298696274 11.3270327542 325 | 6860.0 111.0 5.37359517896 10.3993621127 326 | 6870.0 110.0 3.87318941244 11.4501812109 327 | 6870.0 111.0 5.92206725464 10.2714404601 328 | 6880.0 110.0 3.31650919748 11.5735683273 329 | 6880.0 111.0 6.42318468066 10.123710044 330 | 6890.0 110.0 2.71499991417 11.670464206 331 | 6890.0 111.0 6.92430210668 9.97597962802 332 | 6890.0 112.0 14.8220053454 7.43186616308 333 | 6890.0 113.0 15.0080565023 8.0774409213 334 | 6890.0 114.0 11.259883365 6.49154655221 335 | 6900.0 112.0 14.1379937391 7.5750620429 336 | 6900.0 113.0 14.3575088484 8.20774917195 337 | 6900.0 114.0 11.6808135842 6.73020635192 338 | 6910.0 112.0 13.4539821328 7.71825792273 339 | 6910.0 113.0 13.7069611946 8.33781876279 340 | 6910.0 114.0 12.1017438035 6.96886615163 341 | 6920.0 112.0 12.8078542463 7.88293318454 342 | 6920.0 113.0 13.0982960975 8.47337752903 343 | 6920.0 114.0 12.5003647211 7.00538110099 344 | 6930.0 112.0 12.1659356619 8.04999504433 345 | 6930.0 113.0 12.5073100697 8.61108423346 346 | 6930.0 114.0 12.8575240121 7.02972440056 347 | 6940.0 112.0 11.4989717295 8.20345329555 348 | 6940.0 113.0 11.9167449721 8.7478362987 349 | 6940.0 114.0 13.0583077267 7.09463986608 350 | 6950.0 112.0 10.8294822158 8.35524092817 351 | 6950.0 113.0 11.329126386 8.87718991014 352 | 6950.0 114.0 13.2593019064 7.1597939914 353 | 6960.0 112.0 10.209872933 8.50631258139 354 | 6960.0 113.0 10.7417182651 9.00654352159 355 | 6960.0 114.0 13.2955019053 7.22972131272 356 | 6970.0 112.0 9.61152062638 8.65714557481 357 | 6970.0 113.0 10.123371773 9.10988321486 358 | 6970.0 114.0 13.3319123692 7.29964863404 359 | 6980.0 112.0 8.98686018101 8.76072392788 360 | 6980.0 113.0 9.49197644411 9.20224455735 361 | 6980.0 114.0 13.3681123681 7.36957595535 362 | 6990.0 112.0 8.35546485213 8.85236929097 363 | 6990.0 113.0 8.86058111523 9.29460589984 364 | 6990.0 114.0 13.404522832 7.43950327667 365 | 7000.0 112.0 7.7141776631 8.95141310785 366 | 7000.0 113.0 8.24728578579 9.39818425292 367 | 7000.0 114.0 13.4407228309 7.50966925778 368 | 7010.0 112.0 7.03374396369 9.0786187811 369 | 7010.0 113.0 7.64598696759 9.50892239998 370 | 7010.0 114.0 13.5036518987 7.58389245549 371 | 7020.0 112.0 6.35331026427 9.20582445435 372 | 7020.0 113.0 7.04447768428 9.61966054705 373 | 7020.0 114.0 13.6734972422 7.67649245778 374 | 7030.0 112.0 5.68403121566 9.33637136479 375 | 7030.0 113.0 6.37098933347 9.77144817967 376 | 7030.0 114.0 13.8431321205 7.76885380027 377 | 7040.0 112.0 5.0595812354 9.47956724462 378 | 7040.0 113.0 5.67961144835 9.93325952387 379 | 7040.0 114.0 14.0285518821 7.84952081258 380 | 7050.0 112.0 4.43513125514 9.62276312445 381 | 7050.0 113.0 4.98802309813 10.0953095279 382 | 7050.0 114.0 14.3543518718 7.8266094718 383 | 7060.0 112.0 3.80352546115 9.75211673589 384 | 7060.0 113.0 4.38461962883 10.1907734478 385 | 7060.0 114.0 14.6803623266 7.80345947123 386 | 7070.0 112.0 3.14308594715 9.8261012738 387 | 7070.0 113.0 3.78142662464 10.2862373676 388 | 7070.0 114.0 15.0061623163 7.78030947066 389 | 7080.0 112.0 2.48285689825 9.90032447151 390 | 7080.0 113.0 3.17802315535 10.3817012875 391 | 7090.0 112.0 1.83862319769 9.94137395706 392 | 7090.0 113.0 2.59166735993 10.5862327359 393 | 7100.0 112.0 1.23290461219 9.90509766751 394 | 7100.0 113.0 2.00552202962 10.7907641842 395 | 7100.0 115.0 14.8678867393 8.4931862924 396 | 7110.0 112.0 0.626975561574 9.86906003775 397 | 7110.0 113.0 1.43053135012 11.0022167668 398 | 7110.0 115.0 14.0719076947 8.59700330528 399 | 7120.0 112.0 0.0210465109626 9.83278374819 400 | 7120.0 113.0 0.900580204088 11.2408765665 401 | 7120.0 115.0 13.275718185 8.70082031815 402 | 7130.0 113.0 0.37083952316 11.4795363662 403 | 7130.0 115.0 12.4384879789 8.75953062888 404 | 7140.0 113.0 -0.158901157767 11.7181961659 405 | 7140.0 115.0 11.5966275404 8.81346774362 406 | 7150.0 115.0 10.7547671019 8.86740485835 407 | 7160.0 115.0 9.99141014925 8.9098863027 408 | 7170.0 115.0 9.24783691695 8.94974248925 409 | 7170.0 116.0 0.0841860438502 11.0499487267 410 | 7180.0 115.0 8.50426368464 8.98959867581 411 | 7180.0 116.0 0.602351143748 10.967372436 412 | 7180.0 117.0 0.255083712866 10.3996007725 413 | 7190.0 115.0 7.72406952326 8.99055331501 414 | 7190.0 116.0 1.12030577854 10.8847961453 415 | 7190.0 117.0 0.754938348227 10.300079636 416 | 7200.0 115.0 6.93482536216 8.98220022202 417 | 7200.0 116.0 1.60963715842 10.794582741 418 | 7200.0 117.0 1.25479298359 10.2007971594 419 | 7210.0 115.0 6.14558120107 8.97360846923 420 | 7210.0 116.0 2.05624412104 10.693390986 421 | 7210.0 117.0 1.76496040932 10.1062878787 422 | 7220.0 115.0 5.30372076256 8.93685486007 423 | 7220.0 116.0 2.50264061856 10.5921992309 424 | 7220.0 117.0 2.27596969549 10.0127332372 425 | 7230.0 115.0 4.46186032406 8.90010125091 426 | 7230.0 116.0 2.95577199958 10.489575517 427 | 7230.0 117.0 2.79160921407 9.9139280801 428 | 7240.0 115.0 3.60379407212 8.92587650928 429 | 7240.0 116.0 3.47035919262 10.3745414935 430 | 7240.0 117.0 3.34723710348 9.77073220027 431 | 7250.0 115.0 2.72152433257 9.04520640914 432 | 7250.0 116.0 3.98473592054 10.2597461299 433 | 7250.0 117.0 3.9028649929 9.62753632044 434 | 7260.0 115.0 1.83904412791 9.164536309 435 | 7260.0 116.0 4.50521613664 10.149962622 436 | 7260.0 117.0 4.41050683731 9.50653580199 437 | 7270.0 115.0 0.997604619625 9.39842291272 438 | 7270.0 116.0 5.03432542224 10.0475775679 439 | 7270.0 117.0 4.88594751996 9.40057085091 440 | 7280.0 115.0 0.166267436604 9.6609486924 441 | 7280.0 116.0 5.56343470784 9.94519251386 442 | 7280.0 117.0 5.3613882026 9.29436724004 443 | 7290.0 116.0 6.09275445855 9.84304611958 444 | 7290.0 117.0 5.90438818543 9.22611053732 445 | 7290.0 118.0 15.1713774274 8.90559042631 446 | 7300.0 116.0 6.57850793157 9.7468662203 447 | 7300.0 117.0 6.45475444711 9.1616723914 448 | 7300.0 118.0 14.5555565166 8.88792960113 449 | 7310.0 116.0 7.05963117217 9.65140230041 450 | 7310.0 117.0 6.97902303518 9.09771156508 451 | 7310.0 118.0 13.9397356058 8.87026877595 452 | 7320.0 116.0 7.54075441277 9.55593838053 453 | 7320.0 117.0 7.44204627636 9.03398939855 454 | 7320.0 118.0 13.3207577184 8.86000640456 455 | 7330.0 116.0 8.0397671877 9.45665590385 456 | 7330.0 117.0 7.90506951753 8.97050589183 457 | 7330.0 118.0 12.6725251808 8.91728475649 458 | 7340.0 116.0 8.56592996176 9.35116827237 459 | 7340.0 117.0 8.38282531638 8.88220176594 460 | 7340.0 118.0 12.0242926431 8.97456310842 461 | 7350.0 116.0 9.09209273582 9.2459193007 462 | 7350.0 117.0 8.89530785832 8.73709660771 463 | 7350.0 118.0 11.4206787087 9.14067032903 464 | 7360.0 116.0 9.61825550989 9.14067032903 465 | 7360.0 117.0 9.40779040026 8.59175278968 466 | 7360.0 118.0 10.8465298897 9.37933012874 467 | 7370.0 116.0 10.1334740982 9.00893011958 468 | 7370.0 117.0 9.95352642952 8.43256670328 469 | 7370.0 118.0 10.2456520017 9.61798992845 470 | 7380.0 116.0 10.6489031517 8.87742856994 471 | 7380.0 117.0 10.4990519937 8.27361927667 472 | 7380.0 118.0 9.60415434754 9.85664972817 473 | 7390.0 116.0 11.1679101121 8.74998423689 474 | 7390.0 117.0 11.0559426737 8.12087700485 475 | 7390.0 118.0 8.98159855327 10.0666703519 476 | 7400.0 116.0 11.7203810248 8.660486812 477 | 7400.0 117.0 11.6387205623 7.98317030041 478 | 7400.0 118.0 8.4028195018 10.2098662317 479 | 7410.0 116.0 12.2728519376 8.57098938711 480 | 7410.0 117.0 12.2214984508 7.84546359598 481 | 7410.0 118.0 7.78720905614 10.3459023176 482 | -------------------------------------------------------------------------------- /experiments/eth.yaml: -------------------------------------------------------------------------------- 1 | # Name of the experiment 2 | name: "eth" 3 | 4 | # ----------------------------------------------------------------------------- 5 | # Optimization settings | 6 | # ----------------------------------------------------------------------------- 7 | 8 | # Buffer size of the tf.data.Dataset.prefetch option 9 | prefetchSize: 100 10 | # Shuffle the dataset 11 | shuffle: False 12 | # Number of epochs 13 | epochs: 50 14 | # Use centered option in RMSProp 15 | centered: true 16 | # Learning rate 17 | learningRate: 0.003 18 | # Learning rate decay 19 | learningRateDecay: 0.95 20 | # Clipping ratio 21 | clippingRatio: 5 22 | # Optimizer decay rate 23 | optimizerDecay: 0.95 24 | # Optimizer momentum rate 25 | optimizerMomentum: 0.2 26 | # Weight regularization rate 27 | l2Rate: 0.005 28 | 29 | # ----------------------------------------------------------------------------- 30 | # Dataset parameters | 31 | # ----------------------------------------------------------------------------- 32 | 33 | # Folder of the datasets 34 | dataPath: "datasets" 35 | # Name of the training files in the dataPath 36 | trainDatasets: 37 | - "train/biwi_hotel_train.txt" 38 | - "train/crowds_zara01_train.txt" 39 | - "train/crowds_zara02_train.txt" 40 | - "train/crowds_zara03_train.txt" 41 | - "train/students001_train.txt" 42 | - "train/students003_train.txt" 43 | - "train/uni_examples_train.txt" 44 | # Name of the validation files in the dataPath 45 | validationDatasets: 46 | - "val/biwi_hotel_val.txt" 47 | - "val/crowds_zara01_val.txt" 48 | - "val/crowds_zara02_val.txt" 49 | - "val/crowds_zara03_val.txt" 50 | - "val/students001_val.txt" 51 | - "val/students003_val.txt" 52 | - "val/uni_examples_val.txt" 53 | # Name of the test files in the dataPath 54 | testDatasets: 55 | - "test/biwi_eth.txt" 56 | # Name of the training navigation maps in dataPath 57 | trainMaps: 58 | - "navMap/hotel.npy" 59 | - "navMap/zara1.npy" 60 | - "navMap/zara2.npy" 61 | - "navMap/zara3.npy" 62 | - "navMap/univ1.npy" 63 | - "navMap/univ2.npy" 64 | - "navMap/univ3.npy" 65 | # Name of the validation navigation maps in dataPath 66 | validationMaps: 67 | - "navMap/hotel.npy" 68 | - "navMap/zara1.npy" 69 | - "navMap/zara2.npy" 70 | - "navMap/zara3.npy" 71 | - "navMap/univ1.npy" 72 | - "navMap/univ2.npy" 73 | - "navMap/univ3.npy" 74 | # Name of the test navigation maps in dataPath 75 | testMaps: 76 | - "navMap/eth.npy" 77 | # Number of labels of the semantic map 78 | numLabels: 8 79 | # List of semantic maps 80 | semanticMaps: 81 | - "semMap/eth.npy" 82 | - "semMap/hotel.npy" 83 | - "semMap/univ.npy" 84 | - "semMap/zara.npy" 85 | # List of homography matrix 86 | homography: 87 | - "homography/eth.txt" 88 | - "homography/hotel.txt" 89 | - "homography/univ.txt" 90 | - "homography/zara.txt" 91 | # Name of the training semantic mapping 92 | trainMapping: 93 | - "hotel" 94 | - "zara" 95 | - "zara" 96 | - "zara" 97 | - "univ" 98 | - "univ" 99 | - "univ" 100 | # Name of the validation semantic mapping 101 | validationMapping: 102 | - "hotel" 103 | - "zara" 104 | - "zara" 105 | - "zara" 106 | - "univ" 107 | - "univ" 108 | - "univ" 109 | # Name of the test semantic mapping 110 | testMapping: 111 | - "eth" 112 | # Delimiter used in the files of the dataset 113 | delimiter: "tab" 114 | # Number of frame observed 115 | obsLen: 8 116 | # Number of frame that the model has to predict 117 | predLen: 12 118 | # Number of frames to skip while making the dataset. If True, the number of 119 | # frame to skip is random, between 1 and (obsLen + predLen). For e.g. if 120 | # sequence_1 in the dataset is from Frame_1 - Frame_N and skip = 5 then the 121 | # sequence_2 will be start from Frame_N+5 122 | skip: 1 123 | 124 | # ----------------------------------------------------------------------------- 125 | # Model parameters | 126 | # ----------------------------------------------------------------------------- 127 | 128 | # Maximum number of pedestrian per frame 129 | maxNumPed: 93 130 | # Size of the sum pooling window 131 | gridSize: 8 132 | # Size of the neighborhood 133 | neighborhoodSize: 2 134 | # Number of hidden 135 | rnnSize: 128 136 | # Dimension of the embedding layer 137 | embeddingSize: 64 138 | # Type of pooling module to use. Option supported: social, occupancy, 139 | # navigation, combined or null. Combined pooling is a list of the other pooling 140 | # layers 141 | poolingModule: 142 | - "social" 143 | - "semantic" 144 | # Image width plus padding 145 | imageWidth: 25 146 | # Image height plus padding 147 | imageHeight: 20 148 | # Width of the navigation grid 149 | navigationWidth: 400 150 | # Height of the navigation grid 151 | navigationHeight: 320 152 | # Kernel size for the average pooling 153 | kernelSize: 2 154 | # Size of the neighborhood navigation grid 155 | navigationGrid: 33 156 | # Size of the neighborhood semantic grid 157 | semanticGridSize: 50 158 | 159 | # ----------------------------------------------------------------------------- 160 | # Logger settings | 161 | # ----------------------------------------------------------------------------- 162 | 163 | # Folder where to save the logs 164 | logFolder: "logs" 165 | # Logger level 166 | logLevel: "INFO" 167 | # Folder where to save the model 168 | modelFolder: "models" 169 | # Save the sampled coordinates 170 | saveCoordinates: true 171 | -------------------------------------------------------------------------------- /experiments/hotel.yaml: -------------------------------------------------------------------------------- 1 | # Name of the experiment 2 | name: "hotel" 3 | 4 | # ----------------------------------------------------------------------------- 5 | # Optimization settings | 6 | # ----------------------------------------------------------------------------- 7 | 8 | # Buffer size of the tf.data.Dataset.prefetch option 9 | prefetchSize: 100 10 | # Shuffle the dataset 11 | shuffle: False 12 | # Number of epochs 13 | epochs: 50 14 | # Use centered option in RMSProp 15 | centered: true 16 | # Learning rate 17 | learningRate: 0.003 18 | # Learning rate decay 19 | learningRateDecay: 0.95 20 | # Clipping ratio 21 | clippingRatio: 5 22 | # Optimizer decay rate 23 | optimizerDecay: 0.95 24 | # Optimizer momentum rate 25 | optimizerMomentum: 0.2 26 | # Weight regularization rate 27 | l2Rate: 0.005 28 | 29 | # ----------------------------------------------------------------------------- 30 | # Dataset parameters | 31 | # ----------------------------------------------------------------------------- 32 | 33 | # Folder of the datasets 34 | dataPath: "datasets" 35 | # Name of the training files in the dataPath 36 | trainDatasets: 37 | - "train/biwi_eth_train.txt" 38 | - "train/crowds_zara01_train.txt" 39 | - "train/crowds_zara02_train.txt" 40 | - "train/crowds_zara03_train.txt" 41 | - "train/students001_train.txt" 42 | - "train/students003_train.txt" 43 | - "train/uni_examples_train.txt" 44 | # Name of the validation files in the dataPath 45 | validationDatasets: 46 | - "val/biwi_eth_val.txt" 47 | - "val/crowds_zara01_val.txt" 48 | - "val/crowds_zara02_val.txt" 49 | - "val/crowds_zara03_val.txt" 50 | - "val/students001_val.txt" 51 | - "val/students003_val.txt" 52 | - "val/uni_examples_val.txt" 53 | # Name of the test files in the dataPath 54 | testDatasets: 55 | - "test/biwi_hotel.txt" 56 | # Name of the training navigation maps in the dataPath 57 | trainMaps: 58 | - "navMap/eth.npy" 59 | - "navMap/zara1.npy" 60 | - "navMap/zara2.npy" 61 | - "navMap/zara3.npy" 62 | - "navMap/univ1.npy" 63 | - "navMap/univ2.npy" 64 | - "navMap/univ3.npy" 65 | # Name of the validation navigation maps in the dataPath 66 | validationMaps: 67 | - "navMap/eth.npy" 68 | - "navMap/zara1.npy" 69 | - "navMap/zara2.npy" 70 | - "navMap/zara3.npy" 71 | - "navMap/univ1.npy" 72 | - "navMap/univ2.npy" 73 | - "navMap/univ3.npy" 74 | # Name of the test navigation maps in the dataPath 75 | testMaps: 76 | - "navMap/hotel.npy" 77 | # Semantig grid size 78 | semanticGridSize: 32 79 | # Number of labels of the semantic map 80 | numLabels: 8 81 | # List of semantic maps 82 | semanticMaps: 83 | - "semMap/eth.npy" 84 | - "semMap/hotel.npy" 85 | - "semMap/univ.npy" 86 | - "semMap/zara.npy" 87 | # List of homography matrix 88 | homography: 89 | - "homography/eth.txt" 90 | - "homography/hotel.txt" 91 | - "homography/univ.txt" 92 | - "homography/zara.txt" 93 | # Name of the training navigation maps in the dataPath 94 | trainMapping: 95 | - "eth" 96 | - "zara" 97 | - "zara" 98 | - "zara" 99 | - "univ" 100 | - "univ" 101 | - "univ" 102 | # Name of the validation navigation maps in the dataPath 103 | validationMapping: 104 | - "eth" 105 | - "zara" 106 | - "zara" 107 | - "zara" 108 | - "univ" 109 | - "univ" 110 | - "univ" 111 | # Name of the test navigation maps in the dataPath 112 | testMapping: 113 | - "hotel" 114 | # Delimiter used in the files of the dataset 115 | delimiter: "tab" 116 | # Number of frame observed 117 | obsLen: 8 118 | # Number of frame that the model has to predict 119 | predLen: 12 120 | # Number of frames to skip while making the dataset. If True, the number of 121 | # frame to skip is random, between 1 and (obsLen + predLen). For e.g. if 122 | # sequence_1 in the dataset is from Frame_1 - Frame_N and skip = 5 then the 123 | # sequence_2 will be start from Frame_N+5 124 | skip: 1 125 | 126 | # ----------------------------------------------------------------------------- 127 | # Model parameters | 128 | # ----------------------------------------------------------------------------- 129 | 130 | # Maximum number of pedestrian per frame 131 | maxNumPed: 93 132 | # Size of the sum pooling window 133 | gridSize: 8 134 | # Size of the neighborhood 135 | neighborhoodSize: 2 136 | # Number of hidden 137 | rnnSize: 128 138 | # Dimension of the embedding layer 139 | embeddingSize: 64 140 | # Type of pooling module to use. Option supported: social, occupancy, 141 | # navigation, combined or null. Combined pooling is a list of the other pooling 142 | # layers 143 | poolingModule: 144 | - "social" 145 | - "semantic" 146 | # Image width plus padding 147 | imageWidth: 25 148 | # Image height plus padding 149 | imageHeight: 20 150 | # Width of the navigation grid 151 | navigationWidth: 400 152 | # Height of the navigation grid 153 | navigationHeight: 320 154 | # Kernel size for the average pooling 155 | kernelSize: 2 156 | # Size of the neighborhood navigation grid 157 | navigationGrid: 33 158 | # Size of the neighborhood semantic grid 159 | semanticGridSize: 50 160 | 161 | # ----------------------------------------------------------------------------- 162 | # Logger settings | 163 | # ----------------------------------------------------------------------------- 164 | 165 | # Folder where to save the logs 166 | logFolder: "logs" 167 | # Logger level 168 | logLevel: "INFO" 169 | # Folder where to save the model 170 | modelFolder: "models" 171 | # Save the sampled coordinates 172 | saveCoordinates: true 173 | -------------------------------------------------------------------------------- /experiments/univ.yaml: -------------------------------------------------------------------------------- 1 | # Name of the experiment 2 | name: "univ" 3 | 4 | # ----------------------------------------------------------------------------- 5 | # Optimization settings | 6 | # ----------------------------------------------------------------------------- 7 | 8 | # Buffer size of the tf.data.Dataset.prefetch option 9 | prefetchSize: 100 10 | # Shuffle the dataset 11 | shuffle: False 12 | # Number of epochs 13 | epochs: 50 14 | # Use centered option in RMSProp 15 | centered: true 16 | # Learning rate 17 | learningRate: 0.003 18 | # Learning rate decay 19 | learningRateDecay: 0.95 20 | # Clipping ratio 21 | clippingRatio: 5 22 | # Optimizer decay rate 23 | optimizerDecay: 0.95 24 | # Optimizer momentum rate 25 | optimizerMomentum: 0.2 26 | # Weight regularization rate 27 | l2Rate: 0.005 28 | 29 | # ----------------------------------------------------------------------------- 30 | # Dataset parameters | 31 | # ----------------------------------------------------------------------------- 32 | 33 | # Folder of the datasets 34 | dataPath: "datasets" 35 | # Name of the training files in the dataPath 36 | trainDatasets: 37 | - "train/biwi_eth_train.txt" 38 | - "train/biwi_hotel_train.txt" 39 | - "train/crowds_zara01_train.txt" 40 | - "train/crowds_zara02_train.txt" 41 | - "train/crowds_zara03_train.txt" 42 | - "train/uni_examples_train.txt" 43 | # Name of the validation files in the dataPath 44 | validationDatasets: 45 | - "val/biwi_eth_val.txt" 46 | - "val/biwi_hotel_val.txt" 47 | - "val/crowds_zara01_val.txt" 48 | - "val/crowds_zara02_val.txt" 49 | - "val/crowds_zara03_val.txt" 50 | - "val/uni_examples_val.txt" 51 | # Name of the test files in the dataPath 52 | testDatasets: 53 | - "test/students001.txt" 54 | - "test/students003.txt" 55 | # Name of the training navigation maps in the dataPath 56 | trainMaps: 57 | - "navMap/eth.npy" 58 | - "navMap/hotel.npy" 59 | - "navMap/zara1.npy" 60 | - "navMap/zara2.npy" 61 | - "navMap/zara3.npy" 62 | - "navMap/univ3.npy" 63 | # Name of the validation navigation maps in the dataPath 64 | validationMaps: 65 | - "navMap/eth.npy" 66 | - "navMap/hotel.npy" 67 | - "navMap/zara1.npy" 68 | - "navMap/zara2.npy" 69 | - "navMap/zara3.npy" 70 | - "navMap/univ3.npy" 71 | # Name of the test navigation maps in the dataPath 72 | testMaps: 73 | - "navMap/univ1.npy" 74 | - "navMap/univ2.npy" 75 | # Number of labels of the semantic map 76 | numLabels: 8 77 | # List of semantic maps 78 | semanticMaps: 79 | - "semMap/eth.npy" 80 | - "semMap/hotel.npy" 81 | - "semMap/univ.npy" 82 | - "semMap/zara.npy" 83 | # List of homography matrix 84 | homography: 85 | - "homography/eth.txt" 86 | - "homography/hotel.txt" 87 | - "homography/univ.txt" 88 | - "homography/zara.txt" 89 | # Name of the training navigation maps in the dataPath 90 | trainMapping: 91 | - "eth" 92 | - "hotel" 93 | - "zara" 94 | - "zara" 95 | - "zara" 96 | - "univ" 97 | # Name of the validation navigation maps in the dataPath 98 | validationMapping: 99 | - "eth" 100 | - "hotel" 101 | - "zara" 102 | - "zara" 103 | - "zara" 104 | - "univ" 105 | # Name of the test navigation maps in the dataPath 106 | testMapping: 107 | - "univ" 108 | - "univ" 109 | # Delimiter used in the files of the dataset 110 | delimiter: "tab" 111 | # Number of frame observed 112 | obsLen: 8 113 | # Number of frame that the model has to predict 114 | predLen: 12 115 | # Number of frames to skip while making the dataset. If True, the number of 116 | # frame to skip is random, between 1 and (obsLen + predLen). For e.g. if 117 | # sequence_1 in the dataset is from Frame_1 - Frame_N and skip = 5 then the 118 | # sequence_2 will be start from Frame_N+5 119 | skip: 1 120 | 121 | # ----------------------------------------------------------------------------- 122 | # Model parameters | 123 | # ----------------------------------------------------------------------------- 124 | 125 | # Maximum number of pedestrian per frame 126 | maxNumPed: 93 127 | # Size of the sum pooling window 128 | gridSize: 8 129 | # Size of the neighborhood 130 | neighborhoodSize: 2 131 | # Number of hidden 132 | rnnSize: 128 133 | # Dimension of the embedding layer 134 | embeddingSize: 64 135 | # Type of pooling module to use. Option supported: social, occupancy, 136 | # navigation, combined or null. Combined pooling is a list of the other pooling 137 | # layers 138 | poolingModule: 139 | - "social" 140 | - "semantic" 141 | # Image width plus padding 142 | imageWidth: 25 143 | # Image height plus padding 144 | imageHeight: 20 145 | # Width of the navigation grid 146 | navigationWidth: 400 147 | # Height of the navigation grid 148 | navigationHeight: 320 149 | # Kernel size for the average pooling 150 | kernelSize: 2 151 | # Size of the neighborhood navigation grid 152 | navigationGrid: 33 153 | # Size of the neighborhood semantic grid 154 | semanticGridSize: 50 155 | 156 | # ----------------------------------------------------------------------------- 157 | # Logger settings | 158 | # ----------------------------------------------------------------------------- 159 | 160 | # Folder where to save the logs 161 | logFolder: "logs" 162 | # Logger level 163 | logLevel: "INFO" 164 | # Folder where to save the model 165 | modelFolder: "models" 166 | # Save the sampled coordinates 167 | saveCoordinates: true 168 | -------------------------------------------------------------------------------- /experiments/zara1.yaml: -------------------------------------------------------------------------------- 1 | # Name of the experiment 2 | name: "zara1" 3 | 4 | # ----------------------------------------------------------------------------- 5 | # Optimization settings | 6 | # ----------------------------------------------------------------------------- 7 | 8 | # Buffer size of the tf.data.Dataset.prefetch option 9 | prefetchSize: 100 10 | # Shuffle the dataset 11 | shuffle: False 12 | # Number of epochs 13 | epochs: 50 14 | # Use centered option in RMSProp 15 | centered: true 16 | # Learning rate 17 | learningRate: 0.003 18 | # Learning rate decay 19 | learningRateDecay: 0.95 20 | # Clipping ratio 21 | clippingRatio: 5 22 | # Optimizer decay rate 23 | optimizerDecay: 0.95 24 | # Optimizer momentum rate 25 | optimizerMomentum: 0.2 26 | # Weight regularization rate 27 | l2Rate: 0.005 28 | 29 | # ----------------------------------------------------------------------------- 30 | # Dataset parameters | 31 | # ----------------------------------------------------------------------------- 32 | 33 | # Folder of the datasets 34 | dataPath: "datasets" 35 | # Name of the training files in the dataPath 36 | trainDatasets: 37 | - "train/biwi_eth_train.txt" 38 | - "train/biwi_hotel_train.txt" 39 | - "train/crowds_zara02_train.txt" 40 | - "train/crowds_zara03_train.txt" 41 | - "train/students001_train.txt" 42 | - "train/students003_train.txt" 43 | - "train/uni_examples_train.txt" 44 | # Name of the validation files in the dataPath 45 | validationDatasets: 46 | - "val/biwi_eth_val.txt" 47 | - "val/biwi_hotel_val.txt" 48 | - "val/crowds_zara02_val.txt" 49 | - "val/crowds_zara03_val.txt" 50 | - "val/students001_val.txt" 51 | - "val/students003_val.txt" 52 | - "val/uni_examples_val.txt" 53 | # Name of the test files in the dataPath 54 | testDatasets: 55 | - "test/crowds_zara01.txt" 56 | # Name of the training navigation maps in the dataPath 57 | trainMaps: 58 | - "navMap/eth.npy" 59 | - "navMap/hotel.npy" 60 | - "navMap/zara2.npy" 61 | - "navMap/zara3.npy" 62 | - "navMap/univ1.npy" 63 | - "navMap/univ2.npy" 64 | - "navMap/univ3.npy" 65 | # Name of the validation navigation maps in the dataPath 66 | validationMaps: 67 | - "navMap/eth.npy" 68 | - "navMap/hotel.npy" 69 | - "navMap/zara2.npy" 70 | - "navMap/zara3.npy" 71 | - "navMap/univ1.npy" 72 | - "navMap/univ2.npy" 73 | - "navMap/univ3.npy" 74 | # Name of the test navigation maps in the dataPath 75 | testMaps: 76 | - "navMap/zara1.npy" 77 | # Number of labels of the semantic map 78 | numLabels: 8 79 | # List of semantic maps 80 | semanticMaps: 81 | - "semMap/eth.npy" 82 | - "semMap/hotel.npy" 83 | - "semMap/univ.npy" 84 | - "semMap/zara.npy" 85 | # List of homography matrix 86 | homography: 87 | - "homography/eth.txt" 88 | - "homography/hotel.txt" 89 | - "homography/univ.txt" 90 | - "homography/zara.txt" 91 | # Name of the training navigation maps in the dataPath 92 | trainMapping: 93 | - "eth" 94 | - "hotel" 95 | - "zara" 96 | - "zara" 97 | - "univ" 98 | - "univ" 99 | - "univ" 100 | # Name of the validation navigation maps in the dataPath 101 | validationMapping: 102 | - "eth" 103 | - "hotel" 104 | - "zara" 105 | - "zara" 106 | - "univ" 107 | - "univ" 108 | - "univ" 109 | # Name of the test navigation maps in the dataPath 110 | testMapping: 111 | - "zara" 112 | # Delimiter used in the files of the dataset 113 | delimiter: "tab" 114 | # Number of frame observed 115 | obsLen: 8 116 | # Number of frame that the model has to predict 117 | predLen: 12 118 | # Number of frames to skip while making the dataset. If True, the number of 119 | # frame to skip is random, between 1 and (obsLen + predLen). For e.g. if 120 | # sequence_1 in the dataset is from Frame_1 - Frame_N and skip = 5 then the 121 | # sequence_2 will be start from Frame_N+5 122 | skip: 1 123 | 124 | # ----------------------------------------------------------------------------- 125 | # Model parameters | 126 | # ----------------------------------------------------------------------------- 127 | 128 | # Maximum number of pedestrian per frame 129 | maxNumPed: 93 130 | # Size of the sum pooling window 131 | gridSize: 8 132 | # Size of the neighborhood 133 | neighborhoodSize: 2 134 | # Number of hidden 135 | rnnSize: 128 136 | # Dimension of the embedding layer 137 | embeddingSize: 64 138 | # Type of pooling module to use. Option supported: social, occupancy, 139 | # navigation, combined or null. Combined pooling is a list of the other pooling 140 | # layers 141 | poolingModule: 142 | - "social" 143 | - "semantic" 144 | # Image width plus padding 145 | imageWidth: 25 146 | # Image height plus padding 147 | imageHeight: 20 148 | # Width of the navigation grid 149 | navigationWidth: 400 150 | # Height of the navigation grid 151 | navigationHeight: 320 152 | # Kernel size for the average pooling 153 | kernelSize: 2 154 | # Size of the neighborhood navigation grid 155 | navigationGrid: 33 156 | # Size of the neighborhood semantic grid 157 | semanticGridSize: 50 158 | 159 | # ----------------------------------------------------------------------------- 160 | # Logger settings | 161 | # ----------------------------------------------------------------------------- 162 | 163 | # Folder where to save the logs 164 | logFolder: "logs" 165 | # Logger level 166 | logLevel: "INFO" 167 | # Folder where to save the model 168 | modelFolder: "models" 169 | # Save the sampled coordinates 170 | saveCoordinates: true 171 | -------------------------------------------------------------------------------- /experiments/zara2.yaml: -------------------------------------------------------------------------------- 1 | # Name of the experiment 2 | name: "zara2" 3 | 4 | # ----------------------------------------------------------------------------- 5 | # Optimization settings | 6 | # ----------------------------------------------------------------------------- 7 | 8 | # Buffer size of the tf.data.Dataset.prefetch option 9 | prefetchSize: 100 10 | # Shuffle the dataset 11 | shuffle: False 12 | # Number of epochs 13 | epochs: 50 14 | # Use centered option in RMSProp 15 | centered: true 16 | # Learning rate 17 | learningRate: 0.003 18 | # Learning rate decay 19 | learningRateDecay: 0.95 20 | # Clipping ratio 21 | clippingRatio: 5 22 | # Optimizer decay rate 23 | optimizerDecay: 0.95 24 | # Optimizer momentum rate 25 | optimizerMomentum: 0.2 26 | # Weight regularization rate 27 | l2Rate: 0.005 28 | 29 | # ----------------------------------------------------------------------------- 30 | # Dataset parameters | 31 | # ----------------------------------------------------------------------------- 32 | 33 | # Folder of the datasets 34 | dataPath: "datasets" 35 | # Name of the training files in the dataPath 36 | trainDatasets: 37 | - "train/biwi_eth_train.txt" 38 | - "train/biwi_hotel_train.txt" 39 | - "train/crowds_zara01_train.txt" 40 | - "train/crowds_zara03_train.txt" 41 | - "train/students001_train.txt" 42 | - "train/students003_train.txt" 43 | - "train/uni_examples_train.txt" 44 | # Name of the validation files in the dataPath 45 | validationDatasets: 46 | - "val/biwi_eth_val.txt" 47 | - "val/biwi_hotel_val.txt" 48 | - "val/crowds_zara01_val.txt" 49 | - "val/crowds_zara03_val.txt" 50 | - "val/students001_val.txt" 51 | - "val/students003_val.txt" 52 | - "val/uni_examples_val.txt" 53 | # Name of the test files in the dataPath 54 | testDatasets: 55 | - "test/crowds_zara02.txt" 56 | # Name of the training navigation maps in the dataPath 57 | trainMaps: 58 | - "navMap/eth.npy" 59 | - "navMap/hotel.npy" 60 | - "navMap/zara1.npy" 61 | - "navMap/zara3.npy" 62 | - "navMap/univ1.npy" 63 | - "navMap/univ2.npy" 64 | - "navMap/univ3.npy" 65 | # Name of the validation navigation maps in the dataPath 66 | validationMaps: 67 | - "navMap/eth.npy" 68 | - "navMap/hotel.npy" 69 | - "navMap/zara1.npy" 70 | - "navMap/zara3.npy" 71 | - "navMap/univ1.npy" 72 | - "navMap/univ2.npy" 73 | - "navMap/univ3.npy" 74 | # Name of the test navigation maps in the dataPath 75 | testMaps: 76 | - "navMap/zara2.npy" 77 | # Number of labels of the semantic map 78 | numLabels: 8 79 | # List of semantic maps 80 | semanticMaps: 81 | - "semMap/eth.npy" 82 | - "semMap/hotel.npy" 83 | - "semMap/univ.npy" 84 | - "semMap/zara.npy" 85 | # List of homography matrix 86 | homography: 87 | - "homography/eth.txt" 88 | - "homography/hotel.txt" 89 | - "homography/univ.txt" 90 | - "homography/zara.txt" 91 | # Name of the training navigation maps in the dataPath 92 | trainMapping: 93 | - "eth" 94 | - "hotel" 95 | - "zara" 96 | - "zara" 97 | - "univ" 98 | - "univ" 99 | - "univ" 100 | # Name of the validation navigation maps in the dataPath 101 | validationMapping: 102 | - "eth" 103 | - "hotel" 104 | - "zara" 105 | - "zara" 106 | - "univ" 107 | - "univ" 108 | - "univ" 109 | # Name of the test navigation maps in the dataPath 110 | testMapping: 111 | - "zara" 112 | # Delimiter used in the files of the dataset 113 | delimiter: "tab" 114 | # Number of frame observed 115 | obsLen: 8 116 | # Number of frame that the model has to predict 117 | predLen: 12 118 | # Number of frames to skip while making the dataset. If True, the number of 119 | # frame to skip is random, between 1 and (obsLen + predLen). For e.g. if 120 | # sequence_1 in the dataset is from Frame_1 - Frame_N and skip = 5 then the 121 | # sequence_2 will be start from Frame_N+5 122 | skip: 1 123 | 124 | # ----------------------------------------------------------------------------- 125 | # Model parameters | 126 | # ----------------------------------------------------------------------------- 127 | 128 | # Maximum number of pedestrian per frame 129 | maxNumPed: 93 130 | # Size of the sum pooling window 131 | gridSize: 8 132 | # Size of the neighborhood 133 | neighborhoodSize: 2 134 | # Number of hidden 135 | rnnSize: 128 136 | # Dimension of the embedding layer 137 | embeddingSize: 64 138 | # Type of pooling module to use. Option supported: social, occupancy, 139 | # navigation, combined or null. Combined pooling is a list of the other pooling 140 | # layers 141 | poolingModule: 142 | - "social" 143 | - "semantic" 144 | # Image width plus padding 145 | imageWidth: 25 146 | # Image height plus padding 147 | imageHeight: 20 148 | # Width of the navigation grid 149 | navigationWidth: 400 150 | # Height of the navigation grid 151 | navigationHeight: 320 152 | # Kernel size for the average pooling 153 | kernelSize: 2 154 | # Size of the neighborhood navigation grid 155 | navigationGrid: 33 156 | # Size of the neighborhood semantic grid 157 | semanticGridSize: 50 158 | 159 | # ----------------------------------------------------------------------------- 160 | # Logger settings | 161 | # ----------------------------------------------------------------------------- 162 | 163 | # Folder where to save the logs 164 | logFolder: "logs" 165 | # Logger level 166 | logLevel: "INFO" 167 | # Folder where to save the model 168 | modelFolder: "models" 169 | # Save the sampled coordinates 170 | saveCoordinates: true 171 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy==1.15.4 2 | PyYAML==3.13 3 | tensorflow-gpu==1.12.0 4 | beautifultable==0.5.3 5 | -------------------------------------------------------------------------------- /scripts/logger.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | 4 | 5 | def setLogger(hparams, args, phase): 6 | log_file = hparams.name + "-" + phase.lower() + ".log" 7 | log_folder = None 8 | level = "INFO" 9 | formatter = logging.Formatter( 10 | "[%(asctime)s %(filename)s] %(levelname)s: %(message)s" 11 | ) 12 | 13 | # Check if you have to add a FileHandler 14 | if args.logFolder is not None: 15 | log_folder = args.logFolder 16 | elif hparams.logFolder is not None: 17 | log_folder = hparams.logFolder 18 | 19 | if log_folder is not None: 20 | log_file = os.path.join(log_folder, log_file) 21 | if not os.path.exists(log_folder): 22 | os.makedirs(log_folder) 23 | 24 | # Set the level 25 | if args.logLevel is not None: 26 | level = args.logLevel.upper() 27 | elif hparams.logLevel is not None: 28 | level = hparams.logLevel.upper() 29 | 30 | # Get the logger 31 | logger = logging.getLogger() 32 | # Remove handlers added previously 33 | for handler in logger.handlers[:]: 34 | if isinstance(handler, logging.FileHandler): 35 | handler.close() 36 | logger.removeHandler(handler) 37 | if log_folder is not None: 38 | # Add a FileHandler 39 | file_handler = logging.FileHandler(log_file) 40 | file_handler.setFormatter(formatter) 41 | logger.addHandler(file_handler) 42 | # Add a StreamHandler that display on sys.stderr 43 | stderr_handler = logging.StreamHandler() 44 | stderr_handler.setFormatter(formatter) 45 | logger.addHandler(stderr_handler) 46 | 47 | # Set the level 48 | logger.setLevel(level) 49 | -------------------------------------------------------------------------------- /scripts/makeNavMap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import numpy as np 5 | 6 | 7 | def main(): 8 | parser = argparse.ArgumentParser( 9 | description="Build the navigation map for the given datasets" 10 | ) 11 | parser.add_argument( 12 | "datasets", nargs="+", help="Datasets path. Can be passed one or more datasets" 13 | ) 14 | parser.add_argument("imageWidth", type=int, help="Image width") 15 | parser.add_argument("imageHeight", type=int, help="Image height") 16 | parser.add_argument("navigationWidth", type=int, help="Navigation width") 17 | parser.add_argument("navigationHeight", type=int, help="Navigation height") 18 | parser.add_argument("neighborhood", type=int, help="Neighborhood") 19 | parser.add_argument( 20 | "destination", type=str, help="Filename to where to save the navigation map" 21 | ) 22 | args = parser.parse_args() 23 | 24 | image_size = [args.imageHeight, args.imageWidth] 25 | navigation_size = [args.navigationHeight, args.navigationWidth] 26 | 27 | navigation_map = make_navigation_map( 28 | args.datasets, image_size, navigation_size, args.neighborhood 29 | ) 30 | 31 | np.save(args.destination, navigation_map) 32 | 33 | 34 | def make_navigation_map(datasets, image_size, navigation_size, neighborhood): 35 | """Build the navigation map for the given dataset. 36 | 37 | Args: 38 | datasets: list. Datasets. 39 | image_size: list. Height and width of the image. 40 | navigation_size: list. Height and width of the navigation map. 41 | neighborhood: int. Neighborhood size 42 | 43 | Returns: 44 | ndarray containing the navigation map. 45 | 46 | """ 47 | navigation_map = np.zeros(navigation_size[0] * navigation_size[1]) 48 | 49 | for dataset_path in datasets: 50 | dataset = np.loadtxt(dataset_path, delimiter="\t") 51 | 52 | top_left = [ 53 | np.floor(min(dataset[:, 2]) - neighborhood / 2), 54 | np.ceil(max(dataset[:, 3]) + neighborhood / 2), 55 | ] 56 | cell_x = np.floor( 57 | ((dataset[:, 2] - top_left[0]) / image_size[1]) * navigation_size[1] 58 | ) 59 | cell_y = np.floor( 60 | ((top_left[1] - dataset[:, 3]) / image_size[0]) * navigation_size[0] 61 | ) 62 | grid_pos = cell_x + cell_y * navigation_size[1] 63 | # For each cell, counts the pedestrian only once 64 | grid_pos = np.stack([dataset[:, 1], grid_pos], axis=1) 65 | grid_pos = np.unique(grid_pos, axis=0) 66 | grid_pos = grid_pos[:, 1].astype(int) 67 | np.add.at(navigation_map, grid_pos, 1) 68 | 69 | # Normalize in [0,1] 70 | max_norm = max(navigation_map) 71 | navigation_map = navigation_map / max_norm 72 | navigation_map = np.reshape( 73 | navigation_map, [navigation_size[0], navigation_size[1]] 74 | ) 75 | 76 | return navigation_map 77 | 78 | 79 | if __name__ == "__main__": 80 | main() 81 | -------------------------------------------------------------------------------- /scripts/sample.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import time 5 | import pickle 6 | import logging 7 | import argparse 8 | import numpy as np 9 | import tensorflow as tf 10 | from beautifultable import BeautifulTable 11 | 12 | import utils 13 | from logger import setLogger 14 | from model import SocialModel 15 | 16 | 17 | PHASE = "SAMPLE" 18 | 19 | 20 | def main(): 21 | parser = argparse.ArgumentParser( 22 | description="Sample new trajectories with a social LSTM" 23 | ) 24 | parser.add_argument( 25 | "modelParams", 26 | type=str, 27 | help="Path to the file or folder with the parameters of the experiments", 28 | ) 29 | parser.add_argument( 30 | "-l", 31 | "--logLevel", 32 | help="logging level of the logger. Default is INFO", 33 | metavar="level", 34 | type=str, 35 | ) 36 | parser.add_argument( 37 | "-f", 38 | "--logFolder", 39 | help="path to the folder where to save the logs. If None, logs are only printed in stderr", 40 | metavar="path", 41 | type=str, 42 | ) 43 | parser.add_argument( 44 | "-ns", 45 | "--noSaveCoordinates", 46 | help="Flag to not save the predicted and ground truth coordinates", 47 | action="store_true", 48 | ) 49 | args = parser.parse_args() 50 | 51 | if os.path.isdir(args.modelParams): 52 | names_experiments = os.listdir(args.modelParams) 53 | experiments = [ 54 | os.path.join(args.modelParams, experiment) 55 | for experiment in names_experiments 56 | ] 57 | else: 58 | experiments = [args.modelParams] 59 | 60 | # Table will show the metrics of each experiment 61 | results = BeautifulTable() 62 | results.column_headers = ["Name experiment", "ADE", "FDE"] 63 | 64 | for experiment in experiments: 65 | # Load the parameters 66 | hparams = utils.YParams(experiment) 67 | # Define the logger 68 | setLogger(hparams, args, PHASE) 69 | 70 | remainSpaces = 29 - len(hparams.name) 71 | logging.info( 72 | "\n" 73 | + "--------------------------------------------------------------------------------\n" 74 | + "| Sampling experiment: " 75 | + hparams.name 76 | + " " * remainSpaces 77 | + "|\n" 78 | + "--------------------------------------------------------------------------------\n" 79 | ) 80 | 81 | trajectory_size = hparams.obsLen + hparams.predLen 82 | 83 | saveCoordinates = False 84 | if args.noSaveCoordinates is True: 85 | saveCoordinates = False 86 | elif hparams.saveCoordinates: 87 | saveCoordinates = hparams.saveCoordinates 88 | 89 | if saveCoordinates: 90 | coordinates_path = os.path.join("coordinates", hparams.name) 91 | if not os.path.exists("coordinates"): 92 | os.makedirs("coordinates") 93 | 94 | logging.info("Loading the test datasets...") 95 | test_loader = utils.DataLoader( 96 | hparams.dataPath, 97 | hparams.testDatasets, 98 | hparams.testMaps, 99 | hparams.semanticMaps, 100 | hparams.testMapping, 101 | hparams.homography, 102 | num_labels=hparams.numLabels, 103 | delimiter=hparams.delimiter, 104 | skip=hparams.skip, 105 | max_num_ped=hparams.maxNumPed, 106 | trajectory_size=trajectory_size, 107 | neighborood_size=hparams.neighborhoodSize, 108 | ) 109 | 110 | logging.info("Creating the test dataset pipeline...") 111 | dataset = utils.TrajectoriesDataset( 112 | test_loader, 113 | val_loader=None, 114 | batch=False, 115 | shuffle=hparams.shuffle, 116 | prefetch_size=hparams.prefetchSize, 117 | ) 118 | 119 | logging.info("Creating the model...") 120 | start = time.time() 121 | model = SocialModel(dataset, hparams, phase=PHASE) 122 | end = time.time() - start 123 | logging.debug("Model created in {:.2f}s".format(end)) 124 | 125 | # Define the path to the file that contains the variables of the model 126 | model_folder = os.path.join(hparams.modelFolder, hparams.name) 127 | model_path = os.path.join(model_folder, hparams.name) 128 | 129 | # Create a saver 130 | saver = tf.train.Saver() 131 | 132 | # Add to the computation graph the evaluation functions 133 | ade_sequence = utils.average_displacement_error( 134 | model.new_pedestrians_coordinates[-hparams.predLen :], 135 | model.pedestrians_coordinates[-hparams.predLen :], 136 | model.num_peds_frame, 137 | ) 138 | 139 | fde_sequence = utils.final_displacement_error( 140 | model.new_pedestrians_coordinates[-1], 141 | model.pedestrians_coordinates[-1], 142 | model.num_peds_frame, 143 | ) 144 | 145 | ade = 0 146 | fde = 0 147 | coordinates_predicted = [] 148 | coordinates_gt = [] 149 | peds_in_sequence = [] 150 | 151 | # Zero padding 152 | padding = len(str(test_loader.num_sequences)) 153 | 154 | # ============================ START SAMPLING ============================ 155 | 156 | with tf.Session() as sess: 157 | # Restore the model trained 158 | saver.restore(sess, model_path) 159 | 160 | # Initialize the iterator of the sample dataset 161 | sess.run(dataset.init_train) 162 | 163 | logging.info( 164 | "\n" 165 | + "--------------------------------------------------------------------------------\n" 166 | + "| Start sampling |\n" 167 | + "--------------------------------------------------------------------------------\n" 168 | ) 169 | 170 | for seq in range(test_loader.num_sequences): 171 | logging.info( 172 | "Sample trajectory number {:{width}d}/{}".format( 173 | seq + 1, test_loader.num_sequences, width=padding 174 | ) 175 | ) 176 | 177 | ade_value, fde_value, coordinates_pred_value, coordinates_gt_value, num_peds = sess.run( 178 | [ 179 | ade_sequence, 180 | fde_sequence, 181 | model.new_pedestrians_coordinates, 182 | model.pedestrians_coordinates, 183 | model.num_peds_frame, 184 | ] 185 | ) 186 | ade += ade_value 187 | fde += fde_value 188 | coordinates_predicted.append(coordinates_pred_value) 189 | coordinates_gt.append(coordinates_gt_value) 190 | peds_in_sequence.append(num_peds) 191 | 192 | ade = ade / test_loader.num_sequences 193 | fde = fde / test_loader.num_sequences 194 | logging.info("Sampling finished. ADE: {:.4f} FDE: {:.4f}".format(ade, fde)) 195 | results.append_row([hparams.name, ade, fde]) 196 | 197 | if saveCoordinates: 198 | coordinates_predicted = np.array(coordinates_predicted) 199 | coordinates_gt = np.array(coordinates_gt) 200 | saveCoords( 201 | coordinates_predicted, 202 | coordinates_gt, 203 | peds_in_sequence, 204 | hparams.predLen, 205 | coordinates_path, 206 | ) 207 | tf.reset_default_graph() 208 | logging.info("\n{}".format(results)) 209 | 210 | 211 | def saveCoords(pred, coordinates_gt, peds_in_sequence, pred_len, coordinates_path): 212 | """Save a pickle file with a dictionary containing the predicted coordinates, 213 | the ground truth coordiantes and the number of pedestrian in sequence. 214 | 215 | Args: 216 | pred: numpy array [num_sequence, trajectory_size, max_num_ped, 2]. The 217 | predicted coordinates. 218 | coordinates_gt: numpy array [num_sequence, max_num_ped, trajectory_size, 219 | 2]. The ground truth coordinates. 220 | peds_in_sequence: numpy array [num_sequence]. The number of pedestrian in 221 | each sequence. 222 | pred_len: int. Number of prediction time-steps. 223 | coordinates_path: string. Path to where to save the coordinates. 224 | 225 | """ 226 | coordinates = {"groundTruth": coordinates_gt, "pedsInSequence": peds_in_sequence} 227 | coordinates_pred = coordinates_gt.copy() 228 | 229 | for index, sequence in enumerate(pred): 230 | coords = sequence[-pred_len:, : peds_in_sequence[index]] 231 | coordinates_pred[index, -pred_len:, : peds_in_sequence[index]] = coords 232 | 233 | coordinates["predicted"] = coordinates_pred 234 | with open(coordinates_path + "pkl", "wb") as fp: 235 | pickle.dump(coordinates, fp) 236 | 237 | 238 | if __name__ == "__main__": 239 | main() 240 | -------------------------------------------------------------------------------- /scripts/train.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import time 5 | import logging 6 | import argparse 7 | import tensorflow as tf 8 | 9 | import utils 10 | from logger import setLogger 11 | from model import SocialModel 12 | 13 | 14 | PHASE = "TRAIN" 15 | 16 | 17 | def main(): 18 | # Parse the arguments received from command line 19 | parser = argparse.ArgumentParser(description="Train a social LSTM") 20 | parser.add_argument( 21 | "modelParams", 22 | type=str, 23 | help="Path to the file or folder with the parameters of the experiments", 24 | ) 25 | parser.add_argument( 26 | "-l", 27 | "--logLevel", 28 | help="logging level of the logger. Default is INFO", 29 | metavar="level", 30 | type=str, 31 | ) 32 | parser.add_argument( 33 | "-f", 34 | "--logFolder", 35 | help="path to the folder where to save the logs. If None, logs are only printed in stderr", 36 | type=str, 37 | metavar="path", 38 | ) 39 | args = parser.parse_args() 40 | 41 | if os.path.isdir(args.modelParams): 42 | names_experiments = os.listdir(args.modelParams) 43 | experiments = [ 44 | os.path.join(args.modelParams, experiment) 45 | for experiment in names_experiments 46 | ] 47 | else: 48 | experiments = [args.modelParams] 49 | 50 | for experiment in experiments: 51 | # Load the parameters 52 | hparams = utils.YParams(experiment) 53 | # Define the logger 54 | setLogger(hparams, args, PHASE) 55 | 56 | remainSpaces = 29 - len(hparams.name) 57 | logging.info( 58 | "\n" 59 | + "--------------------------------------------------------------------------------\n" 60 | + "| Training experiment: " 61 | + hparams.name 62 | + " " * remainSpaces 63 | + "|\n" 64 | + "--------------------------------------------------------------------------------\n" 65 | ) 66 | 67 | trajectory_size = hparams.obsLen + hparams.predLen 68 | 69 | logging.info("Loading the training datasets...") 70 | train_loader = utils.DataLoader( 71 | hparams.dataPath, 72 | hparams.trainDatasets, 73 | hparams.trainMaps, 74 | hparams.semanticMaps, 75 | hparams.trainMapping, 76 | hparams.homography, 77 | num_labels=hparams.numLabels, 78 | delimiter=hparams.delimiter, 79 | skip=hparams.skip, 80 | max_num_ped=hparams.maxNumPed, 81 | trajectory_size=trajectory_size, 82 | neighborood_size=hparams.neighborhoodSize, 83 | ) 84 | logging.info("Loading the validation datasets...") 85 | val_loader = utils.DataLoader( 86 | hparams.dataPath, 87 | hparams.validationDatasets, 88 | hparams.validationMaps, 89 | hparams.semanticMaps, 90 | hparams.validationMapping, 91 | hparams.homography, 92 | num_labels=hparams.numLabels, 93 | delimiter=hparams.delimiter, 94 | skip=hparams.skip, 95 | max_num_ped=hparams.maxNumPed, 96 | trajectory_size=trajectory_size, 97 | neighborood_size=hparams.neighborhoodSize, 98 | ) 99 | 100 | logging.info("Creating the training and validation dataset pipeline...") 101 | dataset = utils.TrajectoriesDataset( 102 | train_loader, 103 | val_loader=val_loader, 104 | batch=False, 105 | shuffle=hparams.shuffle, 106 | prefetch_size=hparams.prefetchSize, 107 | ) 108 | 109 | hparams.add_hparam("learningRateSteps", train_loader.num_sequences) 110 | 111 | logging.info("Creating the model...") 112 | start = time.time() 113 | model = SocialModel(dataset, hparams, phase=PHASE) 114 | end = time.time() - start 115 | logging.debug("Model created in {:.2f}s".format(end)) 116 | 117 | # Define the path to where save the model and the checkpoints 118 | if hparams.modelFolder: 119 | save_model = True 120 | model_folder = os.path.join(hparams.modelFolder, hparams.name) 121 | if not os.path.exists(model_folder): 122 | os.makedirs(model_folder) 123 | os.makedirs(os.path.join(model_folder, "checkpoints")) 124 | model_path = os.path.join(model_folder, hparams.name) 125 | checkpoints_path = os.path.join(model_folder, "checkpoints", hparams.name) 126 | # Create the saver 127 | saver = tf.train.Saver() 128 | 129 | # Zero padding 130 | padding = len(str(train_loader.num_sequences)) 131 | 132 | # ============================ START TRAINING ============================ 133 | 134 | with tf.Session() as sess: 135 | logging.info( 136 | "\n" 137 | + "--------------------------------------------------------------------------------\n" 138 | + "| Start training |\n" 139 | + "--------------------------------------------------------------------------------\n" 140 | ) 141 | # Initialize all the variables in the graph 142 | sess.run(tf.global_variables_initializer()) 143 | 144 | for epoch in range(hparams.epochs): 145 | logging.info("Starting epoch {}".format(epoch + 1)) 146 | 147 | # ==================== TRAINING PHASE ==================== 148 | 149 | # Initialize the iterator of the training dataset 150 | sess.run(dataset.init_train) 151 | 152 | for sequence in range(train_loader.num_sequences): 153 | start = time.time() 154 | loss, _ = sess.run([model.loss, model.train_optimizer]) 155 | end = time.time() - start 156 | 157 | logging.info( 158 | "{:{width}d}/{} epoch: {} time/Batch = {:.2f}s. Loss = {:.4f}".format( 159 | sequence + 1, 160 | train_loader.num_sequences, 161 | epoch + 1, 162 | end, 163 | loss, 164 | width=padding, 165 | ) 166 | ) 167 | 168 | # ==================== VALIDATION PHASE ==================== 169 | 170 | logging.info(" ========== Validation ==========") 171 | # Initialize the iterator of the validation dataset 172 | sess.run(dataset.init_val) 173 | loss_val = 0 174 | 175 | for _ in range(val_loader.num_sequences): 176 | loss = sess.run(model.loss) 177 | loss_val += loss 178 | 179 | mean_val = loss_val / val_loader.num_sequences 180 | 181 | logging.info( 182 | "Epoch: {}. Validation loss = {:.4f}".format(epoch + 1, mean_val) 183 | ) 184 | 185 | # Save the model 186 | if save_model: 187 | logging.info("Saving model...") 188 | saver.save( 189 | sess, 190 | checkpoints_path, 191 | global_step=epoch + 1, 192 | write_meta_graph=False, 193 | ) 194 | logging.info("Model saved...") 195 | # Save the final model 196 | if save_model: 197 | saver.save(sess, model_path) 198 | tf.reset_default_graph() 199 | 200 | 201 | if __name__ == "__main__": 202 | main() 203 | -------------------------------------------------------------------------------- /social-lstm/coordinates_helpers.py: -------------------------------------------------------------------------------- 1 | """Module that defines the coordinates helper. An helper provides the correct 2 | coordinates according to the traininig or sampling phase and the time-step.""" 3 | import tensorflow as tf 4 | 5 | 6 | def train_helper(step, coordinates_gt, coordinates_gt_rel, *args): 7 | """Helper used in training phase. Returns the ground truth coordinates. 8 | 9 | Args: 10 | step: int. The current time-step. 11 | coordinates_gt: tensor of shape [max_num_ped, 2]. The ground truth 12 | coordinates. 13 | coordinates_gt_rel: tensor of shape [max_num_ped, 2]. The ground truth 14 | relative coordinates. 15 | 16 | Returns: 17 | In training phase always returns the ground truth coordinates. 18 | 19 | """ 20 | return coordinates_gt, coordinates_gt_rel 21 | 22 | 23 | def sample_helper(obs_len): 24 | """Helper used in sampling phase. Returns a function that returns the ground 25 | truth coordinates or the predicted coordinates based on the value of step. 26 | 27 | In sampling phase, if the step variable is less of the obs_len, the helper 28 | returns the ground truth coordinates. Otherwise returns the predicted 29 | coordinates. 30 | 31 | Args: 32 | obs_len: int. The number of time-step to be observed. 33 | 34 | Returns: 35 | A function that receives in input the time-step, the ground truth 36 | coordinates and the predicted coordinates and returns the right 37 | coordinates according to the time-step. 38 | 39 | """ 40 | 41 | def helper( 42 | step, 43 | coordinates_gt, 44 | coordinates_gt_rel, 45 | coordinates_predicted, 46 | coordinates_predicted_rel, 47 | ): 48 | return tf.cond( 49 | step >= obs_len, 50 | lambda: (coordinates_predicted, coordinates_predicted_rel), 51 | lambda: (coordinates_gt, coordinates_gt_rel), 52 | ) 53 | 54 | return helper 55 | -------------------------------------------------------------------------------- /social-lstm/losses.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | 4 | def social_loss_function(coordinates): 5 | """Calculate the negative log-Likelihood loss. 6 | 7 | Args: 8 | coordinates: tensor of shape [trajectory_size]. The values calculated from 9 | the train_position_estimate function. 10 | 11 | Returns: 12 | The negative log-Likelihood loss. 13 | 14 | """ 15 | # For numerical stability 16 | epsilon = 1e-20 17 | 18 | return tf.reduce_sum(-tf.log(coordinates + epsilon)) 19 | -------------------------------------------------------------------------------- /social-lstm/model.py: -------------------------------------------------------------------------------- 1 | """Module that defines the SocialLSTM model.""" 2 | import logging 3 | import tensorflow as tf 4 | 5 | import losses 6 | import pooling_layers 7 | import position_estimates 8 | import coordinates_helpers 9 | 10 | TRAIN = "TRAIN" 11 | SAMPLE = "SAMPLE" 12 | 13 | 14 | class SocialModel: 15 | """SocialModel defines the model described in the Social LSTM paper.""" 16 | 17 | def __init__(self, dataset, hparams, phase=TRAIN): 18 | """Constructor of the SocialModel class. 19 | 20 | Args: 21 | dataset: A TrajectoriesDataset instance. 22 | hparams: An HParams instance. 23 | phase: string. Train or sample phase 24 | 25 | """ 26 | # Create the tensor for the pedestrians of shape 27 | # [trajectory_size, max_num_ped, 2] 28 | self.pedestrians_coordinates = dataset.tensors[0] 29 | # Create the tensor for the pedestrian relative coordinates of shape 30 | # [trajectory_size, max_num_ped, 2] 31 | pedestrians_coordinates_rel = dataset.tensors[1] 32 | # Create the tensor for the ped mask of shape 33 | # [trajectory_size, max_num_ped, max_num_ped] 34 | pedestrians_mask = dataset.tensors[2] 35 | # Create the tensor for num_peds_frame 36 | self.num_peds_frame = dataset.tensors[3] 37 | # Create the tensor for the loss mask of shape 38 | # [trajectory_size, max_num_ped] 39 | loss_mask = dataset.tensors[4] 40 | # Create the tensor for the navigation map of shape 41 | # [navigation_width, navigation_height] 42 | navigation_map = dataset.tensors[5] 43 | # Create the tensor for the upper left-most point in the dataset 44 | top_left_dataset = dataset.tensors[6] 45 | # Create the tensor for the semantic map of shape 46 | # [num_points, 2 + num_labels] 47 | semantic_map = dataset.tensors[7] 48 | # Create the tensor for the homography matrix of shape [3,3] 49 | homography = dataset.tensors[8] 50 | # Create the tensor for the pedestrian relative coordinates of shape 51 | # [max_num_ped, 2] 52 | new_pedestrians_coordinates_rel = tf.zeros([hparams.maxNumPed, 2]) 53 | 54 | # Store the parameters 55 | # Output size of the linear layer 56 | output_size = 5 57 | trajectory_size = hparams.obsLen + hparams.predLen 58 | # Contain the predicted coordinates or the pdf of the last frame computed 59 | new_pedestrians_coordinates = tf.TensorArray( 60 | dtype=tf.float32, size=trajectory_size, clear_after_read=False 61 | ) 62 | 63 | # Counter for the adaptive learning rate. Counts the number of batch 64 | # processed. 65 | global_step = tf.Variable(0, trainable=False, name="Global_step") 66 | 67 | # ============================ BUILD MODEL ============================ 68 | 69 | # Create the helper class 70 | logging.info("Creating the social helper") 71 | if phase == TRAIN: 72 | helper = coordinates_helpers.train_helper 73 | elif phase == SAMPLE: 74 | helper = coordinates_helpers.sample_helper(hparams.obsLen) 75 | 76 | # Create the pooling layer 77 | pooling_module = None 78 | if isinstance(hparams.poolingModule, list): 79 | logging.info( 80 | "Creating the combined pooling: {}".format(hparams.poolingModule) 81 | ) 82 | pooling_module = pooling_layers.CombinedPooling(hparams).pooling 83 | 84 | elif hparams.poolingModule == "social": 85 | logging.info("Creating the {} pooling".format(hparams.poolingModule)) 86 | pooling_module = pooling_layers.SocialPooling(hparams).pooling 87 | 88 | elif hparams.poolingModule == "occupancy": 89 | logging.info("Creating the {} pooling".format(hparams.poolingModule)) 90 | pooling_module = pooling_layers.OccupancyPooling(hparams).pooling 91 | 92 | elif hparams.poolingModule == "navigation": 93 | logging.info("Creating the {} pooling".format(hparams.poolingModule)) 94 | pooling_module = pooling_layers.NavigationPooling(hparams).pooling 95 | elif hparams.poolingModule == "semantic": 96 | logging.info("Creating the {} pooling".format(hparams.poolingModule)) 97 | pooling_module = pooling_layers.SemanticPooling(hparams).pooling 98 | 99 | # Create the position estimates functions 100 | logging.info("Creating the social position estimate function") 101 | if phase == TRAIN: 102 | position_estimate = position_estimates.social_train_position_estimate 103 | elif phase == SAMPLE: 104 | position_estimate = position_estimates.social_sample_position_estimate 105 | 106 | # Create the loss function 107 | logging.info("Creating the social loss function") 108 | loss_function = losses.social_loss_function 109 | 110 | # ============================ MODEL LAYERS ============================ 111 | 112 | # Define the LSTM with dimension rnn_size 113 | with tf.variable_scope("LSTM"): 114 | cell = tf.nn.rnn_cell.LSTMCell(hparams.rnnSize, name="Cell") 115 | # Output (or hidden states) of the LSTMs 116 | cell_output = tf.zeros( 117 | [hparams.maxNumPed, hparams.rnnSize], tf.float32, name="Output" 118 | ) 119 | # Define the states of the LSTMs. zero_state returns a named tuple 120 | # with two tensors of shape [max_num_ped, state_size] 121 | with tf.name_scope("States"): 122 | cell_states = cell.zero_state(hparams.maxNumPed, tf.float32) 123 | 124 | # Define the layer with ReLu used for processing the coordinates 125 | coordinates_layer = tf.layers.Dense( 126 | hparams.embeddingSize, 127 | activation=tf.nn.relu, 128 | kernel_initializer=tf.contrib.layers.xavier_initializer(), 129 | name="Coordinates/Layer", 130 | ) 131 | 132 | # Define the layer with ReLu used as output_layer for the decoder 133 | output_layer = tf.layers.Dense( 134 | output_size, 135 | kernel_initializer=tf.contrib.layers.xavier_initializer(), 136 | name="Position_estimation/Layer", 137 | ) 138 | 139 | # ============================ LOOP FUNCTIONS =========================== 140 | 141 | frame = tf.constant(0) 142 | 143 | # If phase is TRAIN, new_pedestrians_coordinates contains the pdf and it 144 | # has shape [trajectory_size, max_num_ped]. If phase is SAMPLE 145 | # new_pedestrians_coordinates contains the coordinates predicted and it 146 | # has shape [trajectory_size, max_num_ped, 2] 147 | if phase == TRAIN: 148 | new_pedestrians_coordinates = new_pedestrians_coordinates.write( 149 | 0, tf.zeros(hparams.maxNumPed) 150 | ) 151 | elif phase == SAMPLE: 152 | new_pedestrians_coordinates = new_pedestrians_coordinates.write( 153 | 0, self.pedestrians_coordinates[0] 154 | ) 155 | 156 | def cond(frame, *args): 157 | return frame < (trajectory_size - 1) 158 | 159 | def body( 160 | frame, 161 | new_pedestrians_coordinates, 162 | new_pedestrians_coordinates_rel, 163 | cell_output, 164 | cell_states, 165 | ): 166 | # Processing the coordinates. Apply the liner layer with relu 167 | current_coordinates, current_coordinates_rel = helper( 168 | frame, 169 | self.pedestrians_coordinates[frame], 170 | pedestrians_coordinates_rel[frame], 171 | new_pedestrians_coordinates.read(frame), 172 | new_pedestrians_coordinates_rel, 173 | ) 174 | pedestrians_coordinates_preprocessed = coordinates_layer( 175 | current_coordinates_rel 176 | ) 177 | 178 | # If pooling_module is not None, add the pooling layer 179 | if pooling_module is not None: 180 | pooling_output = pooling_module( 181 | current_coordinates, 182 | states=cell_output, 183 | peds_mask=pedestrians_mask[frame], 184 | navigation_map=navigation_map, 185 | top_left_dataset=top_left_dataset, 186 | semantic_map=semantic_map, 187 | H=homography, 188 | ) 189 | cell_input = tf.concat( 190 | [pedestrians_coordinates_preprocessed, pooling_output], 191 | 1, 192 | name="Rnn_input", 193 | ) 194 | else: 195 | cell_input = pedestrians_coordinates_preprocessed 196 | 197 | # Compute a pass 198 | cell_output, cell_states = cell(cell_input, cell_states) 199 | 200 | # Apply the linear layer to the cell output 201 | layered_output = output_layer(cell_output) 202 | 203 | # Compute the new coordinates or the pdf 204 | if phase == TRAIN: 205 | coordinates_predicted = position_estimate( 206 | layered_output, output_size, pedestrians_coordinates_rel[frame + 1] 207 | ) 208 | elif phase == SAMPLE: 209 | new_pedestrians_coordinates_rel = position_estimate( 210 | layered_output, output_size 211 | ) 212 | coordinates_predicted = ( 213 | new_pedestrians_coordinates.read(frame) 214 | + new_pedestrians_coordinates_rel 215 | ) 216 | 217 | # Append new_coordinates 218 | new_pedestrians_coordinates = new_pedestrians_coordinates.write( 219 | frame + 1, coordinates_predicted 220 | ) 221 | return ( 222 | frame + 1, 223 | new_pedestrians_coordinates, 224 | new_pedestrians_coordinates_rel, 225 | cell_output, 226 | cell_states, 227 | ) 228 | 229 | # Decode the coordinates 230 | _, new_pedestrians_coordinates, _, _, _ = tf.while_loop( 231 | cond, 232 | body, 233 | loop_vars=[ 234 | frame, 235 | new_pedestrians_coordinates, 236 | new_pedestrians_coordinates_rel, 237 | cell_output, 238 | cell_states, 239 | ], 240 | ) 241 | 242 | # In training phase the list contains the values to minimize. In 243 | # sampling phase it has the coordinates predicted. Tensor has shape 244 | # [trajectory_size, max_num_ped, 2] 245 | self.new_pedestrians_coordinates = new_pedestrians_coordinates.stack( 246 | "new_coordinates" 247 | ) 248 | 249 | if phase == TRAIN: 250 | with tf.variable_scope("Calculate_loss"): 251 | # boolean_mask produces a warning because a sparse IndexedSlices 252 | # is implicitly converted to a dense tensor. This typically 253 | # happens when one operation (usually tf.gather()) 254 | # backpropagates a sparse gradient, but the op that receives it 255 | # does not have a specialized gradient function that can handle 256 | # sparse gradients. As a result, TensorFlow automatically 257 | # densifies the tf.IndexedSlices, which can have a devastating 258 | # effect on performance if the tensor is large. To fix the 259 | # problem we use dynamic_partition. The values for the loss 260 | # function are in the second list 261 | loss_values = tf.dynamic_partition( 262 | self.new_pedestrians_coordinates[-hparams.predLen :], 263 | loss_mask[-hparams.predLen :], 264 | 2, 265 | ) 266 | self.loss = loss_function(loss_values[1]) 267 | self.loss = tf.div(self.loss, tf.cast(self.num_peds_frame, tf.float32)) 268 | 269 | # Add weights regularization 270 | tvars = tf.trainable_variables() 271 | l2_loss = ( 272 | tf.add_n([tf.nn.l2_loss(v) for v in tvars if "bias" not in v.name]) 273 | * hparams.l2Rate 274 | ) 275 | self.loss = self.loss + l2_loss 276 | 277 | # Step epoch learning rate decay 278 | learning_rate = tf.train.exponential_decay( 279 | hparams.learningRate, 280 | global_step, 281 | hparams.learningRateSteps, 282 | hparams.learningRateDecay, 283 | ) 284 | 285 | # Define the RMSProp optimizer 286 | optimizer = tf.train.RMSPropOptimizer( 287 | learning_rate, 288 | decay=hparams.optimizerDecay, 289 | momentum=hparams.optimizerMomentum, 290 | centered=hparams.centered, 291 | ) 292 | # Global norm clipping 293 | gradients, variables = zip(*optimizer.compute_gradients(self.loss)) 294 | clipped, _ = tf.clip_by_global_norm(gradients, hparams.clippingRatio) 295 | self.train_optimizer = optimizer.apply_gradients( 296 | zip(clipped, variables), global_step=global_step 297 | ) 298 | -------------------------------------------------------------------------------- /social-lstm/pooling_layers.py: -------------------------------------------------------------------------------- 1 | """Module that defines the pooling modules.""" 2 | from abc import ABC, abstractmethod 3 | import tensorflow as tf 4 | 5 | 6 | class Pooling(ABC): 7 | """Abstract pooling class. Define the interface for the pooling layers.""" 8 | 9 | def __init__(self, hparams): 10 | """Constructor of the Pooling class. 11 | 12 | Args: 13 | hparams: An HParams instance. hparams must contains gridSize, 14 | neighborhoodSize, maxNumPed, embeddingSize and rnnSize values. 15 | 16 | """ 17 | self.grid_size = hparams.gridSize 18 | self.neighborhood_size = hparams.neighborhoodSize 19 | self.max_num_ped = hparams.maxNumPed 20 | 21 | self.pooling_layer = tf.layers.Dense( 22 | hparams.embeddingSize, 23 | activation=tf.nn.relu, 24 | kernel_initializer=tf.contrib.layers.xavier_initializer(), 25 | name="Pooling/Layer", 26 | ) 27 | 28 | @abstractmethod 29 | def pooling(self, coordinates, states=None, peds_mask=None, **kwargs): 30 | """Compute the pooling. 31 | 32 | Args: 33 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates. 34 | states: tensor of shape [max_num_ped, rnn_size]. Cell states of the 35 | LSTM. 36 | peds_mask: tensor of shape [max_num_ped, max_num_ped]. Grid layer. 37 | 38 | Returns: 39 | The pooling layer. 40 | 41 | """ 42 | pass 43 | 44 | def _get_bounds(self, coordinates): 45 | """Calculates the bounds of each pedestrian. 46 | 47 | Args: 48 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates 49 | 50 | Returns: 51 | tuple containing tensor of shape [max_num_ped, 2] the top left and 52 | bottom right bounds. 53 | 54 | """ 55 | top_left_x = coordinates[:, 0] - (self.neighborhood_size / 2) 56 | top_left_y = coordinates[:, 1] + (self.neighborhood_size / 2) 57 | bottom_right_x = coordinates[:, 0] + (self.neighborhood_size / 2) 58 | bottom_right_y = coordinates[:, 1] - (self.neighborhood_size / 2) 59 | 60 | top_left = tf.stack([top_left_x, top_left_y], axis=1) 61 | bottom_right = tf.stack([bottom_right_x, bottom_right_y], axis=1) 62 | 63 | return top_left, bottom_right 64 | 65 | def _grid_pos(self, top_left, coordinates): 66 | """Calculate the position in the grid layer of the neighbours. 67 | 68 | Args: 69 | top_left: tensor of shape [max_num_ped * max_num_ped, 2]. Top left 70 | bound. 71 | coordinates: tensor of shape [max_num_ped * max_num_ped, 2]. 72 | Coordinates. 73 | 74 | Returns: 75 | Tensor of shape [max_num_ped * max_num_ped] that is the position in 76 | the grid layer of the neighbours. 77 | 78 | """ 79 | cell_x = tf.floor( 80 | ((coordinates[:, 0] - top_left[:, 0]) / self.neighborhood_size) 81 | * self.grid_size 82 | ) 83 | cell_y = tf.floor( 84 | ((top_left[:, 1] - coordinates[:, 1]) / self.neighborhood_size) 85 | * self.grid_size 86 | ) 87 | grid_pos = cell_x + cell_y * self.grid_size 88 | return tf.cast(grid_pos, tf.int32) 89 | 90 | def _repeat(self, tensor): 91 | """ Repeat each row of the input tensor max_num_ped times 92 | 93 | Args: 94 | tensor: tensor of shape [max_num_ped, n] 95 | Returns: 96 | tensor of shape [max_num_ped * max_num_ped, n]. Repeat each row of the 97 | input tensor in order to have row1, row1, row1, row2, row2, row2, 98 | etc 99 | 100 | """ 101 | col_len = tensor.shape[1] 102 | tensor = tf.expand_dims(tensor, 1) 103 | # Tensor has now shape [max_num_ped, 1, 2]. Now repeat ech row 104 | tensor = tf.tile(tensor, (1, self.max_num_ped, 1)) 105 | tensor = tf.reshape(tensor, (-1, col_len)) 106 | 107 | return tensor 108 | 109 | 110 | class SocialPooling(Pooling): 111 | """Implement the Social layer defined in social LSTM paper""" 112 | 113 | def __init__(self, hparams): 114 | """Constructor of the Social pooling class. 115 | 116 | Args: 117 | hparams: An HParams instance. hparams must contains grid_size, 118 | neighborhood_size, max_num_ped, embedding_size and rnn_size values. 119 | 120 | """ 121 | super().__init__(hparams) 122 | self.rnn_size = hparams.rnnSize 123 | 124 | def pooling(self, coordinates, states=None, peds_mask=None, **kwargs): 125 | """Compute the social pooling. 126 | 127 | Args: 128 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates. 129 | states: tensor of shape [max_num_ped, rnn_size]. Cell states of the 130 | LSTM. 131 | peds_mask: tensor of shape [max_num_ped, max_num_ped]. Grid layer. 132 | 133 | Returns: 134 | The social pooling layer. 135 | 136 | """ 137 | top_left, bottom_right = self._get_bounds(coordinates) 138 | 139 | # Repeat the coordinates in order to have P1, P2, P3, P1, P2, P3 140 | coordinates = tf.tile(coordinates, (self.max_num_ped, 1)) 141 | # Repeat the hidden states in order to have S1, S2, S3, S1, S2, S3 142 | states = tf.tile(states, (self.max_num_ped, 1)) 143 | # Repeat the bounds in order to have B1, B1, B1, B2, B2, B2 144 | top_left = self._repeat(top_left) 145 | bottom_right = self._repeat(bottom_right) 146 | 147 | grid_layout = self._grid_pos(top_left, coordinates) 148 | 149 | # Find which pedestrians are to include 150 | x_bound = tf.logical_and( 151 | (coordinates[:, 0] < bottom_right[:, 0]), 152 | (coordinates[:, 0] > top_left[:, 0]), 153 | ) 154 | y_bound = tf.logical_and( 155 | (coordinates[:, 1] < top_left[:, 1]), 156 | (coordinates[:, 1] > bottom_right[:, 1]), 157 | ) 158 | 159 | peds_mask = tf.reshape(peds_mask, [self.max_num_ped * self.max_num_ped]) 160 | mask = tf.logical_and(tf.logical_and(x_bound, y_bound), peds_mask) 161 | 162 | # tf.scatter_add works only with 1D tensors. The values in grid_layout 163 | # are in [0, grid_size * grid_size]. It needs an offset 164 | total_grid = self.grid_size * self.grid_size 165 | offset = tf.range(0, total_grid * self.max_num_ped, total_grid) 166 | offset = tf.reshape(self._repeat(tf.reshape(offset, [-1, 1])), [-1]) 167 | grid_layout = grid_layout + offset 168 | 169 | indices = tf.boolean_mask(grid_layout, mask) 170 | 171 | scattered = tf.reshape( 172 | tf.scatter_nd( 173 | tf.expand_dims(indices, 1), 174 | tf.boolean_mask(states, mask), 175 | shape=[ 176 | self.max_num_ped * self.grid_size * self.grid_size, 177 | self.rnn_size, 178 | ], 179 | ), 180 | (self.max_num_ped, -1), 181 | ) 182 | 183 | return self.pooling_layer(scattered) 184 | 185 | 186 | class OccupancyPooling(Pooling): 187 | """Implement the Occupancy layer defined in social LSTM paper""" 188 | 189 | def __init__(self, hparams): 190 | """Constructor of the Occupancy pooling class. 191 | 192 | Args: 193 | hparams: An HParams instance. hparams must contains grid_size, 194 | neighborhood_size, max_num_ped, embedding_size and rnn_size values. 195 | 196 | """ 197 | super().__init__(hparams) 198 | 199 | def pooling(self, coordinates, states=None, peds_mask=None, **kwargs): 200 | """Compute the occupancy pooling. 201 | 202 | Args: 203 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates. 204 | states: tensor of shape [max_num_ped, rnn_size]. Cell states of the 205 | LSTM. 206 | peds_mask: tensor of shape [max_num_ped, max_num_ped]. Grid layer. 207 | 208 | Returns: 209 | The occupancy pooling layer. 210 | 211 | """ 212 | top_left, bottom_right = self._get_bounds(coordinates) 213 | 214 | # Repeat the coordinates in order to have P1, P2, P3, P1, P2, P3 215 | coordinates = tf.tile(coordinates, (self.max_num_ped, 1)) 216 | # Repeat the bounds in order to have B1, B1, B1, B2, B2, B2 217 | top_left = self._repeat(top_left) 218 | bottom_right = self._repeat(bottom_right) 219 | 220 | grid_layout = self._grid_pos(top_left, coordinates) 221 | 222 | # Find which pedestrians are to include 223 | x_bound = tf.logical_and( 224 | (coordinates[:, 0] < bottom_right[:, 0]), 225 | (coordinates[:, 0] > top_left[:, 0]), 226 | ) 227 | y_bound = tf.logical_and( 228 | (coordinates[:, 1] < top_left[:, 1]), 229 | (coordinates[:, 1] > bottom_right[:, 1]), 230 | ) 231 | 232 | peds_mask = tf.reshape(peds_mask, [self.max_num_ped * self.max_num_ped]) 233 | mask = tf.logical_and(tf.logical_and(x_bound, y_bound), peds_mask) 234 | 235 | # tf.scatter_add works only with 1D tensors. The values in grid_layout 236 | # are in [0, grid_size * grid_size]. It needs an offset 237 | total_grid = self.grid_size * self.grid_size 238 | offset = tf.range(0, total_grid * self.max_num_ped, total_grid) 239 | offset = tf.reshape(self._repeat(tf.reshape(offset, [-1, 1])), [-1]) 240 | grid_layout = grid_layout + offset 241 | 242 | indices = tf.boolean_mask(grid_layout, mask) 243 | 244 | scattered = tf.reshape( 245 | tf.scatter_nd( 246 | tf.expand_dims(indices, 1), 247 | tf.boolean_mask(states, mask), 248 | shape=[self.max_num_ped * self.grid_size * self.grid_size, 1], 249 | ), 250 | (self.max_num_ped, -1), 251 | ) 252 | 253 | return self.pooling_layer(scattered) 254 | 255 | 256 | class CombinedPooling: 257 | """Combined pooling class. Define multiple pooling layer combined with each 258 | other.""" 259 | 260 | def __init__(self, hparams): 261 | """Constructor of the CombinedPooling class. 262 | 263 | Args: 264 | hparams: An HParams instance. hparams must contains grid_size, 265 | neighborhood_size, max_num_ped, embedding_size, rnn_size, layers 266 | values. 267 | 268 | """ 269 | self.pooling_layer = tf.layers.Dense( 270 | hparams.embeddingSize, 271 | activation=tf.nn.relu, 272 | kernel_initializer=tf.contrib.layers.xavier_initializer(), 273 | name="Combined/Layer", 274 | ) 275 | 276 | self.__layers = [] 277 | 278 | for layer in hparams.poolingModule: 279 | if layer == "social": 280 | self.__layers.append(SocialPooling(hparams)) 281 | elif layer == "occupancy": 282 | self.__layers.append(OccupancyPooling(hparams)) 283 | elif layer == "navigation": 284 | self.__layers.append(NavigationPooling(hparams)) 285 | elif layer == "semantic": 286 | self.__layers.append(SemanticPooling(hparams)) 287 | 288 | def pooling( 289 | self, 290 | coordinates, 291 | states=None, 292 | peds_mask=None, 293 | navigation_map=None, 294 | top_left_dataset=None, 295 | semantic_map=None, 296 | H=None, 297 | ): 298 | """Compute the combined pooling. 299 | 300 | Args: 301 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates. 302 | states: tensor of shape [max_num_ped, rnn_size]. Cell states of the 303 | LSTM. 304 | peds_mask: tensor of shape [max_num_ped, max_num_ped]. Grid layer. 305 | navigation_map: tensor of shape [navigation_height, navigation_width]. 306 | Navigation map. 307 | top_left_dataset: tensor of shape [2]. Coordinates for the upper 308 | left-most point in the dataset. 309 | semantic_map: tensor of shape [num_points, num_labels + 2]. Semantic 310 | map. 311 | 312 | Returns: 313 | The pooling layer. 314 | 315 | """ 316 | pooled = [] 317 | for layer in self.__layers: 318 | pooled.append( 319 | layer.pooling( 320 | coordinates, 321 | states=states, 322 | peds_mask=peds_mask, 323 | navigation_map=navigation_map, 324 | top_left_dataset=top_left_dataset, 325 | semantic_map=semantic_map, 326 | H=H, 327 | ) 328 | ) 329 | concatenated = tf.concat(pooled, 1) 330 | return self.pooling_layer(concatenated) 331 | 332 | 333 | class NavigationPooling(Pooling): 334 | """Implement the navigation pooling""" 335 | 336 | def __init__(self, hparams): 337 | """Constructor of the Navigation pooling class. 338 | 339 | Args: 340 | hparams: An HParams instance. hparams must contains grid_size, 341 | neighborhood_size, max_num_ped, embedding_size, rnn_size values, 342 | image_size, naviagation_size, kernel_size and navigation_grid. 343 | 344 | """ 345 | super().__init__(hparams) 346 | 347 | self.image_size = [hparams.imageWidth, hparams.imageHeight] 348 | self.navigation_size = [hparams.navigationWidth, hparams.navigationHeight] 349 | self.kernel_size = hparams.kernelSize 350 | self.navigation_grid = hparams.navigationGrid 351 | 352 | def pooling( 353 | self, coordinates, navigation_map=None, top_left_dataset=None, **kwargs 354 | ): 355 | """Compute the navigation pooling. 356 | 357 | Args: 358 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates. 359 | navigation_map: tensor of shape [navigation_height, navigation_width]. 360 | Navigation map. 361 | top_left_dataset: tensor of shape [2]. Coordinates for the upper 362 | left-most point in the dataset 363 | 364 | Returns: 365 | The navigation poolin layer. 366 | 367 | """ 368 | 369 | top_left, bottom_right = self._get_bounds(coordinates) 370 | 371 | # Get the top_left and bottom_right cell positions inside the 372 | # navigation map 373 | top_left_cell = self._grid_pos(top_left_dataset, top_left) 374 | 375 | # For each pedestrian get the grid from the navigation map 376 | indices_x = tf.tile( 377 | (tf.range(self.navigation_grid) + top_left_cell[:, 0, tf.newaxis])[ 378 | ..., tf.newaxis 379 | ], 380 | [1, 1, self.navigation_grid], 381 | ) 382 | indices_y = tf.tile( 383 | (tf.range(self.navigation_grid) + top_left_cell[:, 1, tf.newaxis])[ 384 | :, tf.newaxis 385 | ], 386 | [1, self.navigation_grid, 1], 387 | ) 388 | indices = tf.stack([indices_x, indices_y], axis=3) 389 | indices = tf.reshape(indices, [self.max_num_ped, -1, 2]) 390 | grid = tf.gather_nd(navigation_map, indices, name="navGrid") 391 | grid = tf.reshape( 392 | grid, [self.max_num_ped, self.navigation_grid, self.navigation_grid, 1] 393 | ) 394 | grid = tf.nn.avg_pool( 395 | grid, [1, self.kernel_size, self.kernel_size, 1], [1, 1, 1, 1], "SAME" 396 | ) 397 | grid = tf.reshape(grid, [self.max_num_ped, -1]) 398 | return self.pooling_layer(grid) 399 | 400 | def _grid_pos(self, top_left, coordinates): 401 | """Calculate the position in the grid layer of the neighbours. 402 | 403 | Args: 404 | top_left: tensor of shape [max_num_ped, 2]. Top left 405 | bound. 406 | coordinates: tensor of shape [max_num_ped, 2]. 407 | Coordinates. 408 | 409 | Returns: 410 | Tensor of shape [max_num_ped, 2] that is the position in 411 | the navigation map. 412 | 413 | """ 414 | cell_x = tf.floor( 415 | ((coordinates[:, 0] - top_left[0]) / self.image_size[0]) 416 | * self.navigation_size[0] 417 | ) 418 | cell_y = tf.floor( 419 | ((top_left[1] - coordinates[:, 1]) / self.image_size[1]) 420 | * self.navigation_size[1] 421 | ) 422 | grid_pos = tf.stack([cell_y, cell_x], axis=1) 423 | return tf.cast(grid_pos, tf.int32) 424 | 425 | 426 | class SemanticPooling(Pooling): 427 | """Implement the Semantic layer.""" 428 | 429 | def __init__(self, hparams): 430 | """Constructor of the Semantic pooling class. 431 | 432 | Args: 433 | hparams: An HParams instance. hparams must contains 434 | semantic_grid_size, neighborhood_size, max_num_ped, embedding_size 435 | and numLabels values. 436 | 437 | """ 438 | super().__init__(hparams) 439 | self.num_labels = hparams.numLabels 440 | self.grid_size = hparams.semanticGridSize 441 | self.kernel_size = hparams.kernelSize 442 | self.ones = tf.ones([self.max_num_ped, 1]) 443 | 444 | def pooling(self, coordinates, semantic_map=None, H=None, **kwargs): 445 | """Compute the semantic pooling. 446 | 447 | Args: 448 | coordinates: tensor of shape [max_num_ped, 2]. Coordinates. 449 | semantic_map: tensor of shape [num_points, num_labels + 2]. Semantic 450 | map. 451 | H: tensor of shape [3,3]. Homography matrix. 452 | Returns: 453 | The semantic pooling layer. 454 | 455 | """ 456 | top_left, _ = self._get_bounds(coordinates) 457 | # Transform top_left coordinates in pixel coordinates 458 | top_left = tf.concat([top_left, self.ones], axis=1) 459 | pixel_coordinates = tf.matmul(H, tf.transpose(top_left)) 460 | pixel_coordinates = pixel_coordinates / pixel_coordinates[2] 461 | pixel_coordinates = tf.transpose(pixel_coordinates[:2]) 462 | pixel_coordinates = tf.cast(pixel_coordinates, tf.int32) 463 | 464 | # For each pedestrian get the grid from the navigation map 465 | indices_x = tf.tile( 466 | (tf.range(self.grid_size) + pixel_coordinates[:, 0, tf.newaxis])[ 467 | ..., tf.newaxis 468 | ], 469 | [1, 1, self.grid_size], 470 | ) 471 | indices_y = tf.tile( 472 | (tf.range(self.grid_size) + pixel_coordinates[:, 1, tf.newaxis])[ 473 | :, tf.newaxis 474 | ], 475 | [1, self.grid_size, 1], 476 | ) 477 | indices = tf.stack([indices_x, indices_y], axis=3) 478 | indices = tf.reshape(indices, [self.max_num_ped, -1, 2]) 479 | grid = tf.gather_nd(semantic_map, indices, name="semGrid") 480 | grid = tf.reshape( 481 | grid, [self.max_num_ped, self.grid_size, self.grid_size, self.num_labels] 482 | ) 483 | 484 | # Count the number of label and normalize it 485 | grid = tf.reduce_sum(grid, [1, 2]) / (self.grid_size * self.grid_size) 486 | 487 | return self.pooling_layer(grid) 488 | 489 | def _grid_pos(self, top_left, coordinates): 490 | """Calculate the position in the grid layer of the neighbours. 491 | 492 | Args: 493 | top_left: tensor of shape [max_num_ped, 2]. Top left 494 | bound. 495 | coordinates: tensor of shape [max_num_ped, 2]. 496 | Coordinates. 497 | 498 | Returns: 499 | Tensor of shape [max_num_ped, 2] that is the position in 500 | the navigation map. 501 | 502 | """ 503 | cell_x = tf.floor( 504 | ((coordinates[:, 0] - top_left[0]) / self.image_size[0]) 505 | * self.navigation_size[0] 506 | ) 507 | cell_y = tf.floor( 508 | ((top_left[1] - coordinates[:, 1]) / self.image_size[1]) 509 | * self.navigation_size[1] 510 | ) 511 | grid_pos = tf.stack([cell_y, cell_x], axis=1) 512 | return tf.cast(grid_pos, tf.int32) 513 | -------------------------------------------------------------------------------- /social-lstm/position_estimates.py: -------------------------------------------------------------------------------- 1 | """Module that defines the functions for the position estimation""" 2 | import math 3 | import tensorflow as tf 4 | 5 | 6 | def social_train_position_estimate(cell_output, output_size, coordinates_gt): 7 | """Calculate the probability density function in training phase. 8 | 9 | Args: 10 | cell_output: tensor of shape [max_num_ped, output_size]. The output of the 11 | LSTM after applying a linear layer. 12 | output_size: int. Dimension of the output size. 13 | coordinates_gt: tensor of shape [max_num_ped, 2]. Ground truth 14 | coordinates. 15 | 16 | Returns: 17 | tensor of shape [max_num_ped, 2] that contains the pdf. 18 | 19 | """ 20 | # Calculate the probability density function on Graves (2013) equations. 21 | # Assume a bivariate Gaussian distribution. 22 | with tf.name_scope("Calculate_coordinates"): 23 | # Equations 20 - 22 24 | # Split and squeeze to have shape [max_num_ped] 25 | mu_x, mu_y, std_x, std_y, rho = list( 26 | map(lambda x: tf.squeeze(x, 1), tf.split(cell_output, output_size, 1)) 27 | ) 28 | std_x = tf.exp(std_x) 29 | std_y = tf.exp(std_y) 30 | rho = tf.tanh(rho) 31 | 32 | # Equations 24 & 25 33 | stds = tf.multiply(std_x, std_y) 34 | rho_neg = tf.subtract(1.0, tf.square(rho)) 35 | 36 | # Calculate Z 37 | z_num1 = tf.subtract(coordinates_gt[:, 0], mu_x) 38 | z_num2 = tf.subtract(coordinates_gt[:, 1], mu_y) 39 | z_num3 = tf.multiply(2.0, tf.multiply(rho, tf.multiply(z_num1, z_num2))) 40 | z = ( 41 | tf.square(tf.div(z_num1, std_x)) 42 | + tf.square(tf.div(z_num2, std_y)) 43 | - tf.div(z_num3, stds) 44 | ) 45 | 46 | # Calculate N 47 | n_num = tf.exp(tf.div(-z, 2 * rho_neg)) 48 | n_den = tf.multiply( 49 | 2.0, tf.multiply(math.pi, tf.multiply(stds, tf.sqrt(rho_neg))) 50 | ) 51 | return tf.div(n_num, n_den) 52 | 53 | 54 | def social_sample_position_estimate(cell_output, output_size): 55 | """Calculate the new coordinates in sampling phase. 56 | 57 | Args: 58 | cell_output: tensor of shape [max_num_ped, output_size]. The output of the 59 | LSTM after the linear layer. 60 | output_size: int. Dimension of the output size. 61 | 62 | Returns: 63 | tensor of shape [max_num_ped, 2] that contains the sampled coordinates. 64 | 65 | """ 66 | 67 | # Calculate the new coordinates based on Graves (2013) equations. Assume a 68 | # bivariate Gaussian distribution. 69 | with tf.name_scope("Calculate_coordinates"): 70 | # Equations 20 - 22 from Graves 71 | # Split and squeeze to have shape [max_num_ped] 72 | mu_x, mu_y, std_x, std_y, rho = list( 73 | map(lambda x: tf.squeeze(x, 1), tf.split(cell_output, output_size, 1)) 74 | ) 75 | std_x = tf.exp(std_x) 76 | std_y = tf.exp(std_y) 77 | rho = tf.tanh(rho) 78 | 79 | # Kaiser-Dickman algorithm (Kaiser & Dickman, 1962) 80 | # Generate two sample X1, X2 from the standard normal distribution 81 | # (mu = 0, sigma = 1) 82 | normal_coords = tf.random.normal(tf.TensorShape([mu_x.shape[0], 2])) 83 | # Generate the correlation. 84 | # correlation = rho * X1 + sqrt(1 - pow(rho)) * X2 85 | correlation = ( 86 | rho * normal_coords[:, 0] 87 | + tf.sqrt(1 - tf.square(rho)) * normal_coords[:, 1] 88 | ) 89 | 90 | # Define the two coordinates correlated 91 | # Y1 = mu_x + sigma_x * X1 92 | # Y2 = mu_y + sigma_y * correlation 93 | coords_x = mu_x + std_x * normal_coords[:, 0] 94 | coords_y = mu_y + std_y * correlation 95 | 96 | coordinates = tf.stack([coords_x, coords_y], 1) 97 | return coordinates 98 | -------------------------------------------------------------------------------- /social-lstm/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .dataset import TrajectoriesDataset 2 | from .loader import DataLoader 3 | from .evaluation import average_displacement_error 4 | from .evaluation import final_displacement_error 5 | from .yparams import YParams 6 | -------------------------------------------------------------------------------- /social-lstm/utils/dataset.py: -------------------------------------------------------------------------------- 1 | """Module that defines the classes that provide the input tensors for the 2 | models. Each class defines at least 2 iterators: the iterator for the 3 | sequence/batch and the iterator for the number of pedestrian in the 4 | sequence/batch. Each class store the iterators in the `tensors` variable. 5 | 6 | """ 7 | import tensorflow as tf 8 | 9 | 10 | class TrajectoriesDataset: 11 | """Class that defines the tensors iterators used as input for the models. 12 | 13 | TrajectoriesDataset defines the following iterators: the sequence/batch 14 | iterator, the number of pedestrian in the sequence/batch iterator, the mask 15 | for each frame of the sequence/batch, the sequence/batch iterator with all 16 | pedestrians in the frame and the mask for each frame of the all pedestrians 17 | sequence/batch. The iterators have shape and type defined by the 18 | train_loader class and they are stored inside the variable `tensors`. 19 | 20 | """ 21 | 22 | def __init__( 23 | self, 24 | train_loader, 25 | val_loader=None, 26 | batch=False, 27 | shuffle=True, 28 | batch_size=10, 29 | prefetch_size=1000, 30 | ): 31 | """Constructor of the TrajectoriesDataset class. 32 | 33 | Args: 34 | train_loader: Object. that provides the sequences. The object must 35 | have the next_sequence method that returns a generator with the 36 | sequences. 37 | val_loader: Object that provides the sequences. The object must have 38 | the next_sequence method that returns a generator with the 39 | sequences. 40 | batch: boolean. If True, TrajectoriesDataset returns batch of 41 | sequences. 42 | batch_size: int. Size of the batch. 43 | prefetch_size: int. Number of batch to prefetch. 44 | 45 | """ 46 | 47 | # Create the datasets with the tf.data API. The dataset will use the CPU 48 | with tf.device("/cpu:0"): 49 | train_dataset = tf.data.Dataset.from_generator( 50 | train_loader.next_sequence, 51 | train_loader.output_types, 52 | train_loader.shape, 53 | ) 54 | if val_loader is not None: 55 | val_dataset = tf.data.Dataset.from_generator( 56 | val_loader.next_sequence, val_loader.output_types, val_loader.shape 57 | ) 58 | 59 | # If shuffle is True, add the shuffle option to the dataset 60 | if shuffle: 61 | train_dataset = train_dataset.shuffle(prefetch_size) 62 | if val_loader is not None: 63 | val_dataset = val_dataset.shuffle(prefetch_size) 64 | 65 | # If batch is True, self.tensors will contain a batch of sequences and 66 | # not a single sequence 67 | if batch: 68 | train_dataset = train_dataset.batch(batch_size, drop_remainder=True) 69 | # Prefetch the sequences or batches if batch is True 70 | train_dataset = train_dataset.prefetch(prefetch_size) 71 | 72 | if val_loader is not None: 73 | if batch: 74 | val_dataset = val_dataset.batch(batch_size, drop_remainder=True) 75 | val_dataset = val_dataset.prefetch(prefetch_size) 76 | 77 | # Create the iterators 78 | iterator = tf.data.Iterator.from_structure( 79 | train_dataset.output_types, train_dataset.output_shapes 80 | ) 81 | # Initialize the iterators 82 | self.init_train = iterator.make_initializer(train_dataset) 83 | if val_loader is not None: 84 | self.init_val = iterator.make_initializer(val_dataset) 85 | 86 | # Tensors is a tuple that contains the output of the iterator 87 | self.tensors = iterator.get_next() 88 | -------------------------------------------------------------------------------- /social-lstm/utils/evaluation.py: -------------------------------------------------------------------------------- 1 | """Module that defines the metrics used for evaluation.""" 2 | 3 | import tensorflow as tf 4 | 5 | 6 | def average_displacement_error(coordinates_predicted, coordinates_gt, num_peds): 7 | """Function that calculates the average displacement error. 8 | 9 | The formula is 10 | sqrt(sum((coordinates_gt - coordinates_predicted)^2)) / (num_peds * pred_length) 11 | 12 | Args: 13 | coordinates_predicted: tensor of shape [pred_length, max_num_ped, 2]. 14 | Tensor that contains the coordinates predicted from the model. 15 | coordinates_gt: tensor of shape [pred_length, max_num_ped, 2]. Tensor that 16 | contains the ground truth coordinates. 17 | num_peds: tensor with type tf.Int32 . Number of pedestrians that are in 18 | the sequence. 19 | 20 | Returns: 21 | tensor with type tf.float32 containing the average displacement error of 22 | the pedestrians that are in the sequence. 23 | 24 | """ 25 | i = tf.constant(0) 26 | ade = tf.constant(0, tf.float32) 27 | 28 | cond = lambda i, ade: tf.less(i, num_peds) 29 | 30 | def body(i, ade): 31 | ade_ped = coordinates_gt[:, i] - coordinates_predicted[:, i] 32 | ade_ped = tf.norm(ade_ped) 33 | return tf.add(i, 1), tf.add(ade, ade_ped) 34 | 35 | _, ade = tf.while_loop(cond, body, [i, ade]) 36 | return ade / tf.cast(num_peds * coordinates_gt.shape[0], tf.float32) 37 | 38 | 39 | def final_displacement_error(coordinates_predicted, coordinates_gt, num_peds): 40 | """Function that calculates the final displacement error. 41 | 42 | The formula is 43 | sqrt(sum((coordinates_gt - coordinates_predicted)^2)) / num_peds 44 | 45 | Args: 46 | coordinates_predicted: tensor of shape [max_num_ped, 2]. Tensor that 47 | contains the coordinates predicted from the model. 48 | coordinates_gt: tensor of shape [max_num_ped, 2]. Tensor that contains the 49 | ground truth coordinates. 50 | num_peds: tensor with type tf.Int32 . Number of pedestrians that are in 51 | the sequence. 52 | 53 | Returns: 54 | tensor with type tf.float32 containing the final displacement error of the 55 | pedestrians that are in the sequence. 56 | 57 | """ 58 | i = tf.constant(0) 59 | fde = tf.constant(0, tf.float32) 60 | 61 | cond = lambda i, fde: tf.less(i, num_peds) 62 | 63 | def body(i, fde): 64 | fde_ped = coordinates_gt[i] - coordinates_predicted[i] 65 | fde_ped = tf.norm(fde_ped) 66 | return tf.add(i, 1), tf.add(fde, fde_ped) 67 | 68 | _, fde = tf.while_loop(cond, body, [i, fde]) 69 | return fde / tf.cast(num_peds, tf.float32) 70 | -------------------------------------------------------------------------------- /social-lstm/utils/loader.py: -------------------------------------------------------------------------------- 1 | """Module that defines the classes that provides the input for the classes 2 | defined in the dataset module. Each class load datasets, preprocess them and 3 | create two generators that return sequences or batches of trajectories. 4 | 5 | """ 6 | import os 7 | import random 8 | import logging 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | 13 | class DataLoader: 14 | """Data loader class that load the given datasets, preprocess them and create 15 | two generators that return sequences or batches of trajectories. 16 | 17 | """ 18 | 19 | def __init__( 20 | self, 21 | data_path, 22 | datasets, 23 | navigation_maps, 24 | semantic_maps, 25 | semantic_mapping, 26 | homography, 27 | num_labels=6, 28 | delimiter="\t", 29 | skip=1, 30 | max_num_ped=100, 31 | trajectory_size=20, 32 | neighborood_size=2, 33 | batch_size=10, 34 | ): 35 | """Constructor of the DataLoader class. 36 | 37 | Args: 38 | data_path: string. Path to the folder containing the datasets 39 | datasets: list. List of datasets to use. 40 | navigation_maps: list. List of the navigation map. 41 | semantic_maps: list. List of the semantic map. 42 | semantic_mapping: list. Mapping between semantic_maps and datasets. 43 | num_labels: int. Number of labels inside the semantic map. 44 | homography: list. List of homography matrix. 45 | delimiter: string. Delimiter used to separate data inside the 46 | datasets. 47 | skip: int or True. If True, the number of frames to skip while making 48 | the dataset is random. If int, number of frames to skip while making 49 | the dataset 50 | max_num_ped: int. Maximum number of pedestrian in a single frame. 51 | trajectories_size: int. Length of the trajectory (obs_length + 52 | pred_len). 53 | neighborood_size: int. Neighborhood size. 54 | batch_size: int. Batch size. 55 | 56 | """ 57 | # Store the list of datasets to load 58 | self.__datasets = [os.path.join(data_path, dataset) for dataset in datasets] 59 | logging.debug( 60 | "Number of dataset will be loaded: {} List of datasets: {}".format( 61 | len(self.__datasets), self.__datasets 62 | ) 63 | ) 64 | 65 | # Store the list of the navigation map 66 | self.__navigation = [ 67 | os.path.join(data_path, navigation) for navigation in navigation_maps 68 | ] 69 | # Store the list of the semantic map 70 | self.__semantic = [ 71 | os.path.join(data_path, semantic) for semantic in semantic_maps 72 | ] 73 | # Store the list of the homography matrix 74 | self.__homography = [os.path.join(data_path, hg) for hg in homography] 75 | 76 | # Store the batch_size, trajectory_size, the maximum number of 77 | # pedestrian in a single frame and skip value 78 | self.batch_size = batch_size 79 | self.trajectory_size = trajectory_size 80 | self.max_num_ped = max_num_ped 81 | self.skip = skip 82 | self.neighborood_size = neighborood_size 83 | self.num_labels = num_labels 84 | 85 | if delimiter == "tab": 86 | delimiter = "\t" 87 | elif delimiter == "space": 88 | delimiter = " " 89 | 90 | # Load the datasets and preprocess them 91 | self.__load_data(delimiter, semantic_mapping) 92 | self.__preprocess_data() 93 | self.__type_and_shape() 94 | 95 | def next_batch(self): 96 | """Generator method that returns an iterator pointing to the next batch. 97 | 98 | Returns: 99 | Generator object that has a list of trajectory sequences of size 100 | batch_size, a list of relative trajectory sequences of size 101 | batch_size, a list containing the mask for the grid layer of size 102 | batch_size, a list with the number of pedestrian in each sequence, a 103 | list containing the mask for the loss function, a list containing 104 | the navigation map, the top_left coordinates for each dataset and a 105 | list containing the semantic maps. 106 | 107 | """ 108 | it = self.next_sequence() 109 | for batch in range(self.num_batches): 110 | batch = [] 111 | batch_rel = [] 112 | mask_batch = [] 113 | peds_batch = [] 114 | loss_batch = [] 115 | navigation_map_batch = [] 116 | top_left_batch = [] 117 | semantic_map_batch = [] 118 | homography_matrix = [] 119 | 120 | for size in range(self.batch_size): 121 | data = next(it) 122 | batch.append(data[0]) 123 | batch_rel.append(data[1]) 124 | mask_batch.append(data[2]) 125 | peds_batch.append(data[3]) 126 | loss_batch.append(data[4]) 127 | navigation_map_batch.append(data[5]) 128 | top_left_batch.append(data[6]) 129 | semantic_map_batch.append(data[7]) 130 | homography_matrix.append(data[8]) 131 | yield ( 132 | batch, 133 | batch_rel, 134 | mask_batch, 135 | peds_batch, 136 | loss_batch, 137 | navigation_map_batch, 138 | top_left_batch, 139 | semantic_map_batch, 140 | homography_matrix, 141 | ) 142 | 143 | def next_sequence(self): 144 | """Generator method that returns an iterator pointing to the next sequence. 145 | 146 | Returns: 147 | Generator object that contains a trajectory sequence, a relative 148 | trajectory sequence, the mask for the grid layer, the number of 149 | pedestrian in the sequence, the mask for the loss function, the 150 | navigation map, the top_left coordinates for the datset and the 151 | semantic map. 152 | 153 | """ 154 | # Iterate through all sequences 155 | for idx_d, dataset in enumerate(self.__trajectories): 156 | # Every dataset 157 | for idx_s, trajectories in enumerate(dataset): 158 | sequence, mask, loss_mask = self.__get_sequence(trajectories) 159 | 160 | # Create the relative coordinates 161 | sequence_rel = np.zeros( 162 | [self.trajectory_size, self.max_num_ped, 2], float 163 | ) 164 | sequence_rel[1:] = sequence[1:] - sequence[:-1] 165 | num_peds = self.__num_peds[idx_d][idx_s] 166 | 167 | yield ( 168 | sequence, 169 | sequence_rel, 170 | mask, 171 | num_peds, 172 | loss_mask, 173 | self.__navigation_map[idx_d], 174 | self.__top_left[idx_d], 175 | self.__semantic_map[idx_d], 176 | self.__homography_matrix[idx_d], 177 | ) 178 | 179 | def __load_data(self, delimiter, semantic_mapping): 180 | """Load the datasets and define the list __frames. 181 | 182 | Load the datasets and define the list __frames wich contains all the 183 | frames of the datasets and the list __navigation_map. __frames has shape 184 | [num_datasets, num_frames_dataset, num_peds_frame, 4] where 4 is 185 | frameID, pedID, x and y. 186 | 187 | Args: 188 | delimiter: string. Delimiter used to separate data inside the 189 | datasets. 190 | 191 | """ 192 | # List that contains all the frames of the datasets. Each dataset is a 193 | # list of frames of shape (num_peds, (frameID, pedID, x and y)) 194 | self.__frames = [] 195 | self.__navigation_map = [] 196 | self.__top_left = [] 197 | self.__semantic_map = [] 198 | self.__homography_matrix = [] 199 | semantic_map_labeled = {} 200 | homography_map = {} 201 | 202 | # Load and add the one hot encoding to the semantic maps 203 | for i, smap in enumerate(self.__semantic): 204 | # Load the semantic map 205 | semantic_map = np.load(smap) 206 | homography = np.loadtxt(self.__homography[i], delimiter=delimiter) 207 | filename = os.path.splitext(os.path.basename(smap))[0] 208 | semantic_map_labeled[filename] = semantic_map 209 | homography_map[filename] = homography 210 | 211 | for i, dataset_path in enumerate(self.__datasets): 212 | # Load the dataset. Each line is formed by frameID, pedID, x, y 213 | dataset = np.loadtxt(dataset_path, delimiter=delimiter) 214 | # Get the frames in dataset 215 | num_frames = np.unique(dataset[:, 0]) 216 | # Initialize the array of frames for the current dataset 217 | frames_dataset = [] 218 | # Load the navigation map 219 | navigation_map = np.load(self.__navigation[i]) 220 | 221 | # Image has padding so we add padding to the top_left point. 222 | top_left = [ 223 | np.floor(min(dataset[:, 2]) - self.neighborood_size / 2), 224 | np.ceil(max(dataset[:, 3]) + self.neighborood_size / 2), 225 | ] 226 | 227 | # For each frame add to frames_dataset the pedestrian that appears 228 | # in the current frame 229 | for frame in num_frames: 230 | # Get the pedestrians 231 | frame = dataset[dataset[:, 0] == frame, :] 232 | frames_dataset.append(frame) 233 | 234 | self.__frames.append(frames_dataset) 235 | self.__navigation_map.append(navigation_map) 236 | self.__top_left.append(top_left) 237 | self.__semantic_map.append(semantic_map_labeled[semantic_mapping[i]]) 238 | self.__homography_matrix.append(homography_map[semantic_mapping[i]]) 239 | 240 | def __preprocess_data(self): 241 | """Preprocess the datasets and define the number of sequences and batches. 242 | 243 | The method iterates on __frames saving on the list __trajectories only 244 | the trajectories with length trajectory_size. 245 | 246 | """ 247 | # Keep only the trajectories trajectory_size long 248 | self.__trajectories = [] 249 | self.__num_peds = [] 250 | self.num_sequences = 0 251 | 252 | for dataset in self.__frames: 253 | # Initialize the array of trajectories for the current dataset. 254 | trajectories = [] 255 | num_peds = [] 256 | frame_size = len(dataset) 257 | i = 0 258 | 259 | # Each trajectory contains only frames of a dataset 260 | while i + self.trajectory_size < frame_size: 261 | sequence = dataset[i : i + self.trajectory_size] 262 | # Get the pedestrians in the first frame 263 | peds = np.unique(sequence[0][:, 1]) 264 | # Check if the trajectory of pedestrian is long enough. 265 | sequence = np.concatenate(sequence, axis=0) 266 | traj_frame = [] 267 | for ped in peds: 268 | # Get the frames where ped appear 269 | frames = sequence[sequence[:, 1] == ped] 270 | # Check the trajectory is long enough 271 | if frames.shape[0] == self.trajectory_size: 272 | traj_frame.append(frames) 273 | # If no trajectory is long enough traj_frame is empty. Otherwise 274 | if traj_frame: 275 | trajectories_frame, peds_frame = self.__create_sequence( 276 | traj_frame, sequence 277 | ) 278 | trajectories.append(trajectories_frame) 279 | num_peds.append(peds_frame) 280 | self.num_sequences += 1 281 | # If skip is True, update the index with a random value 282 | if self.skip is True: 283 | i += random.randint(0, self.trajectory_size) 284 | else: 285 | i += self.skip 286 | 287 | self.__trajectories.append(trajectories) 288 | self.__num_peds.append(num_peds) 289 | 290 | # num_batches counts only full batches. It discards the remaining 291 | # sequences 292 | self.num_batches = int(self.num_sequences / self.batch_size) 293 | logging.info("There are {} sequences in loader".format(self.num_sequences)) 294 | logging.info("There are {} batches in loader".format(self.num_batches)) 295 | 296 | def __get_sequence(self, trajectories): 297 | """Returns a tuple containing a trajectory sequence, the mask for the grid layer 298 | and the mask for the loss function. 299 | 300 | Args: 301 | trajectories: list of numpy array. Each array is a trajectory. 302 | 303 | Returns: 304 | tuple containing a numpy array with shape [trajectory_size, 305 | max_num_ped, 2] that contains the trajectories, a numpy array with 306 | shape [trajectory_size, max_num_ped, max_num_ped] that is the mask 307 | for the grid layer and a numpy array with shape [trajectory_size, 308 | max_num_ped] that is the mask for the loss function. 309 | 310 | """ 311 | num_peds_sequence = len(trajectories) 312 | sequence = np.zeros((self.max_num_ped, self.trajectory_size, 2)) 313 | mask = np.zeros((self.max_num_ped, self.trajectory_size), dtype=bool) 314 | 315 | sequence[:num_peds_sequence] = trajectories[:, :, [2, 3]] 316 | 317 | # Create the mask for the grid layer. Set to True only the pedestrians 318 | # that are in the sequence. A pedestrian is in the sequence if its 319 | # frameID is not 0 320 | mask[:num_peds_sequence] = trajectories[:, :, 0] 321 | # Create the mask for the loss function 322 | loss_mask = mask 323 | # Create the mask for all the pedestrians 324 | mask = np.tile(mask, (self.max_num_ped, 1, 1)) 325 | # The mask ignores the pedestrian itself 326 | for ped in range(num_peds_sequence): 327 | mask[ped, ped] = False 328 | 329 | # Change shape of the arrays. From [max_num_ped, trajectory_size] to 330 | # [trajectory_size, max_num_ped] 331 | sequence_moved = np.moveaxis(sequence, 1, 0) 332 | mask_moved = np.moveaxis(mask, 2, 0) 333 | loss_moved = np.moveaxis(loss_mask, 1, 0) 334 | 335 | return sequence_moved, mask_moved, loss_moved 336 | 337 | def __create_sequence(self, trajectories_full, sequence): 338 | """Create an array with the trajectories contained in a dataset slice. 339 | 340 | Args: 341 | trajectories_full: list that contains the trajectories long 342 | trajectory_size of the dataset slice. 343 | sequence: list that contains the remaining trajectories of the dataset 344 | slice. 345 | 346 | Returns: 347 | tuple containing the ndarray with the trajectories of the dataset 348 | slice and the number of pedestrians thate are trajectory_size long 349 | in the dataset slice. In the first positions of the ndarray there 350 | are the trajectories long enough. The shape of the ndarray is 351 | [peds_sequence, trajectory_size, 4]. 352 | 353 | """ 354 | trajectories_full = np.array(trajectories_full) 355 | peds_sequence = np.unique(sequence[:, 1]) 356 | peds_trajectories = np.unique(trajectories_full[:, :, 1]) 357 | frames_id = np.unique(sequence[:, 0]) 358 | # Create the array that will contain the trajectories 359 | trajectories = np.zeros((len(peds_sequence), self.trajectory_size, 4)) 360 | 361 | # Copy trajectories_full in the first len(peds_trajectories) rows 362 | trajectories[: len(peds_trajectories)] = trajectories_full 363 | # Remove the peds that are in peds_trajectories 364 | peds_sequence = np.delete( 365 | peds_sequence, np.searchsorted(peds_sequence, peds_trajectories) 366 | ) 367 | # Create a lookup table with the frames id and their position in the 368 | # sequence 369 | lookup_frames = {} 370 | for i, frame in enumerate(frames_id): 371 | lookup_frames[frame] = i 372 | 373 | # Add the remaining peds 374 | for i, ped in enumerate(peds_sequence, len(peds_trajectories)): 375 | # Get the indexes where the pedsID is equal to ped 376 | positions = np.where(sequence[:, 1] == ped)[0] 377 | # Use the lookup table to find out where the pedestrian trajectory 378 | # begins and end in the sequence 379 | start = lookup_frames[sequence[positions][0, 0]] 380 | end = lookup_frames[sequence[positions][-1, 0]] + 1 381 | # Copy the pedestrian trajectory inside the sequence 382 | trajectories[i, start:end] = sequence[positions] 383 | 384 | return trajectories, len(peds_trajectories) 385 | 386 | def __type_and_shape(self): 387 | """Define the type and the shape of the arrays that tensorflow will use.""" 388 | navigation_h, navigation_w = self.__navigation_map[0].shape 389 | self.output_types = ( 390 | tf.float32, 391 | tf.float32, 392 | tf.bool, 393 | tf.int32, 394 | tf.int32, 395 | tf.float32, 396 | tf.float32, 397 | tf.float32, 398 | tf.float32, 399 | ) 400 | self.shape = ( 401 | tf.TensorShape([self.trajectory_size, self.max_num_ped, 2]), 402 | tf.TensorShape([self.trajectory_size, self.max_num_ped, 2]), 403 | tf.TensorShape([self.trajectory_size, self.max_num_ped, self.max_num_ped]), 404 | tf.TensorShape([]), 405 | tf.TensorShape([self.trajectory_size, self.max_num_ped]), 406 | tf.TensorShape([navigation_h, navigation_w]), 407 | tf.TensorShape([2]), 408 | tf.TensorShape([None, None, self.num_labels]), 409 | tf.TensorShape([3, 3]), 410 | ) 411 | -------------------------------------------------------------------------------- /social-lstm/utils/yparams.py: -------------------------------------------------------------------------------- 1 | """Module that defines the class that load the hyperparameters from a yaml 2 | file.""" 3 | import yaml 4 | from tensorflow.contrib.training import HParams 5 | 6 | 7 | class YParams(HParams): 8 | """Yparams load the parameters from a yaml file.""" 9 | 10 | def __init__(self, file_name, config_name=None): 11 | """Constructor of the YParams class. 12 | 13 | Args: 14 | file_name: string. Path to the file containing the parameters. 15 | config_name: string. Name of the set of parameters. If None, the file 16 | has not sets. 17 | 18 | """ 19 | super().__init__() 20 | 21 | with open(file_name) as fp: 22 | if config_name is not None: 23 | for k, v in yaml.load(fp)[config_name].items(): 24 | self.add_hparam(k, v) 25 | else: 26 | for k, v in yaml.load(fp).items(): 27 | self.add_hparam(k, v) 28 | --------------------------------------------------------------------------------