├── outcome-model.pdf ├── off_pol_eval_functions.pyc ├── README.md ├── off_pol_eval_functions.py ├── minimal_OPE.ipynb └── y.csv /outcome-model.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CausalML/continuous-policy-learning/HEAD/outcome-model.pdf -------------------------------------------------------------------------------- /off_pol_eval_functions.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CausalML/continuous-policy-learning/HEAD/off_pol_eval_functions.pyc -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # continuous-policy-learning 2 | 3 | Code for [Policy Evaluation and Optimization with Continuous Treatments](http://proceedings.mlr.press/v84/kallus18a/kallus18a.pdf), AISTATS 2018 4 | -------------------------------------------------------------------------------- /off_pol_eval_functions.py: -------------------------------------------------------------------------------- 1 | import scipy.integrate as integrate 2 | from math import exp 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from scipy import interpolate 6 | from scipy.optimize import minimize 7 | from sklearn.grid_search import GridSearchCV 8 | from sklearn.neighbors.kde import KernelDensity 9 | from sklearn.metrics.pairwise import rbf_kernel 10 | import datetime 11 | import pickle 12 | import sys 13 | # For bandwidth estimation 14 | from scipy.stats import norm 15 | from sklearn import linear_model 16 | # import numdifftools as nd 17 | from scipy.misc import derivative 18 | from sklearn.neighbors.kde import KernelDensity 19 | from scipy.stats import truncnorm 20 | 21 | 22 | 23 | # !FIXME Global offset value. 24 | 25 | # !FIXME 26 | # Currently when changing data generation distributions, need also to change sampling method in evaluate_subsample 27 | # to generate from the appropriate treatment distribution. 28 | ''' 29 | Choices for output function. 30 | ''' 31 | def oracle_evaluation(**params): 32 | X = params['x_samp']; tau = params['tau'] 33 | return 2*pow(np.abs(X - tau),1.5) 34 | 35 | ''' 36 | Different options for kernel function. 37 | ''' 38 | def db_exp_kernel(x1, x2, variance = 1): 39 | return exp(-1 * (np.linalg.norm(x1-x2)) / (2*variance)) 40 | 41 | def gram_matrix(xs): 42 | return rbf_kernel(xs, gamma=0.5) 43 | def gaussian_kernel(u): 44 | return np.exp(-0.5 * u**2 )/(np.sqrt(2*np.pi)) 45 | def gaussian_kernel_h(u,h_2): 46 | return (1/(np.sqrt(h_2)*np.sqrt(2*np.pi)))*np.exp((-0.5)/h_2 * (1.0*u)**2 ) 47 | def gaussian_k_bar(u): 48 | return (1/(np.sqrt(4*np.pi)))*np.exp(.25* np.linalg.norm(1.0*u)**2) 49 | def epanechnikov_kernel(u): 50 | return 0.75*(1-u**2)*(1 if abs(u) <= 1 else 0) 51 | def epanechnikov_int(lo,hi): 52 | ''' 53 | :return: Definite integral of the kernel from between lo and hi. Assumes that they are within bounds. 54 | ''' 55 | return 0.75*(hi-hi**3/3.0) - 0.75*(lo-lo**3/3.0) 56 | 57 | ''' 58 | Different option for discrete policy functions 59 | Policy functions take in an x vector and return 60 | ''' 61 | def discrete_optimal_central_policy(**params): 62 | ''' 63 | :param params: 64 | :return: optimal treatment vector 65 | ''' 66 | x = params['x_samp'] 67 | T = params['T_samp'] 68 | t_lo = min(T) 69 | t_hi = max(T) 70 | n_bins = params['n_bins'] 71 | bins = np.linspace(t_lo, t_hi, n_bins) 72 | T_binned = np.digitize(T, bins).flatten() 73 | x_binned = np.digitize(x/2.0, bins).flatten() 74 | bin_means = [T[T_binned == i].mean() for i in range(1, n_bins)] 75 | # return np.asarray([bin_means[T_bin - 1] for T_bin in x_binned]).flatten() 76 | return x_binned 77 | 78 | def discretize_tau_policy(**params): 79 | ''' 80 | Discretize the treatment vector 'tau' according to uniform binning. 81 | ''' 82 | x = params['x_samp'] 83 | T = params['T_samp'] 84 | n_bins = params['n_bins'] 85 | t_lo = min(T) 86 | t_hi = max(T) 87 | bins = np.linspace(t_lo, t_hi, n_bins) 88 | T_binned = np.digitize(T, bins).flatten() 89 | bin_means = [T[T_binned == i].mean() for i in range(1, n_bins)] 90 | tau_binned = np.digitize(params['tau'], bins).flatten() 91 | return tau_binned 92 | 93 | ''' 94 | Different options for generating data 95 | ''' 96 | def generate_data_uniform(m,n, d, t_lo, t_hi, x_scheme = 'unif'): 97 | """ 98 | # Generate random features 99 | # n: number of instances 100 | # m: grid length of treatment 101 | # d: feature dimension 102 | # x_scheme: switch to determine dependency structure of x 103 | """ 104 | xs = np.array(np.random.uniform(0,2,(n,d))) 105 | t_fullgrid = np.linspace(t_lo, t_hi, m ) 106 | Z_list = [ np.concatenate([xs, np.ones([n,1])*(t_lo + 1.0*i*(t_hi-t_lo)/(m-1))] , axis=1) for i in np.arange(m) ] 107 | Z = np.concatenate(Z_list, axis=0) 108 | K = np.array(gram_matrix(Z)).reshape([m*n,m*n]) 109 | T = Z[:,d] 110 | # mean_vec = np.asarray([ np.mean(z) for z in Z]) 111 | mean_vec = np.ones([m*n,1]) 112 | F = np.random.multivariate_normal(mean_vec.flatten(), 7*K) 113 | # Ensure outcomes are positive 114 | if min(F) < 0: 115 | F = F + abs(min(F)) 116 | Y = F + 0.05*np.random.randn(m*n) 117 | 118 | return { 'y': Y, 'z': Z, 'f': F , 'K': K, 'x': xs} 119 | 120 | 121 | def generate_data(m,n, d, t_lo, t_hi, mean_vec_f, x_scheme = 'unif'): 122 | """ 123 | # Generate random features 124 | # n: number of instances 125 | # m: grid length of treatment 126 | # d: feature dimension 127 | # x_scheme: switch to determine dependency structure of x 128 | """ 129 | xs = np.array(np.random.uniform(0,1,(n,d))) 130 | t = np.array(np.random.uniform(0, t_hi, size=(n,1))) 131 | # change mean vector appropriately 132 | t_fullgrid = np.linspace(t_lo, t_hi, m ) 133 | Z_list = [ np.concatenate((xs, np.ones([n,1])*(t_lo + 1.0*i*(t_hi-t_lo)/(m-1))) , axis=1) for i in np.arange(m) ] 134 | Z = np.concatenate(Z_list, axis=0) 135 | K = np.array(gram_matrix(Z)).reshape([m*n,m*n]) 136 | T = Z[:,d] 137 | # modify to have T have more of an effect 138 | mean_vec = np.apply_along_axis(mean_vec_f, 1, Z) 139 | # mean_vec = 3*np.multiply(T,Z[:,0]) + 2*T + np.multiply(Z[:,0], np.exp(np.multiply(-Z[:,0],T))) 140 | F = np.random.multivariate_normal(mean_vec, 2*K) 141 | # Ensure outcomes are positive 142 | if min(F) < 0: 143 | F = F + abs(min(F)) 144 | Y = F + 0.05*np.random.randn(m*n) 145 | 146 | return { 'y': Y, 'z': Z, 'f': F , 'K': K, 'x': xs} 147 | 148 | def off_pol_estimator(**params): 149 | 150 | THRESH = params['threshold'] 151 | y_out = params['y']; x = params['x']; h = params['h'];Q = params['Q']; n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi'] 152 | kernel = params['kernel_func'];kernel_int = params['kernel_int_func'] 153 | if ('y_samp' in params.keys()): 154 | y_out = params['y_samp'] 155 | if ('T_samp' in params.keys()): 156 | T = params['T_samp'] 157 | else: 158 | T = params['T'] 159 | if ('x_samp' in params.keys()): 160 | x = params['x_samp'] 161 | 162 | BMI_IND = params.get('BMI_IND') # propensity score for warfarin data evaluations 163 | if (params.get('DATA_TYPE') == 'warfarin'): 164 | x = params['x'][:,BMI_IND] 165 | 166 | loss = 0 167 | tau = params['tau'] 168 | clip_tau = np.clip(tau, t_lo, t_hi) 169 | Qs = np.zeros(n) 170 | for i in np.arange(n): 171 | Q_i = Q(x[i], T[i], t_lo, t_hi) 172 | if (abs(clip_tau[i] - t_lo) <= h): 173 | alpha = kernel_int((t_lo-clip_tau[i])/h, 1) 174 | elif (abs(clip_tau[i] - t_hi) <= h): 175 | alpha = kernel_int(-1, (t_hi - clip_tau[i])/h ) 176 | else: 177 | alpha = 1 178 | Qs[i] = (1.0/h)*kernel( (clip_tau[i] - T[i])/h )/max(Q_i,THRESH) 179 | loss += kernel( (clip_tau[i] - T[i])/h )*1.0 * y_out[i]/max(Q_i,THRESH) * 1.0/alpha 180 | norm_sum = np.mean(np.maximum(Qs,THRESH*np.ones(n))) 181 | return [loss, norm_sum] 182 | 183 | def off_policy_variance(**params): 184 | """ 185 | Takes in a choice of kernel and dictionary of parameters and data required for evaluation 186 | tau is a vector of treatment values (assumed given) 187 | If y_samp, T_samp is present, use that instead. 188 | """ 189 | [loss, norm_sum] = off_pol_estimator(**params) 190 | h = params['h']; n = params['n'] 191 | loss = loss / (norm_sum*1.0*n*h) 192 | loss_mean = np.mean(loss) 193 | return np.square(loss - loss_mean) 194 | 195 | def off_policy_evaluation(**params): 196 | """ 197 | Takes in a choice of kernel and dictionary of parameters and data required for evaluation 198 | tau is a vector of treatment values (assumed given) 199 | If y_samp, T_samp is present, use that instead. 200 | """ 201 | [loss, norm_sum] = off_pol_estimator(**params) 202 | h = params['h'] 203 | n = params['n'] 204 | return loss/(norm_sum*1.0*h*n) 205 | 206 | def off_pol_disc_evaluation(policy, **params): 207 | THRESH = params['threshold'] 208 | y_out = params['y']; x = params['x_samp']; h = params['h']; Q = params['Q']; n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi'] 209 | n_bins = params['n_bins'] 210 | if ('y_samp' in params.keys()): 211 | y_out = params['y_samp'].flatten() 212 | if ('T_samp' in params.keys()): 213 | T = params['T_samp'].flatten() 214 | else: 215 | T = params['T'].flatten() 216 | 217 | BMI_IND = params.get('BMI_IND') # propensity score for warfarin data evaluations 218 | if (params.get('DATA_TYPE') == 'warfarin'): 219 | x = params['x'][:,BMI_IND] 220 | 221 | t_lo = min(T) 222 | t_hi = max(T) 223 | bin_width = t_hi-t_lo 224 | bins = np.linspace(t_lo, t_hi, n_bins) 225 | T_binned = np.digitize(T, bins, right = True).flatten() 226 | bin_means = [T[T_binned == i].mean() for i in range(1, len(bins))] 227 | 228 | loss = 0 229 | tau_vec = policy(**params).flatten() 230 | #! FIXME need to establish whether policy returns discrete bins or means 231 | treatment_overlap = np.where(np.equal(tau_vec.flatten(), T_binned))[0] 232 | 233 | for ind in treatment_overlap: 234 | Q_i = Q(x[ind], bin_means[T_binned[ind]-1], t_lo, t_hi) * bin_width*1.0/n_bins # BUG FIX: this is going to have to be integrated against 235 | loss += y_out[ind]/max(Q_i,THRESH) 236 | n_overlap = len(treatment_overlap) 237 | if n_overlap == 0: 238 | print "no overlap" 239 | return 0 240 | return loss/(1.0*n) 241 | 242 | # Self normalize disc. off pol evaluation 243 | # doesn't work well 244 | # def off_pol_disc_evaluation(policy, **params): 245 | # THRESH = params['threshold'] 246 | # y_out = params['y']; x = params['x_samp']; h = params['h']; Q = params['Q']; n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi'] 247 | # n_bins = params['n_bins'] 248 | # if ('y_samp' in params.keys()): 249 | # y_out = params['y_samp'].flatten() 250 | # if ('T_samp' in params.keys()): 251 | # T = params['T_samp'].flatten() 252 | # else: 253 | # T = params['T'].flatten() 254 | 255 | # BMI_IND = params.get('BMI_IND') # propensity score for warfarin data evaluations 256 | # if (params.get('DATA_TYPE') == 'warfarin'): 257 | # x = params['x'][:,BMI_IND] 258 | 259 | # t_lo = min(T) 260 | # t_hi = max(T) 261 | # bin_width = t_hi-t_lo 262 | # bins = np.linspace(t_lo, t_hi, n_bins) 263 | # T_binned = np.digitize(T, bins, right = True).flatten() 264 | # bin_means = [T[T_binned == i].mean() for i in range(1, len(bins))] 265 | 266 | # loss = 0 267 | # tau_vec = policy(**params).flatten() 268 | # #! FIXME need to establish whether policy returns discrete bins or means 269 | # treatment_overlap = np.where(np.equal(tau_vec.flatten(), T_binned))[0] 270 | # n_overlap = len(treatment_overlap) 271 | # Qs = np.zeros(n_overlap) 272 | # i=0 273 | # for ind in treatment_overlap: 274 | # Q_i = Q(x[ind], bin_means[T_binned[ind]-1], t_lo, t_hi) * bin_width*1.0/n_bins # BUG FIX: this is going to have to be integrated against 275 | # Qs[i] = 1.0/max(Q_i,THRESH) 276 | # loss += y_out[ind]/max(Q_i,THRESH) 277 | # i+=1 278 | 279 | # norm_sum = np.mean(Qs) 280 | # if n_overlap == 0: 281 | # print "no overlap" 282 | # return 0 283 | # return loss/(1.0*n*norm_sum) 284 | 285 | 286 | 287 | def off_pol_gaus_lin_grad(beta, *args): 288 | """ 289 | Compute a gradient for special case of gaussian kernel and linear policy tau 290 | """ 291 | params = dict(args[0]) 292 | y_out = params['y'];x = params['x']; T = params['T']; h = params['h']; Q = params['Q'] 293 | n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi'] 294 | tau = np.dot(x,beta) 295 | clip_tau = np.clip(tau, t_lo, t_hi) 296 | d = len(beta) 297 | grad = np.zeros([d,1]) 298 | for i in np.arange(n): 299 | Q_i = Q(x[i], T[i],t_lo, t_hi) 300 | beta_x_i = np.dot(x[i], beta) 301 | grad += (gaussian_kernel((beta_x_i - T[i])/h) * y_out[i]/Q_i) * (-1.0*x[i]/h**2) * (beta_x_i - T[i]) 302 | return grad/(1.0*h*len(y_out)) 303 | 304 | 305 | def partial_g_n_hat_i(**params): 306 | ''' 307 | Compute normalization term 308 | ''' 309 | 310 | def f_g(**params): 311 | THRESH = params['threshold'] 312 | y_out = params['y']; x = params['x']; h = params['h'];Q = params['Q']; n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi'] 313 | kernel = params['kernel_func'];kernel_int = params['kernel_int_func'] 314 | if ('y_samp' in params.keys()): 315 | y_out = params['y_samp'] 316 | if ('T_samp' in params.keys()): 317 | T = params['T_samp'] 318 | else: 319 | T = params['T'] 320 | if ('x_samp' in params.keys()): 321 | x = params['x_samp'] 322 | BMI_IND = params.get('BMI_IND') # propensity score for warfarin data evaluations 323 | 324 | loss = 0 325 | g = 0 # also keep track of normalized probability ratio quantity 326 | partial_f = 0 327 | partial_g = 0 328 | tau = params['tau'] 329 | clip_tau = np.clip(tau, t_lo, t_hi) 330 | Qs = np.zeros(n) 331 | for i in np.arange(n): 332 | if (params.get('DATA_TYPE') == 'warfarin'): 333 | Q_i = Q(x[i,BMI_IND], T[i], t_lo, t_hi) 334 | else: 335 | Q_i = Q(x[i], T[i], t_lo, t_hi) 336 | if (abs(clip_tau[i] - t_lo) <= h): 337 | alpha = kernel_int((t_lo-clip_tau[i])/h, 1) 338 | elif (abs(clip_tau[i] - t_hi) <= h): 339 | alpha = kernel_int(-1, (t_hi - clip_tau[i])/h ) 340 | else: 341 | alpha = 1 342 | Qs[i] = kernel( (clip_tau[i] - T[i])/h )/max(Q_i,THRESH) 343 | loss += kernel( (clip_tau[i] - T[i])/h )*1.0 * y_out[i]/max(Q_i,THRESH) * 1.0/alpha 344 | if abs((clip_tau[i] - T[i])/h) >= 1: 345 | partial_f += 0 # don't add anything to partial derivatives 346 | else: 347 | partial_g += -1.5 * ((clip_tau[i] - T[i])/h ) * 1.0/max(Q_i,THRESH) * x[i,:] 348 | partial_f += -1.5 * ((clip_tau[i] - T[i])/h ) * y_out[i]/max(Q_i,THRESH) * x[i,:] 349 | norm_sum = np.mean(Qs) 350 | return [loss/(1.0*h*n), 1.0*norm_sum/h, partial_f/(1.0*n*h**2) , partial_g/(1.0*n*h**2) ] 351 | 352 | 353 | def off_pol_epan_lin_grad(beta, *args): 354 | """ 355 | Compute a gradient for special case of Epanechnikov kernel and linear policy tau 356 | """ 357 | # THRESH = 0.001 358 | d = len(beta) 359 | params = dict(args[0]) 360 | #! FIXME x vs xsamp 361 | tau = np.dot(beta, params['x'].T) 362 | params['tau'] = tau 363 | params['beta'] = beta 364 | 365 | THRESH = params['threshold'] 366 | 367 | [f, g, nabla_f, nabla_g] = f_g(**params) 368 | # compute gradient vector via quotient rule 369 | if g < THRESH: 370 | g = THRESH 371 | return np.asarray((g*nabla_f - f*nabla_g) / g**2 ) 372 | 373 | def off_pol_var_lin_grad(beta, *args): 374 | """ 375 | Compute a gradient for special case of Epanechnikov kernel and linear policy tau 376 | """ 377 | # THRESH = 0.001 378 | d = len(beta) 379 | params = dict(args[0]) 380 | #! FIXME x vs xsamp 381 | tau = np.dot(beta, params['x'].T) 382 | params['tau'] = tau 383 | params['beta'] = beta 384 | 385 | THRESH = params['threshold'] 386 | 387 | [f, g, nabla_f, nabla_g] = f_g(**params) 388 | # compute gradient vector via quotient rule 389 | if g < THRESH: 390 | g = THRESH 391 | return np.asarray((g*nabla_f - f*nabla_g) / g**2 ) 392 | 393 | def off_pol_gaus_lin_grad_for_max(beta, *args): 394 | """Wrapper function which multiplies gradient by -1 395 | """ 396 | return off_pol_gaus_lin_grad(beta, *args) 397 | 398 | """ 399 | Options for treatment policies 400 | """ 401 | def tau_test(tau_test_value, x): 402 | return tau_test_value 403 | def linear_tau(x, beta): 404 | return np.dot(beta,x) 405 | def unif_Q(x, t, t_lo, t_hi): 406 | return 1.0/(t_hi-t_lo) 407 | def trunc_norm_Q(x, t, t_lo, t_hi): 408 | # Get pdf from truncated normally distributed propensity score (standard normal centered around (x-t) 409 | sc = 0.5 410 | mu = x 411 | a, b = (t_lo - mu) / sc, (t_hi - mu) / sc 412 | return truncnorm.pdf(t, a,b, loc = mu, scale = sc) 413 | def norm_Q(x, t, t_lo, t_hi): 414 | OFFSET = 0.1 415 | std = 0.5 416 | return 1.0/std *norm.pdf( (t-x - OFFSET)/ std) 417 | 418 | def exp_Q(x, t, t_lo, t_hi): 419 | # Sample from an exponential conditional distribution of T on X using Inverse CDF transform 420 | return x*np.exp(-t*x) 421 | def sample_exp_T(x): 422 | u = np.random.uniform() 423 | return -np.log(1-u)/x 424 | 425 | def sample_norm_T(x): 426 | # ' Sample randomly from uniform normal distribution' 427 | sc = 0.5 428 | OFFSET = 0.1 429 | return np.random.normal(loc=x + OFFSET, scale = sc) 430 | 431 | def evaluate_oracle_outcomes(m,n,f,t_lo,t_hi,tau,X): 432 | """ 433 | Evaluate 'true' outcomes at closest grid point to given tau vector 434 | """ 435 | j_taus = np.array( [int(np.round(1.0*t*(m-1)/t_hi)) for t in tau] ) 436 | j_taus = np.clip(j_taus, 0, m-1) 437 | return np.array( [ f[j_taus[ind]*n+ind] for ind in np.arange(n)] ) 438 | 439 | def evaluate_oracle_interpolated_outcomes(**params): 440 | """ 441 | Function is given a spline curve with which to interpolate values at 'tau' 442 | """ 443 | spline_tck = params['spline']; tau = params['tau']; X = params['x_samp'] 444 | outcomes = [ interpolate.bisplev( X[i], tau[i], spline_tck ) for i in np.arange(len(X)) ] 445 | return np.array(outcomes) 446 | 447 | def sample_T_given_x(x, t_lo, t_hi, sampling = "uniform"): 448 | # Sample from propensity score 449 | # e.g. exponential distribution 450 | sc = 0.5 451 | if (sampling == "exp"): 452 | sample_exp_T_vec = np.vectorize(sample_exp_T) 453 | T_sub = sample_exp_T_vec(x / std) 454 | T_sub = np.clip(T_sub, t_lo, t_hi) 455 | elif (sampling == "normal"): 456 | # Unbounded normal sampling 457 | sample_norm_T_vec = np.vectorize(sample_norm_T) 458 | T_sub = sample_norm_T_vec(x ) 459 | elif (sampling == "truncated_normal"): 460 | # Unbounded normal sampling 461 | # sample_norm_T_vec = np.vectorize(sample_norm_T) 462 | # T_sub = sample_norm_T_vec(x ) 463 | T_sub = np.zeros([len(x), 1]) 464 | for i in np.arange(len(x)): 465 | a =(t_lo - x[i]) / sc 466 | b = (t_hi - x[i]) / sc 467 | T_sub[i] = truncnorm.rvs(a, b, loc = x[i], scale = sc, size=1)[0] 468 | else: 469 | T_sub = np.array( [ np.random.uniform(low = t_lo, high= t_hi) for x_samp in x ] ) 470 | return T_sub 471 | 472 | def evaluate_subsample( n_sub, verbose = False, evaluation=False, cross_val = True, **param_dict): 473 | """ 474 | Evaluate off policy evaluation given a subsample of data from full 475 | Or just subsample data and return subsampled_dictionary 476 | """ 477 | Z = param_dict['z']; X = param_dict['x']; t_lo = param_dict['t_lo']; t_hi = param_dict['t_hi']; m = param_dict['m'] 478 | n = param_dict['n']; Y = param_dict['y']; d = param_dict['d']; f = param_dict['f']; data_gen = param_dict['data_gen'] 479 | sampling = param_dict['sampling']; sub_params = param_dict.copy() 480 | # Subsample data 481 | if (data_gen == "grid"): 482 | X_sub = np.random.choice(n-1, n_sub) 483 | T_sub = sample_T_given_x(X[X_sub], t_lo, t_hi, sampling) 484 | # Round T to grid values 485 | j_s = np.array( [int(np.round(1.0*t*(m-1)/t_hi)) for t in T_sub] ).flatten() 486 | T_grid = np.array([ t_lo + 1.0*np.round(1.0*t*(m-1)/t_hi)*(t_hi-t_lo)/(m-1) for t in T_sub ]) 487 | Y_sub = np.array( [ Y[j_s[ind]*n+x] for (ind,x) in enumerate(X_sub)] ) 488 | sub_params['n'] = n_sub 489 | sub_params['y_samp'] = Y_sub.flatten() 490 | #! FIXME flattening possibly multidimensional data 491 | sub_params['x_samp'] = X[X_sub,:] 492 | sub_params['T_samp'] = T_grid.flatten() 493 | 494 | else: 495 | # Uniform sampling 496 | X_sub = np.random.choice(m*n-1, n_sub) 497 | sub_params['n'] = n_sub 498 | sub_params['x_samp'] = X[X_sub,:].reshape([n_sub,1]) 499 | # Toggle how sampling is drawn 500 | if sampling != "uniform": 501 | sub_params['T_samp'] = sample_T_given_x( X[X_sub,:], t_lo, t_hi, sampling ).reshape([n_sub,1]) 502 | else: # assume uniform otherwise 503 | sub_params['T_samp'] = Z[:,d][X_sub].reshape([n_sub,1]) 504 | # Toggle how oracle values are drawn 505 | if (sub_params['oracle_func']): 506 | # temporary setting of tau to 507 | sub_params['tau'] = sub_params['T_samp'] 508 | # adding noise to 'y' values 509 | sub_params['y_samp'] = oracle_evaluation(**sub_params) #+ np.random.randn(n_sub,1)*0.05 510 | sub_params['f_samp'] = oracle_evaluation(**sub_params) 511 | del sub_params['tau'] 512 | else: #Oracle fnc parameter not set 513 | sub_params['y_samp'] = Y[X_sub].reshape([n_sub,1]) 514 | sub_params['f_samp'] = f[X_sub].reshape([n_sub,1]) 515 | 516 | if 'tau' in param_dict.keys(): 517 | sub_params['tau'] = param_dict['tau'][X_sub] 518 | else: 519 | if verbose: 520 | print "No taus given" 521 | if cross_val: 522 | h_opt = find_best_h(cv_func, res, **sub_params) 523 | sub_params['h'] = h_opt 524 | 525 | return sub_params 526 | 527 | 528 | def plot_surface(plot_sample = False, **params): 529 | fig = plt.figure(figsize=plt.figaspect(.2)) 530 | ax = fig.add_subplot(1,3,1, projection='3d') 531 | 532 | if not plot_sample: 533 | x = params['z'][:,0] 534 | t = params['z'][:,1] 535 | y = params['y'] 536 | else: 537 | x = params['x_samp'] 538 | t = params['T_samp'] 539 | y = params['y_samp'] 540 | 541 | ax.scatter(x, t, y, s = 0.06) 542 | ax.set_xlabel('x Label') 543 | ax.set_ylabel('t Label') 544 | ax.set_zlabel('y Label') 545 | ax = fig.add_subplot(1, 3, 2, projection='3d') 546 | ax.scatter(x, t, y, s = 0.06) 547 | # Add best beta vector 548 | # ax1.scatter(x[40:],y[40:], s=10, c='r', marker="o", label='second') 549 | ax.azim = 240 550 | ax.elev = 20 551 | ax.set_xlabel('x ') 552 | ax.set_ylabel('t ') 553 | ax.set_zlabel('y ') 554 | plt.show() 555 | 556 | def lin_off_policy_loss_evaluation(beta, *args): 557 | arg_dict = dict(args[0]) 558 | t_lo = arg_dict['t_lo'] 559 | t_hi = arg_dict['t_hi'] 560 | x = arg_dict['x_samp'] 561 | arg_dict['tau'] = np.clip(np.dot(x,beta), t_lo, t_hi) 562 | return off_policy_evaluation(**arg_dict) 563 | 564 | def constant_off_policy_loss_evaluation(const, *args): 565 | arg_dict = dict(args[0]) 566 | x = arg_dict['x_samp'] 567 | arg_dict['tau'] = const * np.ones(arg_dict['n']) 568 | return off_policy_evaluation(**arg_dict) 569 | 570 | def eval_interpolated_oracle_tau(beta, *args): 571 | params = dict(args[0]) 572 | t_lo = params['t_lo'] 573 | t_hi = params['t_hi'] 574 | spline_tck = params['spline'] 575 | tau_candidate = np.clip(np.dot(beta, params['x_samp'].T), t_lo, t_hi) 576 | params['tau'] = tau_candidate 577 | return np.mean(evaluate_oracle_interpolated_outcomes(**params)) 578 | 579 | 580 | def eval_const_interpolated_oracle_tau(const, *args): 581 | params = dict(args[0]) 582 | t_lo = params['t_lo'] 583 | t_hi = params['t_hi'] 584 | spline_tck = params['spline'] 585 | tau_candidate = const * np.ones(params['n']) 586 | params['tau'] = tau_candidate 587 | return np.mean(evaluate_oracle_interpolated_outcomes(**params)) 588 | 589 | def eval_oracle_tau(beta, *args): 590 | params = dict(args[0]) 591 | t_lo = params['t_lo'] 592 | t_hi = params['t_hi'] 593 | tau_candidate = np.clip(np.dot(beta, params['x'].T), t_lo, t_hi) 594 | #!FIXME graceful handling of loss function of y_i 595 | params['tau'] = tau_candidate 596 | return np.mean(evaluate_oracle_interpolated_outcomes(**params)) 597 | def eval_oracle_tau_evaluation(beta, *args): 598 | params = dict(args[0]) 599 | t_lo = params['t_lo'] 600 | t_hi = params['t_hi'] 601 | tau_candidate = np.clip(np.dot(beta, params['x'].T), t_lo, t_hi) 602 | #!FIXME graceful handling of loss function of y_i 603 | params['tau'] = tau_candidate 604 | return np.mean(oracle_evaluation(**params)) 605 | 606 | 607 | def pol_opt(verbose = True, samp_func = lin_off_policy_loss_evaluation, oracle_eval = eval_interpolated_oracle_tau, **samp_params): 608 | """ 609 | Run a policy optimization test, comparing performance of empirical minimizer against the true counterfactual outcomes. 610 | """ 611 | d = samp_params['d'] 612 | n = samp_params['n'] 613 | t_lo = samp_params['t_lo'] 614 | t_hi = samp_params['t_hi'] 615 | beta_d = [np.random.uniform() for i in np.arange(d)] 616 | if samp_params['kernel_func'] == gaussian_kernel: 617 | res = minimize(samp_func, x0 = beta_d, jac = off_pol_gaus_lin_grad_for_max, bounds = ((0, t_hi/max(samp_params['x']) ),) , args=samp_params.items() ) 618 | else: 619 | res = minimize(samp_func, x0 = beta_d, jac = off_pol_epan_lin_grad, bounds = ((t_lo/max(samp_params['x_samp']), t_hi/max(samp_params['x_samp']) ),) , args=samp_params.items() ) 620 | emp_best_tau = np.clip(np.dot(res.x, samp_params['x'].T), t_lo, t_hi) 621 | if verbose: 622 | print "Optimization results" 623 | print res 624 | print "Policy treatments:" 625 | print emp_best_tau 626 | print "Observed treatments: " 627 | print samp_params['T_samp'] 628 | # print "Deviation in treatment vector: " 629 | # print np.linalg.norm(emp_best_tau - samp_params['T_samp']) 630 | print 'x: ' + str( res.x ) 631 | print 'off pol evaluation value ' 632 | print res.fun 633 | """ 634 | Optimize a treatment policy over oracle outcomes f 635 | """ 636 | # spl_x = samp_params['z'][:,0] 637 | # spl_t = samp_params['z'][:,1] 638 | # # f is positive 639 | # splined_f_tck = interpolate.bisplrep(spl_x,spl_t, samp_params['f']) 640 | # samp_params['spline'] = splined_f_tck 641 | samp_params['tau'] = emp_best_tau 642 | oracle_outcomes = samp_params['oracle_func'](**samp_params) 643 | ## Evaluate the 'true' performance of this treatment vector 644 | print 'oracle mean of empirically best feature vector \n' 645 | print np.mean(oracle_outcomes) 646 | 647 | # print 'Computing oracle best-in-class linear policy via interpolation of true response surface: \n' 648 | beta_d = [np.random.uniform() for i in np.arange(d)] 649 | # print "initial condition: " + str(beta_d) 650 | # print 'val of initial condition: ' 651 | # print oracle_func(beta_d, samp_params.items()) 652 | 653 | oracle_res = minimize(oracle_eval, x0 = beta_d, bounds = ((0, 1.0/np.mean(samp_params['x']) ),) , args=samp_params.items() ) 654 | if verbose: 655 | print oracle_res 656 | print 'beta' 657 | print oracle_res.x 658 | print 'oracle best linear treatment policy value ' 659 | print oracle_res.fun 660 | 661 | return [res, oracle_res, splined_f_tck] 662 | 663 | def off_pol_opt_test(n_max, n_trials, n_spacing, n_0, t_lo_sub,t_hi_sub, **sub_params): 664 | n = sub_params['n']; m = sub_params['m']; t_lo = t_lo_sub; t_hi = t_hi_sub 665 | d = sub_params['d'] 666 | n_space = np.linspace(n_0, n_max, n_spacing) 667 | best_beta = np.zeros([len(n_space),n_trials]) 668 | best_oracle_beta = np.zeros([len(n_space),n_trials]) 669 | OOS_OPE = np.zeros([len(n_space),n_trials]) 670 | OOS_oracle = np.zeros([len(n_space),n_trials]) 671 | # discrete_off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 672 | oracle_func = sub_params['oracle_func'] 673 | h_orig = sub_params['h'] 674 | TEST_N = 250 675 | TEST_SET = evaluate_subsample( 250, evaluation = False, cross_val = False, **sub_params ) 676 | 677 | for i, n_sub in enumerate(np.linspace(n_0, n_max, n_spacing)): 678 | # sub_params['h'] = h_orig * (np.power(n_sub,0.2))/np.power(n_0,0.2) 679 | n_rnd = int(np.floor(n_sub)) 680 | print "testing with n = " + str(n_rnd) 681 | for k in np.arange(n_trials): 682 | subsamples_pm = evaluate_subsample( n_rnd, evaluation = False, cross_val = False, **sub_params ) 683 | # oracle_evals[t_ind, i, k] = np.mean(evaluate_oracle_interpolated_outcomes(splined_f_tck, m,n_rnd, subsamples_pm['f'], t_lo, t_hi, subsamples_pm['tau'], subsamples_pm['x_samp'])) 684 | ### Compute best betas with random restarts 685 | oracle_betas = np.zeros([n_restarts, d]);eval_vals = np.zeros([n_restarts, d]);emp_betas = np.zeros([n_restarts, d]);emp_eval_vals = np.zeros([n_restarts, d]) 686 | for i_restart in np.arange(n_restarts): 687 | beta_d = [np.random.uniform() for i in np.arange(d)] 688 | res = minimize(lin_off_policy_loss_evaluation, x0 = beta_d, jac = off_pol_epan_lin_grad, bounds = ((t_lo/max(samp_params['x_samp']), t_hi/max(samp_params['x_samp']) ),) , args=samp_params.items() ) 689 | emp_betas[i_restart] = res.x; emp_eval_vals[i_restart] = res.fun 690 | 691 | oracle_res = minimize(oracle_func, x0 = beta_d, bounds = ((0, 1.0/np.mean(samp_params['x']) ),) , args=samp_params.items() ) 692 | oracle_betas[i_restart] = oracle_res.x; eval_vals[i_restart] = oracle_res.fun 693 | 694 | emp_best_tau = np.clip(np.dot(res.x, samp_params['x_samp'].T), t_lo, t_hi) 695 | # get best beta value from random restarts 696 | best_ind = np.argmin(emp_eval_vals) 697 | best_beta[i,k] = emp_betas[best_ind,:] 698 | 699 | best_oracle_ind = np.argmin(eval_vals) 700 | best_oracle_beta[i,k] = oracle_betas[oracle_betas,:] 701 | TEST_SET['tau'] = best_beta[i,k] * TEST_SET['x_samp'] 702 | OOS_OPE[i,k] = off_policy_evaluation(**TEST_SET) 703 | OOS_oracle[i,k] = np.mean(oracle_func(**TEST_SET)) 704 | 705 | return [best_beta, best_oracle_beta, OOS_OPE, OOS_oracle] 706 | 707 | 708 | def off_pol_eval_cons_test(n_max, n_trials, n_treatments, n_spacing, n_0,t_lo_sub,t_hi_sub, **sub_params): 709 | n = sub_params['n']; m = sub_params['m']; t_lo = t_lo_sub; t_hi = t_hi_sub 710 | treatment_space = np.linspace(t_lo, t_hi, n_treatments) 711 | off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 712 | oracle_evals = np.zeros([n_treatments, n_spacing, n_trials]) 713 | discrete_off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 714 | oracle_func = sub_params['oracle_func'] 715 | splined_f_tck = sub_params['spline'] 716 | h_orig = sub_params['h'] 717 | for i, n_sub in enumerate(np.linspace(n_0, n_max, n_spacing)): 718 | # sub_params['h'] = h_orig * (np.power(n_sub,0.2))/np.power(n_0,0.2) 719 | n_rnd = int(np.floor(n_sub)) 720 | print "testing with n = " + str(n_rnd) 721 | for k in np.arange(n_trials): 722 | for t_ind, t in enumerate(treatment_space): 723 | subsamples_pm = evaluate_subsample( n_rnd, evaluation = False, cross_val = False, **sub_params ) 724 | subsamples_pm['tau'] = t * np.ones(n_sub) 725 | oracle_evals[t_ind, i, k] = np.mean(oracle_func(**subsamples_pm)) 726 | # oracle_evals[t_ind, i, k] = np.mean(evaluate_oracle_interpolated_outcomes(splined_f_tck, m,n_rnd, subsamples_pm['f'], t_lo, t_hi, subsamples_pm['tau'], subsamples_pm['x_samp'])) 727 | off_pol_evals[t_ind, i, k] = off_policy_evaluation(**subsamples_pm) 728 | discrete_off_pol_evals[t_ind, i, k] = off_pol_disc_evaluation(discretize_tau_policy , **subsamples_pm) 729 | 730 | off_pol_evals.dump( str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")) + 'off_pol_vals.np') 731 | oracle_evals.dump(str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")) + 'off_pol_oracles.np') 732 | return [oracle_evals, off_pol_evals, discrete_off_pol_evals] 733 | 734 | 735 | def off_pol_eval_linear_test( n_max, beta_0, beta_hi, n_trials, n_treatments, n_spacing, n_0, **sub_params): 736 | ''' 737 | Systematically evaluate over a treatment space defined by a linear treatment policy 738 | ''' 739 | treatment_space = np.linspace(beta_0, beta_hi, n_treatments) 740 | off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 741 | oracle_evals = np.zeros([n_treatments, n_spacing, n_trials]) 742 | discrete_off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 743 | t_lo = sub_params['t_lo']; t_hi = sub_params['t_hi']; spl_x = sub_params['z'][:,0]; spl_t = sub_params['z'][:,1] 744 | # f is positive 745 | splined_f_tck = interpolate.bisplrep(spl_x,spl_t, sub_params['f']) 746 | sub_params['spline'] = splined_f_tck 747 | oracle_func = sub_params['oracle_func'] 748 | n = sub_params['n']; m = sub_params['m'] 749 | 750 | for i, n_sub in enumerate(np.linspace(n_0, n_max, n_spacing)): 751 | n_rnd = int(np.floor(n_sub)) 752 | print "testing n = " + str(n_rnd) 753 | for k in np.arange(n_trials): 754 | for beta_ind, beta in enumerate(treatment_space): 755 | subsamples_pm = evaluate_subsample( n_rnd, evaluation = False, cross_val = False, **sub_params ) 756 | tau = np.clip(np.dot( subsamples_pm['x_samp'], beta ) , t_lo, t_hi) 757 | subsamples_pm['tau'] = tau 758 | oracle_evals[beta_ind, i, k] = np.mean(oracle_func(**subsamples_pm)) 759 | # oracle_evals[beta_ind, i, k] = np.mean(evaluate_oracle_interpolated_outcomes(splined_f_tck,m,n_rnd, subsamples_pm['f'], beta_0, beta_hi, tau, subsamples_pm['x_samp'])) 760 | # off_pol_evals[beta_ind, i, k] = off_policy_evaluation(**subsamples_pm) 761 | off_pol_evals[beta_ind, i, k] = off_policy_evaluation(**subsamples_pm) 762 | discrete_off_pol_evals[beta_ind, i, k] = off_pol_disc_evaluation(discretize_tau_policy , **subsamples_pm) 763 | 764 | off_pol_evals.dump( str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")) + 'off_pol_linear_vals.np') 765 | oracle_evals.dump(str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")) + 'off_pol_linear_oracles.np') 766 | return [oracle_evals, off_pol_evals, discrete_off_pol_evals] 767 | ''' 768 | Systematically evaluate over a treatment space defined by a linear treatment policy 769 | 770 | With DM 771 | ''' 772 | def off_pol_eval_linear_test( n_max, beta_0, beta_hi, n_trials, n_treatments, n_spacing, n_0, **sub_params): 773 | ''' 774 | ''' 775 | treatment_space = np.linspace(beta_0, beta_hi, n_treatments) 776 | off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 777 | oracle_evals = np.zeros([n_treatments, n_spacing, n_trials]) 778 | discrete_off_pol_evals = np.zeros([n_treatments, n_spacing, n_trials]) 779 | t_lo = sub_params['t_lo']; t_hi = sub_params['t_hi']; spl_x = sub_params['z'][:,0]; spl_t = sub_params['z'][:,1] 780 | # f is positive 781 | splined_f_tck = interpolate.bisplrep(spl_x,spl_t, sub_params['f']) 782 | sub_params['spline'] = splined_f_tck 783 | oracle_func = sub_params['oracle_func'] 784 | n = sub_params['n']; m = sub_params['m'] 785 | 786 | for i, n_sub in enumerate(np.linspace(n_0, n_max, n_spacing)): 787 | n_rnd = int(np.floor(n_sub)) 788 | print "testing n = " + str(n_rnd) 789 | for k in np.arange(n_trials): 790 | for beta_ind, beta in enumerate(treatment_space): 791 | subsamples_pm = evaluate_subsample( n_rnd, evaluation = False, cross_val = False, **sub_params ) 792 | tau = np.clip(np.dot( subsamples_pm['x_samp'], beta ) , t_lo, t_hi) 793 | subsamples_pm['tau'] = tau 794 | oracle_evals[beta_ind, i, k] = np.mean(oracle_func(**subsamples_pm)) 795 | # oracle_evals[beta_ind, i, k] = np.mean(evaluate_oracle_interpolated_outcomes(splined_f_tck,m,n_rnd, subsamples_pm['f'], beta_0, beta_hi, tau, subsamples_pm['x_samp'])) 796 | # off_pol_evals[beta_ind, i, k] = off_policy_evaluation(**subsamples_pm) 797 | off_pol_evals[beta_ind, i, k] = off_policy_evaluation(**subsamples_pm) 798 | discrete_off_pol_evals[beta_ind, i, k] = off_pol_disc_evaluation(discretize_tau_policy , **subsamples_pm) 799 | 800 | off_pol_evals.dump( str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")) + 'off_pol_linear_vals.np') 801 | oracle_evals.dump(str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M")) + 'off_pol_linear_oracles.np') 802 | return [oracle_evals, off_pol_evals, discrete_off_pol_evals] 803 | 804 | def plot_off_pol_evals(off_pol_evals, oracle_evals, off_pol_disc_evals, n_0, n, n_trials, n_treatments, n_spacing, t_lo, t_hi, x_label, title_stem, truncate_y = False): 805 | mean_off_pol_vals = np.mean(off_pol_evals, axis = 2) 806 | mean_oracle_vals = np.mean(oracle_evals,axis=2) 807 | sds_off_pol = np.std(off_pol_evals, axis = 2) 808 | sds_oracle = np.std(oracle_evals, axis = 2) 809 | mean_off_pol_disc_evals = np.mean(off_pol_disc_evals,axis=2) 810 | sds_off_pol_disc = np.std(off_pol_disc_evals, axis = 2) 811 | 812 | ts = np.linspace(t_lo, t_hi, n_treatments) 813 | 814 | ns = np.linspace(n_0, n, n_spacing) 815 | for i in np.arange(n_spacing): 816 | plt.figure(i+1) 817 | error_1 = 1.96*sds_off_pol[:,i]/np.sqrt(n_trials) 818 | error_2 = 1.96*sds_oracle[:,i]/np.sqrt(n_trials) 819 | error_3 = 1.96*sds_off_pol_disc[:,i]/np.sqrt(n_trials) 820 | 821 | plt.plot(ts, mean_oracle_vals[:,i], c = "blue") 822 | plt.fill_between(ts, mean_oracle_vals[:,i]-error_2, mean_oracle_vals[:,i]+error_2, alpha=0.5, edgecolor='blue', facecolor='blue') 823 | 824 | plt.scatter(ts, mean_off_pol_disc_evals[:,i], c = "green") 825 | plt.fill_between(ts, mean_off_pol_disc_evals[:,i]-error_3, mean_off_pol_disc_evals[:,i]+error_3, alpha=0.4, edgecolor='g', facecolor='g') 826 | plt.scatter(ts, mean_off_pol_vals[:,i], c = "red") 827 | plt.fill_between(ts, mean_off_pol_vals[:,i]-error_1, mean_off_pol_vals[:,i]+error_1, alpha=0.5, edgecolor='#CC4F1B', facecolor='#FF9848') 828 | 829 | # plt.ylim( (0, 10) ) 830 | plt.title(title_stem+ " with n = " + str(ns[i])) 831 | plt.ylabel("outcome Y") 832 | plt.xlabel(x_label) 833 | if truncate_y: 834 | plt.ylim((0,truncate_y)) 835 | plt.show() 836 | 837 | ''' 838 | Helper functions for (noisy) bandwidth estimation: 839 | ''' 840 | 841 | def build_linear_model( **samp_params): 842 | ''' 843 | Fit a linear response model for use in estimation of bandwidth 844 | Test code for testing linear model of response 845 | # test_val = np.random.uniform() 846 | # samp_params['tau'] = test_val * np.ones([n,1]) 847 | # test_data = np.concatenate( [samp_params['x'], samp_params['tau']], axis = 1 ) 848 | # pred = regr.predict(test_data) 849 | pred_params = {'z' : test_data, 'y' : pred } 850 | plot_surface(**pred_params) 851 | plot_surface(**sub_params) 852 | ''' 853 | n = samp_params['n'] 854 | regr = linear_model.LinearRegression() 855 | samp_params['z_samp'] = np.concatenate( [samp_params['x_samp'], samp_params['T_samp']],axis = 1 ) 856 | regr.fit(samp_params['z_samp'], samp_params['y_samp']) 857 | return regr 858 | 859 | def scores_cond_f_y_given_tau_x(joint_f_t_x, joint_f_y_t_x, test_point): 860 | """ 861 | Use the estimates of joint density of F_{T,X} and F_{Y,T,X} to estimate 862 | the conditional density F_{Y|T,X} at the given test point 863 | Test point: [y, t, x] 864 | """ 865 | tp = test_point[1:] 866 | joint_f_tau_x = joint_f_t_x.score_samples( tp.reshape([1,2]) ) 867 | joint_f_y_tau_x = joint_f_y_t_x.score_samples( test_point.reshape([1,3]) ) 868 | return np.exp(joint_f_y_tau_x - joint_f_tau_x) 869 | 870 | # def scores_cond_f_y_given_tau_x_caller(test_point): 871 | # #FIXME: will look in global scope 872 | # return scores_cond_f_y_given_tau_x(joint_f_t_x, joint_f_y_t_x, test_point) 873 | 874 | def bias_integrand(y, tau, x, hessian): 875 | x0 = np.asarray([y, tau, x]) 876 | return y**2 * hessian([y, tau, x])[1][1] * 0.5 877 | 878 | def empirical_exp_second_moment(regr, **params): 879 | x = params['x'] 880 | tau = params['tau'] 881 | y = params['y_samp'] 882 | T = params['T'] 883 | 884 | y_pred = regr.predict(np.concatenate([params['x_samp'], params['tau']], axis = 1)) 885 | Q = params['Q'] 886 | Q_vec = np.asarray([Q(x[i], T[i], params['t_lo'], params['t_hi']) for i in range(params['n'])]) 887 | return np.square(y_pred) / Q_vec 888 | 889 | def est_h(h_sub, regr, hess, **samp_params): 890 | R_K = 1.0/(2*np.sqrt(np.pi)) 891 | kappa_two = 1.0 892 | C = R_K /(4.0 * samp_params['n'] * kappa_two**2) 893 | exp_second_moment = np.mean(empirical_exp_second_moment(regr, **samp_params)) 894 | # Assume that tau doesn't change for x_i for now 895 | bias = 0 896 | ymin = min(samp_params['y_samp']) 897 | ymax = max(samp_params['y_samp']) 898 | 899 | for i in range(h_sub): 900 | print i 901 | bias += integrate.quad(lambda u: bias_integrand(u, samp_params['tau'][i], samp_params['x_samp'][i], hess), ymin, ymax)[0] 902 | mean_bias_sqd = (bias/h_sub)**2 903 | h = np.power(C*exp_second_moment/(mean_bias_sqd*samp_params['n']), 0.2) 904 | 905 | print "opt h for this treatment vector: " + str(h) 906 | return h 907 | 908 | ''' variant of OPE with known propensities 909 | ''' 910 | ## given Known propensities 911 | def off_policy_evaluation_known_Q(**params): 912 | """ 913 | Takes in a choice of kernel and dictionary of parameters and data required for evaluation 914 | tau is a vector of treatment values (assumed given) 915 | If y_samp, T_samp is present, use that instead. 916 | """ 917 | [loss, norm_sum] = off_pol_estimator_known_Q(**params) 918 | h = params['h'] 919 | n = params['n'] 920 | return loss/(norm_sum*1.0*h*n) 921 | 922 | def off_pol_estimator_known_Q(**params): 923 | THRESH = params['threshold'] 924 | y_out = params['y']; x = params['x']; h = params['h'];n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi'] 925 | kernel = params['kernel_func'];kernel_int = params['kernel_int_func'] 926 | Q = params['Q_known']; 927 | if ('y_samp' in params.keys()): 928 | y_out = params['y_samp'] 929 | if ('T_samp' in params.keys()): 930 | T = params['T_samp'] 931 | else: 932 | T = params['T'] 933 | if ('x_samp' in params.keys()): 934 | x = params['x_samp'] 935 | 936 | BMI_IND = params.get('BMI_IND') # propensity score for warfarin data evaluations 937 | if (params.get('DATA_TYPE') == 'warfarin'): 938 | x = params['x'][:,BMI_IND] 939 | 940 | loss = 0 941 | tau = params['tau'] 942 | clip_tau = np.clip(tau, t_lo, t_hi) 943 | Qs = np.zeros(n) 944 | for i in np.arange(n): 945 | Q_i = Q[i] 946 | if (abs(clip_tau[i] - t_lo) <= h): 947 | alpha = kernel_int((t_lo-clip_tau[i])/h, 1) 948 | elif (abs(clip_tau[i] - t_hi) <= h): 949 | alpha = kernel_int(-1, (t_hi - clip_tau[i])/h ) 950 | else: 951 | alpha = 1 952 | Qs[i] = (1.0/h)*kernel( (clip_tau[i] - T[i])/h )/max(Q_i,THRESH) 953 | loss += kernel( (clip_tau[i] - T[i])/h )*1.0 * y_out[i]/max(Q_i,THRESH) * 1.0/alpha 954 | 955 | # if kernel( (clip_tau[i] - T[i])/h )>0.5: 956 | # print y_out[i] 957 | # print 'propensity: ' + str(Q_i) 958 | norm_sum = np.mean(np.maximum(Qs,THRESH*np.ones(n))) 959 | return [loss, norm_sum] 960 | 961 | def bandwidth_selection(n_samp,h_sub, **params): 962 | ''' 963 | Top-level function for estimating bandwidth. Note that this scales incredibly poorly with the size of the sampled dataset. 964 | ''' 965 | def scores_cond_f_y_given_tau_x_caller(test_point): 966 | return scores_cond_f_y_given_tau_x(joint_f_t_x, joint_f_y_t_x, test_point) 967 | 968 | n = params['n'] 969 | 970 | samp_params = evaluate_subsample(n_samp, cross_val = False, evaluation = False, **params) 971 | regr = build_linear_model(**samp_params) 972 | 973 | samp_params['tau'] = 0.5 * np.ones([samp_params['n'], 1]) 974 | 975 | samp_params['z_samp'] = np.concatenate([samp_params['x_samp'], samp_params['T_samp']], axis = 1) 976 | bandwidths = {'bandwidth': np.logspace(-1,1,20)} 977 | grid = GridSearchCV(KernelDensity(), bandwidths) 978 | grid.fit(samp_params['z_samp']) 979 | 980 | bandwidth_est = grid.best_estimator_.bandwidth 981 | joint_f_t_x = KernelDensity(kernel='gaussian', bandwidth = bandwidth_est).fit(samp_params['z_samp'] ) 982 | joint_f_y_t_x = KernelDensity(kernel='gaussian', bandwidth = bandwidth_est).fit( 983 | np.concatenate([samp_params['y_samp'],samp_params['z_samp']],axis=1) ) 984 | 985 | cond_dens_hess = nd.Hessian(scores_cond_f_y_given_tau_x_caller) 986 | h = est_h(h_sub, regr, cond_dens_hess, **samp_params) 987 | return h -------------------------------------------------------------------------------- /minimal_OPE.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "application/javascript": [ 11 | "if (!(\"Notification\" in window)) {\n", 12 | " alert(\"This browser does not support desktop notifications, so the %%notify magic will not work.\");\n", 13 | "} else if (Notification.permission !== 'granted' && Notification.permission !== 'denied') {\n", 14 | " Notification.requestPermission(function (permission) {\n", 15 | " if(!('permission' in Notification)) {\n", 16 | " Notification.permission = permission;\n", 17 | " }\n", 18 | " })\n", 19 | "}\n" 20 | ], 21 | "text/plain": [ 22 | "" 23 | ] 24 | }, 25 | "metadata": {}, 26 | "output_type": "display_data" 27 | }, 28 | { 29 | "name": "stderr", 30 | "output_type": "stream", 31 | "text": [ 32 | "/Users/az/anaconda/lib/python2.7/site-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n", 33 | " \"This module will be removed in 0.20.\", DeprecationWarning)\n", 34 | "/Users/az/anaconda/lib/python2.7/site-packages/sklearn/grid_search.py:43: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. This module will be removed in 0.20.\n", 35 | " DeprecationWarning)\n" 36 | ] 37 | } 38 | ], 39 | "source": [ 40 | "import jupyternotify\n", 41 | "ip = get_ipython()\n", 42 | "ip.register_magics(jupyternotify.JupyterNotifyMagics)\n", 43 | "from off_pol_eval_functions import * \n", 44 | "from sklearn.linear_model import LinearRegression\n" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "%matplotlib inline \n", 54 | "import matplotlib.pyplot as plt\n", 55 | "import numpy as np \n", 56 | "# from off_pol_eval_functions import * \n", 57 | "\n", 58 | "\n", 59 | "from scipy.optimize import minimize\n", 60 | "import datetime as datetime\n", 61 | "import pickle\n", 62 | "from matplotlib.backends.backend_pdf import PdfPages\n", 63 | "from matplotlib import collections as matcoll\n", 64 | "from sklearn import svm\n", 65 | "import sys\n", 66 | "from sklearn.linear_model import LinearRegression\n", 67 | "from scipy.stats import norm\n", 68 | "\n", 69 | "np.random.seed(2)\n", 70 | "\n", 71 | "\n", 72 | "d = 10 # dimension of x \n", 73 | "n = 1500; \n", 74 | "mu_x = np.zeros(d); \n", 75 | "sigma_x = np.random.normal(size = (d,1))\n", 76 | "sigma_x += np.abs(np.min(sigma_x))+0.5\n", 77 | "sigma_x = np.multiply(sigma_x, np.eye(d))\n", 78 | "sigma_x /= 2# normalize covariances a little bit \n", 79 | "\n", 80 | "W = 1.5 #treatment effect\n", 81 | "# interact_x = 2\n", 82 | "white_noise_coef = 0.1\n", 83 | "\n", 84 | "# # generate propensity model \n", 85 | "# def real_prop(x, beta_prop): \n", 86 | "# T_SIG = 5\n", 87 | "# if len(x.shape) > 1: \n", 88 | "# n= x.shape[1]\n", 89 | "# else:\n", 90 | "# n= len(x)\n", 91 | "# return np.dot(x, beta_prop) + np.random.normal(size = (n,1))*T_SIG\n", 92 | "# # T is normally distributed conditional on covariates \n", 93 | " \n", 94 | "# # coefficient of treatment effect\n", 95 | "# beta_cons = -5\n", 96 | "# beta_x = np.random.normal(size = (d,1))\n", 97 | "# # interaction term with treatment \n", 98 | "# beta_x_T = np.random.normal(size = (d,1))*1.5\n", 99 | "\n", 100 | "# # sparse interaction terms \n", 101 | "# sparse_entries = np.random.choice(range(d),size = int(round(0.7*d)),replace = False)\n", 102 | "# beta_x_T[sparse_entries] = 0 \n", 103 | "# sparse_entries = np.random.choice(range(d),size = int(round(0.35*d)),replace = False)\n", 104 | "# beta_x[sparse_entries] = 0 \n", 105 | "\n", 106 | "# FREQ = 20 \n", 107 | "# beta_x_quad_T = np.random.normal(size = (d,1))*0.2\n", 108 | "# sparse_entries = np.random.choice(range(d),size = int(round(0.6*d)),replace = False)\n", 109 | "# beta_x_quad_T[sparse_entries] = 0\n", 110 | "\n", 111 | "# TRUE_PROP_BETA = np.asarray(beta_x_quad_T + np.random.normal( loc= np.ones((d,1))*2, size = (d,1))).flatten()\n", 112 | "# print TRUE_PROP_BETA\n", 113 | "# def real_risk(T, beta_cons, beta_x, beta_x_T, beta_x_quad_T, x): \n", 114 | "# n = len(T); risk = np.zeros(n)\n", 115 | "# if np.isscalar(T):\n", 116 | "# risk = T*beta_cons + np.dot(beta_x.T, x) + np.dot(beta_x_T.T, x*T) + (T-np.dot(beta_x_quad_T.T,x))**2\n", 117 | "# else: \n", 118 | "# for i in range(len(T)): \n", 119 | "# risk[i] = T[i]*beta_cons + np.dot(beta_x.T, x[i,:]) + np.dot(beta_x_T.T, x[i,:]*T[i]) + (T[i]-np.dot(beta_x_quad_T.T,x[i,:]))**2#+ np.dot(beta_x_quad_T.T, (x[i,:]**2)*T[i]) + np.dot(beta_x_high_freq.T, np.sin(x[i,0:HIGH_FREQ_N]*FREQ)*T[i])\n", 120 | "# return risk\n", 121 | "\n", 122 | "# T_SIG = 4\n", 123 | "# def generate_data(mu_x, sigma_x_mat, n, beta_cons, beta_x, beta_x_T): \n", 124 | "# # x = np.random.normal(mu_x, sigma_x, size = n)\n", 125 | "# # generate n datapoints from the same multivariate normal distribution\n", 126 | "# x = np.random.multivariate_normal(mean = mu_x, cov= sigma_x_mat, size = n ) \n", 127 | "# print x.shape \n", 128 | "# print \"xshape\"\n", 129 | "# T = np.random.normal(0, T_SIG, n) + np.dot(x, TRUE_PROP_BETA) + 2*x[:,1] + 4*x[:,2] - 2*x[:,3]\n", 130 | "# true_resid = T - np.dot(x, TRUE_PROP_BETA)\n", 131 | "# true_Q = norm.pdf( T - np.dot(x, TRUE_PROP_BETA), loc = 0, scale = T_SIG )\n", 132 | "# y_sigma = 0.5\n", 133 | "# white_noise_coef = 5\n", 134 | " \n", 135 | "# clf = LinearRegression(); clf.fit(x, T)\n", 136 | "# y_hat = clf.predict(x)\n", 137 | "# Y = np.zeros(n)\n", 138 | "# for i in range(n): \n", 139 | "# Y[i] = T[i]*beta_cons + np.dot(beta_x.T, x[i,:]) + T[i]*np.dot(beta_x_T.T, x[i,:]) + (T[i] - np.dot(beta_x_quad_T.T,x[i,:]))**2 #+ np.dot(beta_x_quad_T.T, (x[i,:]**2)*T[i]) + np.dot(beta_x_high_freq.T, np.sin(x[i,0:HIGH_FREQ_N]*FREQ)*T[i])\n", 140 | "# Y += np.random.multivariate_normal(mean = np.zeros(n), cov=white_noise_coef * np.eye(n))\n", 141 | "# # get pdf from residuals \n", 142 | "# resid = Y - y_hat\n", 143 | "# # get norm pdf \n", 144 | "# Q = norm.pdf(resid, loc = np.mean(resid), scale=np.std(resid))\n", 145 | "# T = T.flatten()\n", 146 | "# return [x, T, Y, true_Q, clf]\n", 147 | "\n", 148 | "# [x_full, T_full, Y_full, true_Q_full, clf] = generate_data(mu_x, sigma_x, n, beta_cons, beta_x, beta_x_T)\n", 149 | "\n", 150 | "# #compute real risk \n", 151 | "# print np.mean( real_risk(T_full, beta_cons, beta_x, beta_x_T, beta_x_quad_T, x_full))\n", 152 | "\n", 153 | "# plt.hist(Y_full); plt.title('Y')\n", 154 | "# plt.figure()\n", 155 | "# plt.title('T')\n", 156 | "# plt.hist(T_full)\n", 157 | "# Q = true_Q_full\n", 158 | "# plt.figure()\n", 159 | "# plt.hist(true_Q_full)" 160 | ] 161 | }, 162 | { 163 | "cell_type": "markdown", 164 | "metadata": {}, 165 | "source": [ 166 | "## Simpler synthetic example" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 3, 172 | "metadata": {}, 173 | "outputs": [ 174 | { 175 | "name": "stdout", 176 | "output_type": "stream", 177 | "text": [ 178 | "[1.04262406 0.84193477]\n", 179 | "(700, 2)\n", 180 | "xshape\n", 181 | "(700, 2)\n", 182 | "xshape\n", 183 | "1.2423927788694011\n" 184 | ] 185 | }, 186 | { 187 | "data": { 188 | "text/plain": [ 189 | "(array([ 24., 36., 39., 42., 43., 43., 67., 76., 88., 242.]),\n", 190 | " array([1.94517234e-05, 9.99094057e-03, 1.99624294e-02, 2.99339182e-02,\n", 191 | " 3.99054071e-02, 4.98768959e-02, 5.98483848e-02, 6.98198736e-02,\n", 192 | " 7.97913625e-02, 8.97628513e-02, 9.97343401e-02]),\n", 193 | " )" 194 | ] 195 | }, 196 | "execution_count": 3, 197 | "metadata": {}, 198 | "output_type": "execute_result" 199 | }, 200 | { 201 | "data": { 202 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEICAYAAABRSj9aAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAEMdJREFUeJzt3X+o3fV9x/HnqzHTshaq9S6kSdy1kP4RZY1wyQQLc0pn\nqmWxf0wirOQPIYVlxULHiN2g7SDDjf7Y/piFtDrD2tUFbGfQbiPNhK4wTG9cak00mNVIEmJy265U\n/8lIfO+P8xXPYpJ77j335MTPfT7gcL7fz/fzPd/3+aCv++Vzvt9vUlVIktr1rnEXIEkaLYNekhpn\n0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl86R5JtJ/v6ctt9J8vMky8dVlzRf8YYp6f9L8n7gAPDJ\nqtqd5CrgOeAvq+rRsRYnzYNBL51Hkj8A/hq4EfhzYG1VfWy8VUnzY9BLF5DkcWApcAu9oD865pKk\nebli3AVIl7E/Av4b+DNDXu9k/hgrXUBVnQR+Rm++XnrHMuglqXEGvSQ1zqCXpMZ51Y0kNc4zeklq\nnEEvSY0z6CWpcQa9JDXusrgz9tprr63JyclxlyFJ7yj79u37WVVNzNbvsgj6yclJpqenx12GJL2j\nJHllkH5O3UhS42YN+iRXJdmb5MdJDiT5Ytf+hSTHk+zvXnf27fNAksNJDiW5Y5RfQJJ0cYNM3ZwG\nbquq15MsBX6Y5F+6bV+tqi/1d06yBtgI3AB8APh+kg9V1dmFLFySNJhZz+ir5/VudWn3utjttBuA\nx6rqdFW9DBwG1g1dqSRpXgaao0+yJMl+4BSwu6qe6TZ9OslzSR5JcnXXtgLof3b3sa7t3M/cnGQ6\nyfTMzMwQX0GSdDEDBX1Vna2qtcBKYF2SG4GvAR8E1gIngC/P5cBVtb2qpqpqamJi1quDJEnzNKer\nbqrql8DTwPqqOtn9AXgD+DpvTc8cB1b17baya5MkjcEgV91MJHlft/xu4KPAi0mW93X7BPB8t7wL\n2JjkyiTXA6uBvQtbtiRpUINcdbMc2JFkCb0/DDur6skk/5BkLb0fZo8AnwKoqgNJdgIHgTPAFq+4\nkaTxuSyeRz81NVXD3Bk7ufWpBaxmcEcevGssx5UkgCT7qmpqtn7eGStJjTPoJalxBr0kNc6gl6TG\nGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxB\nL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekho3a9AnuSrJ3iQ/TnIgyRe79muS7E7yUvd+dd8+DyQ5\nnORQkjtG+QUkSRc3yBn9aeC2qvowsBZYn+RmYCuwp6pWA3u6dZKsATYCNwDrgYeSLBlF8ZKk2c0a\n9NXzere6tHsVsAHY0bXvAO7uljcAj1XV6ap6GTgMrFvQqiVJAxtojj7JkiT7gVPA7qp6BlhWVSe6\nLq8Cy7rlFcDRvt2PdW3nfubmJNNJpmdmZub9BSRJFzdQ0FfV2apaC6wE1iW58ZztRe8sf2BVtb2q\npqpqamJiYi67SpLmYE5X3VTVL4Gn6c29n0yyHKB7P9V1Ow6s6tttZdcmSRqDQa66mUjyvm753cBH\ngReBXcCmrtsm4IlueRewMcmVSa4HVgN7F7pwSdJgrhigz3JgR3flzLuAnVX1ZJL/BHYmuQ94BbgH\noKoOJNkJHATOAFuq6uxoypckzWbWoK+q54CbztP+c+D2C+yzDdg2dHWSpKF5Z6wkNc6gl6TGGfSS\n1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mN\nM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS42YN+iSrkjyd5GCSA0nu79q/kOR4kv3d686+fR5I\ncjjJoSR3jPILSJIu7ooB+pwBPltVzyZ5L7Avye5u21er6kv9nZOsATYCNwAfAL6f5ENVdXYhC5ck\nDWbWM/qqOlFVz3bLrwEvACsusssG4LGqOl1VLwOHgXULUawkae7mNEefZBK4CXima/p0kueSPJLk\n6q5tBXC0b7djnOcPQ5LNSaaTTM/MzMy5cEnSYAYO+iTvAR4HPlNVvwK+BnwQWAucAL48lwNX1faq\nmqqqqYmJibnsKkmag4GCPslSeiH/rar6DkBVnayqs1X1BvB13pqeOQ6s6tt9ZdcmSRqDQa66CfAw\n8EJVfaWvfXlft08Az3fLu4CNSa5Mcj2wGti7cCVLkuZikKtubgE+Cfwkyf6u7XPAvUnWAgUcAT4F\nUFUHkuwEDtK7YmeLV9xI0vjMGvRV9UMg59n0vYvssw3YNkRdkqQF4p2xktQ4g16SGmfQS1LjDHpJ\napxBL0mNG+TySl3A5NanxnLcIw/eNZbjSnpn8oxekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6g\nl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxs0a9ElWJXk6ycEkB5Lc37Vf\nk2R3kpe696v79nkgyeEkh5LcMcovIEm6uEHO6M8An62qNcDNwJYka4CtwJ6qWg3s6dbptm0EbgDW\nAw8lWTKK4iVJs5s16KvqRFU92y2/BrwArAA2ADu6bjuAu7vlDcBjVXW6ql4GDgPrFrpwSdJg5jRH\nn2QSuAl4BlhWVSe6Ta8Cy7rlFcDRvt2OdW3nftbmJNNJpmdmZuZYtiRpUAMHfZL3AI8Dn6mqX/Vv\nq6oCai4HrqrtVTVVVVMTExNz2VWSNAcDBX2SpfRC/ltV9Z2u+WSS5d325cCprv04sKpv95VdmyRp\nDAa56ibAw8ALVfWVvk27gE3d8ibgib72jUmuTHI9sBrYu3AlS5Lm4ooB+twCfBL4SZL9XdvngAeB\nnUnuA14B7gGoqgNJdgIH6V2xs6Wqzi545ZKkgcwa9FX1QyAX2Hz7BfbZBmwboi5J0gLxzlhJapxB\nL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS\n1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxs0a9EkeSXIqyfN9bV9IcjzJ/u51Z9+2\nB5IcTnIoyR2jKlySNJhBzugfBdafp/2rVbW2e30PIMkaYCNwQ7fPQ0mWLFSxkqS5mzXoq+oHwC8G\n/LwNwGNVdbqqXgYOA+uGqE+SNKRh5ug/neS5bmrn6q5tBXC0r8+xru1tkmxOMp1kemZmZogyJEkX\nM9+g/xrwQWAtcAL48lw/oKq2V9VUVU1NTEzMswxJ0mzmFfRVdbKqzlbVG8DXeWt65jiwqq/ryq5N\nkjQm8wr6JMv7Vj8BvHlFzi5gY5Irk1wPrAb2DleiJGkYV8zWIcm3gVuBa5McAz4P3JpkLVDAEeBT\nAFV1IMlO4CBwBthSVWdHU7okaRCzBn1V3Xue5ocv0n8bsG2YoiRJC8c7YyWpcQa9JDXOoJekxhn0\nktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9J\njTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaN2vQJ3kkyakkz/e1XZNkd5KXuver+7Y9kORwkkNJ7hhV\n4ZKkwQxyRv8osP6ctq3AnqpaDezp1kmyBtgI3NDt81CSJQtWrSRpzmYN+qr6AfCLc5o3ADu65R3A\n3X3tj1XV6ap6GTgMrFugWiVJ8zDfOfplVXWiW34VWNYtrwCO9vU71rW9TZLNSaaTTM/MzMyzDEnS\nbIb+MbaqCqh57Le9qqaqampiYmLYMiRJFzDfoD+ZZDlA936qaz8OrOrrt7JrkySNyXyDfhewqVve\nBDzR174xyZVJrgdWA3uHK1GSNIwrZuuQ5NvArcC1SY4BnwceBHYmuQ94BbgHoKoOJNkJHATOAFuq\n6uyIapckDWDWoK+qey+w6fYL9N8GbBumKEnSwvHOWElqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4\ng16SGmfQS1LjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWrcrP/ClC4/\nk1ufGtuxjzx419iOLWl+PKOXpMYZ9JLUOINekho31Bx9kiPAa8BZ4ExVTSW5BvgnYBI4AtxTVf8z\nXJmSpPlaiDP6362qtVU11a1vBfZU1WpgT7cuSRqTUUzdbAB2dMs7gLtHcAxJ0oCGDfoCvp9kX5LN\nXduyqjrRLb8KLBvyGJKkIQx7Hf1Hqup4kt8Adid5sX9jVVWSOt+O3R+GzQDXXXfdkGVIki5kqDP6\nqjrevZ8CvgusA04mWQ7QvZ+6wL7bq2qqqqYmJiaGKUOSdBHzDvokv57kvW8uA78HPA/sAjZ13TYB\nTwxbpCRp/oaZulkGfDfJm5/zj1X1r0l+BOxMch/wCnDP8GVKkuZr3kFfVT8FPnye9p8Dtw9TlCRp\n4XhnrCQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGDfuYYi0yk1uf\nGstxjzx411iOK7XAM3pJapxBL0mNM+glqXEGvSQ1zh9j9Y4wrh+BwR+C9c7nGb0kNc6gl6TGGfSS\n1Djn6KXLlL9LaKF4Ri9JjfOMXtLb+KiLtows6JOsB/4WWAJ8o6oeHNWxpFEa5xSKtBBGEvRJlgB/\nB3wUOAb8KMmuqjo4iuNJ0jBa/z1kVHP064DDVfXTqvpf4DFgw4iOJUm6iFFN3awAjvatHwN+u79D\nks3A5m719SSHRlTLpXIt8LNxF3EZcBx6HIeeOY1D/mqElYzfecdiyO/8m4N0GtuPsVW1Hdg+ruMv\ntCTTVTU17jrGzXHocRx6HIe3jHMsRjV1cxxY1be+smuTJF1iowr6HwGrk1yf5NeAjcCuER1LknQR\nI5m6qaozSf4Y+Dd6l1c+UlUHRnGsy0gz01BDchx6HIcex+EtYxuLVNW4ji1JugR8BIIkNc6gl6TG\nGfRDSrI+yaEkh5NsHXc9l1KSR5KcSvJ8X9s1SXYneal7v3qcNY5aklVJnk5yMMmBJPd37YtqHACS\nXJVkb5Ifd2Pxxa590Y0F9J4QkOS/kjzZrY9tHAz6IfQ96uFjwBrg3iRrxlvVJfUosP6ctq3Anqpa\nDezp1lt2BvhsVa0Bbga2dP8NLLZxADgN3FZVHwbWAuuT3MziHAuA+4EX+tbHNg4G/XAW9aMequoH\nwC/Oad4A7OiWdwB3X9KiLrGqOlFVz3bLr9H7H3sFi2wcAKrn9W51afcqFuFYJFkJ3AV8o695bONg\n0A/nfI96WDGmWi4Xy6rqRLf8KrBsnMVcSkkmgZuAZ1ik49BNV+wHTgG7q2qxjsXfAH8KvNHXNrZx\nMOg1MtW7dndRXL+b5D3A48BnqupX/dsW0zhU1dmqWkvvbvh1SW48Z3vzY5Hk48Cpqtp3oT6XehwM\n+uH4qIe3O5lkOUD3fmrM9YxckqX0Qv5bVfWdrnnRjUO/qvol8DS933AW21jcAvx+kiP0pnNvS/JN\nxjgOBv1wfNTD2+0CNnXLm4AnxljLyCUJ8DDwQlV9pW/TohoHgCQTSd7XLb+b3r9H8SKLbCyq6oGq\nWllVk/Qy4d+r6g8Z4zh4Z+yQktxJbz7uzUc9bBtzSZdMkm8Dt9J7/OpJ4PPAPwM7geuAV4B7qurc\nH2ybkeQjwH8AP+Gt+djP0ZunXzTjAJDkt+j9yLiE3knkzqr6iyTvZ5GNxZuS3Ar8SVV9fJzjYNBL\nUuOcupGkxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXH/B5U7zp76JDDMAAAAAElFTkSuQmCC\n", 203 | "text/plain": [ 204 | "" 205 | ] 206 | }, 207 | "metadata": {}, 208 | "output_type": "display_data" 209 | }, 210 | { 211 | "data": { 212 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEICAYAAABRSj9aAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAD7tJREFUeJzt3W+IHPd9x/H3p3LjB2lCnOoqhP/0JFACTmmvcLgPmhin\n+WMnLnEcqCsRgtO6lQ2uaUmhyGmpQ8DgpnH9oG0cZCLsQizbjWsiajetbUrcQtPkFIQjO3EjOTKR\nUKRLXNKkNWokffvgRnQj3+nudva0d7+8X7Dc7G9mbj4sex/mZmdmU1VIktr1U+MOIElaWRa9JDXO\nopekxln0ktQ4i16SGmfRS1LjLHpJapxFL50lyQ8HHqeTvDLw/IPjzictV7xgSlpYkkPA71TVU+PO\nIg3LPXpJapxFL0mNs+glqXEWvSQ1zqKXpMZZ9JLUOItekhrnefSS1Dj36CWpcRa9JDXOopekxln0\nktS4C8YdAGD9+vU1OTk57hiStKbs3bv3u1U1sdhyq6LoJycnmZmZGXcMSVpTkry0lOU8dCNJjbPo\nJalxFr0kNc6il6TGWfSS1DiLXpIaZ9FLUuMseklqnEUvSY1bFVfGSouZ3PH42LZ96K5rx7ZtaRTc\no5ekxln0ktQ4i16SGmfRS1LjLHpJapxFL0mNs+glqXEWvSQ1zqKXpMZZ9JLUuEWLPsmuJMeT7B8Y\nezjJvu5xKMm+bnwyySsD8z69kuElSYtbyr1u7gf+CvibMwNV9ZtnppPcDXx/YPmDVTU1qoCSpH4W\nLfqqeibJ5HzzkgS4Afi10caSJI1K37tXvg04VlXfHBjb1B3K+T7wJ1X1L/OtmGQ7sB3gsssu6xlD\nWjnjunOmd83UqPT9MHYbsHvg+VHgsu7QzUeAB5O8fr4Vq2pnVU1X1fTExETPGJKkhQxd9EkuAD4A\nPHxmrKpOVNX3uum9wEHgTX1DSpKG12eP/p3AN6rq8JmBJBNJ1nXTm4EtwIv9IkqS+ljK6ZW7gX8D\n3pzkcJKbullb+fHDNgBXAs92x+g/B9xSVS+PMrAkaXmWctbNtgXGPzzP2KPAo/1jSZJGxStjJalx\nFr0kNc6il6TGWfSS1DiLXpIaZ9FLUuMseklqnEUvSY2z6CWpcRa9JDXOopekxln0ktQ4i16SGmfR\nS1LjLHpJapxFL0mNs+glqXEWvSQ1binfGbsryfEk+wfGPpbkSJJ93eO9A/NuT3IgyQtJrl6p4JKk\npVnKHv39wDXzjN9TVVPd4wmAJJcz96Xhb+nW+VSSdaMKK0lavkWLvqqeAV5e4u+7Dnioqk5U1beA\nA8AVPfJJknrqc4z+tiTPdod2LurGLga+PbDM4W5MkjQmwxb9vcBmYAo4Cty93F+QZHuSmSQzs7Oz\nQ8aQJC1mqKKvqmNVdaqqTgP38f+HZ44Alw4sekk3Nt/v2FlV01U1PTExMUwMSdISDFX0STYOPL0e\nOHNGzh5ga5ILk2wCtgBf7hdRktTHBYstkGQ3cBWwPslh4A7gqiRTQAGHgJsBquq5JI8AzwMngVur\n6tTKRJckLcWiRV9V2+YZ/sw5lr8TuLNPKEnS6HhlrCQ1zqKXpMZZ9JLUOItekhpn0UtS4yx6SWqc\nRS9JjbPoJalxFr0kNc6il6TGWfSS1DiLXpIaZ9FLUuMseklqnEUvSY2z6CWpcRa9JDXOopekxln0\nktS4RYs+ya4kx5PsHxj78yTfSPJskseSvKEbn0zySpJ93ePTKxlekrS4pezR3w9cc9bYk8AvVNUv\nAv8B3D4w72BVTXWPW0YTU5I0rEWLvqqeAV4+a+yfqupk9/RLwCUrkE2SNAKjOEb/28A/DDzf1B22\n+WKSty20UpLtSWaSzMzOzo4ghiRpPr2KPskfAyeBz3ZDR4HLqmoK+AjwYJLXz7duVe2squmqmp6Y\nmOgTQ5J0DkMXfZIPA78OfLCqCqCqTlTV97rpvcBB4E0jyClJGtJQRZ/kGuCPgPdV1f8MjE8kWddN\nbwa2AC+OIqgkaTgXLLZAkt3AVcD6JIeBO5g7y+ZC4MkkAF/qzrC5Evh4kh8Bp4FbqurleX+xJOm8\nWLToq2rbPMOfWWDZR4FH+4aSJI2OV8ZKUuMseklq3KKHbiSNx+SOx8e27UN3XTu2bWv03KOXpMZZ\n9JLUOItekhpn0UtS4yx6SWqcRS9JjbPoJalxFr0kNc4LprQs47yIR9Jw3KOXpMZZ9JLUOItekhpn\n0UtS4yx6SWqcRS9JjVu06JPsSnI8yf6BsTcmeTLJN7ufFw3Muz3JgSQvJLl6pYJLkpZmKXv09wPX\nnDW2A3i6qrYAT3fPSXI5sBV4S7fOp5KsG1laSdKyLVr0VfUM8PJZw9cBD3TTDwDvHxh/qKpOVNW3\ngAPAFSPKKkkawrDH6DdU1dFu+jvAhm76YuDbA8sd7sYkSWPS+8PYqiqglrteku1JZpLMzM7O9o0h\nSVrAsEV/LMlGgO7n8W78CHDpwHKXdGOvUlU7q2q6qqYnJiaGjCFJWsywRb8HuLGbvhH4/MD41iQX\nJtkEbAG+3C+iJKmPRe9emWQ3cBWwPslh4A7gLuCRJDcBLwE3AFTVc0keAZ4HTgK3VtWpFcouSVqC\nRYu+qrYtMOsdCyx/J3Bnn1CSpNHxylhJapxFL0mNs+glqXEWvSQ1zqKXpMZZ9JLUOItekhpn0UtS\n4yx6SWqcRS9JjbPoJalxFr0kNc6il6TGWfSS1DiLXpIaZ9FLUuMseklqnEUvSY2z6CWpcYt+Z+xC\nkrwZeHhgaDPwp8AbgN8FZrvxj1bVE0MnlCT1MnTRV9ULwBRAknXAEeAx4LeAe6rqkyNJKEnqZVSH\nbt4BHKyql0b0+yRJIzKqot8K7B54fluSZ5PsSnLRfCsk2Z5kJsnM7OzsfItIkkagd9EneQ3wPuBv\nu6F7mTtePwUcBe6eb72q2llV01U1PTEx0TeGJGkBo9ijfw/w1ao6BlBVx6rqVFWdBu4DrhjBNiRJ\nQxpF0W9j4LBNko0D864H9o9gG5KkIQ191g1AktcC7wJuHhj+RJIpoIBDZ82TJJ1nvYq+qv4b+Nmz\nxj7UK5EkaaS8MlaSGmfRS1LjLHpJapxFL0mNs+glqXEWvSQ1zqKXpMZZ9JLUOItekhpn0UtS4yx6\nSWqcRS9JjbPoJalxFr0kNc6il6TGWfSS1DiLXpIaZ9FLUuMseklqXN8vBz8E/AA4BZysqukkbwQe\nBiaZ+3LwG6rqP/vFlCQNaxR79G+vqqmqmu6e7wCerqotwNPdc0nSmKzEoZvrgAe66QeA96/ANiRJ\nS9S36At4KsneJNu7sQ1VdbSb/g6wYb4Vk2xPMpNkZnZ2tmcMSdJCeh2jB95aVUeS/BzwZJJvDM6s\nqkpS861YVTuBnQDT09PzLiNJ6q/XHn1VHel+HgceA64AjiXZCND9PN43pCRpeEMXfZLXJnndmWng\n3cB+YA9wY7fYjcDn+4aUJA2vz6GbDcBjSc78nger6gtJvgI8kuQm4CXghv4xNWhyx+PjjiBpDRm6\n6KvqReCX5hn/HvCOPqEkSaPjlbGS1DiLXpIaZ9FLUuMseklqnEUvSY2z6CWpcX1vgSCpQeO6VuPQ\nXdeOZbutc49ekhpn0UtS4yx6SWqcRS9JjbPoJalxFr0kNc6il6TGWfSS1DiLXpIaZ9FLUuMseklq\nXJ8vB780yT8neT7Jc0l+vxv/WJIjSfZ1j/eOLq4kabn63NTsJPCHVfXVJK8D9iZ5spt3T1V9sn88\nSVJffb4c/ChwtJv+QZKvAxePKpgkaTRGcow+ySTwy8C/d0O3JXk2ya4kFy2wzvYkM0lmZmdnRxFD\nkjSP3kWf5GeAR4E/qKr/Au4FNgNTzO3x3z3felW1s6qmq2p6YmKibwxJ0gJ6FX2Sn2au5D9bVX8H\nUFXHqupUVZ0G7gOu6B9TkjSsPmfdBPgM8PWq+ouB8Y0Di10P7B8+niSprz5n3fwq8CHga0n2dWMf\nBbYlmQIKOATc3CuhJKmXPmfd/CuQeWY9MXwcSdKoeWWsJDXOopekxln0ktQ4i16SGmfRS1LjLHpJ\napxFL0mNs+glqXEWvSQ1zqKXpMb1udeNJI3U5I7Hx7LdQ3ddO5btni/u0UtS4yx6SWqch256GNe/\nmZK0HO7RS1LjLHpJapxFL0mNs+glqXFNfBjrh6KStLAV26NPck2SF5IcSLJjpbYjSTq3FdmjT7IO\n+GvgXcBh4CtJ9lTV8yuxPUnqY5xHBc7HVbkrtUd/BXCgql6sqv8FHgKuW6FtSZLOYaWO0V8MfHvg\n+WHgVwYXSLId2N49/WGSF1Yoy3KtB7477hDLsNbywtrLvNbygpnPh5HkzZ/1Wv3nl7LQ2D6Mraqd\nwM5xbX8hSWaqanrcOZZqreWFtZd5reUFM58PaynvSh26OQJcOvD8km5MknSerVTRfwXYkmRTktcA\nW4E9K7QtSdI5rMihm6o6meT3gH8E1gG7quq5ldjWClh1h5MWsdbywtrLvNbygpnPhzWTN1U17gyS\npBXkLRAkqXEWvSQ1zqIHkvxGkueSnE4yPTA+meSVJPu6x6fHmXPQQpm7ebd3t554IcnV48p4Lkk+\nluTIwGv73nFnms9avJVHkkNJvta9rjPjznO2JLuSHE+yf2DsjUmeTPLN7udF48x4tgUyr4n3MFj0\nZ+wHPgA8M8+8g1U11T1uOc+5zmXezEkuZ+4sp7cA1wCf6m5JsRrdM/DaPjHuMGcbuJXHe4DLgW3d\n67sWvL17XVfjed73M/feHLQDeLqqtgBPd89Xk/t5dWZY5e/hMyx6oKq+XlWr5crcJTlH5uuAh6rq\nRFV9CzjA3C0ptHzeymMFVNUzwMtnDV8HPNBNPwC8/7yGWsQCmdcMi35xm7p/y76Y5G3jDrME891+\n4uIxZVnMbUme7f4tXlX/qnfW0ms5qICnkuztbjWyFmyoqqPd9HeADeMMswyr/T0M/AQVfZKnkuyf\n53GuPbSjwGVVNQV8BHgwyevPT+KhM68ai+S/F9gMTDH3Ot891rBteWv3nn0PcGuSK8cdaDlq7pzv\ntXDe95p5DzfxxSNLUVXvHGKdE8CJbnpvkoPAm4Dz8gHXMJlZRbefWGr+JPcBf7/CcYaxal7L5aiq\nI93P40keY+4Q1HyfP60mx5JsrKqjSTYCx8cdaDFVdezM9Cp+DwM/QXv0w0gyceaDzCSbgS3Ai+NN\ntag9wNYkFybZxFzmL48506t0f8xnXM/ch8urzZq7lUeS1yZ53Zlp4N2sztf2bHuAG7vpG4HPjzHL\nkqyR9zDwE7RHfy5Jrgf+EpgAHk+yr6quBq4EPp7kR8Bp4JaqWhUfyCyUuaqeS/II8DxwEri1qk6N\nM+sCPpFkirl/0Q8BN483zqut0Vt5bAAeSwJzf98PVtUXxhvpxyXZDVwFrE9yGLgDuAt4JMlNwEvA\nDeNL+GoLZL5qtb+Hz/AWCJLUOA/dSFLjLHpJapxFL0mNs+glqXEWvSQ1zqKXpMZZ9JLUuP8DHrVB\nLFdyL6cAAAAASUVORK5CYII=\n", 213 | "text/plain": [ 214 | "" 215 | ] 216 | }, 217 | "metadata": {}, 218 | "output_type": "display_data" 219 | }, 220 | { 221 | "data": { 222 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADlZJREFUeJzt3V2sZeVdx/HvT6bF2qKFcDpOh8GZJqNmuHAgR6zSGBQj\nCMZpb8iQ2GCCTptgLdrGAF4ULyapsW+aWJJpwQ7aQicttZMWrZQ0qU0UekAEBood3mTGgTm1VtAL\nWqZ/L86i3bzM2fvstfd5eeb7SXbO2s96nr3+/zmT31ln7ZeTqkKS1K4fWekCJEnTZdBLUuMMeklq\nnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGrdu2IQkm4CbgPVAAXuq6i+SXAf8HjDfTb22qm7r\n1lwDXAEcA/6gqr602DFOP/302rx587g9SNIJ6e677/5WVc0Mmzc06IHngfdU1T1JTgHuTnJ7t+/D\nVfWBwclJtgE7gbOANwJfTvLTVXXseAfYvHkzc3NzI5QiSXpBkidGmTf00k1VHamqe7rtZ4GHgI2L\nLNkB3FJVz1XVY8BB4NxRipEkTd6SrtEn2QycDdzZDb0ryX1Jbkxyaje2EXhyYNkhFv/BIEmaopGD\nPsnrgM8CV1XVM8D1wJuA7cAR4INLOXCSXUnmkszNz88PXyBJGstIQZ/kVSyE/Cer6laAqnq6qo5V\n1feBj/HDyzOHgU0Dy8/oxl6kqvZU1WxVzc7MDH0uQZI0pqFBnyTADcBDVfWhgfENA9PeBjzQbe8H\ndiY5OckWYCtw1+RKliQtxSivujkPeDtwf5J7u7FrgcuSbGfhJZePA+8AqKoDSfYBD7Lwip0rF3vF\njSRpuoYGfVV9Dcgr7LptkTW7gd096pIkTYjvjJWkxhn0ktS4Ua7RS1LTNl/9xRU79uPvv2Tqx/CM\nXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+gl\nqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIa\nZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxg0N+iSbknwlyYNJDiR5dzd+WpLbk3yz+3rqwJpr\nkhxM8nCSC6fZgCRpcaOc0T8PvKeqtgFvBq5Msg24GrijqrYCd3T36fbtBM4CLgI+muSkaRQvSRpu\naNBX1ZGquqfbfhZ4CNgI7AD2dtP2Am/ttncAt1TVc1X1GHAQOHfShUuSRrOka/RJNgNnA3cC66vq\nSLfrKWB9t70ReHJg2aFuTJK0AkYO+iSvAz4LXFVVzwzuq6oCaikHTrIryVySufn5+aUslSQtwUhB\nn+RVLIT8J6vq1m746SQbuv0bgKPd+GFg08DyM7qxF6mqPVU1W1WzMzMz49YvSRpilFfdBLgBeKiq\nPjSwaz9webd9OfD5gfGdSU5OsgXYCtw1uZIlSUuxboQ55wFvB+5Pcm83di3wfmBfkiuAJ4BLAarq\nQJJ9wIMsvGLnyqo6NvHKJUkjGRr0VfU1IMfZfcFx1uwGdveoS5I0Ib4zVpIaZ9BLUuMMeklqnEEv\nSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLU\nOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z\n6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjhgZ9khuTHE3ywMDYdUkOJ7m3u108sO+aJAeTPJzk\nwmkVLkkazShn9J8ALnqF8Q9X1fbudhtAkm3ATuCsbs1Hk5w0qWIlSUs3NOir6qvAt0d8vB3ALVX1\nXFU9BhwEzu1RnySppz7X6N+V5L7u0s6p3dhG4MmBOYe6sZdJsivJXJK5+fn5HmVIkhYzbtBfD7wJ\n2A4cAT641Aeoqj1VNVtVszMzM2OWIUkaZqygr6qnq+pYVX0f+Bg/vDxzGNg0MPWMbkyStELGCvok\nGwbuvg144RU5+4GdSU5OsgXYCtzVr0RJUh/rhk1IcjNwPnB6kkPA+4Dzk2wHCngceAdAVR1Isg94\nEHgeuLKqjk2ndEnSKIYGfVVd9grDNywyfzewu09RkqTJ8Z2xktQ4g16SGmfQS1LjDHpJapxBL0mN\nM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiD\nXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+gl\nqXEGvSQ1zqCXpMYZ9JLUOINekho3NOiT3JjkaJIHBsZOS3J7km92X08d2HdNkoNJHk5y4bQKlySN\nZpQz+k8AF71k7GrgjqraCtzR3SfJNmAncFa35qNJTppYtZKkJRsa9FX1VeDbLxneAezttvcCbx0Y\nv6Wqnquqx4CDwLkTqlWSNIZxr9Gvr6oj3fZTwPpueyPw5MC8Q93YyyTZlWQuydz8/PyYZUiShun9\nZGxVFVBjrNtTVbNVNTszM9O3DEnScYwb9E8n2QDQfT3ajR8GNg3MO6MbkyStkHGDfj9webd9OfD5\ngfGdSU5OsgXYCtzVr0RJUh/rhk1IcjNwPnB6kkPA+4D3A/uSXAE8AVwKUFUHkuwDHgSeB66sqmNT\nql2SNIKhQV9Vlx1n1wXHmb8b2N2nKEnS5PjOWElqnEEvSY0z6CWpcQa9JDVu6JOxkrRcNl/9xZUu\noUme0UtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnB+BIOll/CiC\ntnhGL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhrnyyulVcqXOGpSPKOXpMYZ9JLUOINekhpn0EtS\n4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1LheH4GQ5HHgWeAY8HxVzSY5Dfg0sBl4HLi0qv67\nX5mSpHFN4oz+V6pqe1XNdvevBu6oqq3AHd19SdIKmcalmx3A3m57L/DWKRxDkjSivkFfwJeT3J1k\nVze2vqqOdNtPAet7HkOS1EPfjyl+S1UdTvIG4PYk3xjcWVWVpF5pYfeDYRfAmWee2bMMSdLx9Dqj\nr6rD3dejwOeAc4Gnk2wA6L4ePc7aPVU1W1WzMzMzfcqQJC1i7KBP8tokp7ywDfw68ACwH7i8m3Y5\n8Pm+RUqSxtfn0s164HNJXnicT1XVPyT5OrAvyRXAE8Cl/cuUJI1r7KCvqkeBn3uF8f8CLuhTlCRp\ncnxnrCQ1zj8OrjXBP5Qtjc+g15IYuNLa46UbSWqcZ/RrkGfVkpbCM3pJapxBL0mN89JND15CkbQW\neEYvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNa+Kdsb5DVZKOzzN6SWqc\nQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0\nktQ4g16SGmfQS1Ljphb0SS5K8nCSg0muntZxJEmLm0rQJzkJ+CvgN4BtwGVJtk3jWJKkxU3rjP5c\n4GBVPVpV3wVuAXZM6ViSpEVMK+g3Ak8O3D/UjUmSltmK/XHwJLuAXd3d/03y8JgPdTrwrclUtaac\niH3b84nhhOo5fwaM3/NPjTJpWkF/GNg0cP+MbuwHqmoPsKfvgZLMVdVs38dZa07Evu35xGDPkzet\nSzdfB7Ym2ZLk1cBOYP+UjiVJWsRUzuir6vkkvw98CTgJuLGqDkzjWJKkxU3tGn1V3QbcNq3HH9D7\n8s8adSL2bc8nBnuesFTVNB9fkrTC/AgESWrcqg76YR+jkAV/2e2/L8k5o65drcbtOcmmJF9J8mCS\nA0nevfzVj6fP97nbf1KSf03yheWrup+e/7dfn+QzSb6R5KEkv7i81Y+nZ89/2P2/fiDJzUl+dHmr\nH88IPf9skn9O8lyS9y5l7ZJU1aq8sfAk7iPAm4BXA/8GbHvJnIuBvwcCvBm4c9S1q/HWs+cNwDnd\n9inAv7fe88D+PwI+BXxhpftZjp6BvcDvdtuvBl6/0j1Ns2cW3mz5GPCa7v4+4HdWuqcJ9fwG4OeB\n3cB7l7J2KbfVfEY/ysco7ABuqgX/Arw+yYYR165GY/dcVUeq6h6AqnoWeIi18W7kPt9nkpwBXAJ8\nfDmL7mnsnpP8BPDLwA0AVfXdqvrOchY/pl7fZxZeOPKaJOuAHwP+c7kK72Foz1V1tKq+DnxvqWuX\nYjUH/Sgfo3C8OWv1Ixj69PwDSTYDZwN3TrzCyevb80eAPwa+P60Cp6BPz1uAeeCvu8tVH0/y2mkW\nOyFj91xVh4EPAP8BHAH+p6r+cYq1TkqfHJpohq3moNcYkrwO+CxwVVU9s9L1TFOS3wSOVtXdK13L\nMloHnANcX1VnA/8HrJnnoMaR5FQWzma3AG8EXpvkt1e2qrVlNQf90I9RWGTOKGtXoz49k+RVLIT8\nJ6vq1inWOUl9ej4P+K0kj7Pwq+2vJvnb6ZU6MX16PgQcqqoXflv7DAvBv9r16fnXgMeqar6qvgfc\nCvzSFGudlD45NNkMW+knLBZ5ImMd8CgLP8VfeDLirJfMuYQXP3lz16hrV+OtZ88BbgI+stJ9LFfP\nL5lzPmvnydhePQP/BPxMt30d8Ocr3dM0ewZ+ATjAwrX5sPBk9LtWuqdJ9Dww9zpe/GTsRDNsxf8x\nhvxDXczCq0ceAf6kG3sn8M5uOyz8gZNHgPuB2cXWroXbuD0DbwEKuA+4t7tdvNL9TPv7PPAYaybo\n+/YMbAfmuu/13wGnrnQ/y9DznwLfAB4A/gY4eaX7mVDPP8nCb2nPAN/ptn/8eGvHvfnOWElq3Gq+\nRi9JmgCDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxv0/N+k1k19W6KAAAAAASUVORK5C\nYII=\n", 223 | "text/plain": [ 224 | "" 225 | ] 226 | }, 227 | "metadata": {}, 228 | "output_type": "display_data" 229 | } 230 | ], 231 | "source": [ 232 | "np.random.seed(2)\n", 233 | "\n", 234 | "d = 2 # dimension of x \n", 235 | "n = 700; \n", 236 | "mu_x = np.zeros(d); \n", 237 | "sigma_x = np.random.normal(size = (d,1))\n", 238 | "sigma_x += np.abs(np.min(sigma_x))+0.5\n", 239 | "sigma_x = np.multiply(sigma_x, np.eye(d))\n", 240 | "sigma_x /= 2# normalize covariances a little bit \n", 241 | "\n", 242 | "W = 1.5 #treatment effect\n", 243 | "# interact_x = 2\n", 244 | "white_noise_coef = 0.1\n", 245 | "\n", 246 | "# generate propensity model \n", 247 | "def real_prop(x, beta_prop): \n", 248 | " T_SIG = 5\n", 249 | " if len(x.shape) > 1: \n", 250 | " n= x.shape[1]\n", 251 | " else:\n", 252 | " n= len(x)\n", 253 | " return np.dot(x, beta_prop) + np.random.normal(size = (n,1))*T_SIG\n", 254 | " # T is normally distributed conditional on covariates \n", 255 | " \n", 256 | "# coefficient of treatment effect\n", 257 | "beta_cons = -5\n", 258 | "beta_x = np.random.normal(size = (d,1))\n", 259 | "# interaction term with treatment \n", 260 | "beta_x_T = np.random.normal(size = (d,1))*1.5\n", 261 | "FREQ = 20 \n", 262 | "beta_x_quad_T = np.random.normal(size = (d,1))*0.2\n", 263 | "\n", 264 | "TRUE_PROP_BETA = np.asarray(beta_x_quad_T + np.random.normal( loc= np.ones((d,1))*2, size = (d,1))).flatten()\n", 265 | "print TRUE_PROP_BETA\n", 266 | "\n", 267 | "def real_risk(T, beta_cons, beta_x, beta_x_T, beta_x_quad_T, x): \n", 268 | " n = len(T); risk = np.zeros(n)\n", 269 | " for i in range(len(T)): \n", 270 | " risk[i] = -5 + np.abs(T[i])\n", 271 | " if x[i,0] > 0 and x[i,1] > 0: \n", 272 | " risk[i] = (np.abs(T[i] - x[i,0]))**1.5 #- (np.abs(np.dot(x[i,:],beta_x_quad_T)))**1.5\n", 273 | "# risk[i] = T[i]*beta_cons + np.dot(beta_x.T, x[i,:]) + np.dot(beta_x_T.T, x[i,:]*T[i]) + (T[i]-np.dot(beta_x_quad_T.T,x[i,:]))**2#+ np.dot(beta_x_quad_T.T, (x[i,:]**2)*T[i]) + np.dot(beta_x_high_freq.T, np.sin(x[i,0:HIGH_FREQ_N]*FREQ)*T[i])\n", 274 | " return risk\n", 275 | "\n", 276 | "T_SIG = 4\n", 277 | "beta_x_quad_T = np.asarray( [1,2] )\n", 278 | "def generate_data(mu_x, sigma_x_mat, n, beta_x_quad_T, unconfounded=False): \n", 279 | " x = np.random.uniform( low = -2,high=2, size = [n,len(mu_x )] )\n", 280 | "# x = np.random.multivariate_normal(mean = mu_x, cov= sigma_x_mat, size = n ) \n", 281 | " print x.shape \n", 282 | " print \"xshape\"\n", 283 | " T = np.random.normal(0, T_SIG, n) + np.dot(x, beta_x_quad_T) \n", 284 | " true_resid = T - np.dot(x, beta_x_quad_T); true_Q = norm.pdf( T - np.dot(x, beta_x_quad_T), loc = 0, scale = T_SIG ); y_sigma = 0.5; white_noise_coef = 5\n", 285 | " clf = LinearRegression(); clf.fit(x, T); y_hat = clf.predict(x); Y = np.zeros(n)\n", 286 | " Y = real_risk(T, beta_cons, beta_x, beta_x_T, beta_x_quad_T, x)\n", 287 | " return [x, T, Y, true_Q, clf]\n", 288 | "\n", 289 | "[x_full, T_full, Y_full, true_Q_full, clf] = generate_data(mu_x, sigma_x, n, beta_x_quad_T)\n", 290 | "\n", 291 | "[x_full, T_full, Y_full, true_Q_full, clf] = generate_data(mu_x, sigma_x, n, beta_x_quad_T,unconfounded=True)\n", 292 | "\n", 293 | "\n", 294 | "#compute real risk \n", 295 | "print np.mean( real_risk(T_full, beta_cons, beta_x, beta_x_T, beta_x_quad_T, x_full))\n", 296 | "\n", 297 | "plt.hist(Y_full); plt.title('Y'); plt.figure(); plt.title('T'); plt.hist(T_full); Q = true_Q_full; plt.figure(); plt.hist(true_Q_full)\n" 298 | ] 299 | }, 300 | { 301 | "cell_type": "markdown", 302 | "metadata": {}, 303 | "source": [ 304 | "## train/test split and scale data" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": 7, 310 | "metadata": { 311 | "collapsed": true 312 | }, 313 | "outputs": [], 314 | "source": [ 315 | "n = 300\n", 316 | "x_test = x_full[n:]; Y_test = Y_full[n:]; T_test = T_full[n:]; true_Q_test = true_Q_full[n:]\n", 317 | "x = x_full[0:n]; Y = Y_full[0:n];T = T_full[0:n]; true_Q = true_Q_full[0:n]\n", 318 | "\n", 319 | "x_ = x - np.mean(x) #demean data\n", 320 | "mean_x = np.mean(x) \n", 321 | "n = len(T)\n", 322 | "X_T = np.hstack([ x,T.reshape([n,1])])\n", 323 | "T_ = T.reshape([n,1])\n", 324 | "\n", 325 | "from sklearn import preprocessing\n", 326 | "scaler = preprocessing.StandardScaler().fit(X_T)\n", 327 | "X_T_scaled = scaler.transform(X_T) " 328 | ] 329 | }, 330 | { 331 | "cell_type": "markdown", 332 | "metadata": {}, 333 | "source": [ 334 | "### Use these variants for continuous policy evaluation comparison " 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "execution_count": 8, 340 | "metadata": { 341 | "collapsed": true 342 | }, 343 | "outputs": [], 344 | "source": [ 345 | "## given Known propensities\n", 346 | "def off_policy_evaluation_known_Q(**params):\n", 347 | " \"\"\"\n", 348 | " Takes in a choice of kernel and dictionary of parameters and data required for evaluation\n", 349 | " tau is a vector of treatment values (assumed given)\n", 350 | " If y_samp, T_samp is present, use that instead. \n", 351 | " \"\"\"\n", 352 | " [loss, norm_sum] = off_pol_estimator_known_Q(**params)\n", 353 | " h = params['h']\n", 354 | " n = params['n']\n", 355 | " return loss/(norm_sum*1.0*h*n)\n", 356 | "\n", 357 | "def off_pol_estimator_known_Q(**params): \n", 358 | " THRESH = params['threshold']\n", 359 | " y_out = params['y']; x = params['x']; h = params['h'];n = params['n']; t_lo = params['t_lo']; t_hi = params['t_hi']\n", 360 | " kernel = params['kernel_func'];kernel_int = params['kernel_int_func']\n", 361 | " Q = params['Q_known']; \n", 362 | " if ('y_samp' in params.keys()):\n", 363 | " y_out = params['y_samp']\n", 364 | " if ('T_samp' in params.keys()): \n", 365 | " T = params['T_samp']\n", 366 | " else: \n", 367 | " T = params['T']\n", 368 | " if ('x_samp' in params.keys()):\n", 369 | " x = params['x_samp']\n", 370 | "\n", 371 | " BMI_IND = params.get('BMI_IND') # propensity score for warfarin data evaluations \n", 372 | " if (params.get('DATA_TYPE') == 'warfarin'): \n", 373 | " x = params['x'][:,BMI_IND]\n", 374 | "\n", 375 | " loss = 0\n", 376 | " tau = params['tau']\n", 377 | " clip_tau = np.clip(tau, t_lo, t_hi)\n", 378 | " Qs = np.zeros(n)\n", 379 | " for i in np.arange(n): \n", 380 | " Q_i = Q[i]\n", 381 | " if (abs(clip_tau[i] - t_lo) <= h):\n", 382 | " alpha = kernel_int((t_lo-clip_tau[i])/h, 1)\n", 383 | " elif (abs(clip_tau[i] - t_hi) <= h):\n", 384 | " alpha = kernel_int(-1, (t_hi - clip_tau[i])/h )\n", 385 | " else:\n", 386 | " alpha = 1\n", 387 | " Qs[i] = (1.0/h)*kernel( (clip_tau[i] - T[i])/h )/max(Q_i,THRESH)\n", 388 | " loss += kernel( (clip_tau[i] - T[i])/h )*1.0 * y_out[i]/max(Q_i,THRESH) * 1.0/alpha\n", 389 | " norm_sum = np.mean(np.maximum(Qs,THRESH*np.ones(n)))\n", 390 | " return [loss, norm_sum]" 391 | ] 392 | }, 393 | { 394 | "cell_type": "markdown", 395 | "metadata": {}, 396 | "source": [ 397 | "## unit test that continuous policy evaluation works" 398 | ] 399 | }, 400 | { 401 | "cell_type": "code", 402 | "execution_count": 10, 403 | "metadata": {}, 404 | "outputs": [ 405 | { 406 | "data": { 407 | "text/plain": [ 408 | "29.92270098541192" 409 | ] 410 | }, 411 | "execution_count": 10, 412 | "metadata": {}, 413 | "output_type": "execute_result" 414 | } 415 | ], 416 | "source": [ 417 | "from scipy.stats import norm\n", 418 | "\n", 419 | "t_lo = np.min(X_T_scaled[:,d])\n", 420 | "t_hi = np.max(X_T_scaled[:,d])\n", 421 | "lr = LinearRegression(); lr.fit(X_T_scaled[:,0:d], X_T_scaled[:,d])\n", 422 | "T_hat = lr.predict(X_T_scaled[:,0:d]); #beta_T_gps = np.dot(x[trainind,:],lr.coef_)\n", 423 | "resid = X_T_scaled[:,d] - T_hat\n", 424 | "# Assume normal noise\n", 425 | "mu_resid = np.mean(resid); sigma_resid = np.std(resid)\n", 426 | "def norm_T_Q_est(x,t,t_lo,t_hi): \n", 427 | " return norm.pdf( t - np.dot(x, lr.coef_), loc = mu_resid, scale = sigma_resid )\n", 428 | "\n", 429 | "n = len(Y)\n", 430 | "data = { 'n': n, 'y': Y,'Q': norm_T_Q_est,'x_full': X_T_scaled[:,0:d], 'x': X_T_scaled[:,0:d],'x_samp': X_T_scaled[:,0:d], 'T_samp': X_T_scaled[:,d], 'd': d, 'T': X_T_scaled[:,d],'t_lo': t_lo ,'t_hi': t_hi }\n", 431 | "data['kernel_int_func'] = epanechnikov_int\n", 432 | "data['kernel_func'] = epanechnikov_kernel\n", 433 | "data['sgn'] = 1; data['threshold'] = 0.03\n", 434 | "data['h'] = 1.2; data['Q_known'] = true_Q\n", 435 | "# assign tau \n", 436 | "# standardized or not? \n", 437 | "data['tau'] = X_T_scaled[:,d] +2\n", 438 | "\n", 439 | "off_policy_evaluation_known_Q(**data)" 440 | ] 441 | }, 442 | { 443 | "cell_type": "markdown", 444 | "metadata": {}, 445 | "source": [ 446 | "### helper function to evaluate different methods" 447 | ] 448 | }, 449 | { 450 | "cell_type": "code", 451 | "execution_count": 545, 452 | "metadata": { 453 | "scrolled": true 454 | }, 455 | "outputs": [ 456 | { 457 | "data": { 458 | "application/javascript": [ 459 | "$(document).ready(\n", 460 | " function() {\n", 461 | " function appendUniqueDiv(){\n", 462 | " // append a div with our uuid so we can check that it's already\n", 463 | " // been sent and avoid duplicates on page reload\n", 464 | " var notifiedDiv = document.createElement(\"div\")\n", 465 | " notifiedDiv.id = \"0893a3a7-d02a-4ad9-aaa7-5bed9a750f4d\"\n", 466 | " element.append(notifiedDiv)\n", 467 | " }\n", 468 | "\n", 469 | " // only send notifications if the pageload is complete; this will\n", 470 | " // help stop extra notifications when a saved notebook is loaded,\n", 471 | " // which during testing gives us state \"interactive\", not \"complete\"\n", 472 | " if (document.readyState === 'complete') {\n", 473 | " // check for the div that signifies that the notification\n", 474 | " // was already sent\n", 475 | " if (document.getElementById(\"0893a3a7-d02a-4ad9-aaa7-5bed9a750f4d\") === null) {\n", 476 | " var notificationPayload = {\"body\": \"Cell execution has finished!\", \"requireInteraction\": false, \"icon\": \"/static/base/images/favicon.ico\"};\n", 477 | " if (Notification.permission !== 'denied') {\n", 478 | " if (Notification.permission !== 'granted') { \n", 479 | " Notification.requestPermission(function (permission) {\n", 480 | " if(!('permission' in Notification)) {\n", 481 | " Notification.permission = permission\n", 482 | " }\n", 483 | " })\n", 484 | " }\n", 485 | " if (Notification.permission === 'granted') {\n", 486 | " var notification = new Notification(\"Jupyter Notebook\", notificationPayload)\n", 487 | " appendUniqueDiv()\n", 488 | " notification.onclick = function () {\n", 489 | " window.focus();\n", 490 | " this.close();\n", 491 | " };\n", 492 | " } \n", 493 | " } \n", 494 | " }\n", 495 | " }\n", 496 | " }\n", 497 | ")\n" 498 | ], 499 | "text/plain": [ 500 | "" 501 | ] 502 | }, 503 | "metadata": {}, 504 | "output_type": "display_data" 505 | } 506 | ], 507 | "source": [ 508 | "%%notify\n", 509 | "def get_mse(data, mean_risk): \n", 510 | " return np.mean((data - mean_risk)**2) + (np.mean(data) - mean_risk)**2 # variance + bias \n", 511 | "\n", 512 | "'''\n", 513 | "POL_X is input wrt standardized treatment space \n", 514 | "needs to be scaled back to normal to evaluate outcome model \n", 515 | "Assume POL_X is unit normalized!\n", 516 | "'''\n", 517 | "def eval_possibilities(POL_X, x_T_test_sc, x_test, Y_test, Q, Y_mean_old): \n", 518 | " print \"true-in-sample counterfactuals: \"\n", 519 | " true_risk = np.mean(real_risk((POL_X*np.std(T))+np.mean(T), beta_cons, beta_x, beta_x_T, beta_x_quad_T, x_test))\n", 520 | "# true_risk = np.mean( real_risk((POL_X*np.std(T_))+np.mean(T_), beta_cons, beta_x, beta_x_T, beta_x_quad_T, x_test))\n", 521 | " print true_risk\n", 522 | " n = len(Y_test); POL_X=POL_X.reshape([n,1])\n", 523 | " T_sc_test = x_T_test_sc[:,d].reshape([n,1]); \n", 524 | " X_test_pi = np.hstack([ x_T_test_sc[:,0:d], POL_X ])\n", 525 | " K_T_pi = k_T(T_sc_test, POL_X)\n", 526 | " K_pi_pi = k_T(POL_X, POL_X)\n", 527 | " K_XT_XT = k_XT( x_T_test_sc, x_T_test_sc ) + np.eye(n)*10e-3\n", 528 | "\n", 529 | " print \"direct method: \"\n", 530 | " dm = np.mean(gp.predict(X_test_pi)) + Y_mean_old # add the empirical mean! \n", 531 | " print dm\n", 532 | " test_params = {'x_sc': x_T_test_sc[:,0:d], 'T_sc': x_T_test_sc[:,d], 'k_T': k_T, 'k_XT': k_XT, 'k_X': k_X, 'X_pi': X_test_pi, 'K_T_T':K_T_T, 'K_T_pi': K_T_pi, 'K_pi_pi': K_pi_pi, 'K_XT_XT': K_XT_XT }\n", 533 | " test_params['x_aug'] = np.hstack([x_T_test_sc[:,0:d], np.ones(n).reshape([n,1])])\n", 534 | " test_params['pi'] = assignment_rule_2d\n", 535 | " [val, test_wghts] = opt_wghts(POL_X, **test_params)\n", 536 | " print \"prop. of strictly positive weights: \" + str(sum(test_wghts >0.001)*1.0 / n)\n", 537 | " \n", 538 | " pol_eval = test_wghts.T * Y_test.reshape([n,1]) * 1.0/n \n", 539 | " print \"weight-based evaluation: \"\n", 540 | " print pol_eval + mean_x\n", 541 | " print \"continuous off policy evaluator\"\n", 542 | " data = { 'n': len(Y_test), 'y': Y_test,'Q': Q,'x_full': x_T_test_sc[:,0:d], 'x': x_T_test_sc[:,0:d],'x_samp': x_T_test_sc[:,0:d], 'T_samp': x_T_test_sc[:,d], 'd': d, 'T': x_T_test_sc[:,d],'t_lo': t_lo ,'t_hi': t_hi}\n", 543 | " data['kernel_int_func'] = epanechnikov_int; data['kernel_func'] = epanechnikov_kernel; data['sgn'] = 1; data['threshold'] = 0.03\n", 544 | " data['h'] = 1.2; data['Q_known'] = Q\n", 545 | " data['tau'] = POL_X#*np.std(T))+np.mean(T)\n", 546 | " \n", 547 | " ope = off_policy_evaluation_known_Q(**data)\n", 548 | " print ope\n", 549 | " return [true_risk, dm, pol_eval, ope]" 550 | ] 551 | }, 552 | { 553 | "cell_type": "code", 554 | "execution_count": 552, 555 | "metadata": {}, 556 | "outputs": [ 557 | { 558 | "data": { 559 | "text/plain": [ 560 | "(300, 2)" 561 | ] 562 | }, 563 | "execution_count": 552, 564 | "metadata": {}, 565 | "output_type": "execute_result" 566 | } 567 | ], 568 | "source": [ 569 | "x_sc.shape" 570 | ] 571 | }, 572 | { 573 | "cell_type": "code", 574 | "execution_count": null, 575 | "metadata": { 576 | "collapsed": true 577 | }, 578 | "outputs": [], 579 | "source": [ 580 | "def get_pol(tht,**params): \n", 581 | " n = params['x_sc'].shape[0]\n", 582 | " POL_X = np.asarray( [ params['pi']( tht, params['x_sc'][i,:] ) for i in range(n) ] ).flatten().reshape([n,1]) ; \n", 583 | " return POL_X\n", 584 | "\n", 585 | "n = x_test.shape[0]\n", 586 | "test_n = len(Y_test); N_REPS = 10\n", 587 | "# thts = np.asarray([ [1,0,0], [0,1,0], [.5,.5,1], [0,0,2], [2,0,3] ])\n", 588 | "\n", 589 | "\n", 590 | "def eval_over_reps(thts, N_REPS, POL_FN): \n", 591 | " n_pols = 4 # number of different evaluation methods \n", 592 | " risks = np.zeros([len(test_range), n_pols, N_REPS])\n", 593 | " for k in range(N_REPS):\n", 594 | " n=400; \n", 595 | " [x_test, T_test, Y_test, true_Q, clf] = generate_data(mu_x, sigma_x, n, beta_x_quad_T)\n", 596 | " X_test_T = np.hstack([ x_test,T_test.reshape([n,1])])\n", 597 | " x_T_test_sc = scaler.transform(X_test_T) ; T_sc_test = x_T_test_sc[:,d].reshape([n,1])\n", 598 | " POL_X = np.dot(x_T_test_sc[:,0:d], beta_x_quad_T).reshape([n,1]); X_test_pi = np.hstack([ x_T_test_sc[:,0:d], POL_X ])\n", 599 | " K_T_pi = k_T(T_sc_test, POL_X); K_T_T = k_T(T_sc_test, T_sc_test); K_pi_pi = k_T(POL_X, POL_X); K_XT_XT = k_XT( x_T_test_sc, x_T_test_sc ) + np.eye(n)*10e-3\n", 600 | "\n", 601 | " test_params = {'x_sc': x_T_test_sc[:,0:d],'n':n, 'T_sc': x_T_test_sc[:,d], 'k_T': k_T, 'k_XT': k_XT, 'k_X': k_X, 'X_pi': X_test_pi, 'K_T_T':K_T_T, 'K_T_pi': K_T_pi, 'K_pi_pi': K_pi_pi, 'K_XT_XT': K_XT_XT }\n", 602 | " test_params['pi'] = assignment_rule_2d\n", 603 | "\n", 604 | " for (ind, test_t) in enumerate(test_range): \n", 605 | " print '-------t------------'\n", 606 | " print test_t\n", 607 | " print '---------'\n", 608 | " #(np.ones(test_n)*test_t)*np.std(T)+np.mean(T)\n", 609 | " pol = get_pol(thts[ind,:], **test_params)\n", 610 | " # evaluate policy projected to scaled space \n", 611 | " risks[ind,:, k] = eval_possibilities( (pol-np.mean(T))/np.std(T), x_T_test_sc, x_test, Y_test, true_Q, np.mean(Y) )\n", 612 | " return risks\n", 613 | "\n", 614 | "\n", 615 | "risks = eval_over_reps( thts, N_REPS, get_pol )" 616 | ] 617 | }, 618 | { 619 | "cell_type": "code", 620 | "execution_count": 551, 621 | "metadata": { 622 | "scrolled": true 623 | }, 624 | "outputs": [ 625 | { 626 | "name": "stdout", 627 | "output_type": "stream", 628 | "text": [ 629 | "-------t------------\n", 630 | "-0.2\n", 631 | "---------\n", 632 | "true-in-sample counterfactuals: \n", 633 | "2.57469696802\n", 634 | "direct method: \n", 635 | "2.36644618097\n", 636 | "prop. of strictly positive weights: [[ 0.3575]]\n", 637 | "weight-based evaluation: \n", 638 | "[[ 1.09024674]]\n", 639 | "continuous off policy evaluator\n", 640 | "[ 0.23957388]\n", 641 | "-------t------------\n", 642 | "-0.1\n", 643 | "---------\n", 644 | "true-in-sample counterfactuals: \n", 645 | "-1.41776786459\n", 646 | "direct method: \n", 647 | "-1.41513931895\n", 648 | "prop. of strictly positive weights: [[ 0.2]]\n", 649 | "weight-based evaluation: \n", 650 | "[[-1.83435438]]\n", 651 | "continuous off policy evaluator\n", 652 | "[-0.96295469]\n", 653 | "-------t------------\n", 654 | "0.0\n", 655 | "---------\n", 656 | "true-in-sample counterfactuals: \n", 657 | "-3.00004517945\n", 658 | "direct method: \n", 659 | "-2.70574447571\n", 660 | "prop. of strictly positive weights: [[ 0.2775]]\n", 661 | "weight-based evaluation: \n", 662 | "[[-3.0313785]]\n", 663 | "continuous off policy evaluator\n", 664 | "[-1.11988048]\n", 665 | "-------t------------\n", 666 | "0.1\n", 667 | "---------\n", 668 | "true-in-sample counterfactuals: \n", 669 | "0.0773798099922\n", 670 | "direct method: \n", 671 | "-0.730489219331\n", 672 | "prop. of strictly positive weights: [[ 0.3425]]\n", 673 | "weight-based evaluation: \n", 674 | "[[ 0.40965942]]\n", 675 | "continuous off policy evaluator\n", 676 | "[ 0.3345952]\n", 677 | "-------t------------\n", 678 | "0.2\n", 679 | "---------\n", 680 | "true-in-sample counterfactuals: \n", 681 | "4.0899116162\n", 682 | "direct method: \n", 683 | "3.47468214519\n", 684 | "prop. of strictly positive weights: [[ 0.555]]\n", 685 | "weight-based evaluation: \n", 686 | "[[ 5.0375693]]\n", 687 | "continuous off policy evaluator\n", 688 | "[ 3.69088536]\n" 689 | ] 690 | } 691 | ], 692 | "source": [ 693 | "test_params = {'x_sc': x_T_test_sc[:,0:d], 'T_sc': x_T_test_sc[:,d], 'k_T': k_T, 'k_XT': k_XT, 'k_X': k_X, 'X_pi': X_test_pi, 'K_T_T':K_T_T, 'K_T_pi': K_T_pi, 'K_pi_pi': K_pi_pi, 'K_XT_XT': K_XT_XT }\n", 694 | "test_range = np.linspace(-.2,.2,5)\n", 695 | "n_pols = 4 \n", 696 | "test_n = len(Y_test)\n", 697 | "risks = np.zeros([len(test_range), n_pols])\n", 698 | "for (ind, test_t) in enumerate(test_range): \n", 699 | " print '-------t------------'\n", 700 | " print test_t\n", 701 | " print '---------'\n", 702 | " risks[ind,:] = eval_possibilities( (np.ones(test_n)*test_t)*np.std(T)+np.mean(T) , x_T_test_sc, x_test, Y_test,true_Q_test, np.mean(Y) )\n", 703 | "\n" 704 | ] 705 | }, 706 | { 707 | "cell_type": "code", 708 | "execution_count": null, 709 | "metadata": { 710 | "collapsed": true 711 | }, 712 | "outputs": [], 713 | "source": [ 714 | "eval_possibilities(np.zeros(n), x_T_test_sc, x_test, Y_test)" 715 | ] 716 | }, 717 | { 718 | "cell_type": "code", 719 | "execution_count": null, 720 | "metadata": { 721 | "collapsed": true 722 | }, 723 | "outputs": [], 724 | "source": [ 725 | "eval_possibilities(np.ones(n), x_T_test_sc, x_test, Y_test)" 726 | ] 727 | }, 728 | { 729 | "cell_type": "code", 730 | "execution_count": null, 731 | "metadata": { 732 | "collapsed": true 733 | }, 734 | "outputs": [], 735 | "source": [ 736 | "reps = 10\n", 737 | "vals = np.zeros([reps, 4])\n", 738 | "for i in range(reps): \n", 739 | " print i \n", 740 | " [true_risk, dm, wght_eval, ope] = eval_possibilities(np.dot(x_T_test_sc[:,0:d], np.random.uniform(size=d)) , x_T_test_sc, x_test, Y_test)\n", 741 | " vals[0,:] = [true_risk, dm, wght_eval, ope]\n" 742 | ] 743 | }, 744 | { 745 | "cell_type": "code", 746 | "execution_count": null, 747 | "metadata": { 748 | "collapsed": true 749 | }, 750 | "outputs": [], 751 | "source": [ 752 | "np.dot(x_test,beta_x_quad_T)" 753 | ] 754 | } 755 | ], 756 | "metadata": { 757 | "kernelspec": { 758 | "display_name": "Python 2", 759 | "language": "python", 760 | "name": "python2" 761 | }, 762 | "language_info": { 763 | "codemirror_mode": { 764 | "name": "ipython", 765 | "version": 2 766 | }, 767 | "file_extension": ".py", 768 | "mimetype": "text/x-python", 769 | "name": "python", 770 | "nbconvert_exporter": "python", 771 | "pygments_lexer": "ipython2", 772 | "version": "2.7.13" 773 | } 774 | }, 775 | "nbformat": 4, 776 | "nbformat_minor": 2 777 | } 778 | -------------------------------------------------------------------------------- /y.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 4.50938874889 3 | 21.9059197339 4 | 20.3534388474 5 | 8.99165915448 6 | 0.134958072768 7 | 24.3809360139 8 | 17.4873781909 9 | 9.11742151585 10 | 3.46071035544 11 | 15.7207628789 12 | 16.3392881006 13 | 19.941466149 14 | 8.24035746969 15 | 21.0715884581 16 | 4.55858184327 17 | 15.1516150079 18 | 7.19830850516 19 | 9.15421436162 20 | 39.3717291048 21 | 12.7273571697 22 | 1.86268836156 23 | 22.3395556966 24 | 22.2240205184 25 | 14.2759043598 26 | 24.2662647931 27 | 13.9795917141 28 | 5.3644424971 29 | 2.65859488315 30 | 12.3073580371 31 | 15.903477841 32 | 6.71407609094 33 | 25.1725992351 34 | 40.4055091586 35 | 17.1927752648 36 | 18.1062082444 37 | 3.94047743545 38 | 37.5102531864 39 | 46.7622292474 40 | 3.56162504347 41 | 24.5053013157 42 | 32.1799440561 43 | 4.9688150216 44 | 28.2475862264 45 | 3.1115625088 46 | 22.8101682731 47 | 4.9785606901 48 | 22.2231330934 49 | 9.43374170626 50 | 22.8221643031 51 | 3.64502407534 52 | 29.864515705 53 | 61.0005147449 54 | 27.2398492426 55 | 4.74513592559 56 | 4.72460511714 57 | 3.82935429325 58 | 6.40266373885 59 | 5.01224813654 60 | 10.4372964092 61 | 9.67074181154 62 | 1.11348486145 63 | 15.1910170994 64 | 13.7324733254 65 | 29.3680746037 66 | 44.0489919903 67 | 0.737857990035 68 | 10.1631331468 69 | 18.7128461779 70 | 1.35350991767 71 | 2.38987045091 72 | 19.2952247737 73 | 5.22291900874 74 | 25.6172227669 75 | 30.4319940378 76 | 28.7046036964 77 | 8.08866565332 78 | 21.1267761503 79 | 28.792682292 80 | 21.4160021793 81 | 1.91083022203 82 | 25.9468690577 83 | 35.492403132 84 | 20.2643777879 85 | 5.16552566042 86 | 4.80634957487 87 | 9.6063331138 88 | 9.13885438652 89 | 7.75678154496 90 | 5.07374871078 91 | 7.93939961138 92 | 18.9105599299 93 | 2.94652504287 94 | 4.02908739785 95 | 3.09751443412 96 | 0.305345844109 97 | 10.6211199316 98 | 27.1889668643 99 | 29.0368945861 100 | 6.90655936692 101 | 22.2256086845 102 | 6.70428798804 103 | 8.57629805668 104 | 10.1314929449 105 | 36.130798133 106 | 14.8876352404 107 | 3.72938185807 108 | 2.87965499333 109 | 26.4380530816 110 | 10.1157100397 111 | 33.2736744138 112 | 27.0466098681 113 | 7.96858822518 114 | 13.5083522589 115 | 9.90298325165 116 | 38.0533462279 117 | 44.2132791988 118 | 11.9902677495 119 | 5.22300696149 120 | 2.32438453542 121 | 26.5562564166 122 | 5.76612925974 123 | 21.762143465 124 | 17.2360882667 125 | 23.3520872962 126 | 15.961682941 127 | 3.54632513197 128 | 32.713986773 129 | 27.7316623435 130 | 28.1717928789 131 | 15.7060871349 132 | 13.055133481 133 | 5.23715545016 134 | 9.73226881958 135 | 9.51329672107 136 | 4.83531930045 137 | 6.13761213007 138 | 19.938838283 139 | 8.47460911695 140 | 4.31595398744 141 | 1.78194973341 142 | 18.9491372605 143 | 0.513282434163 144 | 0.00301787854241 145 | 8.76770339227 146 | 10.495654526 147 | 8.00177274288 148 | 15.0434054704 149 | 20.4944995229 150 | 0.918823074975 151 | 27.8177325818 152 | 16.9726973019 153 | 12.4403086655 154 | 43.561769922 155 | 33.3292382064 156 | 1.48422430657 157 | 35.7135430486 158 | 2.00767898385 159 | 8.25264581213 160 | 9.7234765085 161 | 36.8760563699 162 | 12.3761341693 163 | 25.0783257989 164 | 7.13771697211 165 | 29.9182671951 166 | 2.0451442043 167 | 5.1263018301 168 | 5.65842976869 169 | 1.81930995482 170 | 1.04137318333 171 | 18.4678590654 172 | 4.27702773705 173 | 1.66535897635 174 | 20.9022748785 175 | 13.4457373931 176 | 19.0271524576 177 | 26.1282477749 178 | 11.2401070765 179 | 10.7964805322 180 | 26.1363029875 181 | 6.00461587039 182 | 11.5104359097 183 | 13.5287110869 184 | 21.4144270632 185 | 11.2294336434 186 | 13.2411579835 187 | 24.2844297506 188 | 0.697591822674 189 | 4.19748926889 190 | 15.5103767646 191 | 61.5376609694 192 | 58.0049906601 193 | 15.6265344859 194 | 25.6287262876 195 | 51.09107853 196 | 20.7681140226 197 | 9.97621635937 198 | 18.1905885777 199 | 21.2042432353 200 | 9.95171328308 201 | 38.3628371939 202 | 34.0434391151 203 | 15.7014662838 204 | 15.6218003637 205 | 18.4111545652 206 | 24.2636514803 207 | 14.6676898131 208 | 18.0942572663 209 | 24.3145165519 210 | 12.6841682247 211 | 3.00842717494 212 | 4.48310194706 213 | 27.0621689277 214 | 7.91810541615 215 | 8.90895187635 216 | 49.686200111 217 | 30.7699802429 218 | 2.85056042761 219 | 2.38367941229 220 | 11.2237349957 221 | 3.08393268165 222 | 22.5054484272 223 | 12.6077894791 224 | 4.43911423665 225 | 31.8104343919 226 | 2.24029142188 227 | 8.29852777463 228 | 6.39380817661 229 | 5.8943291306 230 | 27.2493347879 231 | 30.6412309314 232 | 42.6358921353 233 | 9.45158591215 234 | 2.38715245409 235 | 10.6024560824 236 | 6.83234547914 237 | 13.9861505309 238 | 10.4637154937 239 | 15.8896895979 240 | 2.47130487966 241 | 17.3111469196 242 | 14.1178881431 243 | 25.3079893495 244 | 3.46098167355 245 | 23.9646473929 246 | 37.7427483782 247 | 12.0453141489 248 | 1.87045433139 249 | 1.96285383428 250 | 5.95486727883 251 | 15.6660838273 252 | 86.4659855964 253 | 16.1328468192 254 | 21.440854166 255 | 5.4077400341 256 | 5.19656556392 257 | 18.7695526526 258 | 7.62987825977 259 | 2.57941520946 260 | 19.0752723222 261 | 8.92563232799 262 | 14.5702369957 263 | 47.2222413121 264 | 6.4530000932 265 | 7.48716043506 266 | 21.1658653475 267 | 15.2048426594 268 | 36.4499796035 269 | 0.464338168974 270 | 0.843626104988 271 | 22.221612291 272 | 29.4464588481 273 | 9.79323333638 274 | 6.24202409534 275 | 2.52272228665 276 | 3.8208241862 277 | 11.4704398916 278 | 0.983931338979 279 | 9.81187008157 280 | 19.9524940142 281 | 1.79972073425 282 | 19.9816133897 283 | 17.7002609817 284 | 7.92885538221 285 | 12.2598333009 286 | 14.1894671324 287 | 7.35950291542 288 | 8.32853384703 289 | 10.52873644 290 | 4.84622587865 291 | 36.7636619239 292 | 11.366865345 293 | 9.50975381099 294 | 3.36403405807 295 | 21.2828709127 296 | 8.38227371964 297 | 20.763654271 298 | 18.20499288 299 | 36.8416182352 300 | 33.2346843216 301 | 13.661828358 302 | 14.6715092489 303 | 21.7645298976 304 | 7.51622081246 305 | 27.9661539737 306 | 12.2350879042 307 | 10.3932953032 308 | 15.2190703814 309 | 14.256335766 310 | 6.81761740902 311 | 20.4659931746 312 | 5.71224212106 313 | 19.8634415298 314 | 42.1400394992 315 | 18.7170310612 316 | 2.37859277741 317 | 20.2937679873 318 | 5.47265948692 319 | 13.915397818 320 | 8.6985432466 321 | 15.7709105345 322 | 27.3675485926 323 | 33.440805493 324 | 15.6199360128 325 | 16.5515319118 326 | 0.0582512114924 327 | 6.95834687139 328 | 9.79104422747 329 | 13.2714056611 330 | 5.0624910212 331 | 30.0257693068 332 | 14.1551386119 333 | 29.7487802917 334 | 39.2988943975 335 | 17.5951009121 336 | 9.50283189084 337 | 5.24484377398 338 | 10.9713539632 339 | 23.120998531 340 | 1.55692161412 341 | 35.8405336485 342 | 3.1201694273 343 | 18.8083392558 344 | 9.95353786356 345 | 18.8926766413 346 | 20.3539970471 347 | 6.99098728596 348 | 6.39500709733 349 | 16.9419850399 350 | 33.1022175163 351 | 7.99341609985 352 | 11.877357022 353 | 22.0424041393 354 | 9.67388317994 355 | 7.62326774113 356 | 19.620596486 357 | 4.43586037107 358 | 26.104780212 359 | 33.2921603799 360 | 1.78027867067 361 | 6.25387783233 362 | 1.46455454414 363 | 3.60670071767 364 | 5.94955578176 365 | 24.2314784022 366 | 10.2463920881 367 | 2.72651502055 368 | 6.67409960865 369 | 9.02564148326 370 | 3.05717996786 371 | 21.5044505414 372 | 12.8173724982 373 | 7.81942999041 374 | 36.1323407568 375 | 18.6584429964 376 | 11.4747818388 377 | 16.3986913022 378 | 3.26650555569 379 | 5.44050514755 380 | 29.1281499258 381 | 26.4533687464 382 | 7.39626111405 383 | 3.86101082898 384 | 3.55921561963 385 | 4.38813931698 386 | 9.45967583533 387 | 34.7594056445 388 | 20.6884613228 389 | 9.66806824797 390 | 19.557424828 391 | 20.2811488112 392 | 10.3265739736 393 | 5.58849199661 394 | 18.7729469175 395 | 9.23901585053 396 | 39.1922641951 397 | 15.0936975878 398 | 27.3220358009 399 | 2.0579212177 400 | 51.0157328446 401 | 13.6950854377 402 | 3.48801359569 403 | 15.2009663456 404 | 12.5822227892 405 | 22.7406680761 406 | 10.3284324928 407 | 22.781051703 408 | 3.01478994528 409 | 26.0564661793 410 | 5.80785157527 411 | 6.86044026372 412 | 17.225122069 413 | 14.4318508443 414 | 7.63729671037 415 | 38.4360686576 416 | 6.67640881388 417 | 4.78484580275 418 | 14.0040016105 419 | 4.66587475194 420 | 15.4312818666 421 | 16.1617437214 422 | 12.7623902478 423 | 5.0235628233 424 | 9.76244321246 425 | 16.8159822936 426 | 9.8040397484 427 | 37.8017431075 428 | 12.9534276467 429 | 0.831646185023 430 | 29.4987678618 431 | 14.9457033089 432 | 3.51904969995 433 | 0.928138216384 434 | 7.21748810935 435 | 6.12801397233 436 | 11.3903534374 437 | 24.8267111023 438 | 12.5484156997 439 | 10.926039702 440 | 3.32025449499 441 | 39.0832352363 442 | 11.6515549993 443 | 7.68771982033 444 | 1.49612937691 445 | 11.842835023 446 | 24.7164778772 447 | 9.59243678878 448 | 6.44236156639 449 | 23.8965877295 450 | 18.3477533258 451 | 2.31769323708 452 | 9.25380461929 453 | 19.940997993 454 | 7.19608354642 455 | 15.9754127359 456 | 4.40713926059 457 | 3.25783996706 458 | 34.8777568273 459 | 34.8158342259 460 | 13.7195296674 461 | 14.9012273399 462 | 4.16038935007 463 | 81.9707936137 464 | 29.4740143157 465 | 1.85645636199 466 | 16.7766617834 467 | 24.8335397241 468 | 3.30560923902 469 | 12.4898297837 470 | 12.1837809634 471 | 26.2190660864 472 | 5.9601756939 473 | 24.1139626879 474 | 28.0367297754 475 | 12.042174861 476 | 2.05058859143 477 | 14.1778709426 478 | 4.70973690848 479 | 28.6887241069 480 | 0.361942844991 481 | 14.8260536542 482 | 29.7137106478 483 | 10.0196625903 484 | 5.23131966245 485 | 23.5793049098 486 | 12.4724142473 487 | 4.13017302575 488 | 23.6385752531 489 | 6.70255223865 490 | 28.0335394117 491 | 28.0700722919 492 | 32.5497109705 493 | 22.1300973687 494 | 36.9567308272 495 | 18.1588533482 496 | 3.71690927 497 | 18.3465113527 498 | 13.4526434532 499 | 30.3537496905 500 | 13.4302050601 501 | 3.24925372578 502 | 5.42441751499 503 | 0.969283982624 504 | 19.1185683622 505 | 26.3029056358 506 | 14.8044248246 507 | 28.2886949719 508 | 21.5404916402 509 | 5.14356040615 510 | 3.20280072666 511 | 19.9824267979 512 | 3.68994635033 513 | 2.06966233654 514 | 9.53216546332 515 | 22.3308402051 516 | 8.26209969523 517 | 5.74920235589 518 | 4.68809538836 519 | 1.85718314296 520 | 23.656705963 521 | 2.25608935322 522 | 30.2423931053 523 | 30.1543905933 524 | 7.5949121691 525 | 3.86831054765 526 | 34.3536714799 527 | 7.0449188199 528 | 12.8936147809 529 | 15.9262946601 530 | 21.4472212384 531 | 66.1578495886 532 | 2.77088601181 533 | 21.9285105262 534 | 18.2457707459 535 | 20.9201136144 536 | 18.1532920361 537 | 7.22686328935 538 | 10.2053162564 539 | 11.7296106467 540 | 14.0136613488 541 | 0.536386237617 542 | 0.951769929897 543 | 0.286023926757 544 | 0.281696532068 545 | 34.062204822 546 | 6.82704936688 547 | 27.3629028601 548 | 43.6914361502 549 | 30.4520095738 550 | 37.7775313244 551 | 9.94857375676 552 | 26.0933879101 553 | 6.14371987769 554 | 18.6094815586 555 | 36.9787113677 556 | 19.9437494875 557 | 21.7186541163 558 | 15.5386935811 559 | 33.1988020439 560 | 13.3766572398 561 | 17.0469659533 562 | 13.811401541 563 | 29.0139720372 564 | 14.151440051 565 | 26.1077127035 566 | 17.8684952562 567 | 29.6804919146 568 | 28.9679432183 569 | 4.85758076751 570 | 9.59348956138 571 | 3.14824911173 572 | 39.7775000886 573 | 21.7969616128 574 | 15.758395276 575 | 24.6970844742 576 | 10.6469373471 577 | 2.86429682262 578 | 19.9569429475 579 | 1.9332864735 580 | 8.6149537451 581 | 24.0381915613 582 | 25.528064299 583 | 17.0579186196 584 | 16.4074236277 585 | 19.7016453982 586 | 32.2566865435 587 | 38.151110508 588 | 21.3207944369 589 | 1.07154166928 590 | 8.5126253798 591 | 49.519386986 592 | 20.7723553582 593 | 14.8750998904 594 | 2.47573267732 595 | 12.6649772796 596 | 39.8539303564 597 | 3.32691938408 598 | 14.4428643064 599 | 29.6436852865 600 | 6.73250105823 601 | 1.82060668555 602 | 35.1494697672 603 | 34.258427884 604 | 27.9311356047 605 | 32.8059141425 606 | 1.95066394174 607 | 11.0937694157 608 | 10.1668970059 609 | 5.61390426645 610 | 7.79973575786 611 | 9.20788104937 612 | 7.9717414427 613 | 35.0079657332 614 | 9.27131605532 615 | 42.5276267803 616 | 1.61510353864 617 | 15.2358158756 618 | 30.400526218 619 | 1.23110072822 620 | 3.86563846975 621 | 24.8529057622 622 | 6.4370014355 623 | 19.8317321605 624 | 26.7534420198 625 | 0.949940742723 626 | 11.5136479465 627 | 5.60564534855 628 | 18.7910035786 629 | 43.7057704344 630 | 1.80222748624 631 | 19.4054648299 632 | 3.34391145807 633 | 9.76783767285 634 | 10.2291842559 635 | 22.7715519963 636 | 34.8873197771 637 | 27.4049593666 638 | 0.490758947883 639 | 9.08982646552 640 | 22.7917129797 641 | 10.0104989227 642 | 51.2389206296 643 | 6.01808918505 644 | 22.650386898 645 | 23.5660308031 646 | 21.852405747 647 | 49.230317195 648 | 25.1398549483 649 | 5.33457474263 650 | 21.1603391497 651 | 11.1817690772 652 | 9.95417282085 653 | 7.49935986717 654 | 16.4641513579 655 | 37.4753372891 656 | 28.3633802617 657 | 0.233170847677 658 | 2.72112276018 659 | 31.3367135075 660 | 9.3325151347 661 | 3.83431709092 662 | 7.9523260456 663 | 26.5056698456 664 | 17.6041229403 665 | 4.9077289705 666 | 1.9115474989 667 | 3.95458207558 668 | 10.9574594796 669 | 8.43116400243 670 | 40.4658669505 671 | 2.67696085833 672 | 4.89811967275 673 | 23.7450288711 674 | 32.9986762455 675 | 13.1475511522 676 | 6.5781010875 677 | 16.6291670904 678 | 29.6157910949 679 | 17.211598114 680 | 2.38912416329 681 | 8.87102202385 682 | 2.8700352768 683 | 13.408120128 684 | 0.584504778298 685 | 8.08991670865 686 | 1.49657755468 687 | 7.75903535024 688 | 39.7280048578 689 | 2.02788673526 690 | 1.74708961933 691 | 20.5886669568 692 | 10.6577772814 693 | 11.5630694004 694 | 9.24250927066 695 | 0.935399719532 696 | 21.3621812111 697 | 19.7545968526 698 | 11.2701218137 699 | 3.05758290837 700 | 10.456352237 701 | 6.39344760787 702 | 13.9857439474 703 | 18.4589134799 704 | 1.86490423884 705 | 8.32551416183 706 | 58.4471849238 707 | 37.7206397154 708 | 11.9405130447 709 | 5.63299503236 710 | 16.1418538439 711 | 25.4439149955 712 | 18.6460695492 713 | 0.451346855466 714 | 9.93291363175 715 | 15.9868545252 716 | 5.77801540197 717 | 22.3063774926 718 | 33.513642806 719 | 7.1867684993 720 | 29.5303078294 721 | 3.39307390003 722 | 40.7551309516 723 | 7.70766064855 724 | 8.34051601628 725 | 20.571367374 726 | 30.5542378856 727 | 20.576395703 728 | 0.16208435139 729 | 16.1254342409 730 | 7.57138908565 731 | 14.8476697546 732 | 22.0959665063 733 | 9.870288044 734 | 37.4437265711 735 | 25.7517000817 736 | 1.24125733881 737 | 20.2566758855 738 | 15.7845581553 739 | 2.90928147889 740 | 14.9937608775 741 | 12.9089539505 742 | 11.1701859482 743 | 15.7361036249 744 | 13.1977114434 745 | 27.3907238286 746 | 31.0922114194 747 | 5.05849877687 748 | 13.3341764554 749 | 11.1357165831 750 | 26.2921232285 751 | 5.02525481039 752 | 3.24782223911 753 | 56.2341842565 754 | 22.0314035069 755 | 2.72994056028 756 | 11.21108699 757 | 25.4859726516 758 | 5.10066064825 759 | 28.85880046 760 | 2.47008903191 761 | 11.6461552821 762 | 14.4663141588 763 | 17.9939272229 764 | 16.3627015841 765 | 42.9312533336 766 | 2.88646726305 767 | 21.2029902168 768 | 52.8682426174 769 | 1.04293043856 770 | 6.2546409125 771 | 0.737971133761 772 | 28.864790616 773 | 16.2290793603 774 | 9.27099730406 775 | 16.9917337368 776 | 0.976112226097 777 | 22.099637642 778 | 15.2834533365 779 | 25.2323183468 780 | 24.7951206067 781 | 15.3691006895 782 | 26.558869991 783 | 16.3359985751 784 | 3.91569534897 785 | 8.72209905135 786 | 9.70816782085 787 | 20.98079867 788 | 7.50044932555 789 | 9.6013207582 790 | 1.10631341753 791 | 6.098704055 792 | 3.00400003313 793 | 19.9432167406 794 | 69.285263442 795 | 3.93348029004 796 | 0.295134229504 797 | 25.7707762419 798 | 26.772966413 799 | 6.48955846013 800 | 9.74150497159 801 | 10.1586228211 802 | 4.95380070116 803 | 8.1308036808 804 | 22.6258400121 805 | 22.5899319843 806 | 5.28178475516 807 | 22.6720740119 808 | 1.43653866213 809 | 20.8190346257 810 | 0.872940532535 811 | 4.58379950667 812 | 31.8076579914 813 | 31.7028150607 814 | 14.3658103322 815 | 14.9187542687 816 | 1.4630241228 817 | 20.0459125145 818 | 10.1255908248 819 | 0.988059503778 820 | 7.81612150255 821 | 13.1133564466 822 | 17.840563398 823 | 8.61013974283 824 | 24.7168174266 825 | 25.5073337281 826 | 21.7379208683 827 | 19.8448544001 828 | 19.202631258 829 | 26.5024401906 830 | 5.23124605306 831 | 12.3370733047 832 | 15.0142579174 833 | 30.2708389857 834 | 2.17661788168 835 | 40.9279130055 836 | 27.7016131763 837 | 8.85481889928 838 | 9.20524930961 839 | 19.3139652732 840 | 13.7523038603 841 | 29.5065696761 842 | 16.360720746 843 | 27.2725257708 844 | 4.66855528798 845 | 34.0184074384 846 | 0.53134951409 847 | 19.2283795225 848 | 41.7249658196 849 | 7.6002834372 850 | 8.31254796907 851 | 0.77546012008 852 | 26.6946703016 853 | 32.3077763705 854 | 0.462699076306 855 | 5.28415145905 856 | 10.6861895818 857 | 65.3090052361 858 | 16.2202764262 859 | 7.93205378203 860 | 26.0907637527 861 | 12.2884553465 862 | 23.1258630357 863 | 16.9549522306 864 | 9.31354883326 865 | 1.18853297944 866 | 12.5955501198 867 | 14.580206065 868 | 16.4392829065 869 | 49.5383318566 870 | 9.73418602479 871 | 31.0034167796 872 | 13.6916826737 873 | 26.456038843 874 | 7.94474036847 875 | 16.5428622244 876 | 13.6684784807 877 | 32.3877782792 878 | 64.5812024823 879 | 4.84056733276 880 | 3.90340895097 881 | 18.8446773106 882 | 15.6260492982 883 | 6.4144958234 884 | 21.824072588 885 | 5.57733386697 886 | 5.86107728427 887 | 26.4879620388 888 | 32.523804809 889 | 17.564221651 890 | 9.92129713391 891 | 4.53376241859 892 | 11.7555540538 893 | 17.7005695427 894 | 10.8526834988 895 | 13.0452242458 896 | 22.7741956015 897 | 35.6136071448 898 | 16.7985891457 899 | 0.481804947938 900 | 22.9968176803 901 | 19.6457216189 902 | 28.305517435 903 | 5.95110407179 904 | 1.07379682837 905 | 6.99125921765 906 | 40.2624672768 907 | 13.2047498791 908 | 1.14681944536 909 | 11.0654446514 910 | 13.7545845523 911 | 42.31779397 912 | 3.64175801034 913 | 1.10423272956 914 | 12.8070189353 915 | 17.8270702276 916 | 25.872971413 917 | 12.3280816777 918 | 33.9646748561 919 | 17.5024551561 920 | 23.2332038719 921 | 23.2315232833 922 | 36.4972849906 923 | 10.0569492665 924 | 1.29480580037 925 | 4.95166674119 926 | 29.6704650443 927 | 14.3105754395 928 | 11.2097351015 929 | 42.1025191679 930 | 11.8462617103 931 | 0.684952904419 932 | 18.9112249911 933 | 45.5748695308 934 | 31.2049346044 935 | 31.6981194209 936 | 48.7053080251 937 | 11.5358272148 938 | 17.6688464775 939 | 0.555632319631 940 | 11.0478407675 941 | 7.2871891261 942 | 21.364298545 943 | 30.4092615573 944 | 1.63916043764 945 | 47.6680283953 946 | 30.7232169663 947 | 17.4484214453 948 | 1.75476961638 949 | 24.6151798978 950 | 14.7261371947 951 | 15.5721534361 952 | 33.7964707633 953 | 27.985368134 954 | 3.22916990991 955 | 10.4029823342 956 | 12.8279132777 957 | 11.2216526929 958 | 40.9530685543 959 | 22.2304775299 960 | 0.989198320385 961 | 8.31766811146 962 | 8.58669288349 963 | 8.32594867128 964 | 33.5188409281 965 | 9.56954328636 966 | 25.4673342844 967 | 17.8550028493 968 | 1.44661296958 969 | 1.11183663125 970 | 11.5391799035 971 | 13.3893396661 972 | 33.6710557883 973 | 12.2520042374 974 | 0.402343251224 975 | 7.70101924794 976 | 10.1762870563 977 | 26.4532547552 978 | 22.5762301153 979 | 17.9468993583 980 | 5.47191981067 981 | 12.4613653827 982 | 10.9340268754 983 | 15.2724414157 984 | 8.57132433903 985 | 3.76513782457 986 | 14.0096983226 987 | 25.7741269022 988 | 3.73739141698 989 | 14.6925714413 990 | 0.265750499578 991 | 30.6980543492 992 | 4.24126845472 993 | 13.9121700532 994 | 4.41901716172 995 | 8.30930027444 996 | 1.39564938679 997 | 9.89963905219 998 | 19.0218898429 999 | 21.0631745171 1000 | 21.524784562 1001 | 7.02334649169 1002 | 6.5692697341 1003 | 7.45125909593 1004 | 7.90141025316 1005 | 13.1310011678 1006 | 1.85472531959 1007 | 3.1959855395 1008 | 20.6172939694 1009 | 27.5678823345 1010 | 4.14393938668 1011 | 15.4857564709 1012 | 29.3078470079 1013 | 3.91241654604 1014 | 19.2666982127 1015 | 13.1858487059 1016 | 10.934571524 1017 | 8.95918937523 1018 | 35.1737548034 1019 | 22.2721943173 1020 | 17.2938141832 1021 | 1.16847467933 1022 | 3.83891628384 1023 | 33.1335738968 1024 | 56.02419758 1025 | 6.14940468153 1026 | 26.7939782092 1027 | 6.26581859092 1028 | 6.66806213955 1029 | 11.6804827138 1030 | 46.024760641 1031 | 8.86545565048 1032 | 46.1976456802 1033 | 30.2049024675 1034 | 24.40971947 1035 | 15.7438133461 1036 | 25.7721968389 1037 | 16.6175435983 1038 | 41.5258211784 1039 | 37.383533915 1040 | 0.446734691253 1041 | 9.9158101271 1042 | 7.65574216562 1043 | 21.6353049058 1044 | 10.0420121573 1045 | 40.2288032017 1046 | 5.47498040382 1047 | 9.23075677848 1048 | 13.9439508677 1049 | 16.7755447311 1050 | 19.5797510258 1051 | 2.72641207461 1052 | 8.54844798558 1053 | 0.473841475919 1054 | 6.50903077504 1055 | 23.7432135857 1056 | 15.2784595466 1057 | 5.2759384383 1058 | 21.2128436577 1059 | 27.0928955867 1060 | 17.323270536 1061 | 5.90903675557 1062 | 30.0666928892 1063 | 10.0761165217 1064 | 25.6106479273 1065 | 16.9140124773 1066 | 16.0075939181 1067 | 9.81467072119 1068 | 17.0530823962 1069 | 56.8784733107 1070 | 18.1424407379 1071 | 8.93411952474 1072 | 8.49858394322 1073 | 10.3293220224 1074 | 12.8907997374 1075 | 21.8796324015 1076 | 16.1860434493 1077 | 22.9230077504 1078 | 10.917839505 1079 | 11.5565933208 1080 | 44.6164971517 1081 | 0.788931144486 1082 | 40.1235469215 1083 | 12.0420430504 1084 | 7.21346153083 1085 | 10.8325349117 1086 | 25.1479144533 1087 | 15.4987734969 1088 | 17.0568140681 1089 | 5.85226550021 1090 | 3.1900095569 1091 | 6.30074247555 1092 | 6.02712843732 1093 | 6.61573025645 1094 | 13.8894377935 1095 | 2.84772381603 1096 | 18.3342886426 1097 | 1.88214392601 1098 | 4.21567116459 1099 | 12.1743959558 1100 | 18.3373017765 1101 | 26.9137295554 1102 | 32.6625617619 1103 | 12.2952021798 1104 | 37.2193262318 1105 | 12.9378281378 1106 | 27.4714863823 1107 | 0.289333597362 1108 | 9.55235623012 1109 | 25.565726373 1110 | 17.6659587825 1111 | 29.7769692772 1112 | 15.0289520938 1113 | 11.2252125295 1114 | 21.0230642851 1115 | 0.608096022527 1116 | 38.7737368617 1117 | 14.3924256428 1118 | 16.8682424265 1119 | 9.59946442262 1120 | 35.7237369505 1121 | 1.2174804156 1122 | 13.4488324801 1123 | 16.637639607 1124 | 12.12938106 1125 | 3.79971366026 1126 | 5.43465853464 1127 | 24.7084781276 1128 | 39.0789722238 1129 | 4.60760541011 1130 | 7.11796343723 1131 | 16.8948597302 1132 | 13.0087703895 1133 | 15.4356037723 1134 | 5.04404221405 1135 | 10.4983299358 1136 | 13.327399044 1137 | 19.9870044151 1138 | 6.91584145569 1139 | 13.6931264459 1140 | 42.9290965432 1141 | 50.0879755952 1142 | 23.8474547304 1143 | 31.0198859561 1144 | 10.4802706453 1145 | 21.3113780044 1146 | 11.790774396 1147 | 25.8380855309 1148 | 6.63675860536 1149 | 8.44831313754 1150 | 35.3980469273 1151 | 42.1881327616 1152 | 17.2013726363 1153 | 5.56279612403 1154 | 4.90590575191 1155 | 30.901141269 1156 | 7.77658963118 1157 | 0.861184386934 1158 | 25.7999689533 1159 | 4.61095520215 1160 | 1.64544039711 1161 | 6.43361798019 1162 | 27.7223166694 1163 | 21.3712205266 1164 | 35.6276206774 1165 | 2.83822388457 1166 | 5.6685652601 1167 | 2.66332245283 1168 | 9.75646267599 1169 | 3.11580708834 1170 | 1.87666438934 1171 | 16.2265262009 1172 | 9.19741312047 1173 | 65.6055766501 1174 | 40.6140229589 1175 | 11.6659338732 1176 | 18.248972901 1177 | 17.351676644 1178 | 10.8722392245 1179 | 4.38493083788 1180 | 23.7232915982 1181 | 14.7277672021 1182 | 4.00813119929 1183 | 33.8274479032 1184 | 1.11317391378 1185 | 19.7943719682 1186 | 11.8844760883 1187 | 25.9304883486 1188 | 7.82640799053 1189 | 19.442231134 1190 | 3.59330225496 1191 | 13.0386428955 1192 | 30.9587448242 1193 | 13.1178089973 1194 | 6.85775262357 1195 | 26.5201316582 1196 | 45.102655314 1197 | 2.15503735647 1198 | 12.373455334 1199 | 0.312071768502 1200 | 1.99409955086 1201 | 11.8022365906 1202 | 8.78791304634 1203 | 1.12850341063 1204 | 18.7036662656 1205 | 11.1548227696 1206 | 0.573815776864 1207 | 4.41682711424 1208 | 9.85322795513 1209 | 18.0684668691 1210 | 18.8993018818 1211 | 18.1295500424 1212 | 4.06212594277 1213 | 27.8446350446 1214 | 27.0237534488 1215 | 27.6412134954 1216 | 8.68974794551 1217 | 3.78950643286 1218 | 19.9659498942 1219 | 21.32859066 1220 | 19.3319350984 1221 | 7.72073292878 1222 | 23.6606758069 1223 | 11.2350139049 1224 | 29.9229543757 1225 | 17.9346033415 1226 | 23.223200376 1227 | 18.6313745123 1228 | 28.8832668943 1229 | 18.3491078028 1230 | 23.5942372469 1231 | 15.6779555492 1232 | 27.5317125795 1233 | 0.706325657676 1234 | 18.6024645042 1235 | 23.4840523491 1236 | 9.47458921122 1237 | 51.0312680515 1238 | 65.2251657769 1239 | 8.76927029559 1240 | 9.5800327195 1241 | 18.2261626111 1242 | 41.1376716935 1243 | 1.08549991606 1244 | 13.8290431319 1245 | 7.73347033029 1246 | 14.9919182702 1247 | 10.5633803645 1248 | 10.9638126575 1249 | 6.83080101452 1250 | 8.31659769727 1251 | 16.1883897119 1252 | 1.76265473505 1253 | 1.74381849757 1254 | 1.56412052315 1255 | 13.4189850464 1256 | 7.7531905609 1257 | 12.5419403012 1258 | 45.7931335544 1259 | 5.01238985438 1260 | 1.65180227283 1261 | 16.3354482317 1262 | 10.8063685725 1263 | 29.1854974445 1264 | 11.4078652717 1265 | 16.2512074722 1266 | 10.8299178577 1267 | 8.00350809547 1268 | 0.157888264508 1269 | 17.2029446894 1270 | 35.1257482007 1271 | 12.6030933459 1272 | 4.81332791864 1273 | 15.5315525599 1274 | 60.3132472477 1275 | 20.0817540805 1276 | 11.8960709505 1277 | 8.72104149248 1278 | 51.6796376575 1279 | 30.9556982884 1280 | 13.4473208576 1281 | 21.4873485473 1282 | 17.3787804659 1283 | 18.2290689962 1284 | 18.2296347092 1285 | 17.0745042499 1286 | 0.893943378079 1287 | 10.2156902748 1288 | 4.2862856288 1289 | 23.9574529753 1290 | 5.63886947701 1291 | 26.1506109328 1292 | 24.1710320583 1293 | 1.6530403897 1294 | 12.02051463 1295 | 13.9580776656 1296 | 2.33626530708 1297 | 4.0964394206 1298 | 4.58253401882 1299 | 0.430068590303 1300 | 5.07652623274 1301 | 29.4503163412 1302 | 0.128208222752 1303 | 19.9782395897 1304 | 14.0066519846 1305 | 4.8056258132 1306 | 2.62371145148 1307 | 6.63190147878 1308 | 16.8575405788 1309 | 19.9873526248 1310 | 3.33789784511 1311 | 7.5607503023 1312 | 0.32324661981 1313 | 2.73605601156 1314 | 2.02508643382 1315 | 13.4415918313 1316 | 3.28755033518 1317 | 0.239797174755 1318 | 9.52039316466 1319 | 6.9200704066 1320 | 22.9959086588 1321 | 19.8849476382 1322 | 9.35295322843 1323 | 0.377376084421 1324 | 23.6713634777 1325 | 21.730385608 1326 | 5.73536249747 1327 | 12.549870755 1328 | 7.07728226305 1329 | 18.3018220269 1330 | 2.56032192181 1331 | 16.3754109322 1332 | 23.1049277871 1333 | 2.81433160074 1334 | 3.34050222296 1335 | 30.1099524818 1336 | 20.4889727794 1337 | 60.2791210329 1338 | 15.3253383291 1339 | 0.173501724894 1340 | 1.49647529372 1341 | 25.7255128357 1342 | 42.7852720555 1343 | 10.5244914444 1344 | 12.4509106607 1345 | 16.0630646201 1346 | 16.963417429 1347 | 7.78024163562 1348 | 20.0576409538 1349 | 34.8322381871 1350 | 19.8398760117 1351 | 6.41277762523 1352 | 18.9357668625 1353 | 9.8490066398 1354 | 0.106385779062 1355 | 27.644718065 1356 | 4.35797669575 1357 | 0.729841614468 1358 | 36.538394209 1359 | 13.2431078977 1360 | 11.0377952904 1361 | 6.06542923509 1362 | 28.9816528792 1363 | 56.7970829074 1364 | 15.9211559456 1365 | 1.68051969955 1366 | 18.7697177548 1367 | 20.7275470031 1368 | 10.3822910255 1369 | 7.94152731468 1370 | 9.91298617664 1371 | 4.35254684259 1372 | 3.77496444554 1373 | 21.5356614295 1374 | 30.5826444464 1375 | 43.9905959168 1376 | 21.2561690944 1377 | 10.5740026117 1378 | 8.33205623239 1379 | 24.1892612157 1380 | 12.0841624568 1381 | 17.6336466775 1382 | 7.69943021348 1383 | 5.53038792381 1384 | 7.27707781423 1385 | 7.04428618621 1386 | 1.80353624084 1387 | 4.30015948425 1388 | 8.73195314671 1389 | 35.156035446 1390 | 5.69821857663 1391 | 2.67511547518 1392 | 40.9112042312 1393 | 27.1815027214 1394 | 34.3085600418 1395 | 6.56710313273 1396 | 40.1046979269 1397 | 35.1214667799 1398 | 7.94745363723 1399 | 25.2289549198 1400 | 25.1165093819 1401 | 0.98980441509 1402 | 40.1859610045 1403 | 8.81201348618 1404 | 4.19950855475 1405 | 7.52572473711 1406 | 20.6656541625 1407 | 9.25136338569 1408 | 12.7211989932 1409 | 14.2543234459 1410 | 2.08331066864 1411 | 36.3095009244 1412 | 23.8244090797 1413 | 11.0414484604 1414 | 7.43691167772 1415 | 8.90138465875 1416 | 32.4994222606 1417 | 0.641764198907 1418 | 1.64295781339 1419 | 29.5210737274 1420 | 42.3179484757 1421 | 20.1329771631 1422 | 15.5912079283 1423 | 18.8195849954 1424 | 37.0995131572 1425 | 4.40100423859 1426 | 16.1121557149 1427 | 7.42689657966 1428 | 35.7387484302 1429 | 0.258064208303 1430 | 21.3618542295 1431 | 22.9244942457 1432 | 5.30203272075 1433 | 1.19581297281 1434 | 13.6299471259 1435 | 12.5864479681 1436 | 21.3439849023 1437 | 11.465412161 1438 | 12.7118711958 1439 | 2.34605888139 1440 | 21.7837712816 1441 | 18.6703130562 1442 | 1.65945105081 1443 | 1.43273005952 1444 | 20.5278950438 1445 | 9.00658398611 1446 | 4.33330856075 1447 | 3.36550689713 1448 | 3.30834018603 1449 | 6.66673478734 1450 | 29.1400982573 1451 | 6.81649694325 1452 | 28.6758850061 1453 | 3.45739385008 1454 | 10.9776774827 1455 | 12.4089392421 1456 | 25.8624667444 1457 | 17.0743120243 1458 | 2.63462149507 1459 | 7.62436015316 1460 | 21.7282121337 1461 | 13.5129465385 1462 | 7.99029728203 1463 | 28.6817635897 1464 | 18.33231053 1465 | 13.7083061951 1466 | 16.611559924 1467 | 39.4266695566 1468 | 7.47436424257 1469 | 4.12038783554 1470 | 5.12901537098 1471 | 29.2017608436 1472 | 4.91760591479 1473 | 27.2410763511 1474 | 28.3369619156 1475 | 16.0896540373 1476 | 1.13082997634 1477 | 31.0027452559 1478 | 30.1100959195 1479 | 5.34338410252 1480 | 29.0492657139 1481 | 8.70394125436 1482 | 32.1569845311 1483 | 1.23084270103 1484 | 6.91289124968 1485 | 11.6728133341 1486 | 32.8833648726 1487 | 1.02280393157 1488 | 1.46394706779 1489 | 13.4886226729 1490 | 23.5556785906 1491 | 15.3276007343 1492 | 17.5120347028 1493 | 12.6541088272 1494 | 10.2897617138 1495 | 37.8250174199 1496 | 3.47517392666 1497 | 1.47174056797 1498 | 14.367103016 1499 | 44.1302215795 1500 | 2.66799595225 1501 | 3.4575170854 1502 | 3.25943231117 1503 | 6.54431056315 1504 | 13.1435897476 1505 | 6.68697377778 1506 | 9.7549194235 1507 | 28.354486559 1508 | 12.9433370107 1509 | 12.4487788563 1510 | 5.75225232826 1511 | 29.9009307662 1512 | 3.75411285257 1513 | 2.06883986067 1514 | 5.12698893625 1515 | 12.2782015204 1516 | 9.67819760761 1517 | 10.0537746561 1518 | 5.091443259 1519 | 26.8055081853 1520 | 11.2805285578 1521 | 2.14731160429 1522 | 41.3027050849 1523 | 0.145353238925 1524 | 31.59415755 1525 | 15.9004046627 1526 | 28.703576235 1527 | 10.5809070939 1528 | 3.87374467766 1529 | 37.2506641842 1530 | 10.4428763983 1531 | 23.4412719059 1532 | 24.9793655521 1533 | 19.8424191689 1534 | 8.01681617891 1535 | 10.2344829867 1536 | 0.105313277442 1537 | 14.5640493525 1538 | 51.2264402853 1539 | 12.4136754536 1540 | 15.5103781687 1541 | 6.32981524015 1542 | 6.73322193126 1543 | 50.7154406705 1544 | 24.9691896077 1545 | 9.55966420366 1546 | 3.46721860165 1547 | 17.3378904368 1548 | 6.57346008707 1549 | 4.44091081356 1550 | 24.6277473213 1551 | 5.77437767053 1552 | 10.8628958983 1553 | 33.1173194474 1554 | 31.5336759864 1555 | 9.02565918378 1556 | 15.3713114498 1557 | 13.889094734 1558 | 16.9145076914 1559 | 11.4472154455 1560 | 7.0847643651 1561 | 14.6128081344 1562 | 18.4117934736 1563 | 12.6473661744 1564 | 12.4568492456 1565 | 4.16586011992 1566 | 4.19471047594 1567 | 6.43100505804 1568 | 12.6062991969 1569 | 32.3647641541 1570 | 24.7669490659 1571 | 11.619196492 1572 | 8.30232002135 1573 | 19.3450569979 1574 | 33.1155964194 1575 | 0.371525705981 1576 | 15.2104621947 1577 | 0.859367712689 1578 | 20.3396536934 1579 | 11.7788890406 1580 | 23.9830487856 1581 | 51.5384280439 1582 | 14.8232138947 1583 | 14.3050728893 1584 | 3.55329749268 1585 | 6.02198613065 1586 | 23.8618197364 1587 | 12.530512818 1588 | 43.1726141109 1589 | 24.6976585003 1590 | 1.71576553876 1591 | 22.4303313428 1592 | 24.6829792789 1593 | 7.81269181169 1594 | 36.9370351731 1595 | 37.9216455304 1596 | 47.3556850311 1597 | 9.9957908246 1598 | 11.2479118729 1599 | 1.06031398126 1600 | 10.3155881724 1601 | 16.7432700111 1602 | 27.2127148398 1603 | 17.2254407498 1604 | 10.0175295046 1605 | 13.2221862609 1606 | 0.44073000358 1607 | 21.0742893078 1608 | 3.13740280355 1609 | 11.2236511304 1610 | 23.9528224675 1611 | 12.1141073003 1612 | 0.638339924053 1613 | 3.21009692504 1614 | 5.41869435328 1615 | 19.8950622115 1616 | 39.4799035696 1617 | 54.18242389 1618 | 9.25875189529 1619 | 16.3809317838 1620 | 11.9480649534 1621 | 9.07600197891 1622 | 18.6202144168 1623 | 9.40404776109 1624 | 14.96093794 1625 | 12.4858738057 1626 | 9.61934274119 1627 | 17.9588649481 1628 | 6.3967708546 1629 | 19.3041571248 1630 | 5.30579850912 1631 | 9.72716028275 1632 | 10.3903627342 1633 | 15.9110791782 1634 | 13.8784431927 1635 | 20.3396755533 1636 | 30.4466175586 1637 | 42.9625409563 1638 | 11.9107310454 1639 | 5.45323374057 1640 | 3.76542706812 1641 | 6.62442698856 1642 | 9.10143291756 1643 | 13.1426707661 1644 | 3.20048393217 1645 | 34.145125911 1646 | 15.8942961634 1647 | 42.5102374867 1648 | 16.3781658644 1649 | 29.9032791722 1650 | 8.47741137334 1651 | 33.4372014803 1652 | 1.11719282054 1653 | 1.0551191048 1654 | 19.9182009935 1655 | 14.8352635056 1656 | 3.7047104055 1657 | 10.876379039 1658 | 12.4375733703 1659 | 4.39456615831 1660 | 33.7343632183 1661 | 6.5124490094 1662 | 15.4441665935 1663 | 13.6309109165 1664 | 1.00203076117 1665 | 7.63917649439 1666 | 1.19677907326 1667 | 74.6141365062 1668 | 13.0076753111 1669 | 19.5355175715 1670 | 29.3054085919 1671 | 28.7931420049 1672 | 5.53361428719 1673 | 0.484892707913 1674 | 24.6034989874 1675 | 8.98634091814 1676 | 15.6701287503 1677 | 20.5767129208 1678 | 9.86941475378 1679 | 4.89429723133 1680 | 19.5626299849 1681 | 5.00931221704 1682 | 3.43556403269 1683 | 0.999944974315 1684 | 3.19857264408 1685 | 20.5626476561 1686 | 5.43133033444 1687 | 14.1285668 1688 | 23.7364940957 1689 | 12.1754696917 1690 | 23.5943781778 1691 | 8.19177951349 1692 | 1.8825015554 1693 | 16.6934937256 1694 | 46.7250083001 1695 | 4.54943415362 1696 | 18.2339914855 1697 | 26.9148348247 1698 | 20.8460072981 1699 | 19.5547154073 1700 | 10.0300333777 1701 | 22.1803051055 1702 | 61.0516082357 1703 | 0.42366076814 1704 | 25.7477756537 1705 | 9.22346144809 1706 | 7.37378569287 1707 | 3.56618988282 1708 | 3.66548839756 1709 | 6.04215143173 1710 | 61.6459921418 1711 | 8.81391195269 1712 | 3.88719926278 1713 | 1.65246178236 1714 | 14.3824514087 1715 | 8.20090460816 1716 | 2.00510506109 1717 | 25.8975814693 1718 | 18.9751075313 1719 | 49.4736551724 1720 | 5.88010368007 1721 | 25.3999458944 1722 | 16.2357093597 1723 | 19.4876677517 1724 | 12.1406407273 1725 | 10.78940208 1726 | 3.30618835774 1727 | 20.44767222 1728 | 32.1025759651 1729 | 17.0584441965 1730 | 0.62121230155 1731 | 22.8595697645 1732 | 1.11355731066 1733 | 6.68234050322 1734 | 12.431830402 1735 | 17.4482655603 1736 | 13.9448622667 1737 | 36.3810173333 1738 | 12.7866133469 1739 | 17.8143211145 1740 | 3.72569911836 1741 | 4.49023714226 1742 | 5.38419615691 1743 | 19.1045315408 1744 | 5.4987583919 1745 | 278.48817171 1746 | 14.6458084089 1747 | 12.0121867923 1748 | 37.2461175872 1749 | 0.209783464555 1750 | 19.6391755699 1751 | 2.16106825174 1752 | 31.1589634711 1753 | 21.1407595431 1754 | 12.304430265 1755 | 35.0895842516 1756 | 30.6712263911 1757 | 28.2559269613 1758 | 2.22631729243 1759 | 6.66723440262 1760 | 2.55842473211 1761 | 11.7721761987 1762 | 13.4516527314 1763 | 12.0550267469 1764 | 22.0271137 1765 | 16.7463087039 1766 | 21.9622779639 1767 | 15.5554667287 1768 | 19.9854868713 1769 | 4.05125117742 1770 | 9.35761799011 1771 | 26.6108373467 1772 | 30.1764013181 1773 | 15.0121737851 1774 | 30.4542447951 1775 | 10.7249947996 1776 | 24.8088809552 1777 | 7.20139821003 1778 | 30.1510691216 1779 | 21.0858150346 1780 | 17.6659973042 1781 | 2.13307128008 1782 | 2.04164785061 1783 | 33.8865266371 1784 | 8.05756813752 1785 | 30.5467501086 1786 | 28.6569473828 1787 | 9.16865152703 1788 | 21.0471330571 1789 | 9.35370496392 1790 | 4.827266288 1791 | 12.7383789499 1792 | 34.2581435206 1793 | 10.6544589118 1794 | 7.56229702778 1795 | 3.77336971074 1796 | 33.9315285198 1797 | 0.392902319567 1798 | 3.00709237282 1799 | 3.31201580895 1800 | 40.1079304187 1801 | 22.3187383451 1802 | 7.20531599629 1803 | 0.195899424989 1804 | 32.723039681 1805 | 5.52512148494 1806 | 10.2149758365 1807 | 5.4420564495 1808 | 40.2424854589 1809 | 4.1688345602 1810 | 11.5817719361 1811 | 1.64940683828 1812 | 13.6549873786 1813 | 12.3766189288 1814 | 17.0905562887 1815 | 13.4830679881 1816 | 26.6390530983 1817 | 20.8196723498 1818 | 3.99344778138 1819 | 1.26870850007 1820 | 15.1852765939 1821 | 0.263228022972 1822 | 13.7368756592 1823 | 8.79137219206 1824 | 27.3875395779 1825 | 31.6384062324 1826 | 50.922754934 1827 | 0.780618901187 1828 | 29.7906736058 1829 | 8.71755390408 1830 | 10.4072099302 1831 | 8.98058526798 1832 | 9.31405572092 1833 | 0.0419048066533 1834 | 24.1501859655 1835 | 13.5848735214 1836 | 15.426072137 1837 | 21.4886971033 1838 | 2.1663107095 1839 | 36.8557204355 1840 | 9.64644881783 1841 | 6.07758205141 1842 | 15.9260177772 1843 | 7.67398839805 1844 | 8.73918036469 1845 | 5.9316023235 1846 | 19.9282586502 1847 | 25.3045635434 1848 | 30.1366237517 1849 | 3.81945934176 1850 | 14.4962587688 1851 | 21.3096086895 1852 | 36.0210266441 1853 | 6.8949197244 1854 | 23.6951632929 1855 | 13.9881871624 1856 | 16.6014302341 1857 | 5.55781203653 1858 | 15.2951596469 1859 | 13.580935664 1860 | 27.4357152645 1861 | 3.10382721226 1862 | 12.0828555087 1863 | 13.5178607906 1864 | 8.95649321134 1865 | 9.87206975642 1866 | 206.73968744 1867 | 5.93403784766 1868 | 14.8478224029 1869 | 39.3002165756 1870 | 35.1306864146 1871 | 10.8351714451 1872 | 30.7974987604 1873 | 28.2626488067 1874 | 11.1685171995 1875 | 22.6943002342 1876 | 1.77403892613 1877 | 14.3276396628 1878 | 0.746916339338 1879 | 25.8149334275 1880 | 21.055769814 1881 | 4.34055102735 1882 | 26.6983401605 1883 | 8.60219322747 1884 | 7.79851203559 1885 | 7.49802767317 1886 | 4.16976918523 1887 | 9.88708734837 1888 | 6.64615848694 1889 | 12.1242284523 1890 | 1.35212529283 1891 | 21.0152748396 1892 | 18.1001562745 1893 | 3.92368439782 1894 | 11.3557012108 1895 | 29.2247125669 1896 | 8.16464305676 1897 | 29.3048909657 1898 | 13.7023889999 1899 | 38.9270581494 1900 | 23.4654912206 1901 | 4.92231859016 1902 | 11.2026369025 1903 | 35.0764738173 1904 | 0.0471570598122 1905 | 10.0168970751 1906 | 4.92769225503 1907 | 13.7369633779 1908 | 2.90446816352 1909 | 4.0901015986 1910 | 24.2624401142 1911 | 14.322193767 1912 | 24.6821209867 1913 | 19.7920398271 1914 | 25.5147313807 1915 | 27.3547677261 1916 | 2.80141306327 1917 | 2.55567424083 1918 | 16.8231327012 1919 | 8.68435655487 1920 | 0.910604226607 1921 | 11.0234189967 1922 | 24.9233916765 1923 | 24.4246325505 1924 | 12.3359032482 1925 | 0.609276930332 1926 | 24.5030077179 1927 | 9.95220952746 1928 | 5.02423508855 1929 | 33.032970695 1930 | 12.2369344014 1931 | 20.3492633751 1932 | 1.35272497514 1933 | 11.5619562034 1934 | 14.8848548531 1935 | 33.6222239264 1936 | 8.77549766876 1937 | 33.5310749735 1938 | 60.2811205571 1939 | 33.7353358247 1940 | 28.1640965734 1941 | 0.751613262861 1942 | 15.4319624972 1943 | 6.9556282287 1944 | 15.1744626005 1945 | 9.19300866124 1946 | 0.911061212109 1947 | 23.8425560027 1948 | 27.7599770955 1949 | 13.796560416 1950 | 19.0429074282 1951 | 6.76026249729 1952 | 3.22061766509 1953 | 8.01714120906 1954 | 17.0792402317 1955 | 21.8402862477 1956 | 17.3003840347 1957 | 29.9145085024 1958 | 5.10273671188 1959 | 4.7159757383 1960 | 7.80408163252 1961 | 37.0800618684 1962 | 2.45381644273 1963 | 23.6718156206 1964 | 0.566542344334 1965 | 13.7137591589 1966 | 0.989533572599 1967 | 28.3945470505 1968 | 25.4920170718 1969 | 9.11214227441 1970 | 13.3238835565 1971 | 2.15668233699 1972 | 17.9070159246 1973 | 8.72120417729 1974 | 21.9334138446 1975 | 9.96792135002 1976 | 1.67751122737 1977 | 11.466214401 1978 | 5.11060892126 1979 | 8.34571147262 1980 | 9.06069953137 1981 | 81.3500408849 1982 | 18.4016364294 1983 | 4.6383643468 1984 | 23.2189639468 1985 | 25.0037705722 1986 | 17.7434050167 1987 | 12.3779856325 1988 | 13.4156612118 1989 | 18.2063744506 1990 | 20.7364504051 1991 | 0.759473804839 1992 | 8.09294346975 1993 | 7.65129980147 1994 | 3.61848738663 1995 | 25.1259946565 1996 | 0.481781971203 1997 | 31.0652501128 1998 | 26.4782367219 1999 | 9.68391434225 2000 | 18.0343132366 2001 | 8.79382594175 2002 | 3.14382066936 2003 | 0.332652550309 2004 | 4.13315183968 2005 | 16.6916901787 2006 | 11.3672342027 2007 | 1.53483372319 2008 | 17.9723995766 2009 | 11.3252629926 2010 | 3.51422943179 2011 | 2.54485258802 2012 | 38.1660105753 2013 | 8.03387876491 2014 | 13.8416040416 2015 | 14.7054986655 2016 | 2.24576989321 2017 | 1.3360590932 2018 | 37.1700582677 2019 | 15.2695190924 2020 | 9.06102645921 2021 | 15.1109022863 2022 | 23.3069394354 2023 | 16.5391656869 2024 | 14.1465703943 2025 | 13.0540847108 2026 | 8.64451101908 2027 | 18.8537549027 2028 | 6.89377071076 2029 | 13.7418042623 2030 | 41.5653456268 2031 | 30.8521142286 2032 | 0.958192284222 2033 | 1.06365709771 2034 | 14.1879267421 2035 | 6.80876645807 2036 | 6.86915673061 2037 | 22.270275724 2038 | 6.80160288233 2039 | 1.99317744501 2040 | 16.1580451425 2041 | 6.11596875878 2042 | 20.4146130057 2043 | 20.1432111193 2044 | 39.0618807969 2045 | 2.45420858304 2046 | 26.7316917116 2047 | 8.57279194502 2048 | 19.4721561311 2049 | 26.220961202 2050 | 21.109773789 2051 | 8.66755497056 2052 | 2.85397010739 2053 | 3.65942723713 2054 | 10.1949037106 2055 | 15.3402291417 2056 | 1.77615301181 2057 | 28.4265866589 2058 | 21.4304538795 2059 | 32.953104054 2060 | 8.14051498029 2061 | 18.4976668596 2062 | 18.6925779214 2063 | 25.6898568817 2064 | 0.645436047891 2065 | 29.3181088343 2066 | 6.98188169009 2067 | 11.5406417424 2068 | 18.940982726 2069 | 23.9391779208 2070 | 5.61938452023 2071 | 32.5598446452 2072 | 11.5691213562 2073 | 33.5854801755 2074 | 13.4799763582 2075 | 28.8675103537 2076 | 5.4841008208 2077 | 11.1325906567 2078 | 16.0594674613 2079 | 14.4306241246 2080 | 3.72609611079 2081 | 13.6584091174 2082 | 33.7773700414 2083 | 0.344810977141 2084 | 0.799816063085 2085 | 18.7682995525 2086 | 19.7768458 2087 | 22.0513550528 2088 | 27.1539699148 2089 | 1.34721461511 2090 | 3.58086368477 2091 | 11.383309045 2092 | 8.30860348154 2093 | 0.771600870346 2094 | 1.64362827624 2095 | 0.639282723353 2096 | 41.4475616773 2097 | 15.837441077 2098 | 18.9922422626 2099 | 14.3305222666 2100 | 35.0561397865 2101 | 7.74983039651 2102 | 28.3695526007 2103 | 17.8230352283 2104 | 16.4930510639 2105 | 13.0425043682 2106 | 4.84722730212 2107 | 36.5899315859 2108 | 7.80700161892 2109 | 18.3792739306 2110 | 16.6017336002 2111 | 21.5521194699 2112 | 8.25340105262 2113 | 10.9687296864 2114 | 4.51350581744 2115 | 4.99717988285 2116 | 33.8535813465 2117 | 9.69302478567 2118 | 9.62352522277 2119 | 24.0567153962 2120 | 40.4842322194 2121 | 1.26170473321 2122 | 10.2565823318 2123 | 8.90503195078 2124 | 29.7853141055 2125 | 6.39873440596 2126 | 32.9458120662 2127 | 19.6600511662 2128 | 18.3543919924 2129 | 0.177207082275 2130 | 0.686892846448 2131 | 21.2709631651 2132 | 11.632404347 2133 | 6.15511063867 2134 | 35.004318092 2135 | 4.21932079575 2136 | 2.28446440514 2137 | 31.0594775715 2138 | 2.9507926148 2139 | 21.3852523237 2140 | 5.40219469246 2141 | 38.6947555558 2142 | 18.6286916597 2143 | 6.29875790984 2144 | 6.89897693329 2145 | 6.24055330008 2146 | 11.3911972178 2147 | 33.1648848128 2148 | 19.8972646275 2149 | 7.7445703148 2150 | 5.48336104631 2151 | 14.0324836862 2152 | 9.56883838159 2153 | 31.9159845438 2154 | 16.1645157568 2155 | 19.3883741489 2156 | 1.59271974463 2157 | 6.67757203462 2158 | 9.66580767317 2159 | 0.290030503501 2160 | 7.3946832025 2161 | 6.02894023786 2162 | 8.7287238112 2163 | 32.3257968615 2164 | 4.2507158172 2165 | 2.66276228236 2166 | 38.9119244372 2167 | 1.6742815381 2168 | 12.6911856217 2169 | 12.2509850801 2170 | 26.9474989552 2171 | 14.4949488222 2172 | 7.042565187 2173 | 0.840085462883 2174 | 3.0692571618 2175 | 17.2112610639 2176 | 12.4931785669 2177 | 7.25300711765 2178 | 0.0731074640224 2179 | 7.390401026 2180 | 14.9687782557 2181 | 19.5051013856 2182 | 4.59859483005 2183 | 16.6693758116 2184 | 24.1756980819 2185 | 19.5531012002 2186 | 12.139933566 2187 | 11.2083171839 2188 | 0.914502669171 2189 | 29.0541301304 2190 | 20.5943544415 2191 | 4.46558350573 2192 | 23.1727281509 2193 | 36.1966124655 2194 | 3.03725311707 2195 | 29.9896580925 2196 | 20.3176572061 2197 | 7.86688194713 2198 | 19.3485561673 2199 | 32.8148043053 2200 | 12.1638074395 2201 | 0.233657455692 2202 | 13.0340427895 2203 | 41.7853254383 2204 | 8.71922094666 2205 | 3.51852936031 2206 | 4.99126411878 2207 | 8.18146344729 2208 | 14.513096768 2209 | 2.4744913574 2210 | 19.8648463286 2211 | 19.3190203655 2212 | 0.318758295106 2213 | 4.01935586567 2214 | 19.4843769143 2215 | 19.9578499975 2216 | 8.75411677482 2217 | 2.59343307057 2218 | 32.9978029933 2219 | 5.74545915015 2220 | 2.12956062315 2221 | 10.3670232749 2222 | 52.7954498646 2223 | 9.26044012838 2224 | 4.26121044579 2225 | 10.8393248643 2226 | 13.163015831 2227 | 16.4924566187 2228 | 7.0580798687 2229 | 47.5885844135 2230 | 17.5137110881 2231 | 24.5788465776 2232 | 1.77231770583 2233 | 9.19540730445 2234 | 12.3045511635 2235 | 12.4932200619 2236 | 16.0568397594 2237 | 52.2344728574 2238 | 24.5243472039 2239 | 12.7731402052 2240 | 13.7440175958 2241 | 7.88106760193 2242 | 13.5247646267 2243 | 12.7756080017 2244 | 4.49546166327 2245 | 23.891142718 2246 | 1.9907711443 2247 | 26.3118567872 2248 | 14.8269833142 2249 | 9.32564318015 2250 | 37.5259432477 2251 | 21.8917014799 2252 | 8.31347360645 2253 | 36.9361542484 2254 | 30.7379013354 2255 | 8.62175538975 2256 | 19.6184396225 2257 | 2.74763305078 2258 | 14.6286980641 2259 | 14.3048401936 2260 | 11.1439056306 2261 | 13.9632814993 2262 | 13.4650791346 2263 | 20.2342851502 2264 | 44.7836509912 2265 | 10.2781088015 2266 | 6.93737270044 2267 | 19.5234327471 2268 | 1.5697699519 2269 | 6.2319014362 2270 | 17.7168306252 2271 | 28.5601209883 2272 | 25.448479857 2273 | 4.21086543712 2274 | 4.39041682927 2275 | 6.70013937248 2276 | 2.93139942068 2277 | 18.5792433676 2278 | 19.4021792603 2279 | 42.7526628128 2280 | 0.27727477999 2281 | 4.75870545222 2282 | 2.93263328925 2283 | 4.34646965724 2284 | 7.89866038904 2285 | 8.80875609656 2286 | 9.18475188581 2287 | 27.9240531549 2288 | 6.14467594709 2289 | 15.2738690702 2290 | 8.58425375002 2291 | 16.0562653002 2292 | 57.5255718554 2293 | 24.2124700553 2294 | 17.8897837737 2295 | 77.2256795486 2296 | 2.68490313956 2297 | 23.9075430601 2298 | 12.4800346587 2299 | 16.3814926963 2300 | 8.9722649058 2301 | 6.7948843042 2302 | 19.1324652264 2303 | 5.07509764938 2304 | 32.3399101916 2305 | 30.8207375101 2306 | 0.420101021028 2307 | 30.9507198519 2308 | 9.88734385899 2309 | 41.412808952 2310 | 21.4017824255 2311 | 14.9479419478 2312 | 5.36293209848 2313 | 15.7207664364 2314 | 60.3109841163 2315 | 30.2931678416 2316 | 0.78717309505 2317 | 19.8458127582 2318 | 4.63334605398 2319 | 17.8170096946 2320 | 4.13986910479 2321 | 1.45381633919 2322 | 4.55182558135 2323 | 14.5662058422 2324 | 11.255361374 2325 | 38.1285743478 2326 | 19.0203771261 2327 | 1.99493643085 2328 | 73.5606309576 2329 | 39.9329256056 2330 | 11.72361695 2331 | 16.4754804021 2332 | 20.8411599083 2333 | 6.17944247987 2334 | 7.33192165881 2335 | 4.41821171729 2336 | 10.9001375068 2337 | 26.0244881479 2338 | 19.6901524891 2339 | 19.892596655 2340 | 46.7082504642 2341 | 1.56644926417 2342 | 8.82678364255 2343 | 6.98622033556 2344 | 27.7994005077 2345 | 13.7395737688 2346 | 0.428707245184 2347 | 8.35341804976 2348 | 6.10804715509 2349 | 3.96239272333 2350 | 4.08283009868 2351 | 12.4582407385 2352 | 5.00726514329 2353 | 47.4597017543 2354 | 5.1201911696 2355 | 8.94870967825 2356 | 49.195354015 2357 | 25.8411528657 2358 | 1.59050293802 2359 | 36.8503912942 2360 | 13.8795213218 2361 | 2.50935170088 2362 | 31.5301904207 2363 | 17.3302944453 2364 | 20.4377092553 2365 | 9.09430593761 2366 | 14.2981861203 2367 | 16.81597964 2368 | 2.58804406595 2369 | 15.3165265888 2370 | 3.82086777896 2371 | 10.2138895989 2372 | 10.228684285 2373 | 29.1186113921 2374 | 15.6745185485 2375 | 3.85051663146 2376 | 16.2002421141 2377 | 7.78200463183 2378 | 27.2736598014 2379 | 1.44317355745 2380 | 5.27372021845 2381 | 10.5218485756 2382 | 3.69847248035 2383 | 0.790789337165 2384 | 16.5568023353 2385 | 12.3319449086 2386 | 30.3104783313 2387 | 13.1237812287 2388 | 34.163199663 2389 | 0.992873074691 2390 | 2.99549151283 2391 | 20.6704393083 2392 | 46.8440970158 2393 | 29.5729913311 2394 | 10.1707734324 2395 | 4.14162210025 2396 | 7.74802062017 2397 | 14.4835063605 2398 | 29.2308161915 2399 | 16.3256082033 2400 | 13.3792163767 2401 | 62.6362272451 2402 | 7.00371263505 2403 | 0.0255887162325 2404 | 4.47304273386 2405 | 2.50343898569 2406 | 2.19159500665 2407 | 1.13321929981 2408 | 3.71106656184 2409 | 12.6461168362 2410 | 6.1725031205 2411 | 21.4084872583 2412 | 12.8261589027 2413 | 12.371413333 2414 | 20.0923909222 2415 | 3.50242034595 2416 | 4.25814882136 2417 | 33.1435499328 2418 | 13.3875214797 2419 | 5.35657307351 2420 | 21.9925359562 2421 | 23.0379859862 2422 | 19.3110060569 2423 | 0.984873277606 2424 | 13.4052571649 2425 | 14.8354148795 2426 | 19.2786309319 2427 | 16.5811379442 2428 | 15.5068999108 2429 | 31.9338553456 2430 | 19.7951705467 2431 | 24.5426801253 2432 | 32.623175965 2433 | 7.48013617015 2434 | 3.27672931948 2435 | 2.1498905103 2436 | 1.03436146267 2437 | 12.3926424748 2438 | 22.8212002832 2439 | 2.02891318946 2440 | 14.7417165024 2441 | 1.09346934983 2442 | 7.35017637637 2443 | 26.3438344099 2444 | 6.43852042197 2445 | 2.29330566078 2446 | 50.9821874741 2447 | 27.8904499841 2448 | 11.7329772552 2449 | 26.9403636872 2450 | 0.612225724751 2451 | 9.64444464546 2452 | 10.1773456808 2453 | 2.01013636272 2454 | 28.7220389858 2455 | 2.90323142382 2456 | 40.5197876692 2457 | 14.5034588314 2458 | 24.1260845477 2459 | 40.9331887951 2460 | 13.0173600709 2461 | 16.3227270656 2462 | 9.54615108156 2463 | 10.6295407215 2464 | 10.2425013371 2465 | 6.34696479975 2466 | 44.9766817042 2467 | 7.7537347231 2468 | 38.6876706922 2469 | 8.78695582024 2470 | 14.1404035824 2471 | 56.8382039381 2472 | 14.8143541168 2473 | 3.55605481506 2474 | 84.1949312745 2475 | 11.4224197671 2476 | 6.2890240443 2477 | 48.5841706138 2478 | 4.04211935524 2479 | 1.07786391746 2480 | 14.2625236228 2481 | 1.66144144245 2482 | 21.0114201931 2483 | 8.06454852477 2484 | 1.79001686371 2485 | 37.5616723414 2486 | 7.62641030809 2487 | 21.0295532022 2488 | 16.1356543472 2489 | 1.24911515312 2490 | 8.23552571038 2491 | 7.36196331718 2492 | 6.31272759366 2493 | 15.5163560954 2494 | 11.499103959 2495 | 8.41355006971 2496 | 20.5335988701 2497 | 2.30602660077 2498 | 32.2671452066 2499 | 16.5355597353 2500 | 18.7333623935 2501 | 63.8261739797 2502 | 0.804633525171 2503 | 0.456335936231 2504 | 2.75528910164 2505 | 0.658581054124 2506 | 10.2885849944 2507 | 23.2033114405 2508 | 14.4282470999 2509 | 17.7358310457 2510 | 4.9640582883 2511 | 10.5278897373 2512 | 3.14555263766 2513 | 22.2574178706 2514 | 4.6369508119 2515 | 1.20313430728 2516 | 15.1586942204 2517 | 6.05796233674 2518 | 44.6254554953 2519 | 12.5154085283 2520 | 16.4666568379 2521 | 15.1922658464 2522 | 13.4128793059 2523 | 50.8738822226 2524 | 24.2711030474 2525 | 29.7470226435 2526 | 8.42046366619 2527 | 20.4753794928 2528 | 4.38078353092 2529 | 13.4720339817 2530 | 10.5970375162 2531 | 8.71328840558 2532 | 23.5883457899 2533 | 22.6881867966 2534 | 46.3312113268 2535 | 53.1229620878 2536 | 22.2318323664 2537 | 9.21479292948 2538 | 8.72547731144 2539 | 21.5310018018 2540 | 21.5287875774 2541 | 13.7355815029 2542 | 19.123546025 2543 | 14.1971285867 2544 | 8.86462792793 2545 | 7.76701326167 2546 | 18.4224887198 2547 | 8.16995345916 2548 | 3.56964450228 2549 | 48.103531552 2550 | 8.60010532387 2551 | 6.70059111294 2552 | 18.1794670233 2553 | 10.3966558859 2554 | 9.31442010752 2555 | 29.0837506083 2556 | 51.2786973449 2557 | 2.53396919315 2558 | 13.2575343478 2559 | 3.65716962685 2560 | 13.2558794838 2561 | 17.9403987731 2562 | 0.108180867077 2563 | 0.530041066071 2564 | 97.1197780274 2565 | 10.9400306753 2566 | 23.0287754451 2567 | 16.2704317705 2568 | 35.7366982893 2569 | 0.42130000789 2570 | 13.7737611142 2571 | 16.8843849188 2572 | 15.8549018605 2573 | 8.47527851476 2574 | 24.6861032207 2575 | 12.3081303473 2576 | 21.4508381185 2577 | 39.6698522171 2578 | 4.66142413624 2579 | 8.68784882097 2580 | 9.00712499422 2581 | 2.92161056536 2582 | 25.2635809579 2583 | 4.59942860621 2584 | 11.7194207008 2585 | 6.60135160895 2586 | 3.95614268071 2587 | 1.11522991754 2588 | 38.7795210195 2589 | 7.80437199945 2590 | 1.19630246367 2591 | 20.3755172126 2592 | 18.6786187797 2593 | 7.30736167251 2594 | 12.601359458 2595 | 37.5223404466 2596 | 18.1498620214 2597 | 10.313460911 2598 | 12.9613496665 2599 | 2.38857642996 2600 | 24.5709935464 2601 | 22.5473392691 2602 | 25.4251702379 2603 | 20.6266951701 2604 | 36.9405161897 2605 | 7.24778672485 2606 | 12.1014984567 2607 | 10.199540115 2608 | 15.7135410634 2609 | 6.86436318733 2610 | 13.146111624 2611 | 56.0670269131 2612 | 22.6634093026 2613 | 3.76805805739 2614 | 33.9128869871 2615 | 1.82532734736 2616 | 6.15295391054 2617 | 6.19064212932 2618 | 5.47136791763 2619 | 11.5340657194 2620 | 15.0644806381 2621 | 26.2281284355 2622 | 28.4693230363 2623 | 49.0246888655 2624 | 13.0207550233 2625 | 133.225710294 2626 | 7.36015198574 2627 | 21.5233014386 2628 | 15.6202743427 2629 | 0.682720010856 2630 | 34.0294121275 2631 | 22.0737523245 2632 | 0.551244883717 2633 | 10.4376640724 2634 | 59.8440503423 2635 | 2.88328757126 2636 | 8.47621396339 2637 | 22.6021329921 2638 | 17.7902415109 2639 | 2.20064319437 2640 | 5.14990711853 2641 | 39.922975403 2642 | 12.4771549095 2643 | 18.3792779778 2644 | 17.3774568361 2645 | 6.87546337725 2646 | 4.05110333546 2647 | 13.4232555758 2648 | 16.2091325641 2649 | 23.4581819875 2650 | 4.85049819121 2651 | 1.29844562131 2652 | 13.2699764094 2653 | 16.5151024326 2654 | 36.8116071021 2655 | 43.8149494071 2656 | 14.8858140272 2657 | 34.1438267663 2658 | 12.665582965 2659 | 6.14880622359 2660 | 50.0914480816 2661 | 14.924029252 2662 | 60.4913813535 2663 | 11.9263038327 2664 | 11.0936346217 2665 | 25.7519722551 2666 | 12.890000173 2667 | 17.3273909051 2668 | 12.0779593903 2669 | 6.89331557349 2670 | 39.4860676284 2671 | 8.53789543814 2672 | 9.87111167946 2673 | 32.765814486 2674 | 3.39494905231 2675 | 10.5009526839 2676 | 12.7013526581 2677 | 0.0885514053528 2678 | 8.88188101586 2679 | 16.6936179453 2680 | 85.6514071233 2681 | 15.9533238841 2682 | 8.71442346486 2683 | 5.74813703123 2684 | 18.5672103152 2685 | 4.6402485329 2686 | 1.49331651653 2687 | 28.6618626187 2688 | 8.80555454389 2689 | 0.273905342358 2690 | 4.04337918876 2691 | 7.87825497151 2692 | 1.11112481144 2693 | 21.7321745755 2694 | 2.4044868159 2695 | 10.3651926574 2696 | 32.3967398829 2697 | 13.0672025104 2698 | 2.11320256667 2699 | 15.4271527034 2700 | 5.76063580862 2701 | 7.21096213281 2702 | 2.84431850843 2703 | 4.22012264058 2704 | 8.57197281226 2705 | 29.9440642744 2706 | 42.8429313327 2707 | 10.570370417 2708 | 7.67247186412 2709 | 28.439407347 2710 | 0.636173305549 2711 | 11.8102273237 2712 | 21.5504154732 2713 | 3.27717633402 2714 | 25.818737665 2715 | 24.6418907373 2716 | 3.94634696602 2717 | 15.8005501654 2718 | 2.32787719018 2719 | 12.3241583643 2720 | 21.5738032784 2721 | 7.39056695695 2722 | 6.76801834988 2723 | 11.2717351721 2724 | 4.83188311527 2725 | 28.9339112809 2726 | 22.9169250326 2727 | 3.28437864283 2728 | 59.3596893716 2729 | 2.76882281262 2730 | 11.9449848799 2731 | 1.12542211451 2732 | 6.08455737955 2733 | 11.1489238131 2734 | 34.863886771 2735 | 23.9858731125 2736 | 19.0332002234 2737 | 10.7316407663 2738 | 16.4345602014 2739 | 19.2428012379 2740 | 23.6313428019 2741 | 6.2053703669 2742 | 14.6688932493 2743 | 20.6849163739 2744 | 18.0723639997 2745 | 3.60227172785 2746 | 8.39384009801 2747 | 8.50920819444 2748 | 32.3426772719 2749 | 28.2017649999 2750 | 11.4783115869 2751 | 23.5105794148 2752 | 5.74061914955 2753 | 4.8432024054 2754 | 6.51191003847 2755 | 1.72577050181 2756 | 5.59310250347 2757 | 16.9807115222 2758 | 3.51872992802 2759 | 6.42119432678 2760 | 13.5087761464 2761 | 12.0194403382 2762 | 4.31369572488 2763 | 27.7216575967 2764 | 11.297599139 2765 | 2.02962002254 2766 | 10.9316680588 2767 | 19.6569321264 2768 | 29.2972719172 2769 | 22.3066608881 2770 | 4.44845596027 2771 | 23.4683460845 2772 | 6.23083151213 2773 | 2.85979011507 2774 | 21.1362094898 2775 | 4.61733978081 2776 | 13.3077744637 2777 | 3.33107225241 2778 | 24.5839237731 2779 | 18.6142191822 2780 | 24.890169647 2781 | 23.2693491883 2782 | 1.55174499051 2783 | 2.89191308218 2784 | 23.7504958155 2785 | 17.0916471599 2786 | 6.74219379104 2787 | 6.39869121707 2788 | 0.411275364857 2789 | 5.84113533346 2790 | 11.6930836107 2791 | 7.54297982249 2792 | 17.5223353306 2793 | 0.650171199647 2794 | 2.84254657705 2795 | 32.4411024494 2796 | 16.0442667731 2797 | 9.08521818207 2798 | 12.720889643 2799 | 33.9468646874 2800 | 13.8956000766 2801 | 17.5272480855 2802 | 13.7158907524 2803 | 25.4064468856 2804 | 25.0442066701 2805 | 8.65520612827 2806 | 22.7477175226 2807 | 7.32658084062 2808 | 21.2654368882 2809 | 6.89234809727 2810 | 4.58568028023 2811 | 21.8994614485 2812 | 6.19019084204 2813 | 40.8298081034 2814 | 2.90313582075 2815 | 28.4956667524 2816 | 15.6807715555 2817 | 47.5362400682 2818 | 15.853666015 2819 | 22.2088103589 2820 | 13.6656907596 2821 | 23.7039259379 2822 | 8.9748970322 2823 | 5.83531874965 2824 | 4.67794958337 2825 | 19.685941861 2826 | 19.7457438729 2827 | 27.5230155178 2828 | 38.7729758687 2829 | 18.257399488 2830 | 15.7355073052 2831 | 0.0620648735182 2832 | 39.5983345758 2833 | 5.24237021024 2834 | 30.6549632275 2835 | 27.1233963487 2836 | 12.7017802432 2837 | 0.305965270176 2838 | 45.9521938224 2839 | 14.0528069492 2840 | 31.9086580123 2841 | 22.4994491011 2842 | 19.3798206085 2843 | 9.2767011534 2844 | 6.55028887638 2845 | 1.52423311944 2846 | 40.6239897302 2847 | 25.0263104417 2848 | 11.1848580118 2849 | 31.7006993785 2850 | 7.88373375889 2851 | 8.28912430801 2852 | 11.6684834438 2853 | 16.6503703487 2854 | 18.8460057907 2855 | 12.3360030049 2856 | 13.782591514 2857 | 6.07499442508 2858 | 31.4902025489 2859 | 1.09569174165 2860 | 27.9522356054 2861 | 29.8719392244 2862 | 11.0954054558 2863 | 17.137296126 2864 | 25.7351137096 2865 | 21.8527175335 2866 | 15.557769383 2867 | 13.8445927364 2868 | 20.6734218264 2869 | 8.78046788123 2870 | 31.9156056026 2871 | 31.285681613 2872 | 14.8404218329 2873 | 15.144946474 2874 | 10.8580007448 2875 | 2.70750006752 2876 | 1.79790374442 2877 | 1.4813597736 2878 | 20.0341913009 2879 | 17.2608756535 2880 | 17.3289139532 2881 | 1.29653820782 2882 | 6.48704907267 2883 | 21.367207371 2884 | 15.6061120025 2885 | 19.3232700613 2886 | 2.26388302333 2887 | 10.8324147153 2888 | 15.2530724488 2889 | 42.2797825699 2890 | 7.48245315252 2891 | 0.563618539185 2892 | 0.389022591145 2893 | 4.47156165782 2894 | 7.16702381389 2895 | 21.1573330179 2896 | 35.3636317875 2897 | 0.213928705058 2898 | 37.5833600664 2899 | 13.8063364701 2900 | 39.5490597368 2901 | 2.80100812375 2902 | 31.1327730489 2903 | 10.8506288895 2904 | 20.3337551104 2905 | 2.62409291926 2906 | 18.0152972715 2907 | 13.4194350405 2908 | 4.98795796798 2909 | 3.562511154 2910 | 1.57031382158 2911 | 17.911103796 2912 | 25.54989626 2913 | 4.06449597306 2914 | 32.767363357 2915 | 25.825833055 2916 | 18.138986284 2917 | 7.89909647372 2918 | 29.8346417757 2919 | 10.9836262382 2920 | 17.6982653571 2921 | 32.432363133 2922 | 14.6875136776 2923 | 2.26987056572 2924 | 33.8194866763 2925 | 5.43380808455 2926 | 24.6260802528 2927 | 19.1139928423 2928 | 16.4589533558 2929 | 17.910872992 2930 | 20.6726350403 2931 | 6.10232249092 2932 | 35.2940340376 2933 | 6.14714436767 2934 | 30.2412485588 2935 | 4.41521752043 2936 | 30.3785845682 2937 | 2.86673603721 2938 | 32.9694958115 2939 | 7.57305373109 2940 | 27.0820378774 2941 | 18.9363953298 2942 | 42.8997494954 2943 | 48.4818053915 2944 | 46.0129753956 2945 | 39.1016364499 2946 | 49.4927880296 2947 | 21.94855538 2948 | 25.9909757945 2949 | 4.2912402367 2950 | 8.80223296071 2951 | 16.7377515403 2952 | 14.2019319933 2953 | 17.4018988877 2954 | 40.3552662173 2955 | 2.01872728634 2956 | 6.10110511996 2957 | 22.94522726 2958 | 52.0379018984 2959 | 6.43786139611 2960 | 29.479683268 2961 | 19.8855301583 2962 | 21.8583326357 2963 | 12.3922821086 2964 | 17.8909049776 2965 | 19.0849003686 2966 | 40.1802438485 2967 | 22.5406400451 2968 | 3.86678509338 2969 | 10.1680992129 2970 | 29.0681755496 2971 | 18.8775148993 2972 | 4.815572758 2973 | 21.2824305539 2974 | 3.56761627035 2975 | 17.3847982613 2976 | 25.1114614249 2977 | 3.62897724536 2978 | 3.91856273209 2979 | 1.92639323248 2980 | 1.94443998537 2981 | 10.3461226309 2982 | 23.3350362241 2983 | 20.6905064008 2984 | 4.91645658071 2985 | 14.8357606521 2986 | 11.1753834457 2987 | 6.47742805831 2988 | 29.4955734648 2989 | 9.24289218568 2990 | 2.34741281818 2991 | 12.3891310197 2992 | 0.542730319547 2993 | 14.2804403688 2994 | 33.3716975793 2995 | 33.4810334836 2996 | 8.2858659308 2997 | 11.6269591189 2998 | 12.7248699624 2999 | 29.7218420002 3000 | 4.85652708961 3001 | 17.4802282467 3002 | 3.66068215652 3003 | 18.7391305782 3004 | 20.1641675661 3005 | 0.68269170992 3006 | 5.6865453971 3007 | 19.017502565 3008 | 16.3865558391 3009 | 11.3217991655 3010 | 49.210580165 3011 | 22.8104007428 3012 | 22.1658053881 3013 | 15.4938250128 3014 | 11.8982845496 3015 | 31.7234430044 3016 | 2.82796556608 3017 | 5.53271014744 3018 | 10.826970041 3019 | 19.5800834182 3020 | 31.556046653 3021 | 17.9624183979 3022 | 9.17275303369 3023 | 8.82084279162 3024 | 38.2414628152 3025 | 40.5950111145 3026 | 21.267944566 3027 | 27.6375187842 3028 | 16.7700170025 3029 | 37.5711349241 3030 | 18.8170086559 3031 | 7.64329405098 3032 | 2.87765595661 3033 | 15.872426313 3034 | 12.7090726951 3035 | 37.484264927 3036 | 9.39345788714 3037 | 20.7553494924 3038 | 4.84868669081 3039 | 23.5371677717 3040 | 24.6551411266 3041 | 1.18199782301 3042 | 23.5940838793 3043 | 7.93428349736 3044 | 9.55648973708 3045 | 2.46102202198 3046 | 2.94654471333 3047 | 4.26185601435 3048 | 6.55222202899 3049 | 17.0129060756 3050 | 11.7226070782 3051 | 3.70497526301 3052 | 7.89466326786 3053 | 9.96054062531 3054 | 15.0779130588 3055 | 14.0246468502 3056 | 2.35608582371 3057 | 42.175046166 3058 | 24.3515851714 3059 | 18.9516182231 3060 | 12.9482577275 3061 | 19.4655491789 3062 | 11.5455220678 3063 | 11.3141519023 3064 | 19.5923563265 3065 | 12.0256259578 3066 | 13.8420577708 3067 | 52.8788248349 3068 | 43.3011279198 3069 | 7.36927229971 3070 | 5.5930446207 3071 | 11.2072025087 3072 | 30.6684988824 3073 | 2.3021670489 3074 | 2.29133590208 3075 | 3.86128858011 3076 | 6.25482297358 3077 | 18.5891854555 3078 | 27.3753886373 3079 | 29.1111749204 3080 | 42.1087403021 3081 | 15.521355352 3082 | 6.84189390813 3083 | 4.5432231832 3084 | 13.4911984878 3085 | 67.8460740759 3086 | 25.6277838396 3087 | 15.7900008018 3088 | 21.1783333258 3089 | 1.23530451417 3090 | 8.25068897129 3091 | 10.0476326962 3092 | 8.65641064477 3093 | 14.9706059611 3094 | 21.3720839551 3095 | 15.2811807476 3096 | 1.35700017663 3097 | 16.8537298144 3098 | 24.6834902266 3099 | 12.0596063009 3100 | 4.92245656824 3101 | 13.0990669899 3102 | 0.508357458912 3103 | 0.0960242007079 3104 | 11.6016517343 3105 | 18.3437335349 3106 | 16.1557339474 3107 | 7.62739187641 3108 | 22.9061758157 3109 | 22.1922635849 3110 | 2.44488714805 3111 | 15.3001352025 3112 | 24.6100562479 3113 | 21.7795359671 3114 | 31.5915587618 3115 | 35.7982279057 3116 | 22.1910214154 3117 | 18.1749683104 3118 | 25.037221092 3119 | 6.28955995973 3120 | 2.0063384465 3121 | 0.557602438851 3122 | 6.95512822686 3123 | 0.878535928374 3124 | 0.567675342687 3125 | 0.453066370165 3126 | 32.1544535983 3127 | 2.17982519159 3128 | 0.566672137613 3129 | 8.58922437199 3130 | 65.2339601385 3131 | 18.126850229 3132 | 7.88796572728 3133 | 10.5704681416 3134 | 2.17232773562 3135 | 19.141773491 3136 | 11.7620552411 3137 | 9.00115045636 3138 | 9.91443190241 3139 | 43.2179409513 3140 | 30.3036634868 3141 | 93.6731616533 3142 | 11.7821390817 3143 | 19.8798191606 3144 | 21.3827349557 3145 | 5.49212934666 3146 | 10.331202834 3147 | 8.93247892699 3148 | 21.4643586568 3149 | 2.49477588863 3150 | 10.851946239 3151 | 6.47728392474 3152 | 17.9478062369 3153 | 20.3254904861 3154 | 3.11125924845 3155 | 6.22881318806 3156 | 9.28261824079 3157 | 5.37651642966 3158 | 6.35256632623 3159 | 4.73095126353 3160 | 13.4301052967 3161 | 20.7386671411 3162 | 26.555731016 3163 | 40.5178050339 3164 | 11.2239689858 3165 | 9.22863719076 3166 | 40.1288543422 3167 | 15.4190021568 3168 | 10.1152716046 3169 | 26.0157897011 3170 | 6.28289476203 3171 | 4.67545158955 3172 | 3.28665342371 3173 | 32.1349561299 3174 | 17.2670914795 3175 | 25.7343315442 3176 | 34.2286886871 3177 | 18.082072531 3178 | 19.4059133612 3179 | 51.3594400147 3180 | 13.8426361985 3181 | 47.1392994415 3182 | 0.50464736127 3183 | 21.2593918277 3184 | 4.04816756136 3185 | 9.59692699379 3186 | 0.668636541941 3187 | 13.6256521756 3188 | 34.5715830487 3189 | 12.6333870703 3190 | 19.1035482561 3191 | 20.9690436586 3192 | 3.38437213852 3193 | 18.0270679054 3194 | 6.12883164975 3195 | 14.0957965187 3196 | 7.05082917762 3197 | 0.396934026792 3198 | 9.99132052216 3199 | 46.9342307406 3200 | 1.16568007243 3201 | 0.804622702214 3202 | 31.7324847957 3203 | 4.66849114477 3204 | 6.32350690337 3205 | 23.2087742801 3206 | 20.3623791243 3207 | 7.78148623867 3208 | 35.1891051243 3209 | 21.3546765625 3210 | 14.1669962851 3211 | 12.7526211136 3212 | 10.1085457421 3213 | 16.7644890295 3214 | 18.265293913 3215 | 2.37580724406 3216 | 34.5017018184 3217 | 9.11967599099 3218 | 12.1119667826 3219 | 0.953958130675 3220 | 2.38978929401 3221 | 41.9644394533 3222 | 0.857621296145 3223 | 12.0904761407 3224 | 43.7952954476 3225 | 13.9858278648 3226 | 19.8195297712 3227 | 11.8664204993 3228 | 1.48008983155 3229 | 14.9798355675 3230 | 20.6521886986 3231 | 6.16619269532 3232 | 20.832600882 3233 | 18.2412019106 3234 | 22.2691332316 3235 | 13.3975475467 3236 | 0.792029766218 3237 | 4.43470820612 3238 | 13.2717831984 3239 | 24.651905794 3240 | 26.1900066757 3241 | 8.57578108075 3242 | 8.79446327755 3243 | 15.3935544297 3244 | 1.22258700933 3245 | 18.0022130196 3246 | 5.95416276204 3247 | 2.63187418187 3248 | 5.17707381057 3249 | 20.1743639289 3250 | 1.87655286772 3251 | 6.96569240786 3252 | 3.81307207167 3253 | 7.48804336288 3254 | 11.8443684615 3255 | 34.9029042409 3256 | 1.70256952498 3257 | 14.8843651081 3258 | 0.716944851056 3259 | 6.83233239358 3260 | 24.2302024984 3261 | 17.1958168475 3262 | 30.9784308032 3263 | 37.9794452612 3264 | 1.45964325575 3265 | 8.28623666258 3266 | 3.70709415354 3267 | 19.4398905533 3268 | 22.6464718035 3269 | 1.76143208641 3270 | 23.2802980091 3271 | 5.15881928224 3272 | 26.2685245483 3273 | 23.7300652186 3274 | 23.4534731607 3275 | 3.73280219334 3276 | 63.3658164584 3277 | 4.70095643901 3278 | 33.7759669996 3279 | 19.6077747778 3280 | 8.4757162034 3281 | 1.70762303673 3282 | 1.20187571923 3283 | 44.1118477443 3284 | 10.3653803263 3285 | 34.4217801456 3286 | 10.6952069836 3287 | 23.6350075817 3288 | 10.7318322538 3289 | 2.41072302066 3290 | 7.84986678567 3291 | 19.4224538993 3292 | 39.8785922537 3293 | 8.60342122038 3294 | 0.531401732492 3295 | 3.53694466539 3296 | 2.7158921637 3297 | 42.0451449144 3298 | 13.005007049 3299 | 31.845382678 3300 | 20.4988259296 3301 | 22.6628261198 3302 | 10.0607749032 3303 | 13.3862871884 3304 | 38.4193881674 3305 | 16.4059346249 3306 | 54.966530521 3307 | 16.205418952 3308 | 18.2158010336 3309 | 20.6531727609 3310 | 20.3968842417 3311 | 37.8929622584 3312 | 0.862629136794 3313 | 8.04679684022 3314 | 9.52928423434 3315 | 21.9249592021 3316 | 31.0479123839 3317 | 40.8009664048 3318 | 1.33788028709 3319 | 4.31599565794 3320 | 21.2104281531 3321 | 15.3625678513 3322 | 13.745482216 3323 | 12.0097451003 3324 | 2.98517279847 3325 | 16.2358393585 3326 | 14.8006907045 3327 | 4.17934546531 3328 | 17.958433872 3329 | 18.5845183353 3330 | 15.8278173511 3331 | 6.34167285905 3332 | 26.6437435846 3333 | 1.64685969564 3334 | 41.8059111489 3335 | 4.66015219899 3336 | 4.61559271882 3337 | 9.1394828718 3338 | 17.7403126736 3339 | 20.4536578822 3340 | 14.5715378252 3341 | 15.9707372638 3342 | 17.6466409766 3343 | 3.15798474717 3344 | 4.0150532071 3345 | 3.07325724934 3346 | 24.8527526987 3347 | 16.6952153526 3348 | 13.0712359579 3349 | 11.7427525112 3350 | 5.27557530358 3351 | 9.68866212646 3352 | 0.330799502387 3353 | 4.15595175008 3354 | 1.92419353786 3355 | 9.50751076098 3356 | 20.5168974964 3357 | 0.102337794419 3358 | 5.39849205664 3359 | 8.78109061847 3360 | 9.66891571181 3361 | 48.1968854521 3362 | 17.5663909978 3363 | 7.97767461491 3364 | 20.1129751268 3365 | 29.3128583336 3366 | 23.0603921067 3367 | 20.3755451353 3368 | 15.5739270478 3369 | 2.98414600426 3370 | 7.9562555111 3371 | 3.54497188674 3372 | 23.1467058815 3373 | 6.05803951274 3374 | 24.0294953615 3375 | 40.4067686144 3376 | 12.3408251811 3377 | 28.2551170889 3378 | 3.42527741472 3379 | 0.415193192618 3380 | 7.92103853694 3381 | 8.6576455759 3382 | 18.5135739153 3383 | 18.6776500512 3384 | 6.60041700295 3385 | 3.66693404952 3386 | 19.1304218545 3387 | 13.4866159387 3388 | 31.7433625024 3389 | 56.9109929041 3390 | 2.69963018389 3391 | 18.0423614959 3392 | 25.3532571572 3393 | 1.81212615379 3394 | 0.511703131164 3395 | 17.6836905109 3396 | 9.49262599052 3397 | 35.6853531013 3398 | 44.6116917476 3399 | 20.5721272197 3400 | 8.22683202303 3401 | 21.9957481355 3402 | 14.5505351753 3403 | 26.0278923403 3404 | 16.8351511339 3405 | 18.1525837017 3406 | 11.659382585 3407 | 27.674189703 3408 | 19.0142066139 3409 | 12.1522989804 3410 | 13.4425284431 3411 | 18.2412980482 3412 | 21.1615474768 3413 | 30.1277116611 3414 | 7.41399138468 3415 | 13.5033255193 3416 | 8.45901762023 3417 | 10.1522976557 3418 | 1.5021997673 3419 | 13.2136053637 3420 | 19.0495558334 3421 | 5.82887662664 3422 | 1.80421343892 3423 | 5.17274406258 3424 | 19.8264570169 3425 | 20.5013791948 3426 | 3.60846854028 3427 | 41.1253790739 3428 | 11.5060623323 3429 | 9.96908518332 3430 | 9.55122262359 3431 | 29.9738240965 3432 | 19.0497313638 3433 | 7.62216248023 3434 | 10.6058679231 3435 | 36.5236905973 3436 | 23.2446116982 3437 | 1.33716019646 3438 | 7.35438571754 3439 | 24.6657101849 3440 | 20.5382742386 3441 | 3.86482824537 3442 | 2.92842713067 3443 | 22.8698542093 3444 | 2.60599989183 3445 | 8.82993121142 3446 | 15.671980137 3447 | 37.4163723813 3448 | 0.206295068185 3449 | 23.8642222666 3450 | 36.0874525765 3451 | 12.1178393601 3452 | 8.05759064159 3453 | 29.5699924135 3454 | 14.458732876 3455 | 23.0483804835 3456 | 10.1328890876 3457 | 8.86207140777 3458 | 27.6117356164 3459 | 21.0934074278 3460 | 12.5859997221 3461 | 3.75607550125 3462 | 71.9610464174 3463 | 35.1161736081 3464 | 7.95574983706 3465 | 21.5429670128 3466 | 36.8863859917 3467 | 20.3914042472 3468 | 34.5908030552 3469 | 13.1065751788 3470 | 6.68883647822 3471 | 22.0726431505 3472 | 7.888874155 3473 | 11.8969448248 3474 | 2.35821324884 3475 | 6.96166702686 3476 | 32.3620730172 3477 | 14.2651004864 3478 | 36.0039968608 3479 | 19.103899397 3480 | 34.2492076967 3481 | 13.5856167499 3482 | 34.0466306088 3483 | 1.54301619984 3484 | 27.6955537099 3485 | 0.964816258304 3486 | 0.966858449566 3487 | 16.7258683659 3488 | 14.4262583208 3489 | 21.4749963654 3490 | 1.40515070167 3491 | 13.3798023025 3492 | 28.5012588247 3493 | 1.54896252345 3494 | 1.82777103416 3495 | 9.23976502308 3496 | 9.47307207951 3497 | 23.7443714087 3498 | 23.7717853617 3499 | 14.750044628 3500 | 19.7856260827 3501 | 9.47595376203 3502 | 26.7450345719 3503 | 6.07850283001 3504 | 9.42718247674 3505 | 1.50174053691 3506 | 28.6517940967 3507 | 4.11700582221 3508 | 3.34477529199 3509 | 10.3416686853 3510 | 12.7433469211 3511 | 4.11275352146 3512 | 27.9649063596 3513 | 62.7166504383 3514 | 9.50144423822 3515 | 42.3513809684 3516 | 27.4062028447 3517 | 53.6594842409 3518 | 33.2046319795 3519 | 27.766096888 3520 | 3.29797357611 3521 | 45.8981217767 3522 | 66.4989263312 3523 | 22.2094110475 3524 | 16.2029834053 3525 | 14.1886188591 3526 | 2.21474127039 3527 | 20.4395025891 3528 | 13.332437137 3529 | 17.5978623797 3530 | 2.87133017156 3531 | 20.7822966264 3532 | 23.5903613189 3533 | 3.75249043777 3534 | 19.9226247187 3535 | 22.2023566721 3536 | 11.3310792568 3537 | 2.12002782265 3538 | 40.981565912 3539 | 1.8527897688 3540 | 18.4025256982 3541 | 10.5879304428 3542 | 7.77543259375 3543 | 7.50536142369 3544 | 18.1674868534 3545 | 5.43242019586 3546 | 31.2912273616 3547 | 19.6388828465 3548 | 26.747596589 3549 | 0.69779193182 3550 | 20.6533643973 3551 | 7.91833106737 3552 | 2.56364213736 3553 | 33.4283256976 3554 | 15.2595515301 3555 | 68.105982121 3556 | 56.381044423 3557 | 13.7281357 3558 | 7.37653124014 3559 | 6.86542919189 3560 | 30.8739413659 3561 | 17.6467146397 3562 | 25.962603515 3563 | 3.05091708338 3564 | 9.02502250874 3565 | 20.5788151932 3566 | 15.9253871036 3567 | 9.71212082558 3568 | 15.3434357802 3569 | 4.11175414608 3570 | 29.3184606038 3571 | 56.9398157582 3572 | 8.15061153558 3573 | 14.5193926255 3574 | 10.6383261254 3575 | 16.0533973755 3576 | 25.0878146687 3577 | 4.96122301882 3578 | 11.5751442818 3579 | 12.2345753947 3580 | 15.7757844613 3581 | 37.793109588 3582 | 18.1102915597 3583 | 23.948206502 3584 | 40.5827872282 3585 | 16.6742433308 3586 | 12.8832499769 3587 | 37.4075635194 3588 | 17.9080769818 3589 | 10.2591699802 3590 | 5.9168152334 3591 | 16.3201160705 3592 | 28.9904442167 3593 | 17.7719434146 3594 | 19.6540179767 3595 | 11.5729869667 3596 | 19.2864040613 3597 | 7.02673638395 3598 | 7.91995469157 3599 | 14.802349282 3600 | 0.143004501214 3601 | 24.4609718915 3602 | 5.33624349382 3603 | 12.6368326548 3604 | 25.7293544907 3605 | 18.12786743 3606 | 10.3068643318 3607 | 11.974773521 3608 | 31.825737969 3609 | 45.3843922527 3610 | 31.827932984 3611 | 46.6254984443 3612 | 4.18862504434 3613 | 24.5432675439 3614 | 3.29814932678 3615 | 8.14107407187 3616 | 18.8853439831 3617 | 9.97119052766 3618 | 0.393596554328 3619 | 30.5253278917 3620 | 14.1976880881 3621 | 1.52965423631 3622 | 13.2692826366 3623 | 26.3695157742 3624 | 2.21763155558 3625 | 3.50313387307 3626 | 1.00403000522 3627 | 35.5891595871 3628 | 8.8286606389 3629 | 30.704433887 3630 | 0.837263467026 3631 | 13.3002635281 3632 | 20.7920390804 3633 | 22.9326639432 3634 | 21.933586915 3635 | 24.3593897093 3636 | 20.5841676595 3637 | 15.6405318226 3638 | 9.4058782458 3639 | 24.5535494977 3640 | 1.32300081612 3641 | 33.1961510161 3642 | 4.75967624237 3643 | 3.58874618185 3644 | 22.8898178341 3645 | 14.9575578025 3646 | 30.8357466341 3647 | 4.76574298531 3648 | 1.80573714441 3649 | 5.91870112137 3650 | 15.1638111813 3651 | 22.3483696288 3652 | 26.234471235 3653 | 2.43622349894 3654 | 5.29427438181 3655 | 18.2645304689 3656 | 3.67353102919 3657 | 3.07866100673 3658 | 18.1680992104 3659 | 45.6521008946 3660 | 13.7583484704 3661 | 0.622902324252 3662 | 38.9501860402 3663 | 3.9392216353 3664 | 16.2140479085 3665 | 8.4534671277 3666 | 19.450796596 3667 | 25.004965717 3668 | 30.3134826507 3669 | 8.4336459128 3670 | 18.9034267294 3671 | 2.5481031619 3672 | 20.6928284762 3673 | 26.2884063169 3674 | 9.31393041396 3675 | 43.6475363598 3676 | 9.74453372375 3677 | 15.0521715133 3678 | 26.925851453 3679 | 5.88994495276 3680 | 4.83542485326 3681 | 0.134658422138 3682 | 21.8159292846 3683 | 6.4093891297 3684 | 25.7000828976 3685 | 21.6514630554 3686 | 16.8591255676 3687 | 14.0649096316 3688 | 25.8185979633 3689 | 11.5870482251 3690 | 28.3614574962 3691 | 14.1532480288 3692 | 35.003867155 3693 | 8.81770710633 3694 | 17.2046355255 3695 | 14.5332395457 3696 | 4.84374833739 3697 | 11.8520779102 3698 | 16.685308567 3699 | 12.7398616584 3700 | 11.1009517588 3701 | 39.6023644935 3702 | 1.42081003459 3703 | 19.1437409609 3704 | 31.7480973731 3705 | 6.94414317995 3706 | 0.669539158324 3707 | 2.73019392701 3708 | 8.16606690583 3709 | 22.2717124981 3710 | 0.707188523047 3711 | 11.8492630936 3712 | 16.512257009 3713 | 26.12377147 3714 | 31.0793780348 3715 | 14.0703381708 3716 | 1.3158672825 3717 | 0.472705500851 3718 | 22.7704794435 3719 | 16.885112206 3720 | 13.0695895506 3721 | 13.0566239112 3722 | 23.9534248732 3723 | 2.49122255665 3724 | 3.85670530736 3725 | 16.5383434985 3726 | 30.7069463758 3727 | 7.62347898983 3728 | 21.3119621022 3729 | 9.09856351366 3730 | 27.2909184219 3731 | 6.21839637276 3732 | 2.99824890316 3733 | 16.7736414836 3734 | 12.3920577236 3735 | 21.6215436194 3736 | 9.18296043404 3737 | 30.620491505 3738 | 6.0066251571 3739 | 5.44592093041 3740 | 0.250882886619 3741 | 20.2984798148 3742 | 4.29405142183 3743 | 0.292869394261 3744 | 21.4509388041 3745 | 27.0885834302 3746 | 9.46807982964 3747 | 67.6415313336 3748 | 32.6691895317 3749 | 4.11332648544 3750 | 1.78652283431 3751 | 18.984150158 3752 | 29.0569577531 3753 | 5.35406443023 3754 | 35.3800906293 3755 | 14.3502554715 3756 | 4.29784856303 3757 | 9.62761946742 3758 | 5.53546278062 3759 | 17.4787821329 3760 | 8.70837648782 3761 | 25.2989603224 3762 | 15.2862147883 3763 | 1.77802302811 3764 | 13.9665645304 3765 | 40.8888802209 3766 | 21.5753411863 3767 | 5.5149086881 3768 | 19.8834299124 3769 | 5.63404217113 3770 | 2.14238765453 3771 | 27.9296380428 3772 | 8.96108141518 3773 | 5.73424943755 3774 | 23.7356502706 3775 | 6.61440369997 3776 | 14.4912334722 3777 | 19.3077902637 3778 | 20.9047719346 3779 | 3.03032745232 3780 | 27.036556242 3781 | 13.4002654906 3782 | 25.4502494699 3783 | 10.2743903792 3784 | 9.06090705699 3785 | 2.40519858515 3786 | 6.07219991486 3787 | 18.3032583451 3788 | 13.2049958133 3789 | 1.15270095726 3790 | 22.8265390011 3791 | 0.320237579382 3792 | 4.88949103372 3793 | 27.5472378441 3794 | 12.4902554035 3795 | 6.63693010758 3796 | 23.6064184158 3797 | 39.8561267955 3798 | 21.3768991165 3799 | 12.8439661805 3800 | 0.56338664346 3801 | 33.9122891119 3802 | 19.4829092404 3803 | 2.02626618217 3804 | 1.09009156143 3805 | 21.8839288184 3806 | 10.2016003468 3807 | 10.7982730635 3808 | 6.92625245349 3809 | 37.7129018519 3810 | 10.2176355858 3811 | 18.1942699598 3812 | 23.4811980094 3813 | 29.5052005901 3814 | 7.14035855482 3815 | 2.34584816294 3816 | 0.617114636651 3817 | 7.7026940273 3818 | 6.49550445676 3819 | 23.9251670936 3820 | 15.4560904783 3821 | 10.6497468496 3822 | 5.28967526385 3823 | 7.55896510217 3824 | 14.0965776438 3825 | 17.0755517089 3826 | 19.3201898336 3827 | 28.2886326718 3828 | 24.597055574 3829 | 4.9421622367 3830 | 13.9656808758 3831 | 21.2352028288 3832 | 21.8198302535 3833 | 8.59894602492 3834 | 41.181040103 3835 | 56.7206665172 3836 | 18.186240603 3837 | 2.23742289767 3838 | 14.0520975295 3839 | 26.0625355765 3840 | 24.5861851988 3841 | 3.50174552205 3842 | 27.5114990324 3843 | 31.6617996596 3844 | 1.65530370275 3845 | 30.7339765406 3846 | 5.08408734775 3847 | 30.9541641023 3848 | 3.1319026553 3849 | 0.76047884944 3850 | 19.1151482478 3851 | 26.4809199366 3852 | 23.9653558073 3853 | 14.0420280051 3854 | 14.9118080314 3855 | 62.0704917951 3856 | 12.7979664791 3857 | 59.0268918367 3858 | 12.992827656 3859 | 0.0148341482564 3860 | 19.2607730574 3861 | 14.3820536518 3862 | 1.33105968349 3863 | 10.5093922101 3864 | 15.7963921207 3865 | 53.4017008271 3866 | 33.1269381212 3867 | 26.2509806642 3868 | 39.3614580025 3869 | 22.8342512134 3870 | 3.39723502296 3871 | 49.5690584329 3872 | 17.215127906 3873 | 10.5427006274 3874 | 7.06973274383 3875 | 25.4118503263 3876 | 24.4785614917 3877 | 15.5220983876 3878 | 44.8539852555 3879 | 30.7767419158 3880 | 21.2615750498 3881 | 24.4851426739 3882 | --------------------------------------------------------------------------------