├── .gitignore ├── README.md ├── data ├── mice.plink.bed ├── mice.plink.bim └── mice.plink.fam ├── lmm.png ├── lmm.py ├── model ├── LMM.py ├── Lasso.py ├── __init__.py └── helpingMethods.py ├── setup.py └── utility ├── __init__.py └── dataLoader.py /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/* 2 | *.pyc 3 | 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![lmm-python](lmm.png "LMM-Python") 2 | 3 | # LMM-Python 4 | 5 | Implementation of the Python Package of Linear Mixed Model, associated with the following paper: 6 | 7 | [Wang, Haohan, Bryon Aragam, and Eric P. Xing. "Trade-offs of Linear Mixed Models in Genome-Wide Association Studies." Journal of Computational Biology 29.3 (2022): 233-242.](http://www.cs.cmu.edu/~haohanw/papers/2021/Wang_etal_JCB21.pdf) 8 | 9 | ## Introduction 10 | 11 | LMM-Python is a python package of linear mixed model, including several popular methods used to calculate the kinship matrix, including 12 | 13 | * with selected SNPs (LMM-select): 14 | * [FaST-LMM-Select for addressing confounding from spatial structure and rare variants](https://www.ncbi.nlm.nih.gov/pubmed/23619783) 15 | * with low rank structure kinship: 16 | * [Variable Selection in Heterogeneous Datasets: A Truncated-rank Sparse Linear Mixed Model with Applications to Genome-wide Association Studies](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5889139/) 17 | * with masked kinship: 18 | * [Two-Variance-Component Model Improves Genetic Prediction in Family Datasets](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4667134/) 19 | 20 | ## File Structure: 21 | 22 | * [models/](https://github.com/HaohanWang/LMM-Python/tree/master/model) main method for the package 23 | * [utility/](https://github.com/HaohanWang/LMM-Python/tree/master/utility) other helper files 24 | * [lmm.py](https://github.com/HaohanWang/LMM-Python/blob/master/lmm.py) main entry point of using the package 25 | 26 | ## An Example Command: 27 | 28 | ``` 29 | python lmm.py -n data/mice.plink 30 | ``` 31 | #### Instructions 32 | ``` 33 | Options: 34 | -h, --help show this help message and exit 35 | 36 | Data Options: 37 | -f FILETYPE choices of input file type 38 | -n FILENAME name of the input file 39 | 40 | Model Options: 41 | -s Construct kinship matrix with selected SNPs 42 | -l Construct kinship matrix with low rank structure 43 | -t THRESHOLD Construct kinship matrix with smaller values masked (smaller than the specificed THRESHOLD) 44 | -q Run in quiet mode 45 | -m Run without missing genotype imputation 46 | -p Generate a simple Manhattan plot after running 47 | ``` 48 | #### Data Support 49 | * The package currently supports CSV and binary PLINK files. 50 | * Extensions to other data format can be easily implemented through `FileReader` in `utility/dataLoadear`. Feel free to contact us for the support of other data format. 51 | 52 | ## Python Users 53 | Proficient python users can directly call the method with python code, see example starting at [Line 75](https://github.com/HaohanWang/LMM-Python/blob/master/lmm.py#L75) 54 | 55 | ## Installation (Not Required) 56 | * Dependencies: 57 | * numpy 58 | * scipy 59 | * pysnptool 60 | * matplotlib 61 | 62 | You can install LMM-Python using pip by doing the following 63 | 64 | ``` 65 | pip install git+https://github.com/HaohanWang/LMM-Python 66 | ``` 67 | 68 | You can also clone the repository and do a manual install. 69 | ``` 70 | git clone https://github.com/HaohanWang/LMM-Python 71 | python setup.py install 72 | ``` 73 | 74 | ## Contact 75 | [Haohan Wang](http://www.cs.cmu.edu/~haohanw/) 76 | · 77 | [@HaohanWang](https://twitter.com/HaohanWang) 78 | -------------------------------------------------------------------------------- /data/mice.plink.bed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HaohanWang/LMM-Python/aee09e7bd18714cb7de352499f6bd73e965c77d6/data/mice.plink.bed -------------------------------------------------------------------------------- /data/mice.plink.fam: -------------------------------------------------------------------------------- 1 | 1 1 0.578326458116525 2 | 2 2 0.195782345383697 3 | 3 3 -0.521305382786239 4 | 4 4 -1.91220116906579 5 | 5 5 -0.128693364667383 6 | 6 6 0.342611123625638 7 | 7 7 0.664426074904099 8 | 8 8 -1.94655063807574 9 | 9 9 -0.507760091974233 10 | 10 10 -1.23151463167658 11 | 11 11 0.866158264867329 12 | 12 12 -0.708523793820819 13 | 13 13 0.291522028943484 14 | 14 14 -0.403630184456081 15 | 15 15 -0.770415240918274 16 | 16 16 1.18961163384266 17 | 17 17 1.0061972751307 18 | 18 18 0.0515545997177002 19 | 19 19 -0.457779594662491 20 | 20 20 0.490373860396776 21 | 21 21 -1.09846881276981 22 | 22 22 -2.43742829265459 23 | 23 23 0.557236524262319 24 | 24 24 -1.41504246318052 25 | 25 25 1.15703889546097 26 | 26 26 -0.679218444234306 27 | 27 27 0.730784488455396 28 | 28 28 0.275248757387192 29 | 29 29 -0.24017051163396 30 | 30 30 -0.542975409627802 31 | 31 31 1.66710558786658 32 | 32 32 0.968785184478699 33 | 33 33 0.826705387379194 34 | 34 34 0.891677356841673 35 | 35 35 -0.719655346343397 36 | 36 36 0.210982658165147 37 | 37 37 -2.08915491884732 38 | 38 38 0.368025936432148 39 | 39 39 -0.787178190674824 40 | 40 40 0.500572212621736 41 | 41 41 0.46780223641754 42 | 42 42 -1.45293776982881 43 | 43 43 0.553878100093788 44 | 44 44 -1.09125638076215 45 | 45 45 1.19058522819809 46 | 46 46 -0.333683522325528 47 | 47 47 0.459196738314866 48 | 48 48 -1.70204432492341 49 | 49 49 -0.944593632686406 50 | 50 50 -0.910669668567478 51 | 51 51 -0.0143537266610671 52 | 52 52 -1.1330946780401 53 | 53 53 -0.867841968602546 54 | 54 54 -0.169996764500546 55 | 55 55 -1.00220412638787 56 | 56 56 1.10358540465666 57 | 57 57 -1.51798526349098 58 | 58 58 0.973772654199901 59 | 59 59 0.0955028789566893 60 | 60 60 -0.043643527695341 61 | 61 61 -0.14830179714797 62 | 62 62 0.550390760807613 63 | 63 63 -0.0571035529216743 64 | 64 64 0.547060369057177 65 | 65 65 -0.542581511861614 66 | 66 66 1.21923194069937 67 | 67 67 1.34909674048756 68 | 68 68 -2.31270864528667 69 | 69 69 -1.34162911011508 70 | 70 70 1.36025307420299 71 | 71 71 -0.176671505210721 72 | 72 72 0.189852859587438 73 | 73 73 1.19293730104554 74 | 74 74 0.650414169199763 75 | 75 75 1.33592428557338 76 | 76 76 0.141541397552307 77 | 77 77 0.797132528611964 78 | 78 78 -0.560675948837918 79 | 79 79 1.87363901660132 80 | 80 80 -3.05597942863777 81 | 81 81 0.873190724421922 82 | 82 82 -0.854483477829469 83 | 83 83 0.0794037682290658 84 | 84 84 0.723341057950123 85 | 85 85 -0.49147227416804 86 | 86 86 1.21133955459618 87 | 87 87 0.133545418481342 88 | 88 88 0.110961916732653 89 | 89 89 0.319576816169153 90 | 90 90 -0.111002382817046 91 | 91 91 -0.211086876473512 92 | 92 92 -1.04553013707887 93 | 93 93 0.199817010985625 94 | 94 94 0.129775030216272 95 | 95 95 1.30270156771202 96 | 96 96 0.296087212173019 97 | 97 97 1.36451407045215 98 | 98 98 1.91191108537188 99 | 99 99 1.08938966094974 100 | 100 100 -0.133190201741845 101 | 101 101 -1.33553779401024 102 | 102 102 0.632009490692488 103 | 103 103 0.0256438415545642 104 | 104 104 0.310392214799678 105 | 105 105 -0.0149467099072475 106 | 106 106 -1.0448025242384 107 | 107 107 -0.127653993425332 108 | 108 108 -0.309516726331201 109 | 109 109 1.04544628341715 110 | 110 110 1.36809730824662 111 | 111 111 0.983350251452794 112 | 112 112 -0.772735953547016 113 | 113 113 1.702009544958 114 | 114 114 -1.40672205849522 115 | 115 115 1.51879192585006 116 | 116 116 -1.14805487456007 117 | 117 117 0.0449764314463272 118 | 118 118 0.865389030290827 119 | 119 119 0.521487824885001 120 | 120 120 1.4242700584653 121 | 121 121 -0.545181765415915 122 | 122 122 -1.24300091939234 123 | 123 123 0.649454669693675 124 | 124 124 -1.16256426718809 125 | 125 125 0.0487176390333451 126 | 126 126 -0.600856902912926 127 | 127 127 0.636003412569337 128 | 128 128 -0.0457818842606848 129 | 129 129 -0.0763752619343752 130 | 130 130 0.363856689893951 131 | 131 131 2.27134379822417 132 | 132 132 1.3218586591688 133 | 133 133 0.710614003890091 134 | 134 134 0.504338850625114 135 | 135 135 -1.04111809349092 136 | 136 136 1.44202806054358 137 | 137 137 1.14272276207842 138 | 138 138 1.22221765715212 139 | 139 139 -0.247404268366069 140 | 140 140 -0.925473025136144 141 | 141 141 0.427074591883455 142 | 142 142 1.02353435445236 143 | 143 143 0.021555325716735 144 | 144 144 1.58618914800925 145 | 145 145 1.62602321388192 146 | 146 146 0.733150040463656 147 | 147 147 0.747369718957557 148 | 148 148 -0.865050512321836 149 | 149 149 0.882506844364719 150 | 150 150 1.29047968064168 151 | 151 151 0.0221646233751824 152 | 152 152 0.630866061221563 153 | 153 153 -1.29953157009172 154 | 154 154 1.30217480182917 155 | 155 155 0.086244645602874 156 | 156 156 -0.169650165905271 157 | 157 157 -0.0169926396150218 158 | 158 158 0.806889688261012 159 | 159 159 1.03023698103876 160 | 160 160 0.589162397671658 161 | 161 161 -0.097002770031942 162 | 162 162 -0.693995355058148 163 | 163 163 1.521383959726 164 | 164 164 0.376459375191069 165 | 165 165 1.39567993464104 166 | 166 166 -1.50790830543716 167 | 167 167 1.02653075487634 168 | 168 168 0.312237504065192 169 | 169 169 -0.452673735664909 170 | 170 170 -0.387406697676818 171 | 171 171 0.741278109972149 172 | 172 172 -0.567922097095984 173 | 173 173 -0.956825712840666 174 | 174 174 0.478420776364722 175 | 175 175 -0.254297333830417 176 | 176 176 1.80774196315707 177 | 177 177 -0.198445397658032 178 | 178 178 0.747456936476228 179 | 179 179 -0.198638351035813 180 | 180 180 1.03720052695906 181 | 181 181 -2.18384988397017 182 | 182 182 -1.03030429944458 183 | 183 183 0.0511422575962806 184 | 184 184 -1.62875698414319 185 | 185 185 1.86356794157945 186 | 186 186 -0.330846002034974 187 | 187 187 0.508121644659424 188 | 188 188 -2.82508797127958 189 | 189 189 0.346884880760978 190 | 190 190 0.595063639248732 191 | 191 191 -1.05798169493196 192 | 192 192 2.51714410611647 193 | 193 193 -1.55553834244038 194 | 194 194 -0.0932777149784873 195 | 195 195 -1.1674430291188 196 | 196 196 0.36584499716066 197 | 197 197 0.322228604593166 198 | 198 198 -0.425810297382625 199 | 199 199 -1.18935219794646 200 | 200 200 -0.0079083281035136 201 | 201 201 1.97668126030989 202 | 202 202 -2.19303705158071 203 | 203 203 1.10728919114735 204 | 204 204 2.37951571451436 205 | 205 205 1.43733599782977 206 | 206 206 -0.00322115101236579 207 | 207 207 2.06044093222258 208 | 208 208 0.733317447183233 209 | 209 209 -0.85752505137047 210 | 210 210 -1.11252548948199 211 | 211 211 0.186512602873248 212 | 212 212 -0.460266569512581 213 | 213 213 0.386332518828279 214 | 214 214 0.565823508378553 215 | 215 215 0.345522234434647 216 | 216 216 -0.317813575156581 217 | 217 217 0.00022263522485855 218 | 218 218 -0.0815590415834106 219 | 219 219 -0.845437086609451 220 | 220 220 1.1815960775075 221 | 221 221 -0.559673153447105 222 | 222 222 -1.45652588168293 223 | 223 223 0.818163314437268 224 | 224 224 -1.10966437992269 225 | 225 225 -1.65098982669428 226 | 226 226 -0.735798150575336 227 | 227 227 0.323720455606259 228 | 228 228 -0.422908323342144 229 | 229 229 2.03006101106716 230 | 230 230 -1.35440362918302 231 | 231 231 0.0147856495145441 232 | 232 232 0.132069862850304 233 | 233 233 1.28857918719064 234 | 234 234 -1.34844178253104 235 | 235 235 0.163332371092126 236 | 236 236 -0.27388419290083 237 | 237 237 -0.653386945390125 238 | 238 238 -0.321728283437608 239 | 239 239 -1.16335931729679 240 | 240 240 1.64614130087898 241 | 241 241 1.20731691086136 242 | 242 242 0.354696396484179 243 | 243 243 1.19680247889328 244 | 244 244 -0.0117964330901182 245 | 245 245 -0.233851260971167 246 | 246 246 -1.27143702348702 247 | 247 247 -0.261107220533331 248 | 248 248 -0.0247017713710243 249 | 249 249 0.125998103583255 250 | 250 250 1.65734294063089 251 | 251 251 0.0770094530125398 252 | 252 252 0.795171188511506 253 | 253 253 -0.942435881704081 254 | 254 254 1.25188921405861 255 | 255 255 0.352096018217151 256 | 256 256 0.804771416998551 257 | 257 257 -0.676357176476529 258 | 258 258 0.00424446638476624 259 | 259 259 -0.422285813133926 260 | 260 260 0.423177473018176 261 | 261 261 0.464814192756453 262 | 262 262 0.665480415833176 263 | 263 263 1.67559727017351 264 | 264 264 -1.38883740328671 265 | 265 265 0.577388782965662 266 | 266 266 -1.54691228937599 267 | 267 267 0.675390887206472 268 | 268 268 -0.284481797288655 269 | 269 269 -0.29235950365634 270 | 270 270 0.963736356506599 271 | 271 271 -0.168156651951719 272 | 272 272 -0.895373391658098 273 | 273 273 0.402954474990335 274 | 274 274 -1.73578122172373 275 | 275 275 -1.38985326542401 276 | 276 276 1.49060236991162 277 | 277 277 -0.161541845848141 278 | 278 278 1.88508179740274 279 | 279 279 -0.221819984248985 280 | 280 280 -1.18777380583341 281 | 281 281 0.928279410703693 282 | 282 282 -0.223715611767975 283 | 283 283 -1.4608896777589 284 | 284 284 -0.866197340852763 285 | 285 285 0.984952219011726 286 | 286 286 0.595681663406518 287 | 287 287 -1.71001993416038 288 | 288 288 -0.727181152152915 289 | 289 289 0.0887360643640421 290 | 290 290 -1.18916576521653 291 | 291 291 -1.2676633627829 292 | 292 292 -0.245433979968461 293 | 293 293 -0.943204764449095 294 | 294 294 -0.957876860592745 295 | 295 295 0.845726608390842 296 | 296 296 0.715996745179194 297 | 297 297 0.112556822023627 298 | 298 298 1.2994806469069 299 | 299 299 -0.423826541089281 300 | 300 300 -0.125675337219647 301 | 301 301 0.131626459115789 302 | 302 302 -0.488482257136846 303 | 303 303 -0.36632181112562 304 | 304 304 -0.0829341055341336 305 | 305 305 1.04671712159328 306 | 306 306 -0.164220712574756 307 | 307 307 -0.894756654535985 308 | 308 308 1.50175790502377 309 | 309 309 -0.450809662003257 310 | 310 310 0.571138104156823 311 | 311 311 -0.913119371279957 312 | 312 312 -0.781213360934724 313 | 313 313 1.16034358757217 314 | 314 314 0.173649928529965 315 | 315 315 0.569033082365486 316 | 316 316 -0.38362783245465 317 | 317 317 0.262673979535344 318 | 318 318 -0.732679298842521 319 | 319 319 -1.85152275794679 320 | 320 320 0.350985808636169 321 | 321 321 0.127610082070859 322 | 322 322 -1.60321851068759 323 | 323 323 0.184501890016734 324 | 324 324 -0.545224504635007 325 | 325 325 0.967813666386325 326 | 326 326 -0.572316556478888 327 | 327 327 -0.583675008909763 328 | 328 328 -1.48826500989001 329 | 329 329 0.388021948032952 330 | 330 330 0.665592023051811 331 | 331 331 -0.0193104515527054 332 | 332 332 -1.47517772007711 333 | 333 333 -0.739850109401815 334 | 334 334 0.50561718771254 335 | 335 335 1.29390326173991 336 | 336 336 -0.264846226304917 337 | 337 337 1.48398405306832 338 | 338 338 0.355665110818614 339 | 339 339 -1.11766210541825 340 | 340 340 -1.31451799152525 341 | 341 341 -0.0434841582085514 342 | 342 342 0.588534432067989 343 | 343 343 0.43099156071492 344 | 344 344 1.40098933267225 345 | 345 345 0.732493580279622 346 | 346 346 0.365231893370316 347 | 347 347 -0.372621947544908 348 | 348 348 0.70577652411543 349 | 349 349 -0.700239220187378 350 | 350 350 0.280887277229157 351 | 351 351 0.0506796313054341 352 | 352 352 0.217890533720017 353 | 353 353 1.3066620730224 354 | 354 354 -0.628802044997488 355 | 355 355 0.973494912557713 356 | 356 356 0.193938093596555 357 | 357 357 0.303158985311571 358 | 358 358 0.571448208625529 359 | 359 359 1.64974889398756 360 | 360 360 -1.42003049385365 361 | 361 361 0.944528220993382 362 | 362 362 1.30401071354372 363 | 363 363 -1.18058299543401 364 | 364 364 -0.435291913231341 365 | 365 365 0.903469995708917 366 | 366 366 0.0143965033439768 367 | 367 367 -0.758960146241604 368 | 368 368 0.819569200339739 369 | 369 369 0.677920745947816 370 | 370 370 -0.594520175212589 371 | 371 371 -0.714460586131618 372 | 372 372 0.561362760353497 373 | 373 373 0.193472748212954 374 | 374 374 1.49896037577192 375 | 375 375 2.08492392454263 376 | 376 376 1.27311563511908 377 | 377 377 0.210279328784529 378 | 378 378 0.0737803308751015 379 | 379 379 -0.672881102033891 380 | 380 380 -0.461816766289609 381 | 381 381 -0.575065972124118 382 | 382 382 -0.454237848442053 383 | 383 383 0.325784765319103 384 | 384 384 2.16351841840363 385 | 385 385 -0.0539771306186573 386 | 386 386 1.00002318211923 387 | 387 387 0.420995889469939 388 | 388 388 0.0521255998768494 389 | 389 389 0.632740831212586 390 | 390 390 -0.913424314232543 391 | 391 391 -1.08589349503984 392 | 392 392 -0.984849062747177 393 | 393 393 -0.39826229934191 394 | 394 394 -0.458057333977137 395 | 395 395 -0.542814341627292 396 | 396 396 -1.37193730529718 397 | 397 397 1.19771736429353 398 | 398 398 0.4438998450767 399 | 399 399 -0.092208890500299 400 | 400 400 -0.468151469330967 401 | 401 401 -0.37237448839694 402 | 402 402 0.310036017593725 403 | 403 403 -0.931946422599881 404 | 404 404 -1.1634327165916 405 | 405 405 -0.142705950920818 406 | 406 406 -1.54360088930744 407 | 407 407 0.848985536792503 408 | 408 408 1.03979387832077 409 | 409 409 0.965004819112038 410 | 410 410 -0.486520356347963 411 | 411 411 0.00559445025838622 412 | 412 412 -0.866791923223554 413 | 413 413 -1.27892531474749 414 | 414 414 -1.19229139436318 415 | 415 415 -1.09548619497005 416 | 416 416 -0.508740672479416 417 | 417 417 -0.501341016822338 418 | 418 418 0.384388908166904 419 | 419 419 -0.152226859268863 420 | 420 420 0.104164261383566 421 | 421 421 -1.60321317995902 422 | 422 422 -0.734418919596604 423 | 423 423 0.6657529590601 424 | 424 424 0.664687535911109 425 | 425 425 1.02421857778428 426 | 426 426 -0.931629635193844 427 | 427 427 0.0207991818236737 428 | 428 428 -1.90177956296221 429 | 429 429 1.69327372678862 430 | 430 430 -1.33152389889587 431 | 431 431 0.800214038704413 432 | 432 432 2.53167217433265 433 | 433 433 0.800478728416751 434 | 434 434 -0.491276720531612 435 | 435 435 1.32629788645812 436 | 436 436 -3.28731914134699 437 | 437 437 -0.675820587859871 438 | 438 438 -0.328888334302369 439 | 439 439 0.870284021671896 440 | 440 440 0.274510487551725 441 | 441 441 -0.272037239578637 442 | 442 442 -0.00506084582887342 443 | 443 443 -1.36241792355678 444 | 444 444 0.361873786613421 445 | 445 445 -0.0100699662556648 446 | 446 446 -0.458950709522541 447 | 447 447 1.08899936177891 448 | 448 448 -1.16272577131026 449 | 449 449 0.309880854226053 450 | 450 450 -0.940237819653282 451 | 451 451 0.390396729172948 452 | 452 452 0.116338716022761 453 | 453 453 0.359358257986564 454 | 454 454 0.454255138813906 455 | 455 455 -0.509760949879084 456 | 456 456 -1.10946622603717 457 | 457 457 -1.98594304838403 458 | 458 458 1.22322873749238 459 | 459 459 -0.0271492237417874 460 | 460 460 0.505850375286026 461 | 461 461 -0.629447137212153 462 | 462 462 -2.3925189228799 463 | 463 463 0.421945650491585 464 | 464 464 -1.1500237564364 465 | 465 465 0.956011618415529 466 | 466 466 0.0878458071315042 467 | 467 467 1.81581351205189 468 | 468 468 1.6383026679351 469 | 469 469 -0.450627283266704 470 | 470 470 0.631715854681423 471 | 471 471 -0.608290296970952 472 | 472 472 -1.94291471625481 473 | 473 473 -0.986971958404098 474 | 474 474 1.64310243677033 475 | 475 475 0.645050516229686 476 | 476 476 0.202114605841121 477 | 477 477 1.26746171515797 478 | 478 478 -1.7405144925332 479 | 479 479 0.945201403465822 480 | 480 480 0.00625406770021817 481 | 481 481 -0.663504758037114 482 | 482 482 0.294893191586028 483 | 483 483 0.289738893310652 484 | 484 484 -0.16467546408554 485 | 485 485 -1.49173088473295 486 | 486 486 -0.237831602628859 487 | 487 487 1.32346960258824 488 | 488 488 1.68348093179519 489 | 489 489 -0.628926774633454 490 | 490 490 0.338269319351233 491 | 491 491 -1.66729729181629 492 | 492 492 0.399409025686734 493 | 493 493 0.822653829576472 494 | 494 494 -0.65389193562412 495 | 495 495 0.639549951348571 496 | 496 496 -0.284656262547133 497 | 497 497 -0.440167220772825 498 | 498 498 0.769362381229842 499 | 499 499 -1.27388937612657 500 | 500 500 -0.248976306572129 501 | 501 501 1.34101158610558 502 | 502 502 1.26628043959491 503 | 503 503 -1.51594973091728 504 | 504 504 2.29545050145156 505 | 505 505 -0.173251428967561 506 | 506 506 0.50393490359524 507 | 507 507 1.50489607504974 508 | 508 508 0.878302742162869 509 | 509 509 -1.67596188194901 510 | 510 510 -0.558781229700373 511 | 511 511 0.0449443739469055 512 | 512 512 -0.515390179894747 513 | 513 513 -1.13863805460269 514 | 514 514 0.510316500739831 515 | 515 515 -0.99303402281578 516 | 516 516 -0.498259564139026 517 | 517 517 1.18824513881634 518 | 518 518 -1.00702197157014 519 | 519 519 -0.651342925891101 520 | 520 520 -1.14542596665589 521 | 521 521 1.38632218302722 522 | 522 522 0.126997784189033 523 | 523 523 0.294695868244159 524 | 524 524 1.09665827694435 525 | 525 525 0.361950767817277 526 | 526 526 -1.10274250158646 527 | 527 527 -2.22598392441938 528 | 528 528 0.35974142637907 529 | 529 529 0.187367872551852 530 | 530 530 1.49737671340357 531 | 531 531 0.539145207735135 532 | 532 532 1.17436472463525 533 | 533 533 -0.902209115508248 534 | 534 534 -0.595323454075403 535 | 535 535 1.65628235217372 536 | 536 536 -1.02208152811503 537 | 537 537 -0.229095070788077 538 | 538 538 0.454323265397076 539 | 539 539 -1.18402367222034 540 | 540 540 0.689994527511207 541 | 541 541 1.72765277776684 542 | 542 542 -0.827215726400863 543 | 543 543 -1.46029179176677 544 | 544 544 1.15665971483052 545 | 545 545 0.988927964046068 546 | 546 546 0.87747025421714 547 | 547 547 0.223447341202459 548 | 548 548 -0.40860763044406 549 | 549 549 1.28085660179362 550 | 550 550 -0.309261351213902 551 | 551 551 0.451109713726357 552 | 552 552 0.492936659721424 553 | 553 553 -0.00437723055378422 554 | 554 554 0.455657574288598 555 | 555 555 -0.39614888693249 556 | 556 556 -0.575509131709891 557 | 557 557 -1.71236028931617 558 | 558 558 1.08743042216912 559 | 559 559 -0.172390063273484 560 | 560 560 0.373713983349088 561 | 561 561 1.28971558573908 562 | 562 562 0.791943969776967 563 | 563 563 0.653526011887572 564 | 564 564 -1.4540924866428 565 | 565 565 -0.864249217780984 566 | 566 566 -1.30086907963633 567 | 567 567 1.07770092315615 568 | 568 568 0.219376445576377 569 | 569 569 1.22778709176904 570 | 570 570 0.0431902777599682 571 | 571 571 -0.100710299245552 572 | 572 572 -0.28896946011697 573 | 573 573 -1.00395883271085 574 | 574 574 2.43030023180524 575 | 575 575 -1.11937707777949 576 | 576 576 0.32493818328184 577 | 577 577 0.209562136244498 578 | 578 578 0.660636639873187 579 | 579 579 0.580498955066073 580 | 580 580 -0.1895117053715 581 | 581 581 1.2889000298888 582 | 582 582 0.0839576099083289 583 | 583 583 -0.808314320462118 584 | 584 584 -0.125327828803886 585 | 585 585 -0.377221692679322 586 | 586 586 -1.4979291044032 587 | 587 587 0.220115923941376 588 | 588 588 0.315647060183415 589 | 589 589 1.27494243950103 590 | 590 590 -1.58627134519583 591 | 591 591 1.75608459629166 592 | 592 592 1.20732195539693 593 | 593 593 -1.86334062689748 594 | 594 594 -0.332652265770898 595 | 595 595 0.628591864650295 596 | 596 596 -1.45769806604135 597 | 597 597 0.352987893320488 598 | 598 598 1.19113226586781 599 | 599 599 0.0207502306911613 600 | 600 600 -0.101103946729359 601 | 601 601 -1.86939862041158 602 | 602 602 -0.490139981392883 603 | 603 603 2.58712472324109 604 | 604 604 -0.954381570235624 605 | 605 605 2.05901643601611 606 | 606 606 1.34801371975398 607 | 607 607 -0.94289530521195 608 | 608 608 1.50921277906219 609 | 609 609 0.973807159308351 610 | 610 610 -0.267224435484423 611 | 611 611 -0.232536338695761 612 | 612 612 -0.509608074925974 613 | 613 613 0.214136062337953 614 | 614 614 0.828726724907193 615 | 615 615 -1.27164749923286 616 | 616 616 -1.0459203748346 617 | 617 617 -1.82816923696542 618 | 618 618 0.483251032263338 619 | 619 619 0.585836642330193 620 | 620 620 -0.640775892719629 621 | 621 621 -0.701753669603853 622 | 622 622 0.68612966334385 623 | 623 623 1.84352895164102 624 | 624 624 -0.601542673048195 625 | 625 625 -0.560031756641758 626 | 626 626 0.846924918810983 627 | 627 627 0.70085312755495 628 | 628 628 -1.58186889085038 629 | 629 629 0.682322080224622 630 | 630 630 0.635095380417219 631 | 631 631 -2.17522441479057 632 | 632 632 -0.0400594748486982 633 | 633 633 -2.80322870830178 634 | 634 634 -0.11264809678505 635 | 635 635 0.78576648522565 636 | 636 636 -0.429625923310891 637 | 637 637 0.673448622413728 638 | 638 638 -0.251993142995242 639 | 639 639 0.98119419169302 640 | 640 640 1.93363679009645 641 | 641 641 1.68313128999358 642 | 642 642 0.341604822295608 643 | 643 643 -0.708675208451394 644 | 644 644 0.598590225469535 645 | 645 645 0.225226504155243 646 | 646 646 0.407641709667575 647 | 647 647 1.0570443937951 648 | 648 648 0.476454710522726 649 | 649 649 -0.0837196373194183 650 | 650 650 -1.01685638444585 651 | 651 651 -1.87442999970146 652 | 652 652 -0.0573336155245096 653 | 653 653 -0.724651930150089 654 | 654 654 0.546673557595427 655 | 655 655 -0.382115726169747 656 | 656 656 0.296443108732044 657 | 657 657 1.61077384347317 658 | 658 658 1.09065337021971 659 | 659 659 0.552921672753564 660 | 660 660 -0.781910277399013 661 | 661 661 0.10789420419679 662 | 662 662 -0.991645109049635 663 | 663 663 0.727324166685002 664 | 664 664 0.481388314051578 665 | 665 665 -0.92950525127386 666 | 666 666 -1.06755545727684 667 | 667 667 0.385654104146012 668 | 668 668 0.707094780301183 669 | 669 669 -2.00340130245861 670 | 670 670 -1.35190450410511 671 | 671 671 -0.687559983228342 672 | 672 672 -0.355988635600759 673 | 673 673 0.415497428837224 674 | 674 674 -1.31306521808429 675 | 675 675 -0.818588400942759 676 | 676 676 1.24042513026022 677 | 677 677 -0.527825792262915 678 | 678 678 -0.350500354808268 679 | 679 679 -1.14922871510124 680 | 680 680 0.0370393040855162 681 | 681 681 0.620091261300061 682 | 682 682 -0.39621570612438 683 | 683 683 0.313221159352008 684 | 684 684 -1.7535965464189 685 | 685 685 -2.03445747976237 686 | 686 686 0.967359405397107 687 | 687 687 -1.66470156795687 688 | 688 688 1.06679711424472 689 | 689 689 0.951724537293982 690 | 690 690 0.689929777515913 691 | 691 691 -0.377722983698516 692 | 692 692 -1.63065148798644 693 | 693 693 0.190596344306524 694 | 694 694 -0.288610640061331 695 | 695 695 1.02620457439326 696 | 696 696 -0.830261719616852 697 | 697 697 0.257448799093562 698 | 698 698 -1.50109041391255 699 | 699 699 1.20537482677809 700 | 700 700 -0.0934504982004204 701 | 701 701 -0.364102522845903 702 | 702 702 1.99956113237189 703 | 703 703 -0.495779770123273 704 | 704 704 -0.373680278977773 705 | 705 705 0.871678714216433 706 | 706 706 -1.50602956465311 707 | 707 707 -0.542351689125362 708 | 708 708 -1.33252683489743 709 | 709 709 -1.1777348472076 710 | 710 710 1.40209046519523 711 | 711 711 0.567313212495617 712 | 712 712 -1.54199935189321 713 | 713 713 0.880484427552787 714 | 714 714 0.0695551965564044 715 | 715 715 -1.37775034285365 716 | 716 716 0.0748663591660813 717 | 717 717 -0.352938978514575 718 | 718 718 0.977296942922311 719 | 719 719 0.0621461125313566 720 | 720 720 -0.19627535830953 721 | 721 721 0.252405160804789 722 | 722 722 0.00867083782830697 723 | 723 723 -0.992402648064642 724 | 724 724 0.380612006321424 725 | 725 725 0.699414313889325 726 | 726 726 -0.92283423715099 727 | 727 727 -0.0231122821143559 728 | 728 728 -0.14797388053842 729 | 729 729 0.017416420192546 730 | 730 730 1.03033386438233 731 | 731 731 -0.599777914154979 732 | 732 732 -0.477876098988569 733 | 733 733 0.288558404470913 734 | 734 734 3.14422775504541 735 | 735 735 0.48032245858908 736 | 736 736 -1.34609237987779 737 | 737 737 0.509169985641762 738 | 738 738 0.243020515956327 739 | 739 739 0.617761630701846 740 | 740 740 -1.81419247270563 741 | 741 741 1.26396879582406 742 | 742 742 -1.07273768892108 743 | 743 743 -0.00258795438978194 744 | 744 744 -0.955681291299179 745 | 745 745 0.0633622098322168 746 | 746 746 0.57606943933952 747 | 747 747 0.952873966645395 748 | 748 748 -0.177074027938983 749 | 749 749 0.888197698874071 750 | 750 750 -0.159937380124124 751 | 751 751 -0.257788005066155 752 | 752 752 0.751081895818972 753 | 753 753 0.966442680434852 754 | 754 754 -0.972656387127648 755 | 755 755 0.363210442321346 756 | 756 756 -0.307918827714191 757 | 757 757 0.810800175975817 758 | 758 758 -0.0390888055324478 759 | 759 759 0.926306671591415 760 | 760 760 0.964875875691789 761 | 761 761 0.823117029462257 762 | 762 762 1.60208828520769 763 | 763 763 -0.878319923867945 764 | 764 764 0.701547410892486 765 | 765 765 -0.318197317221084 766 | 766 766 -1.97888335260498 767 | 767 767 1.50327783469821 768 | 768 768 0.546214369817492 769 | 769 769 0.592548196514232 770 | 770 770 0.693964654156111 771 | 771 771 -0.52138283214078 772 | 772 772 0.522074803167734 773 | 773 773 0.863958097834409 774 | 774 774 -0.308055559564143 775 | 775 775 0.0548002107041571 776 | 776 776 2.03769733400008 777 | 777 777 -0.374103454053649 778 | 778 778 -0.605590309888563 779 | 779 779 -0.0873188636372408 780 | 780 780 -0.966217091098784 781 | 781 781 1.38120796531999 782 | 782 782 -0.171092690017699 783 | 783 783 -0.999804470018856 784 | 784 784 2.07783789825055 785 | 785 785 0.220809804236357 786 | 786 786 0.452496375066468 787 | 787 787 -0.614566149832638 788 | 788 788 1.1466215614438 789 | 789 789 0.402232022138661 790 | 790 790 -2.33990151400286 791 | 791 791 -2.03159252058559 792 | 792 792 -0.678825699364864 793 | 793 793 -1.420485525167 794 | 794 794 1.30821782049379 795 | 795 795 0.179107635994324 796 | 796 796 0.129034102040146 797 | 797 797 -0.598261888800378 798 | 798 798 -0.0541341003192937 799 | 799 799 0.163277923948031 800 | 800 800 -0.963456688442969 801 | 801 801 -1.6926126735285 802 | 802 802 -0.407568733072237 803 | 803 803 -1.43296427020816 804 | 804 804 -1.06729860729041 805 | 805 805 0.97972970914944 806 | 806 806 -0.886967026894436 807 | 807 807 2.15968512767744 808 | 808 808 0.522571072155775 809 | 809 809 -0.0346438137794573 810 | 810 810 1.70767149345523 811 | 811 811 2.16004387489654 812 | 812 812 2.6645356419964 813 | 813 813 -0.311231222486198 814 | 814 814 -0.482263593320542 815 | 815 815 -0.084649480484566 816 | 816 816 -0.349370347162588 817 | 817 817 -2.4407303623914 818 | 818 818 -0.278853573206237 819 | 819 819 0.441233363625248 820 | 820 820 0.816690293462777 821 | 821 821 0.434408888081685 822 | 822 822 -0.0161302481714744 823 | 823 823 -0.733951699657738 824 | 824 824 -0.354258199675501 825 | 825 825 0.843282841507463 826 | 826 826 -1.25072615412519 827 | 827 827 0.192330336683251 828 | 828 828 -0.217942513430395 829 | 829 829 0.791087609431406 830 | 830 830 0.0138198202724494 831 | 831 831 0.23763306270741 832 | 832 832 0.0275601871681827 833 | 833 833 -0.426220374565942 834 | 834 834 -1.77755098036224 835 | 835 835 1.87459797382923 836 | 836 836 0.677744840736292 837 | 837 837 -0.543057998181942 838 | 838 838 -1.22448818892215 839 | 839 839 -1.06269403955843 840 | 840 840 -0.323642221349741 841 | 841 841 -0.956213900056552 842 | 842 842 -1.78818787925385 843 | 843 843 -0.243244008386269 844 | 844 844 2.50097912440502 845 | 845 845 -1.19932379871137 846 | 846 846 -1.32907293758571 847 | 847 847 -1.86015792973063 848 | 848 848 0.243743863454794 849 | 849 849 0.052869972051037 850 | 850 850 -2.23669426849631 851 | 851 851 -0.564553751545923 852 | 852 852 -0.538220439475084 853 | 853 853 -0.280598568987564 854 | 854 854 0.813283014008976 855 | 855 855 -1.80941489647098 856 | 856 856 0.136530083970148 857 | 857 857 0.105865843383871 858 | 858 858 -0.65298791996646 859 | 859 859 1.27363845045396 860 | 860 860 -0.157127914364577 861 | 861 861 0.420687094359494 862 | 862 862 -0.566260980683921 863 | 863 863 1.26914833117578 864 | 864 864 -0.868657514512405 865 | 865 865 -0.528983032570891 866 | 866 866 0.47867581930213 867 | 867 867 0.684660978142026 868 | 868 868 1.04561207470218 869 | 869 869 -1.13879841519413 870 | 870 870 0.902125415920553 871 | 871 871 1.27860598265304 872 | 872 872 1.01942215150554 873 | 873 873 0.911126257289815 874 | 874 874 2.39134134000275 875 | 875 875 -1.07771928197909 876 | 876 876 0.346325076168195 877 | 877 877 0.859760174831497 878 | 878 878 0.130418308508469 879 | 879 879 -0.39812354733325 880 | 880 880 -0.218157868876553 881 | 881 881 -0.615608323571358 882 | 882 882 0.53184006865183 883 | 883 883 0.607522486911059 884 | 884 884 -0.0325735472088869 885 | 885 885 -0.491125661327161 886 | 886 886 0.605286457863997 887 | 887 887 -0.501067988119815 888 | 888 888 0.0743053828040702 889 | 889 889 -0.438969316892805 890 | 890 890 0.483484273755289 891 | 891 891 -0.740311477604801 892 | 892 892 0.814843961808801 893 | 893 893 0.516019881148034 894 | 894 894 -2.00581164986269 895 | 895 895 0.191054021395597 896 | 896 896 -0.333701799390394 897 | 897 897 0.258796428641509 898 | 898 898 -0.394623705823787 899 | 899 899 0.525245910257797 900 | 900 900 -0.215742612182889 901 | 901 901 -0.502114875726312 902 | 902 902 0.993872828945388 903 | 903 903 -1.08402736643302 904 | 904 904 -0.80112148157939 905 | 905 905 0.379129610248663 906 | 906 906 0.531111808470233 907 | 907 907 0.751106545659857 908 | 908 908 -1.01112214782304 909 | 909 909 -0.128921242484947 910 | 910 910 0.965093696426287 911 | 911 911 -0.249341895656547 912 | 912 912 1.47032170588483 913 | 913 913 -0.720612116362076 914 | 914 914 -0.806863233143418 915 | 915 915 -1.53289977971815 916 | 916 916 -1.34161736730603 917 | 917 917 0.568284324857315 918 | 918 918 -0.494521495061654 919 | 919 919 -1.28293180978407 920 | 920 920 0.209497336925205 921 | 921 921 1.96231301727703 922 | 922 922 2.06125996198828 923 | 923 923 0.834744752670082 924 | 924 924 1.37899578755463 925 | 925 925 -2.08752662546199 926 | 926 926 -0.113597390949072 927 | 927 927 0.239706881092107 928 | 928 928 -1.06349922709523 929 | 929 929 -0.60352794817836 930 | 930 930 -0.0069200974513153 931 | 931 931 -0.870981910563755 932 | 932 932 -0.492546579326967 933 | 933 933 1.54495205665164 934 | 934 934 -1.75486257502207 935 | 935 935 1.020810452154 936 | 936 936 -0.942832503678945 937 | 937 937 0.402573754550463 938 | 938 938 -1.72370971853608 939 | 939 939 1.3552659669632 940 | 940 940 -1.7951622299182 941 | 941 941 0.0238411604911366 942 | 942 942 0.344938792033839 943 | 943 943 -0.538585767315982 944 | 944 944 -1.34050215741547 945 | 945 945 -0.621947920836141 946 | 946 946 -2.59351836214142 947 | 947 947 -0.524099420910256 948 | 948 948 -0.956106575638653 949 | 949 949 -0.0371307656060382 950 | 950 950 0.637682241447732 951 | 951 951 -0.139094097651018 952 | 952 952 1.11183242120575 953 | 953 953 -2.05706108785635 954 | 954 954 2.06333333253494 955 | 955 955 -0.16343336968899 956 | 956 956 0.981083181634195 957 | 957 957 1.59994466430786 958 | 958 958 1.66329011890505 959 | 959 959 -0.674972962840394 960 | 960 960 -0.363794652516363 961 | 961 961 -1.59675630236477 962 | 962 962 -1.4537616460578 963 | 963 963 0.686097341295175 964 | 964 964 -0.462713312861854 965 | 965 965 2.05344009594649 966 | 966 966 -0.126377844138069 967 | 967 967 -0.186377110910673 968 | 968 968 -0.433462408047171 969 | 969 969 -0.48332376384547 970 | 970 970 0.299086565682116 971 | 971 971 -0.235804909921767 972 | 972 972 0.171035038043438 973 | 973 973 -0.541282340832921 974 | 974 974 1.44772992125809 975 | 975 975 -0.138763434145803 976 | 976 976 0.0654403785865433 977 | 977 977 0.450410955325329 978 | 978 978 -2.04496447835605 979 | 979 979 -0.218144790188637 980 | 980 980 0.790604983383392 981 | 981 981 2.19610086568879 982 | 982 982 -0.644406546770486 983 | 983 983 0.949419965006283 984 | 984 984 2.13992724379992 985 | 985 985 -0.161325712634616 986 | 986 986 1.14074029812665 987 | 987 987 -0.321483461028062 988 | 988 988 -0.829824077656794 989 | 989 989 -0.0601375441477454 990 | 990 990 1.4661143286612 991 | 991 991 0.672727025544037 992 | 992 992 0.481499127005913 993 | 993 993 -0.868086798263547 994 | 994 994 1.05926565211773 995 | 995 995 -0.656729560138415 996 | 996 996 1.02436442724149 997 | 997 997 0.0865972091186409 998 | 998 998 0.564187264741685 999 | 999 999 -0.844323329846435 1000 | 1000 1000 -0.160601767383363 1001 | 1001 1001 -1.35625585639806 1002 | 1002 1002 0.816854194360791 1003 | 1003 1003 -0.599574940238411 1004 | 1004 1004 0.467127895524276 1005 | 1005 1005 1.10244210226651 1006 | 1006 1006 -0.145561691240101 1007 | 1007 1007 -0.783702363819497 1008 | 1008 1008 0.322211216315776 1009 | 1009 1009 1.43709596927058 1010 | 1010 1010 -1.29043845198999 1011 | 1011 1011 0.604078518158706 1012 | 1012 1012 -0.709263532342798 1013 | 1013 1013 -0.990768115797301 1014 | 1014 1014 1.01187782707636 1015 | 1015 1015 -0.828738905124668 1016 | 1016 1016 -1.02065543416323 1017 | 1017 1017 0.730772606778963 1018 | 1018 1018 0.687479439696051 1019 | 1019 1019 -0.385857044277023 1020 | 1020 1020 -0.319626283237668 1021 | 1021 1021 -1.37449448893124 1022 | 1022 1022 1.69211021186992 1023 | 1023 1023 -0.229416301827666 1024 | 1024 1024 -0.194534545333168 1025 | 1025 1025 -0.202425886439182 1026 | 1026 1026 0.22609096263155 1027 | 1027 1027 0.204129026279692 1028 | 1028 1028 -0.0746406053664831 1029 | 1029 1029 0.53280653670479 1030 | 1030 1030 -0.289609827736964 1031 | 1031 1031 -0.0557379207474039 1032 | 1032 1032 0.502567033823192 1033 | 1033 1033 0.092971515698293 1034 | 1034 1034 0.547498183464671 1035 | 1035 1035 0.740293723166002 1036 | 1036 1036 -0.920927028997258 1037 | 1037 1037 0.0134913285080151 1038 | 1038 1038 0.875803901333755 1039 | 1039 1039 1.41194309986557 1040 | 1040 1040 -1.01899225290165 1041 | 1041 1041 0.721599365773271 1042 | 1042 1042 1.10860975807229 1043 | 1043 1043 0.874566334833937 1044 | 1044 1044 0.282461692283677 1045 | 1045 1045 -1.34482840244734 1046 | 1046 1046 -1.28769641775728 1047 | 1047 1047 0.268067986309329 1048 | 1048 1048 1.56229520024945 1049 | 1049 1049 0.160479930798567 1050 | 1050 1050 0.339993971799562 1051 | 1051 1051 0.207490415036378 1052 | 1052 1052 -0.276037690702472 1053 | 1053 1053 -0.681462251809987 1054 | 1054 1054 0.783942887928524 1055 | 1055 1055 1.12401399415792 1056 | 1056 1056 -0.563974534718373 1057 | 1057 1057 -1.63846053064263 1058 | 1058 1058 0.152343877583849 1059 | 1059 1059 0.215412903579939 1060 | 1060 1060 -0.254179890533108 1061 | 1061 1061 1.20131874371278 1062 | 1062 1062 -0.519774881906727 1063 | 1063 1063 1.39046344089032 1064 | 1064 1064 1.15134815444536 1065 | 1065 1065 -0.947136235139075 1066 | 1066 1066 1.80796902550676 1067 | 1067 1067 -1.40765560772364 1068 | 1068 1068 1.00023735448534 1069 | 1069 1069 -1.45052538574653 1070 | 1070 1070 0.0953457953441216 1071 | 1071 1071 0.430591063296857 1072 | 1072 1072 1.75877698621047 1073 | 1073 1073 1.30906479702458 1074 | 1074 1074 0.348672810848394 1075 | 1075 1075 0.766227674613301 1076 | 1076 1076 1.72073593722906 1077 | 1077 1077 0.936035021760088 1078 | 1078 1078 0.521516429651707 1079 | 1079 1079 -0.752703831004624 1080 | 1080 1080 -0.816825684670737 1081 | 1081 1081 1.21431318944423 1082 | 1082 1082 -0.184649118902779 1083 | 1083 1083 -0.590694625009465 1084 | 1084 1084 1.24272917185376 1085 | 1085 1085 0.828297406272783 1086 | 1086 1086 0.453648869259089 1087 | 1087 1087 -1.39583248582543 1088 | 1088 1088 -1.39112633450899 1089 | 1089 1089 0.494679031623715 1090 | 1090 1090 -0.286305055154897 1091 | 1091 1091 0.35865522804103 1092 | 1092 1092 1.06665811706962 1093 | 1093 1093 2.30051318297387 1094 | 1094 1094 -1.90900936926959 1095 | 1095 1095 -0.727058074627997 1096 | 1096 1096 -0.318160782825145 1097 | 1097 1097 -0.196831867821681 1098 | 1098 1098 0.396361954490237 1099 | 1099 1099 -1.01581196359444 1100 | 1100 1100 0.0488728493153561 1101 | 1101 1101 1.05151166888478 1102 | 1102 1102 0.454877400977546 1103 | 1103 1103 0.556462546676518 1104 | 1104 1104 0.199721063259406 1105 | 1105 1105 1.27763639985176 1106 | 1106 1106 -1.23399141075239 1107 | 1107 1107 -1.31638336194097 1108 | 1108 1108 1.54311476036956 1109 | 1109 1109 -0.708902902769269 1110 | 1110 1110 -1.20544131503888 1111 | 1111 1111 -1.34775691390571 1112 | 1112 1112 1.44566633518221 1113 | 1113 1113 0.346939928008969 1114 | 1114 1114 -1.25023839506123 1115 | 1115 1115 -1.2569703764953 1116 | 1116 1116 -1.15487905903709 1117 | 1117 1117 1.03484982128106 1118 | 1118 1118 0.97211167918077 1119 | 1119 1119 0.643190682417187 1120 | 1120 1120 0.480749821237588 1121 | 1121 1121 0.472910592231118 1122 | 1122 1122 0.709096654181311 1123 | 1123 1123 1.12182665608522 1124 | 1124 1124 1.49049070018702 1125 | 1125 1125 -0.518961609845313 1126 | 1126 1126 -1.44448515502326 1127 | 1127 1127 -0.162357359399894 1128 | 1128 1128 -1.10351831284211 1129 | 1129 1129 1.68542587132182 1130 | 1130 1130 0.175398388229135 1131 | 1131 1131 2.09377580148202 1132 | 1132 1132 -0.752017151809253 1133 | 1133 1133 0.0210569316852188 1134 | 1134 1134 0.945878195948718 1135 | 1135 1135 0.872496222685229 1136 | 1136 1136 0.429674349700101 1137 | 1137 1137 -0.130226353931167 1138 | 1138 1138 0.589758965285816 1139 | 1139 1139 0.0414245668460364 1140 | 1140 1140 -0.137171823076906 1141 | 1141 1141 0.0162941902843969 1142 | 1142 1142 -0.415466301974188 1143 | 1143 1143 -0.335267056506067 1144 | 1144 1144 0.530057376440621 1145 | 1145 1145 -0.565013096484698 1146 | 1146 1146 -0.725817488550875 1147 | 1147 1147 -0.252635347947109 1148 | 1148 1148 -1.48401185454624 1149 | 1149 1149 -0.238884287299247 1150 | 1150 1150 0.463323813500733 1151 | 1151 1151 0.696574534366873 1152 | 1152 1152 -1.46312748966409 1153 | 1153 1153 0.514734344915007 1154 | 1154 1154 0.800340604231711 1155 | 1155 1155 -0.186990597240868 1156 | 1156 1156 1.31888033455244 1157 | 1157 1157 -0.0550398005859455 1158 | 1158 1158 -0.0603621185307241 1159 | 1159 1159 1.29066931335901 1160 | 1160 1160 0.0943671206314975 1161 | 1161 1161 -0.549232379956651 1162 | 1162 1162 -0.125408091982191 1163 | 1163 1163 0.468814944847596 1164 | 1164 1164 -0.400193301565526 1165 | 1165 1165 2.44798542242501 1166 | 1166 1166 0.479701994028617 1167 | 1167 1167 -0.348903127736659 1168 | 1168 1168 -1.71793328371275 1169 | 1169 1169 -1.07061338238171 1170 | 1170 1170 -1.54906422980804 1171 | 1171 1171 1.46265063929291 1172 | 1172 1172 0.370696956805799 1173 | 1173 1173 -0.249766315904071 1174 | 1174 1174 0.824320019997683 1175 | 1175 1175 -1.19991870135692 1176 | 1176 1176 -0.569892736598544 1177 | 1177 1177 -0.171878826830332 1178 | 1178 1178 -0.476676436971038 1179 | 1179 1179 0.423694910176453 1180 | 1180 1180 -1.92916764897977 1181 | 1181 1181 0.49047954474508 1182 | 1182 1182 0.117261800006212 1183 | 1183 1183 -0.485484154511417 1184 | 1184 1184 -0.501845349696449 1185 | 1185 1185 0.269826585443971 1186 | 1186 1186 1.18611363028793 1187 | 1187 1187 0.280365655512276 1188 | 1188 1188 -0.0824603423768123 1189 | 1189 1189 -0.991477564808931 1190 | 1190 1190 -0.454606091116527 1191 | 1191 1191 -1.56006352974182 1192 | 1192 1192 1.74546288505755 1193 | 1193 1193 0.978733590552772 1194 | 1194 1194 0.16937911217355 1195 | 1195 1195 -1.34941391132425 1196 | 1196 1196 0.309143967767199 1197 | 1197 1197 0.356212734662192 1198 | 1198 1198 -1.71327092239964 1199 | 1199 1199 0.788399953867675 1200 | 1200 1200 -0.00450036267858971 1201 | 1201 1201 -0.0619996150546118 1202 | 1202 1202 0.139399436206039 1203 | 1203 1203 0.486570892461434 1204 | 1204 1204 0.339000280186033 1205 | 1205 1205 -0.575680878508633 1206 | 1206 1206 0.737922940679626 1207 | 1207 1207 0.79700495129357 1208 | 1208 1208 -1.594624574502 1209 | 1209 1209 0.111764701072141 1210 | 1210 1210 -0.4127295525447 1211 | 1211 1211 0.2950739968543 1212 | 1212 1212 0.180410334770304 1213 | 1213 1213 1.57945774328874 1214 | 1214 1214 -0.0742825369067975 1215 | 1215 1215 -1.15491588553402 1216 | 1216 1216 1.09932247534741 1217 | 1217 1217 -0.0531141224715516 1218 | 1218 1218 -1.05164649925624 1219 | 1219 1219 -0.429984979449038 1220 | -------------------------------------------------------------------------------- /lmm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HaohanWang/LMM-Python/aee09e7bd18714cb7de352499f6bd73e965c77d6/lmm.png -------------------------------------------------------------------------------- /lmm.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | 4 | # Main file for usage of (CS-LMM) Constrained Sparse multi-locus Linear Mixed Model 5 | # Cite information: 6 | # Wang H, Aragam B, Lee S, Xing EP, and Wu W. 7 | # Discovering Weaker Genetic Associations Guided by Known Associations, with Application to Alcoholism and Alzheimer's Disease Studies 8 | # 9 | 10 | def printOutHead(): out.write("\t".join(["RANK", "SNP_ID", "EFFECT_SIZE_ABS"]) + "\n") 11 | 12 | 13 | def outputResult(rank, id, beta): 14 | out.write("\t".join([str(x) for x in [rank, id, beta]]) + "\n") 15 | 16 | 17 | from optparse import OptionParser, OptionGroup 18 | 19 | usage = """usage: %prog [options] -n fileName 20 | This program provides the basic usage to CS-LMM, e.g: 21 | python cslmm.py -n data/mice.plink 22 | """ 23 | parser = OptionParser(usage=usage) 24 | 25 | dataGroup = OptionGroup(parser, "Data Options") 26 | modelGroup = OptionGroup(parser, "Model Options") 27 | 28 | ## data options 29 | dataGroup.add_option("-f", dest='fileType', default='plink', help="choices of input file type") 30 | dataGroup.add_option("-n", dest='fileName', help="name of the input file") 31 | 32 | ## model options 33 | modelGroup.add_option('-s', action='store_true', dest='select', default=False, 34 | help='Construct Kinship with Selected Covariates (LMM-select)') 35 | modelGroup.add_option('-l', action='store_true', dest='lowRank', default=False, 36 | help='Construct Kinship with low Rank Matrix (truncated rank LMM)') 37 | modelGroup.add_option('-t', dest='threshold', default=0, 38 | help='Construct Kinship with thresholded kinship matrix') 39 | modelGroup.add_option('-q', action='store_true', dest='quiet', default=False, help='Run in quiet mode') 40 | modelGroup.add_option('-p', action='store_true', dest='plot', default=False, help='Generate Manhattan plot') 41 | modelGroup.add_option('-m', action='store_true', dest='missing', default=False, 42 | help='Run without missing genotype imputation') 43 | 44 | ## advanced options 45 | parser.add_option_group(dataGroup) 46 | parser.add_option_group(modelGroup) 47 | 48 | (options, args) = parser.parse_args() 49 | 50 | 51 | import sys 52 | from utility.dataLoader import FileReader 53 | from model.LMM import LinearMixedModel 54 | from model.Lasso import Lasso 55 | from model.helpingMethods import * 56 | 57 | fileType = 0 58 | IN = None 59 | 60 | if len(args) != 0: 61 | parser.print_help() 62 | sys.exit() 63 | 64 | outFile = options.fileName + '.output' 65 | 66 | print ('Running ... ') 67 | 68 | reader = FileReader(fileName=options.fileName, fileType=options.fileType, imputation=(not options.missing)) 69 | X, Y, Xname = reader.readFiles() 70 | 71 | model = LinearMixedModel() 72 | 73 | print ('Computation starts ... ') 74 | 75 | if options.select: 76 | linearRegression = Lasso(lam=0) 77 | linearRegression.fit(X, Y) 78 | beta_lr = np.abs(linearRegression.getBeta()) 79 | 80 | Xselected = selectTopX(beta_lr, X, X.shape[1], [0.01, ])[0] 81 | lmm = LinearMixedModel() 82 | K = matrixMult(Xselected, Xselected.T)/float(Xselected.shape[1]) 83 | lmm.fit(X=X, K=K, Kva=None, Kve=None, y=Y) 84 | pvalue= lmm.getPvalues() 85 | else: 86 | if options.threshold != 0: 87 | lmm = LinearMixedModel(tau=0.001) 88 | K = matrixMult(X, X.T) / float(X.shape[1]) 89 | X, y = lmm.correctData(X=X, K=K, Kva=None, Kve=None, y=Y) 90 | K = matrixMult(X, X.T) / float(X.shape[1]) 91 | lmm.fit(X=X, K=K, Kva=None, Kve=None, y=y) 92 | pvalue = lmm.getPvalues() 93 | elif options.lowRank: 94 | lmm = LinearMixedModel(lowRankFit=True) 95 | K = matrixMult(X, X.T) / float(X.shape[1]) 96 | X, y = lmm.correctData(X=X, K=K, Kva=None, Kve=None, y=Y) 97 | K = matrixMult(X, X.T) / float(X.shape[1]) 98 | lmm.fit(X=X, K=K, Kva=None, Kve=None, y=y) 99 | pvalue = lmm.getPvalues() 100 | else: 101 | lmm = LinearMixedModel() 102 | K = matrixMult(X, X.T) / float(X.shape[1]) 103 | lmm.fit(X=X, K=K, Kva=None, Kve=None, y=Y) 104 | pvalue = lmm.getPvalues() 105 | 106 | 107 | 108 | ind = np.where(pvalue != 0)[0] 109 | bs = pvalue[ind].tolist() 110 | xname = [] 111 | for i in ind: 112 | xname.append(i) 113 | 114 | beta_name = zip(pvalue, Xname) 115 | bn = sorted(beta_name) 116 | 117 | out = open(outFile, 'w') 118 | printOutHead() 119 | 120 | for i in range(len(bn)): 121 | outputResult(i + 1, bn[i][1], bn[i][0]) 122 | 123 | out.close() 124 | 125 | print ('\nComputation ends normally, check the output file at ', outFile) 126 | 127 | if options.plot: 128 | from matplotlib import pyplot as plt 129 | fig, ax = plt.subplots(figsize=[100, 5]) 130 | plt.scatter(range(pvalue.shape[0]), -np.log(pvalue)) 131 | plt.savefig(outFile[:-7]+'.png') 132 | print ('\nManhattan plot drawn, check the output file at ', outFile) 133 | -------------------------------------------------------------------------------- /model/LMM.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | import scipy.optimize as opt 4 | 5 | import sys 6 | sys.path.append('../') 7 | 8 | from helpingMethods import * 9 | 10 | class LinearMixedModel: 11 | def __init__(self, numintervals=100, ldeltamin=-5, ldeltamax=5, mode='lmm', alpha=0.05, fdr=False, lowRankFit=False, tau=None): 12 | self.numintervals = numintervals 13 | self.ldeltamin = ldeltamin 14 | self.ldeltamax = ldeltamax 15 | self.mode = mode 16 | self.alpha = alpha 17 | self.fdr = fdr 18 | self.lowRankFit = lowRankFit 19 | self.tau = tau 20 | 21 | def correctData(self, X, K, Kva, Kve, y): 22 | [n_s, n_f] = X.shape 23 | assert X.shape[0] == y.shape[0], 'dimensions do not match' 24 | assert K.shape[0] == K.shape[1], 'dimensions do not match' 25 | assert K.shape[0] == X.shape[0], 'dimensions do not match' 26 | 27 | if self.tau is not None: 28 | K[K <= self.tau] = 0 29 | 30 | if y.ndim == 1: 31 | y = scipy.reshape(y, (n_s, 1)) 32 | 33 | 34 | if self.lowRankFit: 35 | S, U, ldelta0 = self.train_nullmodel_lowRankFit(y, K, S=Kva, U=Kve, numintervals=self.numintervals, 36 | ldeltamin=self.ldeltamin, ldeltamax=self.ldeltamax, 37 | p=n_f) 38 | else: 39 | S, U, ldelta0 = self.train_nullmodel(y, K, S=Kva, U=Kve, numintervals=self.numintervals, 40 | ldeltamin=self.ldeltamin, ldeltamax=self.ldeltamax, p=n_f) 41 | 42 | delta0 = scipy.exp(ldelta0) 43 | Sdi = 1. / (S + delta0) 44 | Sdi_sqrt = scipy.sqrt(Sdi) 45 | SUX = scipy.dot(U.T, X) 46 | # SUX = SUX * scipy.tile(Sdi_sqrt, (n_f, 1)).T 47 | for i in range(n_f): 48 | SUX[:, i] = SUX[:, i] * Sdi_sqrt.T 49 | SUy = scipy.dot(U.T, y) 50 | SUy = SUy * scipy.reshape(Sdi_sqrt, (n_s, 1)) 51 | 52 | return SUX, SUy 53 | 54 | def fit(self, X, K, Kva, Kve, y): 55 | [n_s, n_f] = X.shape 56 | assert X.shape[0] == y.shape[0], 'dimensions do not match' 57 | assert K.shape[0] == K.shape[1], 'dimensions do not match' 58 | assert K.shape[0] == X.shape[0], 'dimensions do not match' 59 | 60 | if self.tau is not None: 61 | K[K<=self.tau] = 0 62 | 63 | if y.ndim == 1: 64 | y = scipy.reshape(y, (n_s, 1)) 65 | 66 | X0 = np.ones(len(y)).reshape(len(y), 1) 67 | 68 | if self.lowRankFit: 69 | S, U, ldelta0 = self.train_nullmodel_lowRankFit(y, K, S=Kva, U=Kve, numintervals=self.numintervals, 70 | ldeltamin=self.ldeltamin, ldeltamax=self.ldeltamax, p=n_f) 71 | else: 72 | S, U, ldelta0 = self.train_nullmodel(y, K, S=Kva, U=Kve, numintervals=self.numintervals, 73 | ldeltamin=self.ldeltamin, ldeltamax=self.ldeltamax, p=n_f) 74 | 75 | delta0 = scipy.exp(ldelta0) 76 | Sdi = 1. / (S + delta0) 77 | Sdi_sqrt = scipy.sqrt(Sdi) 78 | SUX = scipy.dot(U.T, X) 79 | for i in range(n_f): 80 | SUX[:, i] = SUX[:, i] * Sdi_sqrt.T 81 | SUy = scipy.dot(U.T, y) 82 | SUy = SUy * scipy.reshape(Sdi_sqrt, (n_s, 1)) 83 | SUX0 = scipy.dot(U.T, X0) 84 | SUX0 = SUX0 * scipy.tile(Sdi_sqrt, (1, 1)).T 85 | 86 | self.pvalues = self.hypothesisTest(SUX, SUy, X, SUX0, X0) 87 | 88 | return np.exp(ldelta0) 89 | 90 | def rescale(self, a): 91 | return a / np.max(np.abs(a)) 92 | 93 | def selectValues(self, Kva): 94 | r = np.zeros_like(Kva) 95 | n = r.shape[0] 96 | tmp = self.rescale(Kva) 97 | ind = 0 98 | for i in range(n / 2, n - 1): 99 | if tmp[i + 1] - tmp[i] > 1.0 / n: 100 | ind = i + 1 101 | break 102 | r[ind:] = Kva[ind:] 103 | r[n - 1] = Kva[n - 1] 104 | return r 105 | 106 | def fdrControl(self): 107 | tmp = self.pvalues 108 | tmp = sorted(tmp) 109 | threshold = 1e-8 110 | n = len(tmp) 111 | for i in range(n): 112 | if tmp[i] < (i+1)*self.alpha/n: 113 | threshold = tmp[i] 114 | self.pvalues[self.pvalues>threshold] = 1 115 | 116 | def getPvalues(self): 117 | if not self.fdr: 118 | # self.beta[self.beta < -np.log(self.alpha)] = 0 119 | return self.pvalues 120 | else: 121 | self.fdrControl() 122 | return self.pvalues 123 | 124 | def getEstimatedBeta(self): 125 | return self.estimatedBeta 126 | 127 | def hypothesisTest(self, UX, Uy, X, UX0, X0): 128 | [m, n] = X.shape 129 | p = [] 130 | betas = [] 131 | for i in range(n): 132 | if UX0 is not None: 133 | UXi = np.hstack([UX0, UX[:, i].reshape(m, 1)]) 134 | XX = matrixMult(UXi.T, UXi) 135 | XX_i = linalg.pinv(XX) 136 | beta = matrixMult(matrixMult(XX_i, UXi.T), Uy) 137 | Uyr = Uy - matrixMult(UXi, beta) 138 | Q = np.dot(Uyr.T, Uyr) 139 | sigma = Q * 1.0 / m 140 | else: 141 | Xi = np.hstack([X0, UX[:, i].reshape(m, 1)]) 142 | XX = matrixMult(Xi.T, Xi) 143 | XX_i = linalg.pinv(XX) 144 | beta = matrixMult(matrixMult(XX_i, Xi.T), Uy) 145 | Uyr = Uy - matrixMult(Xi, beta) 146 | Q = np.dot(Uyr.T, Uyr) 147 | sigma = Q * 1.0 / m 148 | betas.append(beta[1][0]) 149 | ts, ps = tstat(beta[1], XX_i[1, 1], sigma, 1, m) 150 | if -1e30 < ts < 1e30: 151 | p.append(ps) 152 | else: 153 | p.append(1) 154 | # print beta[1][0], XX_i[1, 1], sigma, ps 155 | p = np.array(p) 156 | p[p<=1e-100] = 1e-100 157 | self.estimatedBeta = np.array(betas) 158 | return p 159 | 160 | def train_nullmodel(self, y, K, S=None, U=None, numintervals=500, ldeltamin=-5, ldeltamax=5, scale=0, mode='lmm', p=1): 161 | ldeltamin += scale 162 | ldeltamax += scale 163 | 164 | if S is None or U is None: 165 | S, U = linalg.eigh(K) 166 | 167 | Uy = scipy.dot(U.T, y) 168 | 169 | # grid search 170 | nllgrid = scipy.ones(numintervals + 1) * scipy.inf 171 | ldeltagrid = scipy.arange(numintervals + 1) / (numintervals * 1.0) * (ldeltamax - ldeltamin) + ldeltamin 172 | for i in scipy.arange(numintervals + 1): 173 | nllgrid[i] = nLLeval(ldeltagrid[i], Uy, S) # the method is in helpingMethods 174 | 175 | nllmin = nllgrid.min() 176 | ldeltaopt_glob = ldeltagrid[nllgrid.argmin()] 177 | 178 | for i in scipy.arange(numintervals - 1) + 1: 179 | if (nllgrid[i] < nllgrid[i - 1] and nllgrid[i] < nllgrid[i + 1]): 180 | ldeltaopt, nllopt, iter, funcalls = opt.brent(nLLeval, (Uy, S), 181 | (ldeltagrid[i - 1], ldeltagrid[i], ldeltagrid[i + 1]), 182 | full_output=True) 183 | if nllopt < nllmin: 184 | nllmin = nllopt 185 | ldeltaopt_glob = ldeltaopt 186 | 187 | return S, U, ldeltaopt_glob 188 | 189 | def train_nullmodel_lowRankFit(self, y, K, S=None, U=None, numintervals=500, ldeltamin=-5, ldeltamax=5, scale=0, mode='lmm', p=1): 190 | ldeltamin += scale 191 | ldeltamax += scale 192 | 193 | if S is None or U is None: 194 | S, U = linalg.eigh(K) 195 | 196 | Uy = scipy.dot(U.T, y) 197 | 198 | S = self.selectValues(S) 199 | nllgrid = scipy.ones(numintervals + 1) * scipy.inf 200 | ldeltagrid = scipy.arange(numintervals + 1) / (numintervals * 1.0) * (ldeltamax - ldeltamin) + ldeltamin 201 | for i in scipy.arange(numintervals + 1): 202 | nllgrid[i] = nLLeval(ldeltagrid[i], Uy, S) # the method is in helpingMethods 203 | 204 | nllmin = nllgrid.min() 205 | ldeltaopt_glob = ldeltagrid[nllgrid.argmin()] 206 | 207 | for i in scipy.arange(numintervals - 1) + 1: 208 | if (nllgrid[i] < nllgrid[i - 1] and nllgrid[i] < nllgrid[i + 1]): 209 | ldeltaopt, nllopt, iter, funcalls = opt.brent(nLLeval, (Uy, S), 210 | (ldeltagrid[i - 1], ldeltagrid[i], ldeltagrid[i + 1]), 211 | full_output=True) 212 | if nllopt < nllmin: 213 | nllmin = nllopt 214 | ldeltaopt_glob = ldeltaopt 215 | 216 | return S, U, ldeltaopt_glob -------------------------------------------------------------------------------- /model/Lasso.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | import numpy as np 4 | from numpy import linalg 5 | 6 | class Lasso: 7 | def __init__(self, lam=1., lr=1., tol=1e-5, logistic=False): 8 | self.lam = lam 9 | self.lr = lr 10 | self.tol = tol 11 | self.decay = 0.5 12 | self.maxIter = 500 13 | self.logistic = logistic 14 | 15 | def setLambda(self, lam): 16 | self.lam = lam 17 | 18 | def setLogisticFlag(self, logistic): 19 | self.logistic = logistic 20 | 21 | def setLearningRate(self, lr): 22 | self.lr = lr 23 | 24 | def setMaxIter(self, a): 25 | self.maxIter = a 26 | 27 | def setTol(self, t): 28 | self.tol = t 29 | 30 | def fit(self, X, y): 31 | X0 = np.ones(len(y)).reshape(len(y), 1) 32 | X = np.hstack([X, X0]) 33 | shp = X.shape 34 | self.beta = np.zeros([shp[1], 1]) 35 | resi_prev = np.inf 36 | resi = self.cost(X, y) 37 | step = 0 38 | while np.abs(resi_prev - resi) > self.tol and step < self.maxIter: 39 | keepRunning = True 40 | resi_prev = resi 41 | runningStep = 0 42 | while keepRunning and runningStep < 10: 43 | runningStep += 1 44 | prev_beta = self.beta 45 | pg = self.proximal_gradient(X, y) 46 | self.beta = self.proximal_proj(self.beta - pg * self.lr) 47 | keepRunning = self.stopCheck(prev_beta, self.beta, pg, X, y) 48 | if keepRunning: 49 | self.lr = self.decay * self.lr 50 | step += 1 51 | resi = self.cost(X, y) 52 | return self.beta 53 | 54 | def cost(self, X, y): 55 | if self.logistic: 56 | tmp = (np.dot(X, self.beta)).T 57 | return -0.5 * np.sum(y*tmp - np.log(1+np.exp(tmp))) + self.lam * linalg.norm( 58 | self.beta, ord=1) 59 | else: 60 | return 0.5 * np.sum(np.square(y - np.dot(X, self.beta)).transpose()) + self.lam * linalg.norm( 61 | self.beta, ord=1) 62 | 63 | def proximal_gradient(self, X, y): 64 | if self.logistic: 65 | return -np.dot(X.transpose(), (y.reshape((y.shape[0], 1)) - 1. / (1 + np.exp(-np.dot(X, self.beta))))) 66 | else: 67 | return -np.dot(X.transpose(), (y.reshape((y.shape[0], 1)) - (np.dot(X, self.beta)))) 68 | 69 | def proximal_proj(self, B): 70 | t = self.lam * self.lr 71 | zer = np.zeros_like(B) 72 | result = np.maximum(zer, B - t) - np.maximum(zer, -B - t) 73 | return result 74 | 75 | def predict(self, X): 76 | X0 = np.ones(X.shape[0]).reshape(X.shape[0], 1) 77 | X = np.hstack([X, X0]) 78 | if not self.logistic: 79 | return np.dot(X, self.beta) 80 | else: 81 | t = 1. / (1 + np.exp(-np.dot(X, self.beta))) 82 | y = np.zeros_like(t) 83 | y[t>0.5] = 1 84 | return y 85 | 86 | def getBeta(self): 87 | self.beta = self.beta.reshape(self.beta.shape[0]) 88 | return self.beta[:-1] 89 | 90 | def stopCheck(self, prev, new, pg, X, y): 91 | if np.square(linalg.norm((y - (np.dot(X, new))))) <= \ 92 | np.square(linalg.norm((y - (np.dot(X, prev))))) + np.dot(pg.transpose(), ( 93 | new - prev)) + 0.5 * self.lam * np.square(linalg.norm(prev - new)): 94 | return False 95 | else: 96 | return True -------------------------------------------------------------------------------- /model/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | -------------------------------------------------------------------------------- /model/helpingMethods.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | import scipy.linalg as linalg 4 | import scipy 5 | import numpy as np 6 | from scipy import stats 7 | import operator 8 | 9 | def matrixMult(A, B): 10 | try: 11 | linalg.blas 12 | except AttributeError: 13 | return np.dot(A, B) 14 | 15 | if not A.flags['F_CONTIGUOUS']: 16 | AA = A.T 17 | transA = True 18 | else: 19 | AA = A 20 | transA = False 21 | 22 | if not B.flags['F_CONTIGUOUS']: 23 | BB = B.T 24 | transB = True 25 | else: 26 | BB = B 27 | transB = False 28 | 29 | return linalg.blas.dgemm(alpha=1., a=AA, b=BB, trans_a=transA, trans_b=transB) 30 | 31 | def factor(X, rho): 32 | """ 33 | computes cholesky factorization of the kernel K = 1/rho*XX^T + I 34 | Input: 35 | X design matrix: n_s x n_f (we assume n_s << n_f) 36 | rho: regularizaer 37 | Output: 38 | L lower triangular matrix 39 | U upper triangular matrix 40 | """ 41 | n_s, n_f = X.shape 42 | K = 1 / rho * scipy.dot(X, X.T) + scipy.eye(n_s) 43 | U = linalg.cholesky(K) 44 | return U 45 | 46 | def tstat(beta, var, sigma, q, N, log=False): 47 | 48 | """ 49 | Calculates a t-statistic and associated p-value given the estimate of beta and its standard error. 50 | This is actually an F-test, but when only one hypothesis is being performed, it reduces to a t-test. 51 | """ 52 | ts = beta / np.sqrt(var * sigma) 53 | # ts = beta / np.sqrt(sigma) 54 | # ps = 2.0*(1.0 - stats.t.cdf(np.abs(ts), self.N-q)) 55 | # sf == survival function - this is more accurate -- could also use logsf if the precision is not good enough 56 | if log: 57 | ps = 2.0 + (stats.t.logsf(np.abs(ts), N - q)) 58 | else: 59 | ps = 2.0 * (stats.t.sf(np.abs(ts), N - q)) 60 | if not len(ts) == 1 or not len(ps) == 1: 61 | raise Exception("Something bad happened :(") 62 | # return ts, ps 63 | return ts.sum(), ps.sum() 64 | 65 | def nLLeval(ldelta, Uy, S): 66 | """ 67 | evaluate the negative log likelihood of a random effects model: 68 | nLL = 1/2(n_s*log(2pi) + logdet(K) + 1/ss * y^T(K + deltaI)^{-1}y, 69 | where K = USU^T. 70 | Uy: transformed outcome: n_s x 1 71 | S: eigenvectors of K: n_s 72 | ldelta: log-transformed ratio sigma_gg/sigma_ee 73 | """ 74 | n_s = Uy.shape[0] 75 | delta = scipy.exp(ldelta) 76 | 77 | # evaluate log determinant 78 | Sd = S + delta 79 | ldet = scipy.sum(scipy.log(Sd)) 80 | 81 | # evaluate the variance 82 | Sdi = 1.0 / Sd 83 | Uy = Uy.flatten() 84 | ss = 1. / n_s * (Uy * Uy * Sdi).sum() 85 | 86 | # evalue the negative log likelihood 87 | nLL = 0.5 * (n_s * scipy.log(2.0 * scipy.pi) + ldet + n_s + n_s * scipy.log(ss)) 88 | 89 | return nLL 90 | 91 | def nLLeval_singleValue(ldelta_singleValue, ldelta, ind, Uy, S): 92 | n_s = Uy.shape[0] 93 | ldelta[ind] = ldelta_singleValue 94 | delta = scipy.exp(ldelta) 95 | 96 | Sd = S + delta 97 | ldet = scipy.sum(scipy.log(Sd)) 98 | 99 | Sdi = 1.0 / Sd 100 | Uy = Uy.flatten() 101 | ss = 1. / n_s * (Uy * Uy * Sdi).sum() 102 | 103 | nLL = 0.5 * (n_s * scipy.log(2.0 * scipy.pi) + ldet + n_s + n_s * scipy.log(ss)) 104 | 105 | return nLL 106 | 107 | def nLLeval_delta(delta, Uy, S): 108 | n_s = Uy.shape[0] 109 | Sd = S + delta 110 | ldet = scipy.sum(scipy.log(Sd)) 111 | 112 | Sdi = 1.0 / Sd 113 | Uy = Uy.flatten() 114 | ss = 1. / n_s * (Uy * Uy * Sdi).sum() 115 | 116 | nLL = 0.5 * (n_s * scipy.log(2.0 * scipy.pi) + ldet + n_s + n_s * scipy.log(ss)) 117 | 118 | return nLL 119 | 120 | def nLLeval_delta_grad(delta, Uy, S): 121 | n_s = Uy.shape[0] 122 | Sd = S + delta 123 | Sdi = 1.0 / Sd 124 | 125 | ldet_grad = Sdi 126 | 127 | Uy = Uy.flatten() 128 | ss = 1. / n_s * (Uy * Uy * Sdi).sum() 129 | 130 | nll_grad = 0.5*(ldet_grad - np.exp(-ss)*(Uy*Uy/(Sdi*Sdi))) 131 | return nll_grad 132 | 133 | def KFold(X,y,k=5): 134 | foldsize = int(X.shape[0]/k) 135 | for idx in range(k): 136 | testlst = range(idx*foldsize,idx*foldsize+foldsize) 137 | Xtrain = np.delete(X,testlst,0) 138 | ytrain = np.delete(y,testlst,0) 139 | Xtest = X[testlst] 140 | ytest = y[testlst] 141 | yield Xtrain, ytrain, Xtest, ytest 142 | 143 | def selectTopX(beta_t, Xchrr, pnum, percentages): 144 | beta_order_dic = dict((i,beta_t[i]) for i in range(pnum)) 145 | beta_rank = sorted(beta_order_dic.items(), key=operator.itemgetter(1)) 146 | beta_rank.reverse() 147 | Xs = [] 148 | for p in percentages: 149 | num = int(pnum*p) 150 | indices = np.array([beta_rank[i][0] for i in range(num)]) 151 | X = Xchrr[:,indices] 152 | Xs.append(X) 153 | return Xs 154 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | 3 | setup( 4 | name='lmm-python', 5 | version='0.99', 6 | author = "Haohan Wang", 7 | author_email='haohanw@cs.cmu.edu', 8 | url = "https://github.com/HaohanWang/LMM-Python", 9 | description = "Tradeoffs of Linear Mixed Models in Genome-wide Association Studies", 10 | packages=['models', 'utility'], 11 | scripts=['lmm.py'], 12 | ) -------------------------------------------------------------------------------- /utility/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | -------------------------------------------------------------------------------- /utility/dataLoader.py: -------------------------------------------------------------------------------- 1 | __author__ = 'Haohan Wang' 2 | 3 | import sys 4 | import numpy as np 5 | import pysnptools 6 | import operator 7 | 8 | 9 | class FileReader(): 10 | def __init__(self, fileName, imputation=True, fileType=None): 11 | self.fileName = fileName 12 | self.imputationFlag = imputation 13 | if fileType is None: 14 | self.fileType = 'plink' 15 | else: 16 | self.fileType = fileType 17 | 18 | def famReader(self, fileName): 19 | d = [] 20 | text = [line.strip() for line in open(fileName)] 21 | for line in text: 22 | d.append(float(line.split()[-1])) 23 | return np.array(d) 24 | 25 | def imputation(self, X): 26 | print 'Missing genotype imputation ... ' 27 | print 'This may take a while, use -m to skip this step' 28 | [n, p] = X.shape 29 | dis = np.zeros([n, n]) 30 | for i in range(n): 31 | for j in range(i+1, n): 32 | d = np.nanmean(np.square(X[i,:]-X[j,:])) 33 | dis[i,j] = d 34 | dis[j,i] = d 35 | 36 | mx, xy = np.where(np.isnan(X)==1) 37 | 38 | missing = {} 39 | for x in mx: 40 | if x not in missing: 41 | missing[x] = 1 42 | else: 43 | missing[x] += 1 44 | 45 | ms = sorted(missing.items(), key=operator.itemgetter(1)) 46 | ms.reverse() 47 | for (x, k) in ms: 48 | neighbors = np.argsort(dis[x,:])[1:] 49 | for i in range(n-1): 50 | n = neighbors[i] 51 | ind = np.where(np.isnan(X[x,:])==1)[0] 52 | if len(ind) == 0: 53 | break 54 | X[x,ind] = X[n,ind] 55 | return X 56 | 57 | 58 | def simpleImputation(self, X): 59 | X[np.isnan(X)] = 0 60 | return X 61 | 62 | 63 | def readFiles(self): 64 | print 'Reading Data ...' 65 | X = None 66 | y = None 67 | Xname = None 68 | if self.fileType == 'plink': 69 | from pysnptools.snpreader import Bed 70 | snpreader = Bed(self.fileName+'.bed') 71 | snpdata = snpreader.read() 72 | X = snpdata.val 73 | Xname = snpdata.sid 74 | y = self.famReader(self.fileName+".fam") 75 | 76 | if self.fileType == 'csv': 77 | X = np.loadtxt(self.fileName+'.geno.csv', delimiter=',') 78 | y = np.loadtxt(self.fileName+'.pheno.csv', delimiter=',') 79 | try: 80 | Xname = np.loadtxt(self.fileName+'.marker.csv', delimiter=',') 81 | except: 82 | Xname = ['geno ' + str(i+1) for i in range(X.shape[1])] 83 | if self.imputationFlag: 84 | X = self.imputation(X) 85 | keep = True - np.isnan(y) 86 | return X[keep,:], y[keep], Xname 87 | else: 88 | X = self.simpleImputation(X) 89 | keep = np.isnan(y)==False 90 | return X[keep,:], y[keep], Xname 91 | --------------------------------------------------------------------------------