├── .idea ├── misc.xml └── vcs.xml ├── Data ├── FTRLtest.txt └── FTRLtrain.txt ├── FOBOS.py ├── FTML.py ├── FTRL.py ├── FTRL_Optimizer.py ├── OGD.py ├── RDA.py ├── README.md └── TG.py /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /Data/FTRLtest.txt: -------------------------------------------------------------------------------- 1 | -0.0264327236841 -0.877065160154 -0.327464537305 -0.585718307338 0.0 2 | -0.785449036632 0.791255037993 0.30042139101 0.714386271546 1.0 3 | -0.0632711308657 -0.780223939543 -0.283494130194 -0.837712007583 0.0 4 | -0.197693758982 -0.62748160403 -0.141260150542 0.0553709572777 0.0 5 | -0.184280282082 -0.931221567059 0.176947710235 0.33293522504 1.0 6 | 0.962153375684 0.741777968906 0.712268465597 0.114729105236 1.0 7 | 0.599794020242 -0.840687719998 -0.406327941303 -0.981821597685 0.0 8 | 0.776574361421 0.936871235098 -0.755241813599 0.820352331106 0.0 9 | -0.407890177986 -0.689125731905 -0.898532418769 -0.707587228428 0.0 10 | -0.12651856794 0.848149793743 -0.277318062932 -0.161056648855 0.0 11 | 0.938459419255 0.255251381931 -0.202871819103 -0.421802067093 1.0 12 | 0.195567855921 -0.20924441656 -0.734016209556 0.696119647919 0.0 13 | 0.725529856645 -0.0668523135094 -0.0620421451881 -0.568056872216 1.0 14 | -0.267527704315 -0.448446574243 -0.360071307846 -0.558650536267 0.0 15 | 0.00078711025425 0.223782662562 0.193729961853 0.774965151927 1.0 16 | -0.825452763394 -0.0166134681682 -0.66548045346 0.606724021995 0.0 17 | -0.309365585416 -0.7341319305 -0.272356976536 0.699974468694 0.0 18 | -0.259210858836 0.431505001543 -0.133124585336 -0.79839509734 0.0 19 | -0.572318507396 -0.539861503681 0.672815109754 -0.0443712717373 1.0 20 | -0.426550558935 0.424515340106 -0.0617813538434 0.222150247362 0.0 21 | 0.557113588903 -0.350896321932 -0.0825636491504 0.686249732848 1.0 22 | 0.71213596608 0.227978793454 0.920239062163 -0.076941529238 1.0 23 | 0.196443878987 -0.261057834162 -0.444701950011 -0.827295791987 0.0 24 | 0.968456418303 0.732428291632 0.721116521691 0.156620478197 1.0 25 | -0.628380511379 0.441075005034 -0.827935384492 0.149554064179 0.0 26 | 0.156798484268 -0.997880305224 0.109798122785 0.197936607174 1.0 27 | 0.573588633674 -0.656866274591 0.260606974698 -0.794572709649 1.0 28 | 0.768990446351 -0.520979330747 0.458096625939 -0.553469843849 1.0 29 | 0.404052389049 0.483805143618 0.969572497552 -0.701018613426 1.0 30 | 0.1946343045 0.0253589204742 -0.047630713912 0.718697199259 1.0 31 | 0.417717288537 -0.428281674367 -0.0231835872526 0.921707120297 1.0 32 | -0.571995708399 0.426636269003 -0.815537103296 -0.391218075533 0.0 33 | -0.725067174332 0.656652808765 0.43386505652 0.052845349388 1.0 34 | 0.206776412387 -0.242983518331 -0.135724985103 -0.576490824388 0.0 35 | 0.173374718866 -0.0670120641592 0.767977140993 -0.0779312912752 1.0 36 | 0.600613241296 -0.951010243328 -0.195575811431 0.968303567414 1.0 37 | -0.629507337173 0.37623404701 -0.343805827334 0.208254235018 0.0 38 | -0.319150708055 -0.895678668838 -0.169205904399 -0.938639495519 0.0 39 | -0.254272548876 -0.531837013651 -0.619080421461 0.787404421671 0.0 40 | 0.252186249622 0.698679323638 0.906515185948 0.674814452255 1.0 41 | -0.384728720913 0.998181020905 0.681178569814 -0.354838744529 1.0 42 | -0.87171428779 -0.826207217968 0.416144318931 -0.607258816864 0.0 43 | -0.8192735217 0.344427480618 -0.0484892396544 0.609470017304 0.0 44 | 0.27436841193 0.994917095774 -0.0998417474423 -0.369162211712 1.0 45 | 0.802453772313 -0.781768311584 0.188245816427 -0.733570410222 1.0 46 | 0.499292593837 -0.121280939216 0.836273110148 0.821115868565 1.0 47 | 0.419529032623 0.812052859219 0.946630748862 0.186136196358 1.0 48 | 0.864112998287 0.269785015955 0.409357475621 -0.339292703147 1.0 49 | 0.114769832815 -0.706104240285 0.195237655187 0.244280924894 1.0 50 | 0.519420118051 -0.524675185582 -0.191649791152 -0.0937664132789 0.0 51 | 0.711579583084 -0.82287758624 0.379872041956 -0.399164332495 1.0 52 | 0.233388808253 0.216863610677 0.642541708616 0.240247299333 1.0 53 | -0.281442923565 0.0655772578866 0.898076447359 -0.918454543409 1.0 54 | 0.402322237614 -0.359713607987 -0.172249268031 -0.495921576246 0.0 55 | -0.183347458771 0.557933574148 0.468504685632 -0.517742553709 1.0 56 | 0.679165597453 -0.145743951651 0.458332679868 -0.131778912067 1.0 57 | 0.580724208319 0.70713404535 0.697945412852 0.683648932503 1.0 58 | -0.281036736691 -0.501143494697 0.473183591438 -0.893046132923 0.0 59 | -0.177676782224 0.929724812501 0.253327270897 -0.80918697206 1.0 60 | 0.373463320863 0.700254398351 0.871881196034 0.128867773036 1.0 61 | 0.478096369507 -0.537447893188 0.560955602061 0.550975050641 1.0 62 | 0.195131201666 -0.558772668497 0.498935484459 0.176562611941 1.0 63 | -0.987120437417 -0.432998322037 -0.862265135865 -0.537397615336 0.0 64 | 0.779794050101 0.238989087406 -0.562947352861 0.981189253365 1.0 65 | -0.199094343554 0.201039035885 -0.498364938821 0.0916471059989 0.0 66 | 0.339558911995 -0.493815500004 0.129156751438 0.0733113650456 1.0 67 | 0.480409331585 0.234528908932 -0.703131725577 -0.194011967854 0.0 68 | 0.958264347065 0.735346821362 -0.0121408129397 -0.158687640881 1.0 69 | -0.369752602296 -0.0596249904958 0.571448089683 0.306390671102 1.0 70 | 0.336957204292 -0.830434008579 0.679214118064 0.283865340165 1.0 71 | -0.117882652565 0.149370801245 0.30142082426 -0.105724990851 1.0 72 | -0.00646896510893 -0.580967491614 -0.065308777998 0.712802388436 1.0 73 | -0.790140984352 -0.794139585431 0.338294195948 -0.5151643627 0.0 74 | -0.50683528593 -0.585522636417 -0.294499159457 0.910575115682 0.0 75 | 0.879926328403 0.420796939898 0.935839145281 0.302885784244 1.0 76 | -0.135713446999 -0.448307787665 0.257526860938 -0.148834858006 1.0 77 | 0.378645824735 0.884120335899 -0.0947764070766 -0.644646610364 1.0 78 | -0.969520754714 0.0406348758957 -0.655280399599 -0.624061492737 0.0 79 | 0.669825194803 -0.632510539385 0.0951774639864 -0.415398507095 1.0 80 | 0.933032608307 -0.613607983888 -0.965899844187 -0.657132228816 0.0 81 | 0.766178148693 0.208415468659 -0.208941089513 0.285403330059 1.0 82 | -0.958554483046 -0.0349146283133 0.587655155586 -0.742900925601 0.0 83 | 0.771214642521 0.897284585388 -0.918634300155 -0.426851198554 0.0 84 | 0.594646019261 -0.170037709888 -0.286569511172 -0.0870995669456 0.0 85 | 0.40226973883 0.204791598257 0.201856449036 -0.473538223972 1.0 86 | -0.841415533846 0.601843767278 -0.577036914894 0.203206108917 0.0 87 | 0.899624693485 -0.306799869594 -0.914103233193 -0.479734653275 0.0 88 | 0.762639551769 0.753926088799 0.807313630305 0.0720156578233 1.0 89 | -0.678456935225 -0.392838752573 -0.889362924752 0.390736880218 0.0 90 | 0.0963328096658 -0.916783169647 -0.820112322776 0.284659645382 0.0 91 | 0.808168625727 0.476120354233 0.241112588838 -0.235174288458 1.0 92 | -0.290607585391 -0.91665755915 0.624295129268 -0.0847859167408 1.0 93 | 0.15912438326 0.58384614319 -0.737263908281 0.890760497773 0.0 94 | 0.908697255706 0.480033288567 -0.266942963499 -0.301391608196 1.0 95 | -0.507713559306 0.023982623908 0.705123490784 -0.828883587202 1.0 96 | -0.089941680624 0.605052810092 -0.0419950117423 -0.148867108509 0.0 97 | 0.726945332666 -0.294447985774 -0.789653873267 0.684052698942 0.0 98 | -0.628706582636 -0.520485678716 0.181526626162 -0.333340853814 0.0 99 | 0.132355600014 0.524391014814 0.852758856237 -0.83540855165 1.0 100 | -0.757642092747 0.749558295311 0.67960126473 0.230039576669 1.0 101 | 0.298712861442 0.478306139956 -0.0254706021219 -0.554910322416 1.0 102 | 0.390905758969 -0.326412224646 0.0442630893767 0.0834614898867 1.0 103 | -0.869958414655 -0.647791838906 0.93493454387 -0.797484124709 1.0 104 | 0.944811489223 0.0174594903465 0.726924403506 -0.0404190537329 1.0 105 | -0.636008958096 0.237657822076 -0.605998490448 0.0257534057514 0.0 106 | 0.642267139309 0.0301389173795 0.568683791587 -0.696571631395 1.0 107 | -0.841600644906 -0.35107321911 -0.460304443876 0.791040928224 0.0 108 | 0.377333201924 0.337310243947 -0.651642498902 0.208865354446 0.0 109 | 0.756723368664 0.0512912176609 0.549337990522 -0.589120338823 1.0 110 | -0.400369095982 -0.729750850193 -0.965336755315 -0.177223796718 0.0 111 | 0.7446062373 -0.728038961558 -0.195225004584 0.781636747663 1.0 112 | -0.523995863513 0.467528462095 0.474743309585 -0.352113534577 1.0 113 | 0.323565461952 -0.895550503224 0.164555522056 -0.498599104653 0.0 114 | 0.0661391757234 -0.0765767737421 -0.977543573866 0.564598346645 0.0 115 | -0.0422467602078 0.465770254747 0.855812211887 0.607979358706 1.0 116 | 0.500167023324 -0.678440340609 -0.130255480897 -0.880795459002 0.0 117 | 0.557636261828 -0.0591062365345 -0.572317761387 -0.133051369702 0.0 118 | 0.0798751782612 -0.749257821953 0.267919628159 0.512776228203 1.0 119 | -0.84033281074 -0.684062699589 -0.748271239087 -0.386256402263 0.0 120 | 0.688688993237 -0.173333755854 -0.377544719925 -0.398184879701 0.0 121 | 0.0323628238674 -0.893365045294 -0.655222934306 0.0155889315908 0.0 122 | -0.167986635004 0.658701291165 -0.572958866205 -0.172545828169 0.0 123 | -0.253523366967 0.259489486656 0.482114415481 0.305712650838 1.0 124 | 0.603291405618 0.971391579016 -0.794783659861 0.563917125066 0.0 125 | 0.505289115851 0.447956594057 0.23919891809 0.679688883946 1.0 126 | -0.391008989826 -0.961172352659 0.177867843106 -0.611323932276 0.0 127 | 0.66225947395 0.683674134046 0.483414604771 -0.014413849783 1.0 128 | -0.569688803296 0.619601854667 -0.742320999314 -0.949482326803 0.0 129 | 0.611521624812 0.661474802104 0.35079993684 0.362037086231 1.0 130 | 0.845483619829 0.483475093086 0.892446385678 0.106675582355 1.0 131 | 0.903566323227 0.218975398292 -0.453359667998 -0.315467502 0.0 132 | -0.350923001512 -0.246840578598 -0.249258412408 -0.470751041076 0.0 133 | -0.585272056306 -0.202886766507 0.177522283163 -0.76643630599 0.0 134 | 0.754175592138 0.363015875656 0.236594785998 -0.738348072516 1.0 135 | 0.615565489451 -0.277791854612 -0.738237261382 0.60437472091 0.0 136 | 0.072864500882 -0.073951704782 0.746183897559 0.888000905413 1.0 137 | -0.69651327323 0.742150806626 -0.8585522836 -0.00468404109658 0.0 138 | 0.201907660233 0.359495603814 0.102372979573 -0.980919816081 0.0 139 | 0.960718505017 -0.747423808054 0.0467792509463 -0.7682034771 1.0 140 | -0.990333184253 -0.89522176233 0.14391762962 0.718597359558 0.0 141 | -0.932467012627 -0.119753942178 0.0141493051199 0.595908383006 0.0 142 | -0.98708922779 0.657123879063 0.204390400532 0.203305651201 1.0 143 | -0.217338844808 0.756562536525 -0.0195900925743 0.130409544495 1.0 144 | -0.0170548990103 -0.328393416665 -0.495976721672 -0.66852409325 0.0 145 | 0.141065442483 0.794810022623 0.0487568195882 0.939271650663 1.0 146 | 0.300529868732 -0.798406907243 0.0256359020875 -0.106058677796 0.0 147 | 0.761597767955 -0.499091438528 0.546874478065 0.127250350573 1.0 148 | -0.243106397338 0.310269186158 -0.452519366017 -0.732093087911 0.0 149 | -0.491059896227 0.757199821363 -0.653986018384 0.733709482027 0.0 150 | -0.932045509088 0.202955584531 0.619785570807 -0.264233327192 1.0 151 | -0.574162786485 -0.430743000435 -0.960812968023 0.904083129633 0.0 152 | -0.885743661461 -0.527169854109 0.380771924923 -0.644596901806 0.0 153 | -0.539095356925 -0.352252987319 -0.926187352923 0.397521098728 0.0 154 | -0.244653141421 -0.0832037099434 0.76466736653 0.257362881699 1.0 155 | 0.398161452528 -0.915581696469 -0.312834423304 -0.0989148475017 0.0 156 | 0.786778735324 -0.307817639835 -0.234020644667 0.651369711817 1.0 157 | -0.641918004651 0.326264358642 0.825614118994 -0.708172931224 1.0 158 | 0.92463749865 0.894455870583 0.683626584038 -0.383418123017 1.0 159 | 0.785028297582 -0.463665585993 -0.0664274846688 0.2473812924 1.0 160 | -0.56473206044 -0.808494532261 0.349921531255 -0.393904609043 0.0 161 | -0.195769401863 0.219995999866 0.390167801258 0.328530933254 1.0 162 | -0.896054875292 -0.386077171027 -0.443258050809 -0.999216096259 0.0 163 | 0.74225011114 -0.430290030515 -0.51151225929 -0.287038894744 0.0 164 | -0.929885255851 0.658860708033 0.791588397891 -0.733970017015 1.0 165 | 0.145571936394 -0.847495915564 -0.0427920529881 -0.837884233488 0.0 166 | -0.70955415413 -0.341968119137 0.799972562451 0.38690992939 1.0 167 | -0.690076558059 0.192548033258 -0.42000810843 -0.687830298549 0.0 168 | -0.98454769276 0.889400300651 0.226188142807 -0.278291527279 0.0 169 | 0.163679620056 0.578785101122 -0.730680549041 0.1124395934 0.0 170 | -0.0518182985087 0.36938328659 -0.548753541869 -0.653351733297 0.0 171 | -0.125625346557 -0.578168241861 -0.674861725981 0.548101147074 0.0 172 | -0.500637062945 -0.583685133995 -0.524767872406 -0.29115411524 0.0 173 | 0.0946599983022 0.10038564731 -0.365699834556 0.840619052623 0.0 174 | 0.485175071223 0.707794895041 0.0838495637362 0.401009699009 1.0 175 | 0.28942225826 0.0471279425865 0.882787311566 0.816962811258 1.0 176 | -0.133468381308 -0.149533830137 0.754494328972 -0.493575778645 1.0 177 | -0.697180497802 0.780450867491 -0.449270057609 0.877402574712 0.0 178 | -0.844844736339 -0.631320300232 0.183050651341 -0.51105064077 0.0 179 | 0.448712274419 -0.246876346495 -0.659893450376 0.049153377403 0.0 180 | 0.690616718216 0.991853099266 0.615106276743 0.100788461992 1.0 181 | 0.926684619988 -0.412891507193 -0.578331191756 -0.685681846739 0.0 182 | -0.533612605947 0.241008859456 0.969313658884 -0.844268965404 1.0 183 | 0.160639330024 0.0542974824067 0.0304272913887 0.968823338301 1.0 184 | 0.487788412898 0.886899214698 -0.717373528719 -0.248701401493 0.0 185 | -0.384981282995 0.244733860725 -0.202918417136 -0.655980166072 0.0 186 | -0.592401846206 0.976266366418 -0.262882846336 -0.266256773712 0.0 187 | -0.548402890896 0.99954742179 -0.0619293263551 0.34091756848 1.0 188 | 0.259962400712 -0.443418247715 0.840014865452 0.688207776032 1.0 189 | -0.664517104086 0.648815039883 -0.78181432186 0.716998117869 0.0 190 | -0.984751979848 0.330632999124 0.281506769915 -0.858927070629 0.0 191 | -0.311197258781 -0.929989092508 -0.685820974459 0.667592497884 0.0 192 | -0.383002192331 0.889191137948 -0.081358775884 0.115297309434 0.0 193 | -0.697493914619 0.748082951361 0.953623660289 -0.318431432166 1.0 194 | 0.12171948134 0.392739261364 0.189455233847 0.442998820043 1.0 195 | 0.228159774871 -0.768193413349 -0.324919271944 0.681380310031 0.0 196 | -0.595873152585 -0.592581387922 0.460041943473 -0.830380204105 0.0 197 | 0.948745078055 0.595507104488 -0.574847290703 -0.0592645832805 0.0 198 | -0.279356136189 -0.844793238264 -0.0600124569989 -0.533353203398 0.0 199 | 0.0286099808385 -0.135181458223 0.456230112912 0.977821238507 1.0 200 | 0.861108409089 0.694380159323 -0.486512287675 0.0429279010222 0.0 201 | 0.477923700989 -0.214613779909 0.803024215535 -0.669656559808 1.0 202 | -0.458885028201 0.885883284845 -0.885615481519 0.212593429052 0.0 203 | -0.429097265218 -0.30324600027 0.422311584166 -0.993120198371 0.0 204 | 0.47935463937 -0.412516451362 -0.921192273179 -0.824193895486 0.0 205 | -0.646686875939 0.632770042925 -0.0729242955942 -0.407641000659 0.0 206 | -0.941516953518 0.327675374606 0.074888482739 -0.907252887777 0.0 207 | 0.468735826241 -0.468964788579 0.172368165758 -0.240946046527 1.0 208 | -0.68914945506 -0.234277633262 -0.833158687995 0.302968942702 0.0 209 | 0.967114126911 -0.890012507577 0.794050077652 -0.895035456385 1.0 210 | -0.88904175061 -0.483573238857 0.775752433076 0.908966896701 1.0 211 | 0.15819432167 0.602019574625 0.367887495206 -0.33053717052 1.0 212 | 0.490776193412 -0.29029398092 -0.216691069604 0.179638613755 0.0 213 | -0.0962786233947 0.706022101175 -0.607335745195 -0.851194508189 0.0 214 | 0.847311583089 -0.7206842818 -0.62347792958 -0.794194627117 0.0 215 | 0.491006613378 0.728944694061 0.437155265638 -0.82886444594 1.0 216 | -0.140132083453 0.0809583382835 0.340288124099 -0.264708807211 1.0 217 | -0.142054608714 0.339156431224 -0.318340822372 -0.090282844087 0.0 218 | -0.428782444314 0.445870508289 -0.194401138559 -0.215124007077 0.0 219 | 0.149728341755 0.338771911606 -0.63009412736 -0.802989826306 0.0 220 | 0.726017239603 -0.454870535413 0.315409508932 -0.760298168453 1.0 221 | 0.936360671439 0.784711344896 0.273771634639 -0.270153471358 1.0 222 | -0.0381328796869 0.544782419225 0.789482953359 0.843840536109 1.0 223 | 0.0415355829321 0.814679621637 -0.95226727951 -0.520072636541 0.0 224 | 0.250710476199 -0.395310730042 0.235270702444 0.408012468021 1.0 225 | -0.0924617300676 0.118946623236 -0.380300856968 -0.661504655636 0.0 226 | 0.0444181775449 -0.974884032057 0.483009434125 0.83132635163 1.0 227 | 0.463914822914 -0.742810541983 -0.797446848832 -0.573177509177 0.0 228 | -0.305171685281 -0.395164450898 -0.192220971096 0.126809311142 0.0 229 | -0.749011843222 -0.701562449729 -0.348740041743 0.833009876645 0.0 230 | -0.542756882227 0.668888102199 0.539668360882 0.18789377412 1.0 231 | 0.513205828429 0.485426153294 -0.660423017721 -0.60883159747 0.0 232 | 0.667471880707 -0.331805151349 0.742236623588 -0.473173978812 1.0 233 | 0.593903055066 0.859240595604 -0.883062906081 0.551814618271 0.0 234 | -0.712613853663 0.0741763606389 -0.0164369924469 -0.975622710024 0.0 235 | -0.299641527959 -0.619067480729 0.186898156453 -0.151599462532 0.0 236 | -0.687138868235 0.949278292373 0.318702383426 0.163257794375 1.0 237 | -0.944301084039 -0.00670177866728 0.855338079145 -0.0873354761626 1.0 238 | 0.130053410623 -0.758682552425 -0.477703088893 0.432423378273 0.0 239 | -0.312025841182 -0.105092129748 0.150398115758 0.628243003509 1.0 240 | 0.401839799987 0.918966548532 0.48319621879 0.888058656943 1.0 241 | -0.435556072353 -0.872580797025 -0.748890146131 -0.958931978558 0.0 242 | -0.676955319632 -0.708345172954 -0.319796336341 0.223905758814 0.0 243 | -0.357406310955 -0.239862262808 0.595732903333 0.926959485946 1.0 244 | 0.55722856216 -0.365639592597 -0.386422368931 0.441549983606 0.0 245 | 0.332119945532 0.466967564284 -0.965129037291 -0.951853390087 0.0 246 | -0.346229046324 -0.228797931623 0.956929740788 0.682986270123 1.0 247 | 0.861429992786 -0.0579408805548 0.813772524313 -0.393548840811 1.0 248 | 0.892700697813 0.750002481754 -0.903429501673 -0.165517504753 0.0 249 | -0.202197677399 -0.43208191658 0.641618993554 0.640059729261 1.0 250 | -0.37485064459 0.93573434575 0.0145860486864 0.989616939031 1.0 251 | -0.781676683074 0.730876661631 0.979193618565 0.124168086354 1.0 252 | -0.701604883381 0.0486911552468 0.642001776428 -0.669806424452 1.0 253 | -0.983203419996 0.316501320144 -0.920775716972 0.7789037727 0.0 254 | -0.826445277855 0.870033760332 0.650931269244 -0.328063419485 1.0 255 | -0.484610721757 0.364462798335 -0.473302635876 -0.198023241387 0.0 256 | -0.800822767329 0.0419605972242 0.684122203457 0.0756169686966 1.0 257 | 0.392028384348 0.568108886904 0.606935107025 0.844385615816 1.0 258 | -0.646657420596 0.0451803376543 -0.230741404377 -0.684996101929 0.0 259 | 0.0744881206251 0.00198343552336 -0.761995409766 -0.11658496468 0.0 260 | 0.515167408868 0.230270345736 0.526975391072 -0.451529527171 1.0 261 | -0.905523617813 0.685069272277 0.576698314092 -0.901492559707 1.0 262 | 0.62158047623 -0.550292212126 -0.681388108711 -0.715248008632 0.0 263 | 0.682635884344 -0.463447711022 0.537609836524 0.00190815303283 1.0 264 | 0.209927011532 -0.276834330137 -0.955597777463 -0.270449543995 0.0 265 | -0.986157777507 -0.348132583219 -0.227989274471 -0.442689319873 0.0 266 | -0.926272229602 0.934800421551 -0.976516036468 0.914284136246 0.0 267 | 0.227317884315 -0.116566436559 0.674834898102 0.976342716132 1.0 268 | 0.0683294805845 -0.874495157312 -0.306570527343 -0.16980458648 0.0 269 | -0.373655418061 -0.86009515523 0.117455072759 -0.720590731062 0.0 270 | 0.98071466254 0.833766905698 0.695831335192 -0.30430809506 1.0 271 | -0.607863794255 -0.792017120799 0.875096040804 0.034259600697 1.0 272 | -0.240170467856 0.422930864588 0.305807667095 0.953666363896 1.0 273 | -0.842026376962 -0.465782611575 -0.952228847355 0.484049523744 0.0 274 | 0.298221702942 0.653294246859 -0.404848236397 0.909651810107 1.0 275 | 0.349067368719 0.511887432778 0.0085735659412 0.0944663658031 1.0 276 | -0.208319037255 0.360194705517 -0.622705001447 0.0564713775274 0.0 277 | 0.338637426006 -0.0318288313507 0.958430957452 -0.890922018703 1.0 278 | 0.1896967201 -0.0831956039236 -0.427229554982 0.433721265352 0.0 279 | -0.0618780972111 0.61333295384 -0.418452500715 0.468030644479 0.0 280 | -0.393556638253 -0.962291471288 0.576415720276 0.247302834715 1.0 281 | -0.0570876748805 -0.918222173927 0.515215208989 -0.28244134817 1.0 282 | 0.766605540527 -0.149640227839 0.420460931346 0.70304941855 1.0 283 | 0.592202679761 0.404875114558 0.955874644989 -0.620026829321 1.0 284 | 0.571264293383 -0.513023875432 -0.896666617772 0.164337815104 0.0 285 | 0.0795054734095 0.591945913997 0.982118494463 0.114501133969 1.0 286 | 0.00500248521127 0.452945983887 -0.0126408303283 -0.0619896063915 1.0 287 | 0.879453224865 0.839999752535 0.212106098532 0.560432678117 1.0 288 | 0.381144674525 0.938709490399 -0.469871260081 0.505473029875 1.0 289 | 0.5445980594 0.220989037852 -0.0720735107418 -0.506262496673 1.0 290 | 0.179300723819 0.512848703501 0.791862410008 -0.963464214502 1.0 291 | 0.890069146227 0.836785563193 -0.97284543574 0.276965619174 0.0 292 | -0.377563269387 0.723182382437 -0.908031000402 0.276408815867 0.0 293 | 0.439731342006 0.473947682402 0.714144385342 -0.99901908323 1.0 294 | -0.99865479962 -0.495789669116 -0.850769424774 -0.748081930634 0.0 295 | -0.860164161069 0.613660098189 -0.214022235162 0.0096084321158 0.0 296 | -0.887061097921 0.329765766126 0.724341058716 0.314438132145 1.0 297 | 0.527116536736 -0.199980869661 0.397799212171 -0.344284857234 1.0 298 | -0.579844758143 0.271695438255 -0.376103602374 -0.474120741605 0.0 299 | -0.383830145505 -0.356180671088 0.297599587549 0.858426977774 1.0 300 | 0.53433286931 0.463867548892 -0.521209353507 0.54774689834 0.0 301 | 0.813433871334 0.299793437474 -0.220619184595 0.376955018041 1.0 302 | 0.795506874507 -0.443841599486 -0.785664150286 0.875919792336 0.0 303 | -0.980566980194 0.821803358498 -0.49411520987 0.0606433091852 0.0 304 | -0.226805317092 -0.324663605684 -0.909526882133 0.19997889025 0.0 305 | 0.109830635646 0.447474979347 -0.252424936481 -0.738301899253 0.0 306 | -0.772581883645 -0.686625687176 0.611855444667 -0.660115194681 0.0 307 | -0.733282544868 -0.224074393014 -0.0429963273964 0.535530560312 0.0 308 | 0.838520750208 0.777234377891 0.707690779302 -0.627054377589 1.0 309 | 0.465843844226 0.715726930164 -0.131883727289 0.301547193163 1.0 310 | -0.315342693241 0.105186768135 -0.659332880449 -0.811569437836 0.0 311 | -0.560768558785 -0.596709345707 -0.0477786968166 0.241072370284 0.0 312 | 0.615338380316 0.890207476586 -0.273844867919 -0.271239760332 1.0 313 | -0.840521066779 -0.551047017858 0.552944780291 0.0860310358732 1.0 314 | 0.464209903457 0.247956154217 -0.56003888786 0.187370799587 0.0 315 | -0.887696635478 -0.916059337997 -0.136794719217 -0.485348291528 0.0 316 | 0.655575031589 -0.624077426431 0.308147731356 0.112008670247 1.0 317 | -0.206721060962 -0.306838737912 -0.834646966951 -0.463575807883 0.0 318 | -0.936347994003 -0.878206348427 0.476522231064 0.624128675672 1.0 319 | 0.859997309592 0.49215841239 0.195630853364 -0.839794812086 1.0 320 | -0.500211921918 -0.764756556947 0.664111418118 -0.989948663374 1.0 321 | 0.651773002766 0.56947825136 0.173694092902 -0.80042176775 1.0 322 | 0.17476718558 -0.763798958015 0.526774397984 -0.220321260623 1.0 323 | -0.0238913740188 -0.324879147809 -0.157770195101 0.926920571109 1.0 324 | -0.470575713769 0.650864001254 -0.178176180932 0.202738832539 0.0 325 | -0.821724442716 0.274746997526 -0.999081075223 -0.843939061937 0.0 326 | 0.720121149735 -0.467363080142 -0.777853897383 0.404613704207 0.0 327 | 0.17284224405 0.5211261253 -0.565806810698 -0.716465052943 0.0 328 | -0.238508661597 0.169715633793 0.101395186028 -0.450878044 0.0 329 | -0.451128253265 -0.138927031673 0.0210022978354 0.158219127431 0.0 330 | 0.407244247637 -0.488457896284 -0.969737730013 0.596033590665 0.0 331 | -0.597075885586 0.470398273349 -0.632649253936 -0.805164178061 0.0 332 | -0.0388139261413 -0.763861301308 -0.347406292699 -0.230288501214 0.0 333 | 0.173681370154 0.866038125435 -0.799235408902 -0.0936245129512 0.0 334 | -0.411349744874 0.412102333562 -0.118244072105 0.916195420088 1.0 335 | -0.914520147295 -0.225838013763 -0.185923057545 -0.580066504356 0.0 336 | -0.243725563053 -0.325866177409 -0.178620065079 0.669653798993 0.0 337 | -0.677431578214 -0.333599944756 -0.514837949348 0.824333388911 0.0 338 | -0.976245817664 0.0731681114779 -0.654601678937 -0.394852238943 0.0 339 | 0.659588927492 -0.563702741326 0.0181157481363 0.940075481812 1.0 340 | 0.596522178734 0.0821154502301 0.402844211121 0.900484860725 1.0 341 | -0.879985402686 -0.915759331138 -0.981907916958 0.797194068258 0.0 342 | 0.45085785338 -0.986119086841 -0.388388559756 -0.354785073177 0.0 343 | 0.367582007217 -0.197154359804 -0.607561621931 -0.298101119686 0.0 344 | -0.289920151685 0.471318292781 0.721926020681 -0.782810096198 1.0 345 | -0.866515842419 -0.716217880229 0.819731958162 -0.326447855859 1.0 346 | 0.475375111052 -0.437400406077 0.485957459839 -0.168683939165 1.0 347 | 0.947146335222 -0.28458788456 -0.96954202712 0.233395567594 0.0 348 | 0.115640906481 0.434727942481 -0.470146958268 0.865138988864 0.0 349 | -0.688010691806 0.0855592213385 0.263127784899 0.377622306494 1.0 350 | 0.596460162072 0.931491637033 -0.822663767482 0.28072123463 0.0 351 | -0.964367382869 0.616212387344 0.166791351755 0.53310005642 1.0 352 | 0.0598506977725 0.822178587717 0.507538147384 0.827330551383 1.0 353 | -0.951786591187 -0.381812237196 0.10673076653 -0.413691102682 0.0 354 | 0.107186692501 0.266294246506 0.823344773162 0.0858090411371 1.0 355 | 0.832434098282 0.855754340998 0.951842138809 0.145247380107 1.0 356 | -0.863802900804 0.257584477853 -0.314555887195 0.187771912177 0.0 357 | -0.945020222679 -0.670094023196 -0.495620170622 -0.629137950164 0.0 358 | 0.00701399615263 -0.492156525197 -0.886834395564 0.00867107235341 0.0 359 | -0.16658812404 -0.169989687369 0.906841378561 -0.598445912944 1.0 360 | 0.629004556736 -0.766607304687 0.962634248853 -0.661566020626 1.0 361 | 0.634510557318 -0.898931610193 0.403715227805 -0.398215240944 1.0 362 | -0.569231782432 -0.955627799467 -0.993574042901 -0.468050473385 0.0 363 | 0.543416381693 -0.473214857925 0.940211088935 -0.633576922024 1.0 364 | 0.995656486527 0.535619628498 0.285022943667 -0.917866091705 1.0 365 | -0.692679667832 0.588274709975 0.628970018365 0.725705232179 1.0 366 | 0.302228680464 -0.929592706001 -0.578006195451 0.168085850432 0.0 367 | -0.238178661121 -0.0716364731308 -0.126006700735 -0.33917914859 0.0 368 | -0.213869028527 0.489047670186 -0.54417382311 0.830625461955 0.0 369 | -0.776126883497 -0.641279452113 -0.0533122325925 -0.423378470045 0.0 370 | 0.908089475674 -0.200676608634 -0.145041363074 -0.923108044496 0.0 371 | -0.468356268871 0.771146586835 -0.995167221441 0.202149648623 0.0 372 | 0.800264224092 0.385659086608 -0.341758337274 0.753897592165 1.0 373 | 0.455782586847 -0.364274737314 -0.124120572309 -0.961727267128 0.0 374 | -0.309246703587 0.1074763136 -0.946671833725 -0.254420397401 0.0 375 | -0.505719954042 0.389348314829 -0.164888340173 0.121976384953 0.0 376 | 0.689237088568 0.165418924772 -0.440217439503 0.801653077951 1.0 377 | -0.465188354796 0.854735638301 -0.752215974299 -0.30755965236 0.0 378 | -0.782805095453 -0.292104217631 -0.674309828538 -0.40811294188 0.0 379 | 0.61147213939 0.927216575509 -0.778226985684 -0.0813228443926 0.0 380 | -0.853268358237 0.376650173082 0.376685692902 -0.696673554783 0.0 381 | -0.896969538851 0.311537701953 -0.575681243541 0.541704690515 0.0 382 | -0.433073015154 -0.428778155197 -0.409373818353 -0.492128406279 0.0 383 | 0.968825711721 -0.372244935771 0.254438946695 0.712209421444 1.0 384 | 0.345805079015 0.447167006896 -0.24407952708 0.201924447172 1.0 385 | -0.534626500867 -0.215677247678 0.123231657666 -0.170834835602 0.0 386 | 0.720643079609 0.580225271948 0.404507312397 -0.380940925306 1.0 387 | -0.163308882115 0.171096874393 -0.243949650156 -0.352839796838 0.0 388 | -0.999544050423 0.864564707923 0.0460771090923 0.012269655171 0.0 389 | -0.183207397076 0.0779260142203 0.560050387301 0.419118323695 1.0 390 | -0.569967657472 0.938531554874 -0.182121614638 -0.533587557854 0.0 391 | 0.439972556927 -0.214874353819 -0.9175556335 -0.793650810231 0.0 392 | -0.394934944215 0.136523297984 0.338529665051 -0.939121708551 0.0 393 | 0.292602396981 -0.965649144854 -0.57964945771 0.807327126931 0.0 394 | 0.470480031067 -0.0985524073734 -0.277457761436 -0.518706107816 0.0 395 | -0.94575737826 0.708844716236 0.841739121589 0.909109805819 1.0 396 | 0.536354547956 -0.210718590297 -0.728669915776 -0.464469401532 0.0 397 | 0.282106025115 0.0334521658817 -0.972217819321 -0.661955688358 0.0 398 | -0.934890593577 -0.749301462724 -0.14456512437 0.669992887862 0.0 399 | -0.815479781645 0.314514350548 -0.086660354999 0.0889110177359 0.0 400 | 0.0976386210839 0.721038157808 0.666816959314 -0.806995671659 1.0 401 | 0.917885213796 0.203366290948 0.22486580266 0.573249972846 1.0 402 | -0.158345095028 -0.53166232487 0.40493092203 -0.0124998927742 1.0 403 | -0.0672266990142 0.476828849909 -0.64531516799 -0.0419635854275 0.0 404 | -0.16464192571 0.490737842787 -0.0299664039999 0.490061707664 1.0 405 | -0.196022729179 0.16793546988 0.304164680465 0.918090950259 1.0 406 | 0.995331041517 -0.274416190907 -0.70954472292 -0.16231724541 0.0 407 | 0.0563677283433 0.0294431131738 0.0462446998559 0.749156268183 1.0 408 | -0.127532899587 0.220890092166 -0.456845052513 -0.564093854394 0.0 409 | -0.360524732374 -0.541568146653 0.113262870222 0.831991769311 1.0 410 | -0.308684155975 0.986426984286 0.886698494857 0.126200613654 1.0 411 | -0.895867094188 -0.661986214998 -0.10647641671 0.125947001025 0.0 412 | 0.678600402095 -0.881964489696 0.996097477787 0.742637019586 1.0 413 | 0.798820243674 -0.338196442293 -0.559929754099 -0.285871365255 0.0 414 | -0.949070415199 -0.174488577476 0.369569796026 0.512731539511 1.0 415 | 0.448816727388 -0.263989331532 0.113825269313 -0.0901433793143 1.0 416 | -0.421131708972 -0.843216566848 -0.215809032626 -0.249870552396 0.0 417 | -0.996985459391 0.931758785913 0.993751900556 0.174606902731 1.0 418 | -0.0935723738669 0.240661477782 0.479073724405 0.137038717018 1.0 419 | -0.57214932683 -0.462402052991 0.0103724657067 -0.867844829524 0.0 420 | 0.655537294214 0.0372757940085 0.197280102806 0.211747888842 1.0 421 | 0.641112155537 0.704839590904 0.62247771489 -0.646454141857 1.0 422 | -0.33396706393 0.656699011457 0.295995778735 -0.662454651046 1.0 423 | -0.41524502091 0.845502677485 0.419921092448 -0.961462246163 1.0 424 | -0.176616287556 0.532287160183 0.265954244525 0.53504553891 1.0 425 | 0.628068375662 -0.015680889518 -0.386484155146 -0.621045223958 0.0 426 | -0.760202784531 0.391333762912 0.393534682819 -0.132824420989 1.0 427 | -0.577578820413 -0.60156003101 0.993823692023 -0.170566442643 1.0 428 | -0.857112336709 0.396968031867 0.391730110606 0.560838225568 1.0 429 | 0.801606779083 0.217635899973 -0.484098579273 -0.312427391811 0.0 430 | -0.339325816062 0.834329120523 0.759734146869 0.804975621505 1.0 431 | 0.681248498397 -0.0795402775475 -0.76145543511 0.267747696734 0.0 432 | 0.113244534598 0.90059700547 -0.586682497307 0.973646919483 0.0 433 | -0.540801153312 0.104840255856 0.637438894076 -0.626472202542 1.0 434 | 0.0982703842235 0.78946580533 -0.709826937378 0.410565475325 0.0 435 | 0.9242031312 -0.94123113096 0.904590507365 -0.391359892114 1.0 436 | 0.686863579513 0.990372247747 0.950505817893 0.944485326913 1.0 437 | 0.235373678141 0.370400454774 0.674376898652 -0.428517627572 1.0 438 | 0.868241045045 -0.191171212024 0.295722051526 0.414341749172 1.0 439 | -0.877680531372 -0.162791929679 0.135571215392 0.895155515252 1.0 440 | 0.661252170615 0.995443223314 -0.642271125137 -0.469195240266 0.0 441 | -0.0913671817006 0.352615227657 0.826158191527 0.23044082638 1.0 442 | -0.366211369693 -0.509917893638 -0.815087094187 -0.947653304509 0.0 443 | 0.502200162444 -0.8828773941 -0.670979153082 0.625558600125 0.0 444 | -0.0239854543665 0.778363792908 0.489450910982 0.0816532469376 1.0 445 | -0.888667282308 0.78396118388 0.959775727952 0.4896729125 1.0 446 | -0.241849216604 -0.490527880326 0.412113944777 0.565516516307 1.0 447 | 0.301267951699 -0.356167635756 -0.791746523844 0.465701216215 0.0 448 | -0.928309602365 0.66595708081 -0.446253279934 -0.655803839073 0.0 449 | -0.610855968982 -0.855285593118 0.634206320756 0.215262521526 1.0 450 | 0.984210872344 0.194276817778 0.235026756422 0.488081650716 1.0 451 | -0.653504006574 0.482713960009 -0.00103010202814 0.275136755724 0.0 452 | 0.651231727581 0.171366076372 -0.609012128252 -0.00712494026599 0.0 453 | 0.203384525523 0.731020863533 0.836429915216 -0.327077165998 1.0 454 | 0.491264742822 0.0470833839776 0.878505898003 0.0618929705149 1.0 455 | -0.193472552206 -0.585422691395 0.192707241645 0.142164116773 1.0 456 | -0.0982361883593 0.682219830942 -0.407993195095 0.19370281628 0.0 457 | -0.584778855702 -0.572104997859 0.249099144184 0.427995648703 1.0 458 | -0.720776213411 0.408704543102 -0.171060987939 0.0099737971612 0.0 459 | -0.404263693902 -0.515164395453 -0.600754817319 -0.317844637101 0.0 460 | -0.490529348934 0.811280280909 -0.141324267576 -0.754398240158 0.0 461 | -0.0960084193193 0.915795767165 0.838127520548 0.298089340645 1.0 462 | 0.0741201533159 0.0994805585264 -0.552705408349 0.0806330098711 0.0 463 | -0.0815099588902 -0.583216695681 -0.770861738492 0.568898759726 0.0 464 | 0.116480320867 -0.0893911679537 0.309705500084 0.22338445306 1.0 465 | -0.176553765963 0.668842787657 -0.752600042106 0.765466629122 0.0 466 | -0.0406933532463 -0.72484108561 0.170394365551 0.510287359259 1.0 467 | 0.0509009458928 0.951045774805 -0.402619388722 -0.45656760111 0.0 468 | -0.134802155621 0.901083856324 0.0250721198287 -0.958352602749 0.0 469 | -0.519482533571 0.878646592878 -0.77039243499 0.283589514848 0.0 470 | 0.135452709343 -0.530829097565 0.642487473934 -0.869684851882 1.0 471 | -0.918657089786 -0.750427062092 0.278393028004 -0.661328415891 0.0 472 | 0.539633030008 -0.0222048270532 -0.845935677539 -0.395481619537 0.0 473 | 0.291685291799 -0.177885177001 0.749889397481 -0.861946629123 1.0 474 | -0.718746622565 -0.340023300729 -0.403308729034 -0.874243945922 0.0 475 | 0.171309677842 0.819216924547 0.353281947376 -0.261694057017 1.0 476 | -0.577229968501 0.662094691451 -0.6167552945 0.227057999316 0.0 477 | -0.773690069645 -0.365533405455 -0.745076689918 0.788210961082 0.0 478 | 0.593712596067 0.0624183946105 -0.736012402317 0.176296545642 0.0 479 | -0.86013534971 -0.929417220674 -0.09297383891 0.446105500901 0.0 480 | -0.506119563468 -0.791476834608 -0.370609603595 -0.779110253939 0.0 481 | 0.722123541403 0.487262440824 -0.772444148578 -0.978407534855 0.0 482 | -0.81186839802 -0.483881907036 0.841831896344 -0.319150189705 1.0 483 | 0.626571248856 -0.206844523226 0.319680397139 0.420254163465 1.0 484 | 0.148725124973 0.972583786834 -0.148284695623 0.585489230435 1.0 485 | 0.216193719002 0.326001931064 0.888061420631 0.668415332361 1.0 486 | -0.0375743729992 0.754908454997 -0.227606228218 -0.224488558659 0.0 487 | 0.503200226441 0.772940911249 -0.161139237661 0.110272571591 1.0 488 | -0.408044288383 0.709159858633 0.822424875839 0.692086121461 1.0 489 | -0.665659179505 0.533519056543 0.980436165709 0.54275641488 1.0 490 | -0.00610411178513 -0.710748856153 0.237010173328 -0.159242440468 1.0 491 | 0.934169151589 -0.603896739759 0.453529224294 0.51258597976 1.0 492 | 0.981026819526 -0.803450904793 -0.60978286564 -0.245141598816 0.0 493 | -0.417821763273 0.690935804879 0.0078834254446 0.032896319463 0.0 494 | 0.0134166672688 0.541119795472 0.67167502108 -0.373347158327 1.0 495 | -0.876858695236 0.368279981949 0.35756577964 0.29663734853 1.0 496 | 0.786915113762 -0.808654492256 -0.22051231005 0.291458690551 1.0 497 | 0.609671330249 -0.418811647271 -0.833893582738 0.242264657167 0.0 498 | -0.77140838288 0.983392771344 -0.806694252048 -0.810796373826 0.0 499 | -0.189720756809 0.678531520067 0.422784591536 0.308028987184 1.0 500 | -0.498420312179 0.510914959046 -0.965245723209 0.496135943699 0.0 501 | -0.675441703174 0.648763636779 -0.546979847614 0.759873280529 0.0 502 | 0.190866679454 0.897180806392 0.201947637707 0.436116085401 1.0 503 | -0.249189246842 -0.311885873856 0.186361679207 0.203679855917 1.0 504 | -0.652975851494 -0.146984878458 -0.555478528041 -0.0410341613081 0.0 505 | 0.0591320773535 -0.770471897435 0.0169559151703 0.173946681845 0.0 506 | -0.370190240017 -0.0906700572177 0.15046717969 0.314992449401 1.0 507 | -0.407335992291 -0.665041514152 -0.978126565236 -0.461871217204 0.0 508 | -0.0278244700191 -0.519707425037 0.851213933449 0.668478862666 1.0 509 | -0.797362083907 0.0541162447922 0.442020888098 -0.909846360138 0.0 510 | -0.0850370707506 -0.915594345226 0.433808855351 0.100236500468 1.0 511 | -0.250878464111 -0.682800600463 -0.598512687113 -0.785700012589 0.0 512 | 0.690051115444 0.633714049405 -0.901299227742 0.949712688854 0.0 513 | 0.633297429951 -0.566451556918 -0.510963555458 -0.264823136494 0.0 514 | 0.965620866584 0.0695464848563 0.837656832935 -0.776744851101 1.0 515 | 0.717737793476 0.849082994512 -0.84182918191 -0.795584036666 0.0 516 | -0.296569962017 -0.550886092027 0.934650171503 0.691780170231 1.0 517 | 0.652957924618 0.545251147037 -0.483606713663 0.785548042829 1.0 518 | -0.846752852743 -0.43149313671 0.632025082133 -0.058768287339 1.0 519 | -0.391727852112 -0.699009852709 0.436060234074 0.878213960135 1.0 520 | -0.118455879791 0.0497200633729 -0.300841159712 -0.673303395229 0.0 521 | 0.336824575779 0.0849208569329 0.0384744544927 -0.0847632084372 1.0 522 | 0.72562126348 0.572115522412 0.128760232239 -0.0371164333604 1.0 523 | -0.0786201905031 0.0262277711263 -0.400028922146 0.787042258773 0.0 524 | -0.0742829990883 0.718063235129 0.802456192978 0.71267743056 1.0 525 | -0.997614560789 -0.884203943522 -0.982894429579 -0.248778775044 0.0 526 | -0.432027266144 -0.145790015547 0.673973927382 0.0968009521144 1.0 527 | -0.159272975413 0.759674995824 -0.884895333632 0.868706284037 0.0 528 | 0.140583197692 0.966620627068 0.950388974019 0.797214588106 1.0 529 | -0.701364415564 -0.144597393592 0.144416795176 0.971079179796 1.0 530 | 0.541100668292 0.145736105016 -0.522867828495 -0.647302030229 0.0 531 | 0.524700461544 -0.401609441335 -0.320551396766 0.405602785028 0.0 532 | 0.0835421936072 -0.794111269044 0.597534618432 0.471947238426 1.0 533 | 0.477413459734 0.0768771993777 0.803027686434 0.226159934762 1.0 534 | 0.438904228612 -0.567631755476 -0.261883972785 -0.557637071893 0.0 535 | 0.671891485446 0.104389941747 0.511222428381 0.989120322045 1.0 536 | 0.580754064449 -0.317119573738 -0.212192892156 0.657349619361 1.0 537 | 0.0921194400607 -0.845952654835 0.450577760665 0.808298506671 1.0 538 | 0.0101597844256 0.201255999589 -0.259082512314 -0.446557053406 0.0 539 | 0.0959042227079 -0.0511152088256 -0.7408165525 -0.847249700436 0.0 540 | -0.621546328938 -0.967537352675 0.351108219926 -0.991262256906 0.0 541 | 0.845064842192 -0.921674458681 0.245508986096 -0.210392556659 1.0 542 | 0.865993179169 0.401216543976 -0.461827347717 0.301433183027 1.0 543 | 0.167072543675 0.780760807306 -0.841380137296 -0.443327042811 0.0 544 | -0.33227980537 -0.286334625555 -0.109373888192 -0.809720175899 0.0 545 | -0.589096813744 0.579248549397 -0.581127407958 -0.845628610275 0.0 546 | -0.765607777101 -0.261604186161 0.794075169438 -0.629817468031 1.0 547 | -0.516346044975 -0.0365662139273 0.444037393965 -0.316540057136 1.0 548 | 0.600602589868 0.156565505168 -0.667840718074 -0.0665506127227 0.0 549 | -0.596695165879 -0.827682550165 0.724459960195 -0.840676537751 1.0 550 | -0.116247820889 -0.915147726001 -0.366049933708 -0.229804797241 0.0 551 | 0.145702964089 0.0994624437286 0.000142254334204 0.235469832525 1.0 552 | 0.693230811516 -0.92315041115 -0.330265373145 0.0660398722825 0.0 553 | -0.871734571138 0.811818847431 0.748332367824 -0.201388482405 1.0 554 | 0.400534112439 -0.309263573621 0.465242085668 0.918605737948 1.0 555 | 0.944398610478 -0.926424967363 -0.694113190522 -0.795762151069 0.0 556 | -0.846030156336 -0.0231484437624 0.135182451227 -0.597411984948 0.0 557 | -0.453629693895 0.260626938399 -0.636250636246 -0.694293085175 0.0 558 | -0.751693712443 -0.762558841602 -0.533830114013 0.82615140859 0.0 559 | 0.266821407973 0.656765307861 -0.0216005132343 0.787131784898 1.0 560 | -0.484411492909 -0.00829304819725 0.199462554573 -0.994035528882 0.0 561 | -0.331513958755 -0.508148270438 -0.596688045666 0.166494749315 0.0 562 | -0.865248220627 0.714354714515 0.77622790639 0.272428423316 1.0 563 | -0.723333638403 -0.177750977562 -0.994901317567 0.732782801611 0.0 564 | -0.131227645372 -0.836519442888 0.00265497915776 0.777445273203 1.0 565 | 0.864278784707 0.256852677376 -0.829608244831 -0.726217275983 0.0 566 | -0.474869293518 -0.163762169801 0.288791540229 -0.185125164256 1.0 567 | 0.387989144127 0.595907293986 -0.841394764145 0.735537497065 0.0 568 | -0.947827925002 -0.837000095363 -0.684391740502 -0.155766145748 0.0 569 | 0.265586159044 -0.292360350275 0.385095731668 0.608925864331 1.0 570 | 0.783401098971 -0.000565915694922 0.447935967461 0.321686781562 1.0 571 | 0.731844140682 -0.135008370456 0.902860773467 0.815041296355 1.0 572 | -0.14727274379 -0.479271625908 0.754354449418 -0.960286209706 1.0 573 | 0.665696369841 0.222795170024 0.641362245127 -0.887031988432 1.0 574 | 0.535520642625 0.373720063258 0.618004419979 0.4052839643 1.0 575 | 0.346897888476 0.482020234477 -0.302115420123 0.818009779055 1.0 576 | 0.916411075868 0.436272632819 0.675083913866 0.624297148771 1.0 577 | 0.0850905600309 0.510959934038 -0.182561680189 0.0579033754087 0.0 578 | -0.641993134809 0.0949370513135 -0.302296411372 -0.974013744899 0.0 579 | -0.826610317956 -0.781375655572 -0.701663938089 -0.943399880323 0.0 580 | 0.574679493721 0.24096994205 0.0396732707653 0.578533869254 1.0 581 | -0.96088181318 0.662944211427 0.0240539764967 0.883941313592 1.0 582 | 0.920726678074 -0.35211856202 -0.413676966347 -0.598124317598 0.0 583 | -0.985744746421 -0.184300497698 0.524874610939 0.806834506751 1.0 584 | 0.938331548195 -0.866288995383 0.661282580438 -0.345587966523 1.0 585 | -0.668240309773 0.649215918042 0.710881121334 0.596135493318 1.0 586 | 0.51044356906 0.757004783581 -0.82965153425 0.975542969019 0.0 587 | 0.897149590195 -0.640021462307 -0.870969560866 0.728357400951 0.0 588 | -0.830081309507 0.665134591753 -0.646403574669 0.253878061583 0.0 589 | 0.303761304834 0.664275268894 -0.723471501448 -0.511723286524 0.0 590 | 0.340319647443 -0.536119282539 0.779229655578 -0.825851959348 1.0 591 | 0.033828863001 0.818680653431 0.805871024183 0.820009035329 1.0 592 | -0.832477560364 0.939685825484 -0.981624354041 0.975564735205 0.0 593 | 0.176331190158 -0.443446352763 -0.0463273491559 0.289298028768 1.0 594 | 0.379054569243 0.460778777052 0.675098173931 -0.113127424072 1.0 595 | 0.737095556185 0.357240306764 -0.847213982255 -0.300404828386 0.0 596 | -0.359521941885 -0.851624326117 -0.460800138502 0.480196441604 0.0 597 | -0.0951244990358 -0.500607057995 0.958380520581 0.165694996378 1.0 598 | 0.980063642184 -0.830286331635 0.066845756032 -0.0953996120289 1.0 599 | 0.496510320629 0.896206196905 0.434342560203 -0.149159690532 1.0 600 | 0.169506738757 0.112514183959 0.52173657093 -0.139212903288 1.0 601 | 0.231863490104 0.613352098774 -0.413880407124 0.664383157575 0.0 602 | 0.23934423703 0.126333071245 0.978360289679 0.51696235652 1.0 603 | 0.0112602969894 0.400690827598 0.357231687375 -0.646300427907 1.0 604 | 0.121985392808 0.602477587447 0.407603486462 -0.862793683081 1.0 605 | -0.571009243882 0.383643663708 -0.380489134917 0.268560822086 0.0 606 | 0.335589890454 0.379944382904 0.909813205083 -0.463509225337 1.0 607 | -0.699340428802 -0.545623609779 0.083665782291 0.404557830756 0.0 608 | -0.813023345048 0.436211710215 0.686876808197 -0.152351265544 1.0 609 | 0.606204007561 0.678746767572 -0.946064532251 0.960993942539 0.0 610 | 0.123589787422 0.955922396596 -0.538955898218 -0.474103214517 0.0 611 | 0.24206003238 -0.0124412341892 -0.581224312212 -0.714835731705 0.0 612 | 0.0658787631018 -0.458642930492 -0.431529697044 -0.869029690093 0.0 613 | -0.53170139543 -0.30016301236 0.350871460309 0.588345602311 1.0 614 | -0.674956630746 0.628022368407 0.998877187341 -0.411834358117 1.0 615 | -0.370026765611 -0.185763637134 0.947035327908 -0.646946260134 1.0 616 | -0.879887448429 0.0892053574206 -0.368744814314 0.942140131286 0.0 617 | -0.549616272387 0.982317791714 0.894431402673 -0.998107281367 1.0 618 | 0.986989581714 0.675268301588 0.0568582886956 0.517791447076 1.0 619 | -0.838818549192 -0.126782449877 -0.432502768091 0.816363752312 0.0 620 | 0.0664795038002 -0.487494043574 0.49621513699 -0.634421718111 1.0 621 | -0.666019490401 -0.125512266115 -0.514462510808 0.712648475729 0.0 622 | 0.309503491899 0.254184654951 0.341589275799 -0.964277569868 1.0 623 | -0.673928800852 -0.794684515561 -0.0765283692182 0.187128340769 0.0 624 | 0.0691774926449 -0.637761187249 -0.853565281582 0.666580582226 0.0 625 | 0.459597318338 0.987770978846 0.0169771803048 0.55137730859 1.0 626 | -0.223891516185 -0.643861935415 0.865955028062 -0.286215233457 1.0 627 | -0.943567203675 -0.08540066057 0.542219772553 0.936435799829 1.0 628 | 0.75973302118 0.311436657377 -0.158414862524 -0.824050505986 0.0 629 | 0.685287674941 0.368946596447 -0.0270466636858 0.398537096253 1.0 630 | -0.411675015204 0.818839251027 0.983235708905 -0.623901328634 1.0 631 | -0.818452706383 0.412721670716 0.0722039943048 0.468367335051 0.0 632 | -0.173912040179 -0.18636904398 -0.889682357379 0.515701694802 0.0 633 | -0.969951479374 -0.64800736524 -0.597615639185 -0.0457411277267 0.0 634 | -0.936599616742 -0.557097391629 0.342394573108 0.347283880811 0.0 635 | 0.635347700334 -0.0140126714273 -0.540095718137 0.900444955714 0.0 636 | 0.971235132103 0.300031181693 0.107576707349 0.777999804719 1.0 637 | -0.586286210954 -0.0898578885708 -0.560327698835 -0.638718194182 0.0 638 | 0.584269008333 0.865894354966 0.939038202564 -0.113381221252 1.0 639 | 0.619307640336 0.208651765227 0.902730136015 0.493612815441 1.0 640 | 0.306055909459 -0.381546325032 -0.50142159059 0.482345343136 0.0 641 | 0.430698150825 0.776100417164 -0.173248881563 0.209007578701 1.0 642 | -0.715498877555 -0.682352105613 -0.0789904496264 0.297587644086 0.0 643 | 0.483520093465 0.767460475815 -0.12007460454 0.339996886841 1.0 644 | 0.811966728184 0.758773047038 -0.423333995709 0.514676598792 1.0 645 | 0.803878292845 0.861303949104 0.779267875574 -0.747102512473 1.0 646 | 0.569487511651 -0.554824346042 0.934259827377 -0.993880022985 1.0 647 | 0.634901118678 -0.0652731049803 -0.53038044468 -0.893923974763 0.0 648 | 0.831455998375 -0.402845069936 -0.677684364621 0.250593557951 0.0 649 | -0.649322891356 0.88066275422 -0.204634559385 -0.737615108835 0.0 650 | -0.935273180055 0.255998556725 0.440237094567 0.181009651991 1.0 651 | 0.517104762733 0.614324840195 0.324231859193 -0.123079919202 1.0 652 | 0.734950985303 -0.684541619288 0.666359784752 0.31745864061 1.0 653 | -0.140237947953 0.880318993667 0.80279405318 0.273241345217 1.0 654 | -0.262703762684 0.875844867715 0.158838345988 -0.192868605152 1.0 655 | 0.87980872306 0.133967152676 0.415721150356 0.896035962511 1.0 656 | 0.366350554547 0.949484382154 -0.175207644729 0.132201222276 1.0 657 | 0.458242344776 -0.44084566659 0.494850857518 0.658986468275 1.0 658 | -0.0888282936503 -0.333063269056 0.195393476044 -0.45497866936 0.0 659 | 0.979475693056 0.989878224315 -0.0896548510216 0.165493023516 1.0 660 | -0.0190468220094 0.507677895672 0.9559854419 -0.604234101682 1.0 661 | 0.194074535961 -0.46050147525 0.806751253273 0.117986861348 1.0 662 | -0.541199735833 -0.513249495553 0.15580039546 0.156504858357 0.0 663 | 0.821760372686 0.844030781132 0.102441529902 -0.851116189404 1.0 664 | 0.84053723632 0.378753348844 0.485088645205 0.412289182532 1.0 665 | 0.921853869453 0.39935573974 -0.383201468447 -0.0593403862051 1.0 666 | -0.901766676751 0.692624350407 -0.359837698401 0.364810844427 0.0 667 | -0.69841856914 0.255919264575 0.845740224446 -0.3573137403 1.0 668 | -0.372475692922 0.647087899496 -0.0201096975599 -0.588106515117 0.0 669 | -0.651198823503 0.101523949135 0.417232475781 0.344201713389 1.0 670 | 0.928828571839 0.0917852658737 0.938829403199 0.390048360075 1.0 671 | -0.259390007333 -0.610031853845 0.33812396756 0.605160270357 1.0 672 | -0.860358405591 -0.260544745663 0.773733546717 -0.707913450422 1.0 673 | -0.482846221647 -0.311524295668 -0.975719870444 -0.181409609167 0.0 674 | 0.892776278734 0.850049167087 -0.654693385736 -0.0139468401003 0.0 675 | 0.201564211378 -0.806313299805 0.21121739142 0.0654497934868 1.0 676 | 0.777973789118 0.541827251804 0.76041663619 -0.308776203377 1.0 677 | -0.940599908278 0.173724134783 -0.171048300037 0.67097453627 0.0 678 | 0.929558136148 0.458058260621 0.925391145187 -0.69789577466 1.0 679 | 0.54882962017 -0.264834289844 0.978274714516 0.951596991459 1.0 680 | -0.115605699759 0.842530913831 0.649043078304 -0.850290755071 1.0 681 | -0.848251333992 0.66497948922 -0.185937807033 -0.651597269912 0.0 682 | 0.307118307294 0.188193597334 -0.018224679896 0.188113049218 1.0 683 | 0.908996612217 -0.389037900393 -0.113610329823 -0.274929304821 1.0 684 | 0.844435195103 0.275029756895 0.486165656377 0.409561169043 1.0 685 | -0.552232425019 0.638996900053 0.778852585365 -0.712270863988 1.0 686 | 0.221219079204 0.201834432184 0.523392425775 0.858583784178 1.0 687 | -0.763395617151 -0.905347275061 -0.0864123021061 -0.172406800732 0.0 688 | 0.0301191044614 -0.723547471644 -0.479976038091 -0.540621502689 0.0 689 | -0.271329140995 -0.51724869625 0.339634302352 -0.126449977453 1.0 690 | 0.751137817433 -0.192080977198 0.822883703003 0.328713545396 1.0 691 | -0.638934848561 -0.365149869593 -0.388665698653 0.791008256466 0.0 692 | -0.120760236606 0.435406042285 0.100474819287 -0.554771596561 0.0 693 | 0.283432745495 -0.311653057804 -0.768597434654 -0.827166980742 0.0 694 | -0.941606728007 0.933282462589 0.96435771783 -0.403952868596 1.0 695 | 0.0693638016524 -0.251801368987 0.350932311928 -0.779483800384 1.0 696 | 0.505186855532 0.233854364961 0.491285189326 0.631355320337 1.0 697 | -0.633725253971 -0.08752862347 -0.571367704683 0.588095700876 0.0 698 | 0.568001492228 0.584485993528 0.860284487834 0.893021861284 1.0 699 | -0.166180330906 0.9587062558 0.822441442038 0.610491118836 1.0 700 | -0.301589727681 -0.110576252799 -0.388591402336 -0.584130527079 0.0 701 | -0.792712648965 0.908171529558 -0.461568468635 -0.183978277162 0.0 702 | -0.328989235293 -0.911371749987 -0.736092583386 0.245893295248 0.0 703 | -0.867549601079 -0.288368333941 -0.871834468352 0.449083669491 0.0 704 | -0.871034521903 0.414009815704 -0.123332677021 0.557904116477 0.0 705 | -0.602688310287 -0.580339809143 0.0667513662232 0.420154641497 0.0 706 | -0.991487921305 -0.197209929617 0.889908404156 0.21113827221 1.0 707 | 0.582748472381 0.733964019582 -0.227140909818 0.887293982207 1.0 708 | -0.476833733972 -0.247284778683 0.290884412583 -0.923228508293 0.0 709 | 0.790275432432 0.116106440611 -0.929919549469 -0.83261102104 0.0 710 | -0.761995401231 -0.463261859834 -0.98421986092 0.783862517642 0.0 711 | 0.342269913809 -0.326022372124 -0.726568280181 0.481830192205 0.0 712 | 0.642306909656 0.711249001576 0.855243289081 0.576287182492 1.0 713 | 0.171310350778 -0.266425300246 0.619594596681 0.145097022483 1.0 714 | 0.0572661089618 -0.437342919902 -0.559904623127 0.627491989016 0.0 715 | 0.376600987564 -0.403660570192 -0.278132724069 0.261741701928 0.0 716 | -0.137454316907 -0.305702098089 -0.407657796725 0.829231604646 0.0 717 | -0.924102729797 -0.760422343198 -0.841589008675 -0.318024139711 0.0 718 | 0.490967578818 -0.792934792594 0.330509002911 -0.263251535402 1.0 719 | -0.869680107234 0.330545943125 0.548124256124 0.884049648318 1.0 720 | -0.368205350669 -0.914517291205 -0.612503026276 0.835939857041 0.0 721 | -0.612016816008 -0.388950640039 0.675777510305 -0.845071186892 1.0 722 | -0.956862441487 -0.440543724833 0.973353342778 0.932445798245 1.0 723 | -0.383469990257 -0.439915831772 0.983180422785 -0.63555095869 1.0 724 | 0.58359196674 -0.70176213308 -0.500879675839 0.821020365526 0.0 725 | -0.678441946433 -0.0472434722354 0.821361247508 0.762051393076 1.0 726 | -0.992559932802 -0.131674386352 0.799119817565 -0.23206413593 1.0 727 | -0.949367503339 -0.462703932265 -0.553660613151 -0.977141870395 0.0 728 | 0.479073726505 -0.0868766470863 -0.0883995072381 0.211930279413 1.0 729 | -0.0300521245207 -0.971435314701 0.240204212138 -0.640191909799 0.0 730 | -0.263511778912 -0.176439710397 0.115001689139 0.0378231749717 0.0 731 | 0.0244293489569 -0.588422596663 -0.568733050079 0.191145129968 0.0 732 | -0.267753040981 -0.460957834778 0.907961151918 0.740531255392 1.0 733 | -0.578609241224 -0.306098098139 0.990789790556 0.673990403648 1.0 734 | -0.0674491049171 0.673878612273 0.914322261845 0.528959645584 1.0 735 | -0.731568225045 -0.103165739889 0.686538973779 -0.10504933642 1.0 736 | -0.497620643083 0.823776339302 0.746252467506 -0.260600213156 1.0 737 | 0.453806781496 -0.401108881307 0.374143078792 -0.615332129971 1.0 738 | 0.829433462672 -0.269538093242 0.882560581837 0.246799824072 1.0 739 | -0.114685434019 0.123234931912 0.610658233639 -0.833916388492 1.0 740 | -0.550986877739 -0.12434082895 -0.577220008947 -0.800627666713 0.0 741 | 0.363404928801 0.162343776198 0.325007010839 0.893661927956 1.0 742 | -0.0388501495762 -0.393458028211 -0.524826854637 -0.272988901165 0.0 743 | -0.910761155433 0.368540867764 -0.908855788717 -0.943397678003 0.0 744 | -0.443327089978 -0.365889703557 -0.394744836921 0.641632904457 0.0 745 | -0.747904300123 0.49963579054 -0.829708957548 0.542024641421 0.0 746 | -0.559818946595 -0.23267422664 0.552804047637 -0.501409573996 1.0 747 | -0.205465670428 -0.297795927234 -0.882554212135 0.408486144978 0.0 748 | -0.404047519825 0.65523652139 0.884890718544 0.330811598 1.0 749 | -0.233578723397 -0.932472124317 -0.678405094025 0.353725876794 0.0 750 | 0.728889892222 0.579622611277 0.459878420866 0.737488314594 1.0 751 | -0.931008571784 -0.694969845676 0.0981426262048 0.643507094549 0.0 752 | -0.428641805656 -0.525939702794 0.58497379203 -0.691872624043 1.0 753 | -0.0887370706053 -0.0917246008037 0.548737745297 0.222444178696 1.0 754 | 0.196872889808 0.0999844158657 -0.933746628885 0.00658832520434 0.0 755 | 0.276208599837 -0.92765745147 0.972908101243 -0.885962838094 1.0 756 | 0.0964464087109 0.127823288444 0.872752327861 0.395066122279 1.0 757 | 0.662751127445 0.481864627174 -0.979611209658 0.0744473058736 0.0 758 | -0.0667120082245 -0.657854877896 0.32786433826 -0.586705348741 1.0 759 | -0.650106034837 0.343100654864 -0.19561692864 0.710213028873 0.0 760 | 0.906454494298 0.616677227099 0.556126433096 -0.571383510085 1.0 761 | -0.998965781425 -0.376839633345 -0.0454228261314 0.284439120031 0.0 762 | 0.820894341863 -0.290294692167 0.0187095222996 -0.334679912798 1.0 763 | -0.334403039986 -0.897947251437 0.0590357131595 -0.310736943211 0.0 764 | 0.179325406978 -0.274773196476 0.102163732862 -0.335181591915 1.0 765 | 0.692850420888 0.658703013364 -0.78415674352 -0.342363348433 0.0 766 | -0.0161936473646 -0.169536373897 0.669953050186 0.277412092314 1.0 767 | -0.457677494642 0.692436210127 0.557015957204 -0.495597324522 1.0 768 | -0.494708384035 0.658145795121 0.0475908883633 0.782694249597 1.0 769 | 0.784850305347 -0.999062737453 0.517201541716 -0.55552530284 1.0 770 | -0.384635617447 0.811621104591 -0.159792961296 0.746453358417 1.0 771 | -0.697111430674 -0.747417542888 0.107073120311 0.545779929361 0.0 772 | 0.305776340969 0.792493237473 0.0674596263939 0.780685532668 1.0 773 | 0.480738134436 0.737429363078 -0.196404702893 0.554474116618 1.0 774 | 0.416725699781 0.474240261501 -0.189182928816 -0.416393404383 0.0 775 | 0.608285312351 -0.497151580205 -0.272739272447 -0.0265869827631 0.0 776 | 0.794538777291 0.38369823213 0.0979329472544 0.896917929632 1.0 777 | 0.70021053473 -0.514111020595 -0.20900854117 -0.682334391733 0.0 778 | 0.982541548365 -0.586592388006 -0.842886075909 0.202448923978 0.0 779 | 0.751922751964 0.72115647152 -0.736567484804 -0.579230421785 0.0 780 | 0.0078819373884 0.647693254839 -0.991333640741 0.633724642075 0.0 781 | 0.62596226115 0.784173777879 0.690105045591 -0.994065906247 1.0 782 | -0.896261884827 -0.368850279951 -0.0275624290983 0.820592930069 0.0 783 | 0.236892890794 0.743669698325 -0.879670888524 0.348187426549 0.0 784 | 0.843100151682 -0.54424417031 0.610123936746 0.164084667673 1.0 785 | 0.639272173402 -0.535113313921 0.110238600398 0.904942896953 1.0 786 | -0.455469923768 -0.814711836844 0.797302264615 -0.253819481474 1.0 787 | -0.638117409596 0.369728668839 -0.00607709149052 0.611476674331 1.0 788 | 0.990341125998 0.416621145523 -0.500061609975 -0.0923327218333 0.0 789 | -0.488918745775 -0.604327917916 0.156468240304 -0.155195499368 0.0 790 | 0.238338866245 0.110817048332 0.926357696317 0.215276471459 1.0 791 | 0.432336378572 0.465212780335 -0.396912315742 -0.0927135295365 0.0 792 | 0.572586636791 0.130893704782 0.761552023445 -0.506802722796 1.0 793 | -0.090208155604 -0.165516667177 0.712886285246 -0.540332438546 1.0 794 | 0.738941648918 -0.529423759554 0.68925753686 0.693755631777 1.0 795 | 0.223304165903 -0.382011892632 0.72474051678 0.0423017385706 1.0 796 | -0.921825543495 -0.622627587177 0.299776001983 -0.887502870198 0.0 797 | -0.632132289715 0.429725373967 -0.398203641014 -0.0877661592236 0.0 798 | -0.195621220723 -0.601744563845 0.0319624095246 -0.955768938881 0.0 799 | 0.627292218471 -0.256831056112 -0.524644073833 -0.130203043734 0.0 800 | 0.317948637729 -0.895416126622 0.955435388818 0.197692710884 1.0 801 | 0.00352210161521 0.724743683313 0.0250210139263 -0.868949410802 0.0 802 | 0.0439206773901 0.443698579414 0.932951405987 -0.816067720182 1.0 803 | -0.406250206205 -0.263667011751 -0.832857718706 -0.210819424902 0.0 804 | -0.0850916576168 0.788283920302 -0.874262145631 0.816183188626 0.0 805 | -0.999967435557 0.799542978655 -0.227989133749 0.462182381306 0.0 806 | -0.610924172059 -0.690225935534 0.759450691543 -0.312883104263 1.0 807 | 0.342316557012 -0.803393337373 -0.529304334414 0.824896499638 0.0 808 | -0.472776360059 0.628562039005 0.892971142366 -0.604151537117 1.0 809 | 0.976187508957 0.384194847723 0.505291300014 0.636187585187 1.0 810 | 0.171495019237 -0.0435080320733 -0.216212378324 -0.516534007086 0.0 811 | 0.456129966291 0.830170133694 0.691985743118 -0.898779831232 1.0 812 | 0.154734795203 -0.265860630938 0.941360857883 0.758672548447 1.0 813 | -0.0517303521721 0.862376456584 0.724613322588 -0.649705376005 1.0 814 | -0.505414956212 -0.759675588511 -0.329306902246 -0.630984751977 0.0 815 | 0.264122775181 0.696499016866 0.162332242883 -0.246971263685 1.0 816 | 0.450625157332 -0.46373046279 0.0236674418308 0.201816620762 1.0 817 | 0.0637463641191 -0.787004805235 -0.321434952535 0.702520428554 0.0 818 | -0.613211242225 -0.892286817604 0.0708939991564 0.171777904746 0.0 819 | -0.031799121483 -0.242130730098 -0.908530041424 0.880296273432 0.0 820 | -0.874361497507 0.403179892959 -0.173630386889 0.0689616192258 0.0 821 | -0.845840820395 -0.0633671252879 0.468333483382 0.959234174914 1.0 822 | -0.440866283575 -0.849670592144 0.712297714236 -0.00334027290686 1.0 823 | -0.164220969658 0.0513548259977 -0.646485718156 -0.752152872067 0.0 824 | 0.733484898635 -0.200640151099 -0.0642488174918 0.0435070613497 1.0 825 | -0.763297341889 -0.53345018471 0.235365647967 -0.28732643426 0.0 826 | -0.283070347927 -0.375039985417 -0.206854195188 -0.62979875397 0.0 827 | -0.132382772241 -0.239943742409 -0.806454544271 0.738848641259 0.0 828 | -0.71692523277 -0.767553770384 0.350215188718 0.528690637705 1.0 829 | -0.321413629679 -0.153103857705 0.573629416419 -0.534539267416 1.0 830 | 0.557123598689 -0.311021848876 -0.443290331337 0.390143201992 0.0 831 | -0.384264324098 -0.881010251864 0.453064284692 0.119468312811 1.0 832 | -0.463198368165 -0.282967238763 -0.123785619143 -0.0921601034151 0.0 833 | 0.308531498693 0.629009007338 0.129934450294 -0.488069397578 1.0 834 | 0.399079605703 0.147669304199 -0.876401305018 0.0873087963857 0.0 835 | 0.99775911411 0.647704259407 -0.860999693373 0.961690700396 0.0 836 | 0.142687067118 -0.823554066003 0.076510775128 -0.372854230257 0.0 837 | -0.12484824207 0.920999077782 -0.965445342164 0.313428792234 0.0 838 | 0.417599543921 0.187313607089 -0.317294395079 0.126105091627 0.0 839 | 0.963629923735 -0.99938037456 -0.158124771794 0.134418242153 1.0 840 | -0.0792880197875 0.20684688567 0.393372256523 -0.483016638604 1.0 841 | 0.836868262354 0.945688243607 -0.291610335907 0.481506513208 1.0 842 | -0.406911757993 0.487865636361 -0.102262885705 0.525034156453 0.0 843 | -0.705124516529 0.752792534681 -0.705675677242 0.975240215986 0.0 844 | -0.83929952383 -0.0462838139795 -0.739852172874 -0.126890291753 0.0 845 | 0.931021317029 0.0184342703056 0.786645786753 0.378666855066 1.0 846 | -0.857763704311 -0.0959454405682 0.798880940063 -0.916060577051 1.0 847 | 0.917968320533 0.928466430561 0.284210698011 0.399846184204 1.0 848 | 0.162234224347 -0.307045747374 0.107700121833 -0.152446805827 1.0 849 | -0.305694377979 -0.534689053802 0.234853053751 0.0474990298277 1.0 850 | -0.693367600079 -0.139366291383 0.323885281069 -0.405457884994 0.0 851 | 0.911310393709 -0.946765116846 0.53777586371 -0.594255595967 1.0 852 | 0.698835403043 -0.744890466017 -0.626004905413 0.660238948853 0.0 853 | 0.522407470777 -0.147583345359 -0.146303323053 -0.925268745155 0.0 854 | 0.367772809893 -0.650434949549 -0.445351402226 -0.145955207191 0.0 855 | 0.420334946303 -0.384024095794 -0.102418861025 -0.519583030812 0.0 856 | -0.55098358496 0.998631887252 -0.937292154942 0.923175397465 0.0 857 | -0.00422803916075 -0.326253350953 0.10900564487 0.437416809186 1.0 858 | 0.26037315272 0.499720964946 -0.0343978605578 0.785017939137 1.0 859 | -0.037016818112 -0.11704517169 -0.117585269532 0.480237444922 0.0 860 | 0.474474005546 -0.0762903196115 -0.602071297758 0.623576866419 0.0 861 | 0.719875840209 0.326548351231 -0.921432819848 -0.405351959519 0.0 862 | 0.281718478446 0.247690648127 0.00375122522984 0.857421859195 1.0 863 | 0.41602049511 0.63409400959 -0.0179746600361 0.958507377219 1.0 864 | 0.106139383463 -0.517167696599 -0.226403874541 -0.152605275386 0.0 865 | -0.129448899403 -0.989195526626 -0.47334413739 0.394013607489 0.0 866 | 0.839406776957 0.70102342217 0.184520997823 -0.955000115697 1.0 867 | -0.921418219749 0.0991354622365 0.866026812527 -0.64820533861 1.0 868 | -0.509293122597 -0.992534546342 0.55601847257 0.671327712712 1.0 869 | 0.501299962602 0.135830056047 -0.409684383581 -0.0165711423856 0.0 870 | -0.247461615908 -0.00241003225242 0.770687197142 0.410618697849 1.0 871 | 0.240314942849 0.699294638139 0.338165208838 0.199278999006 1.0 872 | 0.31764438213 0.320241133065 0.751772594888 -0.103654648142 1.0 873 | -0.210755120623 0.600547414844 -0.433939142917 0.892717006254 0.0 874 | 0.823579772196 -0.802887301547 0.625947465101 -0.543020797619 1.0 875 | -0.0571024174576 -0.392942349576 -0.736020825862 -0.230873478995 0.0 876 | -0.019797630185 -0.0448083307269 0.281815443429 -0.793381246533 1.0 877 | 0.0958650212824 -0.424559433433 -0.0355166520277 -0.0288448152578 0.0 878 | 0.0531591903996 -0.661090989001 0.129103779434 0.0192819372298 1.0 879 | -0.877819204146 -0.361103222811 0.02914925583 0.52548153683 0.0 880 | -0.841383820001 0.379442145289 0.449685515264 0.196113378898 1.0 881 | 0.450093495858 0.586954413949 0.521607669736 0.844964450629 1.0 882 | -0.0674296733483 -0.590716587049 -0.704505813467 0.56362683918 0.0 883 | 0.857912573541 0.958015773895 -0.276197108601 -0.558636999415 1.0 884 | 0.905097588513 0.340773083214 0.783567727965 -0.773909832566 1.0 885 | -0.526073632269 0.43661537736 -0.00385884599044 0.0483964670686 0.0 886 | -0.502254770775 -0.953283107603 0.318087817134 -0.759778171385 0.0 887 | 0.170836796451 -0.416880710142 0.514256272776 -0.152549766113 1.0 888 | 0.486294680551 0.181422254572 -0.18724076775 -0.543223847991 0.0 889 | -0.681537876882 -0.725065034054 0.284879561124 0.718432301669 1.0 890 | 0.72044281853 -0.195794423643 0.889713452129 0.738105113791 1.0 891 | 0.875900959359 0.908335003112 0.742814311312 0.298240496973 1.0 892 | -0.242348828916 -0.514662505638 0.766968823522 -0.976809077235 1.0 893 | -0.824323628268 -0.0809141026706 0.148965212804 0.397315861774 0.0 894 | 0.740974317733 0.504770813379 -0.851316897467 0.508821306371 0.0 895 | -0.934468172688 -0.343561697138 0.564016845651 0.63154498843 1.0 896 | -0.853243680327 0.112986430075 -0.0269799553619 -0.561921522219 0.0 897 | -0.201867654579 0.738272767497 -0.39640422434 0.478608188853 0.0 898 | -0.191668978642 0.611861314877 0.212238653497 0.384597349037 1.0 899 | 0.491682397418 -0.893755159074 -0.790752831114 0.437028736394 0.0 900 | 0.897177221302 0.897660987799 -0.0973574879113 0.207948175544 1.0 901 | 0.462363296456 -0.713383572681 0.557363313852 0.227969036884 1.0 902 | -0.87617299787 -0.126800640884 0.359920382178 -0.805679085404 0.0 903 | 0.282944751048 -0.881338919479 -0.965010549787 -0.766323204038 0.0 904 | 0.245416554635 0.505654892594 -0.901226761816 0.993402042339 0.0 905 | 0.154578420726 0.954020836071 0.650950850208 -0.830795016905 1.0 906 | -0.129766217091 0.252248797313 0.538084644882 0.0873685949022 1.0 907 | -0.281265326255 -0.717772405759 0.846484287314 0.0431384966983 1.0 908 | -0.0709011072196 -0.251468014961 0.00774914750371 -0.864651690961 0.0 909 | 0.460054883093 0.560757710048 -0.44512662257 0.93137086676 1.0 910 | 0.734207840906 -0.686217993232 -0.663552980143 -0.605632242332 0.0 911 | -0.297050587933 0.865822289251 -0.288440177909 -0.728442365708 0.0 912 | 0.285569042993 -0.904208988373 -0.608673331007 -0.824104165192 0.0 913 | -0.668900717306 -0.37845301444 0.912469243785 0.915869388923 1.0 914 | -0.548963989012 0.936270774312 0.161237298239 0.890759772491 1.0 915 | 0.550496823972 -0.716901751049 -0.388480967275 0.31517926487 0.0 916 | 0.215379295261 -0.91586654893 0.95615675086 -0.863041849126 1.0 917 | 0.609689460388 0.103554397744 -0.3827575099 -0.821226037818 0.0 918 | 0.735549914505 -0.0240640528727 0.807113890451 -0.960893452692 1.0 919 | -0.975168832168 0.449552935638 0.87993792283 0.70492294404 1.0 920 | -0.981276001533 0.758140758196 0.478080077902 0.305960031127 1.0 921 | 0.269473194958 0.854254772828 -0.432872231955 -0.786271356298 0.0 922 | -0.327462856764 0.689080619259 0.0753045493781 -0.0371130287889 1.0 923 | 0.599572430365 0.99426996484 -0.722227664689 -0.00576257066606 0.0 924 | 0.792159682295 -0.262143121584 0.660237253106 0.0787172488402 1.0 925 | 0.178281905819 -0.0354661116672 -0.824398573189 -0.997834907279 0.0 926 | -0.725951957222 -0.577938816346 -0.256780185199 0.307672737951 0.0 927 | -0.472926914778 0.065287664969 0.846579182881 -0.483254434566 1.0 928 | 0.863796935905 -0.895294251109 0.403650462393 0.411495867774 1.0 929 | -0.892948395736 -0.305781489965 0.254708613434 -0.26349098351 0.0 930 | -0.813921397054 -0.436283432422 0.732236185172 -0.497498093081 1.0 931 | -0.232080414971 -0.659018990594 0.823512929874 0.307334209557 1.0 932 | 0.800336139863 -0.821036177033 -0.775431052047 -0.97427924001 0.0 933 | -0.793308438475 -0.288202941801 0.990504939155 -0.344640244772 1.0 934 | 0.299114203894 0.938319564499 0.499031725688 -0.108495997704 1.0 935 | -0.981445836753 0.841803304881 -0.207935356161 -0.341182809479 0.0 936 | -0.32674468274 -0.0270182115593 0.7242606277 -0.59758890457 1.0 937 | 0.515927939392 0.404038412244 -0.893573868825 0.517066661396 0.0 938 | -0.59096168405 0.671445912586 -0.0643943805561 0.382717508035 0.0 939 | 0.389768467826 0.490298699259 -0.0500856510001 -0.496245033521 1.0 940 | 0.546061142524 0.571312000285 0.500178318733 -0.65979378976 1.0 941 | 0.557392740213 -0.482237047847 0.200628275607 -0.746177576044 1.0 942 | 0.333616403041 -0.964711949714 0.363394560841 -0.0762853885184 1.0 943 | 0.657462848726 0.514348033598 -0.155709843977 -0.457601379521 1.0 944 | 0.550870528163 -0.0719790465118 0.355608679776 -0.452109846353 1.0 945 | 0.542476090703 -0.165583407363 -0.552433904464 0.531137341992 0.0 946 | 0.582305734324 0.252382171568 -0.730281252194 0.60496626655 0.0 947 | -0.649728602172 0.739009541625 0.591271028291 0.351735292715 1.0 948 | -0.592539243146 -0.81749416011 -0.266303297781 0.32373299196 0.0 949 | -0.138304226027 0.856033302807 -0.423983004906 0.978950916395 0.0 950 | -0.710243896445 0.694534173101 -0.674753992433 0.614956841439 0.0 951 | 0.37127886636 -0.32669659564 -0.410587058548 -0.288943272329 0.0 952 | -0.615386307356 -0.277036800959 0.30042215803 -0.554588541683 0.0 953 | 0.0250026423245 -0.678511956365 0.0725129261695 -0.43251444674 0.0 954 | -0.64124694464 -0.703455612722 -0.399974559467 0.933107833559 0.0 955 | 0.643956078168 0.3475233379 -0.919383837447 -0.9047705182 0.0 956 | -0.701034919819 -0.778392003139 0.4981973839 -0.285949392954 0.0 957 | -0.78249578523 -0.190278602724 -0.484246555075 -0.0753475758248 0.0 958 | 0.971708124133 -0.196222734891 -0.351038046472 0.992968443052 1.0 959 | -0.960290728633 -0.596676494865 0.955226869403 0.843617569443 1.0 960 | -0.85282807521 0.956234574232 -0.591638901214 -0.666919066258 0.0 961 | -0.479141915708 0.169319022689 0.962734345815 0.593230513214 1.0 962 | 0.239468686945 -0.559635268469 0.974416420038 -0.518066349207 1.0 963 | -0.440931194677 0.772929341369 0.183821784695 0.0738081849671 1.0 964 | -0.22034828454 -0.650218971629 -0.922839565308 0.902066892014 0.0 965 | -0.671085024786 0.925512430008 0.303597252597 0.462087953728 1.0 966 | -0.55438164153 -0.592861442933 -0.517712712476 0.300585596055 0.0 967 | -0.584812630785 0.874638704429 0.916577585931 -0.66057102149 1.0 968 | -0.948076252525 -0.379797190945 0.333421292843 -0.30663066664 0.0 969 | -0.270595365105 0.218774316529 0.346885535298 -0.984931600786 0.0 970 | 0.902301584316 0.289318067932 0.162477387639 0.797381341117 1.0 971 | -0.955484167438 -0.759963669325 -0.113461766433 -0.737072702639 0.0 972 | -0.732662550596 -0.790987999995 -0.057678239461 -0.809996240464 0.0 973 | 0.307236362837 -0.600602569257 0.453709806091 -0.451998713808 1.0 974 | 0.908940927854 0.902067232476 -0.585689012863 -0.095615759948 0.0 975 | -0.866019168884 0.472612549275 0.52147318057 0.413223051229 1.0 976 | 0.120970430681 0.48453578729 0.0936969348161 -0.330855654012 1.0 977 | -0.676313112435 0.95926294241 0.233443266879 -0.420604988184 1.0 978 | -0.327958729631 -0.725157181511 -0.772376291413 0.798123637328 0.0 979 | -0.0140530029193 -0.422123714268 0.599292514722 -0.93404232875 1.0 980 | 0.133048176951 -0.822702827441 0.159780449828 0.657263357698 1.0 981 | -0.448359936158 -0.340499202354 0.151681918584 -0.595763558163 0.0 982 | 0.0974074358433 -0.337762753527 0.212180913395 0.85828900996 1.0 983 | 0.927331851747 -0.146145736458 -0.678014167001 0.47511089996 0.0 984 | 0.932036131619 -0.00251387033138 0.797061369432 -0.305772199117 1.0 985 | -0.82583681856 -0.801129049859 0.472104288433 -0.178217923547 0.0 986 | 0.132890719729 0.840007011824 -0.140948765606 0.575533293451 1.0 987 | 0.119488173095 -0.241384566385 -0.911946500254 -0.00803007367759 0.0 988 | -0.842957119649 0.176166784964 -0.519031699488 -0.0855644966521 0.0 989 | 0.384039441173 0.606292700705 0.0187094839398 -0.0309811249795 1.0 990 | -0.526616070484 -0.13156169976 -0.550891392544 -0.769109957445 0.0 991 | -0.470503662539 -0.949353095805 -0.679856935695 0.670691176693 0.0 992 | 0.678615507401 0.824981504547 -0.423758219878 0.859746652996 1.0 993 | -0.314019925757 0.621644156933 0.33606282991 -0.819496280701 1.0 994 | 0.283994957058 -0.0326259190858 0.30382170013 -0.57904463704 1.0 995 | 0.157131763138 -0.19051750629 -0.214289183664 0.159722968543 0.0 996 | 0.459386758637 0.616590452762 0.536943351382 0.680567437582 1.0 997 | 0.126328223046 0.376323948626 -0.0827026820856 -0.176471816369 0.0 998 | -0.943524780224 0.807872797377 -0.0585752786319 0.852943076339 0.0 999 | -0.574251694393 0.601383302212 0.916422660963 -0.684700661779 1.0 1000 | -0.0462645372981 0.695777890609 -0.670765959419 -0.759552528827 0.0 -------------------------------------------------------------------------------- /FOBOS.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-17 8:47 2 | # Author: Enneng Yang 3 | # Abstract:FOBOS 4 | 5 | import sys 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits.mplot3d import Axes3D 8 | import random 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | # logistic regression 13 | class LR(object): 14 | 15 | @staticmethod 16 | def fn(w, x): 17 | ''' sigmod function ''' 18 | return 1.0 / (1.0 + np.exp(-w.dot(x))) 19 | 20 | @staticmethod 21 | def loss(y, y_hat): 22 | '''cross-entropy loss function''' 23 | return np.sum(np.nan_to_num(-y * np.log(y_hat) - (1-y)*np.log(1-y_hat))) 24 | 25 | @staticmethod 26 | def grad(y, y_hat, x): 27 | '''gradient function''' 28 | return (y_hat - y) * x 29 | 30 | class FOBOS(object): 31 | 32 | def __init__(self,K,alpha,lambda_,decisionFunc=LR): 33 | self.K = K #to zero after every K online steps 34 | self.alpha = alpha # learning rate 35 | self.lambda_ = lambda_ # 36 | self.w = np.zeros(4) # param 37 | self.decisionFunc = decisionFunc #decision Function 38 | 39 | def predict(self, x): 40 | return self.decisionFunc.fn(self.w, x) 41 | 42 | def update(self, x, y, step): 43 | y_hat = self.predict(x) 44 | g = self.decisionFunc.grad(y, y_hat, x) 45 | 46 | learning_rate = self.alpha / np.sqrt(step + 1) # damping step size 47 | learning_rate_p = self.alpha / np.sqrt(step + 2) # damping step size 48 | 49 | 50 | for i in range(4): 51 | w_e_g = self.w[i] - learning_rate * g[i] 52 | self.w[i] = np.sign(w_e_g) * max(0.,np.abs(w_e_g)-learning_rate_p * self.lambda_) 53 | 54 | return self.decisionFunc.loss(y,y_hat) 55 | 56 | def training(self, trainSet, max_itr=100000): 57 | n = 0 58 | 59 | all_loss = [] 60 | all_step = [] 61 | while True: 62 | for var in trainSet: 63 | x= var[:4] 64 | y= var[4:5] 65 | loss = self.update(x, y, n) 66 | 67 | all_loss.append(loss) 68 | all_step.append(n) 69 | 70 | print("itr=" + str(n) + "\tloss=" + str(loss)) 71 | 72 | n += 1 73 | if n > max_itr: 74 | print("reach max iteration", max_itr) 75 | return all_loss, all_step 76 | 77 | if __name__ == '__main__': 78 | 79 | trainSet = np.loadtxt('Data/FTRLtrain.txt') 80 | FOBOS = FOBOS(K=5, alpha=0.01, lambda_=1.) 81 | all_loss, all_step = FOBOS.training(trainSet, max_itr=100000) 82 | w = FOBOS.w 83 | print(w) 84 | 85 | testSet = np.loadtxt('Data/FTRLtest.txt') 86 | correct = 0 87 | wrong = 0 88 | for var in testSet: 89 | x = var[:4] 90 | y = var[4:5] 91 | y_hat = 1.0 if FOBOS.predict(x) > 0.5 else 0.0 92 | if y == y_hat: 93 | correct += 1 94 | else: 95 | wrong += 1 96 | print("correct ratio:", 1.0 * correct / (correct + wrong), "\t correct:", correct, "\t wrong:", wrong) 97 | 98 | plt.title('FOBOS') 99 | plt.xlabel('training_epochs') 100 | plt.ylabel('loss') 101 | plt.plot(all_step, all_loss) 102 | plt.show() 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /FTML.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-18 19:47 2 | # Author: Enneng Yang 3 | # Abstract:FTML 4 | 5 | import sys 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits.mplot3d import Axes3D 8 | import random 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | # logistic regression 13 | class LR(object): 14 | 15 | @staticmethod 16 | def fn(w, x): 17 | ''' sigmod function ''' 18 | return 1.0 / (1.0 + np.exp(-w.dot(x))) 19 | 20 | @staticmethod 21 | def loss(y, y_hat): 22 | '''cross-entropy loss function''' 23 | return np.sum(np.nan_to_num(-y * np.log(y_hat) - (1-y)*np.log(1-y_hat))) 24 | 25 | @staticmethod 26 | def grad(y, y_hat, x): 27 | '''gradient function''' 28 | return (y_hat - y) * x 29 | 30 | class FTRL(object): 31 | 32 | def __init__(self, dim, l1, l2, alpha,epsilon, beta1, beta2, decisionFunc=LR): 33 | self.dim = dim 34 | self.l1 = l1 35 | self.l2 = l2 36 | self.alpha = alpha 37 | self.epsilon = epsilon 38 | self.beta1 = beta1 39 | self.beta2 = beta2 40 | self.decisionFunc = decisionFunc 41 | 42 | self.z = np.zeros(dim) 43 | self.q = np.zeros(dim) 44 | self.d = np.zeros(dim) 45 | self.w = np.zeros(dim) 46 | 47 | def predict(self, x): 48 | return self.decisionFunc.fn(self.w, x) 49 | 50 | def update(self, x, y ,step): 51 | self.w = np.array([0 if np.abs(self.z[i]) <= self.l1 52 | else (np.sign(self.z[i] * self.l1) * self.l1 - self.z[i]) / (self.l2 + (self.epsilon + np.sqrt(self.q[i]))/self.alpha) 53 | for i in range(self.dim)]) 54 | 55 | y_hat = self.predict(x) 56 | g = self.decisionFunc.grad(y, y_hat, x) 57 | 58 | self.q = self.beta2 * self.q + (1-self.beta2) * (g*g) 59 | eta_t = self.alpha / (np.sqrt(step)) 60 | dt = (1 - np.power(self.beta1, step) / eta_t) * (np.sqrt(self.q / (1 - np.power(self.beta2, step))) + self.epsilon ) 61 | #FTRL: sigma = (np.sqrt(self.q + g*g) - np.sqrt(self.q)) / self.alpha 62 | sigma = dt - self.beta1 * self.d 63 | self.d = dt 64 | self.z = self.beta1 * self.z + (1-self.beta1) * g - sigma * self.w 65 | return self.decisionFunc.loss(y, y_hat) 66 | 67 | def training(self, trainSet, max_itr): 68 | n = 0 69 | 70 | all_loss = [] 71 | all_step = [] 72 | while True: 73 | for var in trainSet: 74 | x= var[:4] 75 | y= var[4:5] 76 | loss = self.update(x, y, n+1) 77 | 78 | all_loss.append(loss) 79 | all_step.append(n) 80 | 81 | print("itr=" + str(n) + "\tloss=" + str(loss)) 82 | 83 | n += 1 84 | if n > max_itr: 85 | print("reach max iteration", max_itr) 86 | return all_loss, all_step 87 | 88 | if __name__ == '__main__': 89 | 90 | d = 4 91 | trainSet = np.loadtxt('Data/FTRLtrain.txt') 92 | ftrl = FTRL(dim=d, l1=0.001, l2=0.1, alpha=0.1, epsilon=0.01, beta1=0.6, beta2=0.999) 93 | all_loss, all_step = ftrl.training(trainSet, max_itr=100000) 94 | w = ftrl.w 95 | print(w) 96 | 97 | testSet = np.loadtxt('Data/FTRLtest.txt') 98 | correct = 0 99 | wrong = 0 100 | for var in testSet: 101 | x = var[:4] 102 | y = var[4:5] 103 | y_hat = 1.0 if ftrl.predict(x) > 0.5 else 0.0 104 | if y == y_hat: 105 | correct += 1 106 | else: 107 | wrong += 1 108 | print("correct ratio:", 1.0 * correct / (correct + wrong), "\t correct:", correct, "\t wrong:", wrong) 109 | 110 | plt.title('FTML') 111 | plt.xlabel('training_epochs') 112 | plt.ylabel('loss') 113 | plt.plot(all_step, all_loss) 114 | plt.show() 115 | 116 | -------------------------------------------------------------------------------- /FTRL.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-17 9:47 2 | # Author: Enneng Yang 3 | # Abstract:ftrl ref:http://www.cnblogs.com/zhangchaoyang/articles/6854175.html 4 | 5 | import sys 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits.mplot3d import Axes3D 8 | import random 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | # logistic regression 13 | class LR(object): 14 | 15 | @staticmethod 16 | def fn(w, x): 17 | ''' sigmod function ''' 18 | return 1.0 / (1.0 + np.exp(-w.dot(x))) 19 | 20 | @staticmethod 21 | def loss(y, y_hat): 22 | '''cross-entropy loss function''' 23 | return np.sum(np.nan_to_num(-y * np.log(y_hat) - (1-y)*np.log(1-y_hat))) 24 | 25 | @staticmethod 26 | def grad(y, y_hat, x): 27 | '''gradient function''' 28 | return (y_hat - y) * x 29 | 30 | class FTRL(object): 31 | 32 | def __init__(self, dim, l1, l2, alpha, beta, decisionFunc=LR): 33 | self.dim = dim 34 | self.l1 = l1 35 | self.l2 = l2 36 | self.alpha = alpha 37 | self.beta = beta 38 | self.decisionFunc = decisionFunc 39 | 40 | self.z = np.zeros(dim) 41 | self.q = np.zeros(dim) 42 | self.w = np.zeros(dim) 43 | 44 | def predict(self, x): 45 | return self.decisionFunc.fn(self.w, x) 46 | 47 | def update(self, x, y): 48 | self.w = np.array([0 if np.abs(self.z[i]) <= self.l1 49 | else (np.sign(self.z[i]) * self.l1 - self.z[i]) / (self.l2 + (self.beta + np.sqrt(self.q[i]))/self.alpha) 50 | for i in range(self.dim)]) 51 | 52 | y_hat = self.predict(x) 53 | g = self.decisionFunc.grad(y, y_hat, x) 54 | sigma = (np.sqrt(self.q + g*g) - np.sqrt(self.q)) / self.alpha 55 | self.z += g - sigma * self.w 56 | self.q += g*g 57 | return self.decisionFunc.loss(y,y_hat) 58 | 59 | def training(self, trainSet, max_itr): 60 | 61 | n = 0 62 | all_loss = [] 63 | all_step = [] 64 | while True: 65 | for var in trainSet: 66 | x= var[:4] 67 | y= var[4:5] 68 | loss = self.update(x, y) 69 | 70 | all_loss.append(loss) 71 | all_step.append(n) 72 | 73 | print("itr=" + str(n) + "\tloss=" + str(loss)) 74 | 75 | n += 1 76 | if n > max_itr: 77 | print("reach max iteration", max_itr) 78 | return all_loss, all_step 79 | 80 | if __name__ == '__main__': 81 | 82 | d = 4 83 | trainSet = np.loadtxt('Data/FTRLtrain.txt') 84 | ftrl = FTRL(dim=d, l1=0.001, l2=0.1, alpha=0.1, beta=1e-8) 85 | all_loss, all_step = ftrl.training(trainSet, max_itr=100000) 86 | w = ftrl.w 87 | print(w) 88 | 89 | testSet = np.loadtxt('Data/FTRLtest.txt') 90 | correct = 0 91 | wrong = 0 92 | for var in testSet: 93 | x = var[:4] 94 | y = var[4:5] 95 | y_hat = 1.0 if ftrl.predict(x) > 0.5 else 0.0 96 | if y == y_hat: 97 | correct += 1 98 | else: 99 | wrong += 1 100 | print("correct ratio:", 1.0 * correct / (correct + wrong), "\t correct:", correct, "\t wrong:", wrong) 101 | 102 | plt.title('FTRL') 103 | plt.xlabel('training_epochs') 104 | plt.ylabel('loss') 105 | plt.plot(all_step, all_loss) 106 | plt.show() 107 | 108 | -------------------------------------------------------------------------------- /FTRL_Optimizer.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-17 09:09 2 | # Author: Enneng Yang 3 | # Abstract:FTRL 4 | 5 | import sys 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | import tensorflow as tf 9 | from mpl_toolkits.mplot3d import Axes3D 10 | from tensorflow.examples.tutorials.mnist import input_data 11 | mnist = input_data.read_data_sets("Data/MNIST_data/", one_hot=True) 12 | 13 | method_name = 'FTRL' 14 | # training Parameters 15 | learning_rate = 0.001 16 | training_epochs = 50 17 | batch_size = 100 18 | display_step = 1 19 | 20 | 21 | # tf Graph Input 22 | x = tf.placeholder(tf.float32, [None, 784]) # mnist data image of shape: 28*28=784 23 | y = tf.placeholder(tf.float32, [None, 10]) # 0-9 digits recognition: 10 classes 24 | 25 | # Set model weights 26 | W = tf.Variable(tf.zeros([784, 10])) 27 | b = tf.Variable(tf.zeros([10])) 28 | 29 | # Construct model 30 | pred = tf.nn.softmax(tf.matmul(x, W) + b) # Softmax 31 | 32 | # Minimize error using cross entropy 33 | cost = tf.reduce_mean(-tf.reduce_sum(y * tf.log(pred), reduction_indices=1)) 34 | 35 | optimizer = tf.train.FtrlOptimizer(learning_rate=learning_rate).minimize(cost) 36 | 37 | 38 | # Initializing the variables 39 | init = tf.global_variables_initializer() 40 | 41 | all_loss = [] 42 | all_step = [] 43 | 44 | plt.title('Optimization method:'+ method_name) 45 | plt.xlabel('training_epochs') 46 | plt.ylabel('loss') 47 | 48 | # Launch the graph 49 | with tf.Session() as sess: 50 | sess.run(init) 51 | # Training cycle 52 | for epoch in range(training_epochs): 53 | 54 | avg_cost = 0. 55 | epoch_cost = 0. 56 | 57 | total_batch = int(mnist.train.num_examples/batch_size) 58 | 59 | # Loop over all batches 60 | for i in range(total_batch): 61 | 62 | batch_x, batch_y = mnist.train.next_batch(batch_size) 63 | 64 | # Run optimization op (backprop) and cost op (to get loss value) 65 | _, c_ = sess.run([optimizer, cost], feed_dict={x: batch_x, y: batch_y}) 66 | 67 | # Compute average loss 68 | epoch_cost += c_ 69 | 70 | avg_cost = epoch_cost / total_batch 71 | 72 | # opt loss 73 | all_loss.append(avg_cost) 74 | all_step.append(epoch) 75 | 76 | # Display logs per epoch step 77 | if epoch % display_step == 0: 78 | print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(avg_cost)) 79 | 80 | print("Optimization Finished!") 81 | 82 | plt.plot(all_step, all_loss, color='red', label=method_name) 83 | plt.legend(loc='best') 84 | 85 | plt.show() 86 | plt.pause(1000) 87 | 88 | 89 | 90 | 91 | -------------------------------------------------------------------------------- /OGD.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-17 8:47 2 | # Author: Enneng Yang 3 | # Abstract:OGD 4 | 5 | import sys 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits.mplot3d import Axes3D 8 | import random 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | # logistic regression 13 | class LR(object): 14 | 15 | @staticmethod 16 | def fn(w, x): 17 | ''' sigmod function ''' 18 | return 1.0 / (1.0 + np.exp(-w.dot(x))) 19 | 20 | @staticmethod 21 | def loss(y, y_hat): 22 | '''cross-entropy loss function''' 23 | return np.sum(np.nan_to_num(-y * np.log(y_hat) - (1-y)*np.log(1-y_hat))) 24 | 25 | @staticmethod 26 | def grad(y, y_hat, x): 27 | '''gradient function''' 28 | return (y_hat - y) * x 29 | 30 | class OGD(object): 31 | 32 | def __init__(self,alpha,decisionFunc=LR): 33 | self.alpha = alpha 34 | self.w = np.zeros(4) 35 | self.decisionFunc = decisionFunc 36 | 37 | def predict(self, x): 38 | return self.decisionFunc.fn(self.w, x) 39 | 40 | def update(self, x, y,step): 41 | y_hat = self.predict(x) 42 | g = self.decisionFunc.grad(y, y_hat, x) 43 | learning_rate = self.alpha / np.sqrt(step + 1) # damping step size 44 | # SGD Update rule theta = theta - learning_rate * gradient 45 | self.w = self.w - learning_rate * g 46 | return self.decisionFunc.loss(y,y_hat) 47 | 48 | def training(self, trainSet, max_itr=100000): 49 | n = 0 50 | 51 | all_loss = [] 52 | all_step = [] 53 | while True: 54 | for var in trainSet: 55 | x= var[:4] 56 | y= var[4:5] 57 | loss = self.update(x, y,n) 58 | 59 | all_loss.append(loss) 60 | all_step.append(n) 61 | 62 | print("itr=" + str(n) + "\tloss=" + str(loss)) 63 | 64 | n += 1 65 | if n > max_itr: 66 | print("reach max iteration", max_itr) 67 | return all_loss, all_step 68 | 69 | if __name__ == '__main__': 70 | 71 | trainSet = np.loadtxt('Data/FTRLtrain.txt') 72 | OGD = OGD(alpha=0.01) 73 | all_loss, all_step = OGD.training(trainSet, max_itr=100000) 74 | w = OGD.w 75 | print(w) 76 | 77 | testSet = np.loadtxt('Data/FTRLtest.txt') 78 | correct = 0 79 | wrong = 0 80 | for var in testSet: 81 | x = var[:4] 82 | y = var[4:5] 83 | y_hat = 1.0 if OGD.predict(x) > 0.5 else 0.0 84 | if y == y_hat: 85 | correct += 1 86 | else: 87 | wrong += 1 88 | print("correct ratio:", 1.0 * correct / (correct + wrong), "\t correct:", correct, "\t wrong:", wrong) 89 | 90 | plt.title('OGD') 91 | plt.xlabel('training_epochs') 92 | plt.ylabel('loss') 93 | plt.plot(all_step, all_loss) 94 | plt.show() 95 | 96 | -------------------------------------------------------------------------------- /RDA.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-17 8:47 2 | # Author: Enneng Yang 3 | # Abstract:RDA 4 | 5 | import sys 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits.mplot3d import Axes3D 8 | import random 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | # logistic regression 13 | class LR(object): 14 | 15 | @staticmethod 16 | def fn(w, x): 17 | ''' sigmod function ''' 18 | return 1.0 / (1.0 + np.exp(-w.dot(x))) 19 | 20 | @staticmethod 21 | def loss(y, y_hat): 22 | '''cross-entropy loss function''' 23 | return np.sum(np.nan_to_num(-y * np.log(y_hat) - (1-y)*np.log(1-y_hat))) 24 | 25 | @staticmethod 26 | def grad(y, y_hat, x): 27 | '''gradient function''' 28 | return (y_hat - y) * x 29 | 30 | class FOBOS(object): 31 | 32 | def __init__(self,K,alpha,lambda_,gamma,decisionFunc=LR): 33 | self.K = K #to zero after every K online steps 34 | self.alpha = alpha # learning rate 35 | self.lambda_ = lambda_ # 36 | self.gamma = gamma # 37 | self.w = np.zeros(4) # param 38 | self.decisionFunc = decisionFunc #decision Function 39 | self.g = np.zeros(4) # gradient 40 | 41 | def predict(self, x): 42 | return self.decisionFunc.fn(self.w, x) 43 | 44 | def update(self, x, y, step): 45 | y_hat = self.predict(x) 46 | g = self.decisionFunc.grad(y, y_hat, x) 47 | self.g = (step-1)/step * self.g + (1/step) * g 48 | 49 | for i in range(4): 50 | if (abs(self.g[i]) 0.5 else 0.0 93 | if y == y_hat: 94 | correct += 1 95 | else: 96 | wrong += 1 97 | print("correct ratio:", 1.0 * correct / (correct + wrong), "\t correct:", correct, "\t wrong:", wrong) 98 | 99 | plt.title('RDA') 100 | plt.xlabel('training_epochs') 101 | plt.ylabel('loss') 102 | plt.plot(all_step, all_loss) 103 | plt.show() 104 | 105 | 106 | 107 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OnlineLearning_BasicAlgorithm 2 | Online Learning code:
3 | principal line: sparsity
4 | Algorithm: TG -> FOBOS -> RDA ->FTRL -> FTML 5 | 6 | ## Algorithm list: 7 | > { Online gradient descent } Algorithm: [OGD.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/OGD.py)
8 | > { Truncated Gradient } Algorithm: [TG.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/TG.py)
9 | > { Forward-Backward Splitting } Algorithm: [FOBOS.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/FOBOS.py)
10 | > { Regularized Dual Averaging } Algorithm: [RDA.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/RDA.py)
11 | > { Follow-the-regularized-Leader } Algorithm: [FTRL.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/FTRL.py)
12 | > { Follow-the-regularized-Leader based on Tensorflow } Algorithm: [FTRL_Optimizer.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/FTRL_Optimizer.py)
13 | > { Follow the Moving Leader } Algorithm: [FTML.py](https://github.com/YEN-GitHub/OnlineLearning_BasicAlgorithm/blob/master/FTML.py)
14 | 15 | -------------------------------------------------------------------------------- /TG.py: -------------------------------------------------------------------------------- 1 | # Date: 2018-08-17 8:47 2 | # Author: Enneng Yang 3 | # Abstract:Truncated Gradient 4 | 5 | import sys 6 | import matplotlib.pyplot as plt 7 | from mpl_toolkits.mplot3d import Axes3D 8 | import random 9 | import numpy as np 10 | import tensorflow as tf 11 | 12 | # logistic regression 13 | class LR(object): 14 | 15 | @staticmethod 16 | def fn(w, x): 17 | ''' sigmod function ''' 18 | return 1.0 / (1.0 + np.exp(-w.dot(x))) 19 | 20 | @staticmethod 21 | def loss(y, y_hat): 22 | '''cross-entropy loss function''' 23 | return np.sum(np.nan_to_num(-y * np.log(y_hat) - (1-y)*np.log(1-y_hat))) 24 | 25 | @staticmethod 26 | def grad(y, y_hat, x): 27 | '''gradient function''' 28 | return (y_hat - y) * x 29 | 30 | class TG(object): 31 | 32 | def __init__(self,K,alpha,theta,lambda_,decisionFunc=LR): 33 | self.K = K #to zero after every K online steps 34 | self.alpha = alpha # learning rate 35 | self.theta = theta # threshold value 36 | self.lambda_ = lambda_ # 37 | self.w = np.zeros(4) # param 38 | self.decisionFunc = decisionFunc #decision Function 39 | 40 | def predict(self, x): 41 | return self.decisionFunc.fn(self.w, x) 42 | 43 | def update(self, x, y, step): 44 | y_hat = self.predict(x) 45 | g = self.decisionFunc.grad(y, y_hat, x) 46 | 47 | if step % self.K == 0: 48 | 49 | learning_rate = self.alpha / np.sqrt(step+1) # damping step size 50 | 51 | temp_lambda = self.K * self.lambda_ 52 | 53 | for i in range(4): 54 | w_e_g = self.w[i] -learning_rate * g[i] 55 | if (0< w_e_g 0.5 else 0.0 106 | if y == y_hat: 107 | correct += 1 108 | else: 109 | wrong += 1 110 | print("correct ratio:", 1.0 * correct / (correct + wrong), "\t correct:", correct, "\t wrong:", wrong) 111 | 112 | plt.title('Truncated Gradient') 113 | plt.xlabel('training_epochs') 114 | plt.ylabel('loss') 115 | plt.plot(all_step, all_loss) 116 | plt.show() 117 | 118 | 119 | 120 | --------------------------------------------------------------------------------