├── .gitignore ├── LICENSE.txt ├── README.md ├── build.sbt ├── data ├── KMeans.txt ├── MultiVariateLinearRegression.txt ├── RegularizedLogisticRegression.txt ├── SupportVectorMachineWithGaussianKernel.txt └── UniVariateLinearRegression.txt ├── project └── plugins │ └── build.sbt ├── sbt ├── sbt-launch-0.10.0.jar └── src ├── main └── scala │ └── org │ └── everpeace │ └── scalala │ └── sample │ ├── KMeansSample.scala │ ├── MultiVariateLinearRegressionSample.scala │ ├── RegularizedLogisticRegressionSample.scala │ ├── SupportVectorMachineWithGaussianKernel.scala │ ├── UniVariateLinearRegressionSample.scala │ └── package.scala └── test └── scala └── dummy /.gitignore: -------------------------------------------------------------------------------- 1 | # Eclipse 2 | .classpath 3 | .settings 4 | .metadata 5 | .project 6 | .hsproject 7 | .dist-scion 8 | Servers 9 | bin 10 | target 11 | 12 | # cabal 13 | dist 14 | 15 | # SpringSource Tool Suite 16 | .springBeans 17 | 18 | # IntelliJ IDEA 19 | *.iml 20 | *.ipr 21 | *.iws 22 | *.idea 23 | .idea 24 | 25 | # Simple Build Tool 26 | project/boot 27 | project/plugins/project 28 | lib_managed 29 | src_managed 30 | target 31 | 32 | #ensime 33 | .ensime 34 | .ensime~ 35 | 36 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2011 OMURA Shingo 4 | 5 | Permission is hereby granted, free of charge, to any person 6 | obtaining a copy of this software and associated documentation 7 | files (the "Software"), to deal in the Software without restriction, 8 | including without limitation the rights to use, copy, modify, merge, 9 | publish, distribute, sublicense, and/or sell copies of the Software, 10 | and to permit persons to whom the Software is furnished to do so, 11 | subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included 14 | in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 20 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 21 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR 22 | THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning Algorihtm Samples By [Scalala](https://github.com/scalala/Scalala). 2 | 3 | 4 | ### Supervised Learning Samples 5 | * [Uni-Variate Linear Regression](https://github.com/everpeace/ml-examples-by-scalala/blob/master/src/main/scala/org/everpeace/scalala/sample/UniVariateLinearRegressionSample.scala): perform uni-variage linear regression by gradient descent glrithm. 6 | * [Multi-Variate Linear Regression](https://github.com/everpeace/ml-examples-by-scalala/blob/master/src/main/scala/org/everpeace/scalala/sample/MultiVariateLinearRegressionSample.scala): perform 2-variate linear regression by gradient descent algorithm. 7 | * [Regularized Logistic Regression](https://github.com/everpeace/ml-examples-by-scalala/blob/master/src/main/scala/org/everpeace/scalala/sample/RegularizedLogisticRegressionSample.scala): perform learning logistic classifier by gradient descent algorithm. 8 | * [Support Vector Machine With Gaussian Kernel](https://github.com/everpeace/ml-examples-by-scalala/blob/master/src/main/scala/org/everpeace/scalala/sample/SupportVectorMachineWithGaussianKernel.scala): perform learning non-linear boundary using SVM with gaussian kernel by simplified SMO (Sequential Minimal Optimization) algorithm. 9 | 10 | ### Unsupervised Learing Sample 11 | * [K-Means](https://github.com/everpeace/ml-examples-by-scalala/blob/master/src/main/scala/org/everpeace/scalala/sample/KMeansSample.scala): perform K-Means algorithm to 2-dimensional data. 12 | 13 | ### How to try? 14 | 1. clone the repository. 15 | 2. ./sbt update run 16 | 3. choose a sample number from displayed options. 17 | 18 | ### License 19 | All Sample Programs licensed under MIT License. See LICENSE.txt for further details. 20 | 21 | ### Copyright 22 | Copyright (c) 2011 [everpeace](http://twitter.com/everpeace). 23 | 24 | ### Caution 25 | This repository is under construction. 26 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | name:="machine-learning-examples-by-scalala" 2 | 3 | version:="0.0.1" 4 | 5 | scalaVersion:="2.9.1" 6 | 7 | organization:="everpeace.org" 8 | 9 | libraryDependencies ++= Seq( 10 | // other dependencies here 11 | "org.scalala" %% "scalala" % "1.0.0.RC2-SNAPSHOT" 12 | ) 13 | 14 | resolvers ++= Seq( 15 | // other resolvers here 16 | "Scala Tools Snapshots" at "http://scala-tools.org/repo-snapshots/", 17 | "ScalaNLP Maven2" at "http://repo.scalanlp.org/repo" 18 | ) 19 | 20 | //compile options 21 | scalacOptions ++= Seq("-unchecked", "-deprecation") -------------------------------------------------------------------------------- /data/KMeans.txt: -------------------------------------------------------------------------------- 1 | 1.842079531126156,4.607571604482282 2 | 5.658583120618816,4.799964054441543 3 | 6.35257892020234,3.290854498754266 4 | 2.904016525281363,4.612204112815066 5 | 3.231979157207443,4.939894052826295 6 | 1.247922679700377,4.93267846009402 7 | 1.97619886162512,4.434896742381875 8 | 2.23454135329173,5.055471681344978 9 | 2.983407571660435,4.840464061253519 10 | 2.979703906925933,4.806710807986595 11 | 2.114964111296854,5.373735865287625 12 | 2.121695429851929,5.208542115552275 13 | 1.514352903108069,4.77003303264906 14 | 2.169792270988234,5.274353538341508 15 | 0.4185237324421214,4.883125218349286 16 | 2.47053694822282,4.804189440854675 17 | 4.060691324409988,4.995038620740097 18 | 3.007089337903088,4.678977577376825 19 | 0.6663234559392024,4.871879490418842 20 | 3.162186503015328,4.836583005763322 21 | 0.5115525803972185,4.910529229502589 22 | 3.134280099558564,4.961781139153404 23 | 2.049745950070817,5.624139495988095 24 | 0.665827852422697,5.243992574839312 25 | 1.017320125829354,4.844736469192718 26 | 2.178935676285175,5.29758701369049 27 | 2.859626149667426,5.260419973323453 28 | 1.308825883599691,5.301587009101879 29 | 0.9925324554451718,5.015674237577476 30 | 1.403726381719334,4.575276844394842 31 | 2.660465724208499,5.196238478566125 32 | 2.799958819293554,5.115263227051593 33 | 2.069953453750165,4.68467130341947 34 | 3.29765180919281,5.592055349989062 35 | 1.892976595802534,4.890432088944249 36 | 2.559830639620333,5.263977564175146 37 | 1.153540312065502,4.678667174337322 38 | 2.251507538328767,5.44500310210926 39 | 2.209602955078801,4.914692641452207 40 | 1.591419371792057,4.832125726643392 41 | 1.678380376029915,5.269038217480221 42 | 2.591486417187874,4.925933935454911 43 | 2.809964422336821,5.538498989714164 44 | 0.9531162720189421,5.580371075704837 45 | 1.51775275912809,5.038366376905697 46 | 3.231142476556044,5.784296646013455 47 | 2.541800105726059,4.810987375514933 48 | 3.814228654429137,4.735267964925186 49 | 1.684958290504217,4.5964355345526 50 | 2.177771733641957,4.861540188503138 51 | 1.81733279599126,5.133339067003756 52 | 1.857765534416548,4.869624135455342 53 | 3.030843007696991,5.240575821340838 54 | 2.926582948784921,5.096679228841688 55 | 3.43493543156672,5.340807411956873 56 | 3.203671162333278,4.859247586559391 57 | 0.1051180394588671,4.729163438461413 58 | 1.405979162897246,5.066368224996453 59 | 2.241850515528486,4.924461702324725 60 | 1.36678395256625,5.261610953537723 61 | 1.707254821599876,4.042314794400387 62 | 1.919095658945418,5.578484465015508 63 | 1.601567313150977,4.644530115981849 64 | 0.3796343714386488,5.261947294103527 65 | 2.02134502433816,4.412674449222839 66 | 1.120367372903417,5.208807468302654 67 | 2.269014280465731,4.618188827708471 68 | -0.2451271276617017,5.740192366387896 69 | 2.128578426246194,5.01149793096124 70 | 1.844199811025123,5.031539479749028 71 | 2.325582532272868,4.748679619050756 72 | 1.523341131057926,4.879161593256661 73 | 1.022851284640786,5.010506504958272 74 | 1.853827372072783,5.007524823118074 75 | 2.203216583558012,4.945163787321349 76 | 1.200999811913184,4.578297628829255 77 | 1.020627027388979,4.629911191127603 78 | 1.604932266103602,5.136631390741864 79 | 0.4764735513254672,5.135359772867848 80 | 0.3639172040966812,4.73332822591586 81 | 0.3131984500482181,5.546946435563959 82 | 2.286648391015867,5.007669903345754 83 | 2.154601389641329,5.462829590785801 84 | 2.052885176506151,4.779585588568983 85 | 4.888043316568204,5.506707952236728 86 | 2.403047466924176,5.081473257321243 87 | 2.568694529536674,5.206878859954676 88 | 1.829759925428527,4.596572882818212 89 | 0.5484522320181779,5.02672980169041 90 | 3.171096186656912,5.594645195347297 91 | 3.042020694060989,5.007583730318273 92 | 2.404277749844845,5.02587069662433 93 | 0.1778346644668876,5.297650317664919 94 | 2.614286784239835,5.222874141673731 95 | 2.300977982316242,4.972358439910461 96 | 3.907793172331622,5.094646759998636 97 | 2.05670542408801,5.233913255151575 98 | 1.381334970069743,5.001949624092176 99 | 1.160741778518876,4.677279268477293 100 | 1.728181994347231,5.36028437434454 101 | 3.203606205777914,0.7222149034236587 102 | 3.061929184157545,1.571921099248526 103 | 4.017149169469283,1.160706467852803 104 | 1.40260821937859,1.087265358063056 105 | 4.081649507530082,0.872003428739607 106 | 3.152730808507298,0.9815587114697814 107 | 3.451863508927066,0.427840834415254 108 | 3.853843137593207,0.7920479033506194 109 | 1.5744925512127,1.348111259491027 110 | 4.72372077755589,0.6204413584594055 111 | 2.879610840681258,0.7541374110660162 112 | 0.9679134767334232,1.161668194764475 113 | 1.531781070482273,1.100548524232475 114 | 4.138359152178729,1.247809793250773 115 | 3.161090214815601,1.294228931841527 116 | 2.951770389082617,0.8958314324641911 117 | 3.278442954622743,1.750439261520662 118 | 2.127018499311802,0.9567204249582623 119 | 3.326488848134789,1.280190663949387 120 | 2.54371489456784,0.9573271589986067 121 | 3.233946999894362,1.082023240176862 122 | 4.431529762704597,0.5404100030365326 123 | 3.564786253895481,1.117647142657849 124 | 4.255884820986278,0.906439572236069 125 | 4.053865813388372,0.5329186210157868 126 | 3.089701760267874,1.088144482376987 127 | 2.847344593332542,0.2675925319316859 128 | 3.635860486925822,1.121601943492355 129 | 1.955388637259492,1.321568569161503 130 | 2.883840047350313,0.8045450649676619 131 | 3.484443872790626,1.135514482437953 132 | 3.497984124763063,1.100464021034776 133 | 2.455759339638348,0.7890465416425023 134 | 3.203800095269826,1.027280746355822 135 | 3.006772536867357,0.625191281541267 136 | 1.965479737608877,1.217307604928878 137 | 2.179893331974446,1.308798305086229 138 | 2.612070293192836,0.9907685574084989 139 | 3.955499115382459,0.8326929897165098 140 | 3.648464816547516,1.628496974308694 141 | 4.184500106526444,0.4535620312008207 142 | 3.787572301756839,1.4544290445227 143 | 3.300636549872138,1.281075882108406 144 | 3.02836363232283,1.356351889867326 145 | 3.184121756205688,1.414107988729598 146 | 4.169118966225332,0.2058103774770208 147 | 3.240242111019093,1.14876237276451 148 | 3.915960677768852,1.012257735490282 149 | 2.969797161873892,1.012103059929201 150 | 1.1299385570386,0.770852843682668 151 | 2.717307986328911,0.486975546180956 152 | 3.118901695897268,0.6943833555225435 153 | 2.405180200260553,1.117781226884269 154 | 2.958184290787181,1.018870956875362 155 | 1.654563085079055,1.186311747601043 156 | 2.397758068153118,1.247213873549756 157 | 2.284093050088675,0.6486546903964623 158 | 2.795887243076254,0.9952666365513957 159 | 3.411562770859122,1.159636297470659 160 | 3.50663521276111,0.7387810441941597 161 | 3.936160288603507,1.462029340688243 162 | 3.902066572515992,1.277787507048313 163 | 2.610363960633363,0.8802760188273794 164 | 4.372718610315967,1.02914091957094 165 | 3.083491363119661,1.196326444356825 166 | 2.115993501587732,0.7930365000883433 167 | 2.156534044871015,0.4035886068351918 168 | 2.144911011682092,1.135823990498392 169 | 1.849355240758471,1.022326436980281 170 | 4.159081601434126,0.6172073250362708 171 | 2.764944992381324,1.431489505711269 172 | 3.905611531758359,1.165753149843201 173 | 2.540716724478098,0.9839251586297414 174 | 4.277830681418135,1.180136796728559 175 | 3.310581669018694,1.031244609451246 176 | 2.155206613462027,0.8069656235345628 177 | 3.713636588104567,0.4581320816161256 178 | 3.540101863554706,0.864461352047468 179 | 1.605199914477712,1.109805302134234 180 | 1.751643371561828,0.6885353609048434 181 | 3.124051226487953,0.6782175665304078 182 | 2.371987852894733,1.427896071916304 183 | 2.534460192788522,1.215620812788234 184 | 3.683446503425531,1.228345375360799 185 | 3.267013396283058,0.3205667586236743 186 | 3.941591391535048,0.8257743797704139 187 | 3.264551401883844,1.38368689842555 188 | 4.304711382466017,1.107259950849141 189 | 2.684993757562959,0.3534494332134992 190 | 3.126351840441264,1.280689297523756 191 | 2.942943556919985,1.028250762556938 192 | 3.118765410908555,1.332854585759118 193 | 2.023589779094434,0.447716138160535 194 | 3.622029314554192,1.286437630092413 195 | 2.428658786114777,0.8649928452669559 196 | 2.095172961865599,1.140104911050688 197 | 5.292394521306933,0.3687329803035045 198 | 2.072917085533818,1.167638505750342 199 | 0.9462320825757167,0.2452225283820372 200 | 2.739119075886319,1.100722839009583 201 | 6.005065336398827,2.727841713587384 202 | 6.056964109173649,2.949704332520405 203 | 6.770127666721521,3.214114215383589 204 | 5.640346781001489,2.69385282204864 205 | 5.633254027407438,2.99002338627959 206 | 6.174431565363438,3.290264880367543 207 | 7.246947941745138,2.968774242127756 208 | 5.581629063930656,3.335103746206046 209 | 5.362720504336735,3.146811922058337 210 | 4.70775773362377,2.787108694840931 211 | 7.428920981004157,3.466794896488929 212 | 6.641072482533531,3.05998737926515 213 | 6.374736515665913,2.562530591931918 214 | 7.287803244048407,2.751798846544767 215 | 6.202952309484854,2.678561789876971 216 | 5.387360412681998,2.267373457548288 217 | 5.667310300677457,2.964778673052191 218 | 6.597021551786485,3.070823760206949 219 | 7.756605593713768,3.15604464618241 220 | 6.632627452617938,3.147991826330151 221 | 5.766349587768342,3.142717074434685 222 | 5.994231542030925,2.757078584992307 223 | 6.378704072854217,2.650223208757134 224 | 5.740362333785032,3.103913060350143 225 | 4.616524417150913,2.793207150855647 226 | 5.335339987326496,3.039286944297795 227 | 5.372939124793692,2.816847755729278 228 | 5.036111615427723,2.924860873447376 229 | 5.529086767660337,3.336815764978288 230 | 6.050869424857447,2.807025944351663 231 | 5.132008998429942,2.198121948150304 232 | 5.732849448865801,2.877381321676975 233 | 6.781107319547244,3.05676866020901 234 | 6.448344486992528,3.352992249622101 235 | 6.399414819366499,2.897569484468338 236 | 5.860679247147698,2.995771287055466 237 | 6.447651830577588,3.165609454634354 238 | 5.367081109651559,3.195025516188134 239 | 5.887355652684954,3.346155661401808 240 | 3.961624654186922,2.72025046330054 241 | 6.284381932579608,3.173606431643382 242 | 4.205847888245449,2.816473683437164 243 | 5.326155813846448,3.033140472272608 244 | 7.171352036284695,3.412272700756582 245 | 7.494927502838001,2.840187540021823 246 | 7.398072414628061,3.484870305939167 247 | 5.024329841014982,2.986831785334856 248 | 5.317124783074046,2.817413560279757 249 | 5.876552365538852,3.216611091154561 250 | 6.037628331680621,2.683035115089214 251 | 5.912802727354569,2.856319376516589 252 | 6.694513582424757,2.890560826096426 253 | 6.010179778860985,2.724013381935843 254 | 6.927219684374198,3.199600255791053 255 | 6.335595219108058,3.30864291189389 256 | 6.242570707552161,2.791792687595131 257 | 5.578122938363613,3.247660162539342 258 | 6.407738625745769,2.675549509470509 259 | 6.800295256441381,3.17579577996112 260 | 7.216840328780716,2.728965745190048 261 | 6.511007399653392,2.727319065926274 262 | 4.606305339926791,3.329458003252386 263 | 7.655032263472198,2.870956284238776 264 | 5.502957590616259,2.629246344004256 265 | 6.630606985789793,3.015023010300451 266 | 3.459280064345287,2.684784447523264 267 | 8.203398153359817,2.416934952039439 268 | 4.95679427569342,2.897762969043582 269 | 5.370526670765466,2.449548134199833 270 | 5.697978664307992,2.949771319733885 271 | 6.273762709954783,2.242560360890311 272 | 5.05274526001882,2.756921629697973 273 | 6.885755837329,2.888452692573059 274 | 4.187744196259964,2.892834633977075 275 | 5.975103279293128,3.025919099846584 276 | 6.094571294802686,2.618679749914378 277 | 5.723956974354073,3.044542193790471 278 | 4.372497673493347,3.054882171401887 279 | 6.29206262170893,2.775738556384176 280 | 5.145330350868728,4.132256915057223 281 | 6.587056504854597,3.375083452766874 282 | 5.7876909544557,3.292551273226857 283 | 6.727980978712887,3.004398300225435 284 | 6.640789386230871,2.410688388590533 285 | 6.232288777398539,2.728509022331939 286 | 6.217727237731909,2.80994633217706 287 | 5.781163010356058,3.079877867015621 288 | 6.624472531850962,2.744537425706766 289 | 5.195908230361076,3.06972936535178 290 | 5.871771805724644,3.255177304624756 291 | 5.895620994170579,2.898439774276882 292 | 5.617543198026376,2.597507103274049 293 | 5.631761031004175,3.047587472381008 294 | 5.502586590242818,3.118690754276496 295 | 6.482126282928347,2.550851397135404 296 | 7.30278708280303,3.380159788521076 297 | 6.991984340611751,2.98706728722547 298 | 4.825534095511381,2.779616639180507 299 | 6.117680550153719,2.854756552164795 300 | 0.940489440353196,5.715568023943128 301 | -------------------------------------------------------------------------------- /data/MultiVariateLinearRegression.txt: -------------------------------------------------------------------------------- 1 | 2104,3,399900 2 | 1600,3,329900 3 | 2400,3,369000 4 | 1416,2,232000 5 | 3000,4,539900 6 | 1985,4,299900 7 | 1534,3,314900 8 | 1427,3,198999 9 | 1380,3,212000 10 | 1494,3,242500 11 | 1940,4,239999 12 | 2000,3,347000 13 | 1890,3,329999 14 | 4478,5,699900 15 | 1268,3,259900 16 | 2300,4,449900 17 | 1320,2,299900 18 | 1236,3,199900 19 | 2609,4,499998 20 | 3031,4,599000 21 | 1767,3,252900 22 | 1888,2,255000 23 | 1604,3,242900 24 | 1962,4,259900 25 | 3890,3,573900 26 | 1100,3,249900 27 | 1458,3,464500 28 | 2526,3,469000 29 | 2200,3,475000 30 | 2637,3,299900 31 | 1839,2,349900 32 | 1000,1,169900 33 | 2040,4,314900 34 | 3137,3,579900 35 | 1811,4,285900 36 | 1437,3,249900 37 | 1239,3,229900 38 | 2132,4,345000 39 | 4215,4,549000 40 | 2162,4,287000 41 | 1664,2,368500 42 | 2238,3,329900 43 | 2567,4,314000 44 | 1200,3,299000 45 | 852,2,179900 46 | 1852,4,299900 47 | 1203,3,239500 48 | -------------------------------------------------------------------------------- /data/RegularizedLogisticRegression.txt: -------------------------------------------------------------------------------- 1 | 0.051267,0.69956,1 2 | -0.092742,0.68494,1 3 | -0.21371,0.69225,1 4 | -0.375,0.50219,1 5 | -0.51325,0.46564,1 6 | -0.52477,0.2098,1 7 | -0.39804,0.034357,1 8 | -0.30588,-0.19225,1 9 | 0.016705,-0.40424,1 10 | 0.13191,-0.51389,1 11 | 0.38537,-0.56506,1 12 | 0.52938,-0.5212,1 13 | 0.63882,-0.24342,1 14 | 0.73675,-0.18494,1 15 | 0.54666,0.48757,1 16 | 0.322,0.5826,1 17 | 0.16647,0.53874,1 18 | -0.046659,0.81652,1 19 | -0.17339,0.69956,1 20 | -0.47869,0.63377,1 21 | -0.60541,0.59722,1 22 | -0.62846,0.33406,1 23 | -0.59389,0.005117,1 24 | -0.42108,-0.27266,1 25 | -0.11578,-0.39693,1 26 | 0.20104,-0.60161,1 27 | 0.46601,-0.53582,1 28 | 0.67339,-0.53582,1 29 | -0.13882,0.54605,1 30 | -0.29435,0.77997,1 31 | -0.26555,0.96272,1 32 | -0.16187,0.8019,1 33 | -0.17339,0.64839,1 34 | -0.28283,0.47295,1 35 | -0.36348,0.31213,1 36 | -0.30012,0.027047,1 37 | -0.23675,-0.21418,1 38 | -0.06394,-0.18494,1 39 | 0.062788,-0.16301,1 40 | 0.22984,-0.41155,1 41 | 0.2932,-0.2288,1 42 | 0.48329,-0.18494,1 43 | 0.64459,-0.14108,1 44 | 0.46025,0.012427,1 45 | 0.6273,0.15863,1 46 | 0.57546,0.26827,1 47 | 0.72523,0.44371,1 48 | 0.22408,0.52412,1 49 | 0.44297,0.67032,1 50 | 0.322,0.69225,1 51 | 0.13767,0.57529,1 52 | -0.0063364,0.39985,1 53 | -0.092742,0.55336,1 54 | -0.20795,0.35599,1 55 | -0.20795,0.17325,1 56 | -0.43836,0.21711,1 57 | -0.21947,-0.016813,1 58 | -0.13882,-0.27266,1 59 | 0.18376,0.93348,0 60 | 0.22408,0.77997,0 61 | 0.29896,0.61915,0 62 | 0.50634,0.75804,0 63 | 0.61578,0.7288,0 64 | 0.60426,0.59722,0 65 | 0.76555,0.50219,0 66 | 0.92684,0.3633,0 67 | 0.82316,0.27558,0 68 | 0.96141,0.085526,0 69 | 0.93836,0.012427,0 70 | 0.86348,-0.082602,0 71 | 0.89804,-0.20687,0 72 | 0.85196,-0.36769,0 73 | 0.82892,-0.5212,0 74 | 0.79435,-0.55775,0 75 | 0.59274,-0.7405,0 76 | 0.51786,-0.5943,0 77 | 0.46601,-0.41886,0 78 | 0.35081,-0.57968,0 79 | 0.28744,-0.76974,0 80 | 0.085829,-0.75512,0 81 | 0.14919,-0.57968,0 82 | -0.13306,-0.4481,0 83 | -0.40956,-0.41155,0 84 | -0.39228,-0.25804,0 85 | -0.74366,-0.25804,0 86 | -0.69758,0.041667,0 87 | -0.75518,0.2902,0 88 | -0.69758,0.68494,0 89 | -0.4038,0.70687,0 90 | -0.38076,0.91886,0 91 | -0.50749,0.90424,0 92 | -0.54781,0.70687,0 93 | 0.10311,0.77997,0 94 | 0.057028,0.91886,0 95 | -0.10426,0.99196,0 96 | -0.081221,1.1089,0 97 | 0.28744,1.087,0 98 | 0.39689,0.82383,0 99 | 0.63882,0.88962,0 100 | 0.82316,0.66301,0 101 | 0.67339,0.64108,0 102 | 1.0709,0.10015,0 103 | -0.046659,-0.57968,0 104 | -0.23675,-0.63816,0 105 | -0.15035,-0.36769,0 106 | -0.49021,-0.3019,0 107 | -0.46717,-0.13377,0 108 | -0.28859,-0.060673,0 109 | -0.61118,-0.067982,0 110 | -0.66302,-0.21418,0 111 | -0.59965,-0.41886,0 112 | -0.72638,-0.082602,0 113 | -0.83007,0.31213,0 114 | -0.72062,0.53874,0 115 | -0.59389,0.49488,0 116 | -0.48445,0.99927,0 117 | -0.0063364,0.99927,0 118 | 0.63265,-0.030612,0 119 | -------------------------------------------------------------------------------- /data/SupportVectorMachineWithGaussianKernel.txt: -------------------------------------------------------------------------------- 1 | 0.107143,0.60307,1 2 | 0.093318,0.649854,1 3 | 0.09792629999999999,0.705409,1 4 | 0.15553,0.784357,1 5 | 0.210829,0.866228,1 6 | 0.328341,0.929094,1 7 | 0.429724,0.9349420000000001,1 8 | 0.620968,0.9349420000000001,1 9 | 0.756912,0.910088,1 10 | 0.819124,0.879386,1 11 | 0.809908,0.836988,1 12 | 0.75,0.819444,1 13 | 0.773041,0.854532,1 14 | 0.770737,0.88962,1 15 | 0.7661289999999999,0.899854,1 16 | 0.779954,0.835526,1 17 | 0.75,0.791667,1 18 | 0.761521,0.765351,1 19 | 0.814516,0.832602,1 20 | 0.814516,0.885234,1 21 | 0.563364,0.921784,1 22 | 0.49424,0.899854,1 23 | 0.480415,0.863304,1 24 | 0.448157,0.819444,1 25 | 0.425115,0.857456,1 26 | 0.450461,0.899854,1 27 | 0.510369,0.9378649999999999,1 28 | 0.498848,0.945175,1 29 | 0.351382,0.9349420000000001,1 30 | 0.273041,0.901316,1 31 | 0.233871,0.861842,1 32 | 0.215438,0.858918,1 33 | 0.134793,0.861842,1 34 | 0.0748848,0.872076,1 35 | 0.0587558,0.872076,1 36 | 0.10023,0.83845,1 37 | 0.120968,0.83114,1 38 | 0.132488,0.813596,1 39 | 0.137097,0.774123,1 40 | 0.116359,0.749269,1 41 | 0.104839,0.727339,1 42 | 0.107143,0.699561,1 43 | 0.09101380000000001,0.65424,1 44 | 0.0679724,0.633772,1 45 | 0.0679724,0.623538,1 46 | 0.093318,0.585526,1 47 | 0.162442,0.579678,1 48 | 0.178571,0.59576,1 49 | 0.144009,0.642544,1 50 | 0.109447,0.645468,1 51 | 0.146313,0.703947,1 52 | 0.148618,0.741959,1 53 | 0.160138,0.777047,1 54 | 0.123272,0.791667,1 55 | 0.104839,0.8150579999999999,1 56 | 0.0817972,0.82383,1 57 | 0.0587558,0.88231,1 58 | 0.0956221,0.894006,1 59 | 0.144009,0.88962,1 60 | 0.169355,0.864766,1 61 | 0.164747,0.844298,1 62 | 0.118664,0.839912,1 63 | 0.162442,0.883772,1 64 | 0.236175,0.899854,1 65 | 0.27765,0.936404,1 66 | 0.298387,0.955409,1 67 | 0.337558,0.961257,1 68 | 0.342166,0.958333,1 69 | 0.293779,0.93348,1 70 | 0.259217,0.924708,1 71 | 0.252304,0.9422509999999999,1 72 | 0.213134,0.921784,1 73 | 0.203917,0.891082,1 74 | 0.192396,0.880848,1 75 | 0.362903,0.961257,1 76 | 0.418203,0.964181,1 77 | 0.429724,0.967105,1 78 | 0.390553,0.9422509999999999,1 79 | 0.390553,0.936404,1 80 | 0.418203,0.923246,1 81 | 0.413594,0.901316,1 82 | 0.41129,0.888158,1 83 | 0.457373,0.864766,1 84 | 0.46659,0.848684,1 85 | 0.508065,0.907164,1 86 | 0.521889,0.927632,1 87 | 0.434332,0.93348,1 88 | 0.448157,0.958333,1 89 | 0.464286,0.972953,1 90 | 0.508065,0.956871,1 91 | 0.524194,0.9349420000000001,1 92 | 0.567972,0.927632,1 93 | 0.586406,0.955409,1 94 | 0.591014,0.958333,1 95 | 0.625576,0.952485,1 96 | 0.662442,0.949561,1 97 | 0.690092,0.936404,1 98 | 0.68318,0.923246,1 99 | 0.740783,0.91886,1 100 | 0.7661289999999999,0.914474,1 101 | 0.715438,0.892544,1 102 | 0.729263,0.88962,1 103 | 0.7453920000000001,0.875,1 104 | 0.738479,0.850146,1 105 | 0.738479,0.839912,1 106 | 0.779954,0.81652,1 107 | 0.8053,0.798977,1 108 | 0.796083,0.790205,1 109 | 0.782258,0.763889,1 110 | 0.7661289999999999,0.756579,1 111 | 0.720046,0.842836,1 112 | 0.738479,0.88962,1 113 | 0.740783,0.923246,1 114 | 0.715438,0.939327,1 115 | 0.676267,0.940789,1 116 | 0.637097,0.921784,1 117 | 0.680876,0.915936,1 118 | 0.740783,0.915936,1 119 | 0.756912,0.9378649999999999,1 120 | 0.77765,0.951023,1 121 | 0.8329490000000001,0.91886,1 122 | 0.779954,0.872076,1 123 | 0.77765,0.85307,1 124 | 0.770737,0.92617,1 125 | 0.671659,0.955409,1 126 | 0.574885,0.946637,1 127 | 0.5449310000000001,0.929094,1 128 | 0.503456,0.914474,1 129 | 0.443548,0.923246,1 130 | 0.390553,0.930556,1 131 | 0.337558,0.92617,1 132 | 0.369816,0.923246,1 133 | 0.436636,0.910088,1 134 | 0.535714,0.959795,1 135 | 0.457373,0.959795,1 136 | 0.422811,0.939327,1 137 | 0.408986,0.905702,1 138 | 0.367512,0.908626,1 139 | 0.314516,0.920322,1 140 | 0.332949,0.924708,1 141 | 0.457373,0.923246,1 142 | 0.508065,0.914474,1 143 | 0.473502,0.949561,1 144 | 0.538018,0.962719,1 145 | 0.697005,0.955409,1 146 | 0.75,0.951023,1 147 | 0.800691,0.913012,1 148 | 0.823733,0.864766,1 149 | 0.710829,0.861842,1 150 | 0.701613,0.886696,1 151 | 0.637097,0.910088,1 152 | 0.593318,0.917398,1 153 | 0.563364,0.917398,1 154 | 0.540323,0.910088,1 155 | 0.510369,0.883772,1 156 | 0.501152,0.863304,1 157 | 0.480415,0.841374,1 158 | 0.432028,0.855994,1 159 | 0.404378,0.876462,1 160 | 0.365207,0.898392,1 161 | 0.314516,0.898392,1 162 | 0.27765,0.895468,1 163 | 0.263825,0.891082,1 164 | 0.236175,0.875,1 165 | 0.22235,0.86038,1 166 | 0.1947,0.850146,1 167 | 0.171659,0.834064,1 168 | 0.176267,0.819444,1 169 | 0.18318,0.787281,1 170 | 0.160138,0.753655,1 171 | 0.111751,0.747807,1 172 | 0.0887097,0.739035,1 173 | 0.093318,0.701023,1 174 | 0.125576,0.68348,1 175 | 0.162442,0.65424,1 176 | 0.12788,0.630848,1 177 | 0.134793,0.58845,1 178 | 0.185484,0.58845,1 179 | 0.192396,0.625,1 180 | 0.132488,0.664474,1 181 | 0.10023,0.670322,1 182 | 0.0748848,0.671784,1 183 | 0.07027650000000001,0.705409,1 184 | 0.093318,0.721491,1 185 | 0.10023,0.752193,1 186 | 0.0656682,0.759503,1 187 | 0.0817972,0.774123,1 188 | 0.09792629999999999,0.774123,1 189 | 0.821429,0.89693,1 190 | 0.858295,0.84576,1 191 | 0.814516,0.820906,1 192 | 0.789171,0.798977,1 193 | 0.791475,0.749269,1 194 | 0.37212,0.851608,0 195 | 0.351382,0.863304,0 196 | 0.3053,0.854532,0 197 | 0.323733,0.841374,0 198 | 0.289171,0.834064,0 199 | 0.27765,0.829678,0 200 | 0.293779,0.810673,0 201 | 0.298387,0.801901,0 202 | 0.31682,0.841374,0 203 | 0.353687,0.834064,0 204 | 0.353687,0.82383,0 205 | 0.3053,0.804825,0 206 | 0.291475,0.784357,0 207 | 0.261521,0.800439,0 208 | 0.268433,0.800439,0 209 | 0.270737,0.81652,0 210 | 0.247696,0.806287,0 211 | 0.259217,0.781433,0 212 | 0.261521,0.778509,0 213 | 0.233871,0.762427,0 214 | 0.229263,0.725877,0 215 | 0.266129,0.706871,0 216 | 0.302995,0.696637,0 217 | 0.293779,0.679094,0 218 | 0.268433,0.66155,0 219 | 0.282258,0.652778,0 220 | 0.312212,0.644006,0 221 | 0.349078,0.626462,0 222 | 0.351382,0.605994,0 223 | 0.379032,0.58114,0 224 | 0.420507,0.553363,0 225 | 0.379032,0.529971,0 226 | 0.346774,0.518275,0 227 | 0.337558,0.525585,0 228 | 0.346774,0.5650579999999999,0 229 | 0.360599,0.584064,0 230 | 0.319124,0.620614,0 231 | 0.328341,0.66886,0 232 | 0.335253,0.682018,0 233 | 0.307604,0.730263,0 234 | 0.3053,0.763889,0 235 | 0.319124,0.809211,0 236 | 0.335253,0.820906,0 237 | 0.360599,0.828216,0 238 | 0.388249,0.822368,0 239 | 0.404378,0.798977,0 240 | 0.413594,0.788743,0 241 | 0.374424,0.768275,0 242 | 0.365207,0.765351,0 243 | 0.367512,0.777047,0 244 | 0.369816,0.800439,0 245 | 0.346774,0.798977,0 246 | 0.332949,0.779971,0 247 | 0.332949,0.739035,0 248 | 0.358295,0.690789,0 249 | 0.353687,0.66155,0 250 | 0.367512,0.63962,0 251 | 0.395161,0.61769,0 252 | 0.385945,0.608918,0 253 | 0.43894,0.591374,0 254 | 0.422811,0.572368,0 255 | 0.450461,0.534357,0 256 | 0.464286,0.538743,0 257 | 0.471198,0.591374,0 258 | 0.461982,0.629386,0 259 | 0.436636,0.648392,0 260 | 0.406682,0.67617,0 261 | 0.374424,0.702485,0 262 | 0.358295,0.762427,0 263 | 0.367512,0.739035,0 264 | 0.406682,0.667398,0 265 | 0.422811,0.611842,0 266 | 0.485023,0.598684,0 267 | 0.480415,0.614766,0 268 | 0.508065,0.63231,0 269 | 0.514977,0.649854,0 270 | 0.526498,0.660088,0 271 | 0.558756,0.667398,0 272 | 0.574885,0.674708,0 273 | 0.593318,0.67617,0 274 | 0.641705,0.66155,0 275 | 0.678571,0.63962,0 276 | 0.703917,0.616228,0 277 | 0.726959,0.586988,0 278 | 0.738479,0.5577490000000001,0 279 | 0.779954,0.528509,0 280 | 0.789171,0.527047,0 281 | 0.784562,0.556287,0 282 | 0.784562,0.597222,0 283 | 0.75,0.642544,0 284 | 0.738479,0.66155,0 285 | 0.729263,0.673246,0 286 | 0.678571,0.693713,0 287 | 0.664747,0.696637,0 288 | 0.591014,0.706871,0 289 | 0.549539,0.6922509999999999,0 290 | 0.531106,0.674708,0 291 | 0.49424,0.65424,0 292 | 0.441244,0.652778,0 293 | 0.427419,0.66886,0 294 | 0.397465,0.698099,0 295 | 0.397465,0.702485,0 296 | 0.46659,0.67617,0 297 | 0.508065,0.682018,0 298 | 0.508065,0.6849420000000001,0 299 | 0.547235,0.705409,0 300 | 0.584101,0.709795,0 301 | 0.611751,0.714181,0 302 | 0.669355,0.711257,0 303 | 0.6347930000000001,0.68348,0 304 | 0.710829,0.6849420000000001,0 305 | 0.692396,0.664474,0 306 | 0.747696,0.655702,0 307 | 0.729263,0.63231,0 308 | 0.736175,0.607456,0 309 | 0.743088,0.594298,0 310 | 0.773041,0.582602,0 311 | 0.747696,0.553363,0 312 | 0.761521,0.543129,0 313 | 0.807604,0.537281,0 314 | 0.812212,0.546053,0 315 | 0.830645,0.576754,0 316 | 0.823733,0.585526,0 317 | 0.793779,0.625,0 318 | 0.77765,0.641082,0 319 | 0.756912,0.652778,0 320 | 0.740783,0.671784,0 321 | 0.72235,0.686404,0 322 | 0.657834,0.686404,0 323 | 0.6440090000000001,0.686404,0 324 | 0.593318,0.674708,0 325 | 0.540323,0.899854,1 326 | 0.623272,0.895468,1 327 | 0.650922,0.891082,1 328 | 0.692396,0.88231,1 329 | 0.639401,0.877924,1 330 | 0.549539,0.886696,1 331 | 0.540323,0.892544,1 332 | 0.5656679999999999,0.908626,1 333 | 0.6324880000000001,0.915936,1 334 | 0.703917,0.907164,1 335 | 0.75,0.875,1 336 | 0.8053,0.793129,1 337 | 0.837558,0.779971,1 338 | 0.8675119999999999,0.775585,1 339 | 0.904378,0.772661,1 340 | 0.952765,0.774123,1 341 | 0.975806,0.777047,1 342 | 0.968894,0.801901,1 343 | 0.927419,0.803363,1 344 | 0.881336,0.800439,1 345 | 0.851382,0.806287,1 346 | 0.807604,0.84576,1 347 | 0.800691,0.866228,1 348 | 0.860599,0.850146,1 349 | 0.908986,0.836988,1 350 | 0.920507,0.826754,1 351 | 0.9550689999999999,0.822368,1 352 | 0.9665899999999999,0.822368,1 353 | 0.968894,0.820906,1 354 | 0.897465,0.819444,1 355 | 0.851382,0.788743,1 356 | 0.851382,0.785819,1 357 | 0.8559909999999999,0.839912,1 358 | 0.828341,0.850146,1 359 | 0.60023,0.940789,1 360 | 0.60023,0.940789,1 361 | 0.482719,0.930556,1 362 | 0.254608,0.951023,1 363 | 0.226959,0.899854,1 364 | 0.160138,0.891082,1 365 | 0.176267,0.86038,1 366 | 0.111751,0.866228,1 367 | 0.0748848,0.885234,1 368 | 0.0541475,0.883772,1 369 | 0.0449309,0.914474,1 370 | 0.111751,0.913012,1 371 | 0.104839,0.89693,1 372 | 0.153226,0.885234,1 373 | 0.199309,0.883772,1 374 | 0.0610599,0.91155,1 375 | 0.0564516,0.89693,1 376 | 0.0748848,0.879386,1 377 | 0.093318,0.858918,1 378 | 0.353687,0.714181,0 379 | 0.309908,0.774123,0 380 | 0.270737,0.744883,0 381 | 0.300691,0.698099,0 382 | 0.392857,0.673246,0 383 | 0.404378,0.65424,0 384 | 0.420507,0.638158,0 385 | 0.445853,0.616228,0 386 | 0.464286,0.604532,0 387 | 0.374424,0.563596,0 388 | 0.425115,0.528509,0 389 | 0.369816,0.509503,0 390 | 0.480415,0.642544,0 391 | 0.429724,0.66886,0 392 | 0.5541469999999999,0.679094,0 393 | 0.620968,0.705409,0 394 | 0.616359,0.739035,0 395 | 0.662442,0.722953,0 396 | 0.68318,0.705409,0 397 | 0.692396,0.651316,0 398 | 0.720046,0.641082,0 399 | 0.726959,0.667398,0 400 | 0.680876,0.695175,0 401 | 0.6555299999999999,0.711257,0 402 | 0.620968,0.722953,0 403 | 0.591014,0.718567,0 404 | 0.567972,0.6878649999999999,0 405 | 0.567972,0.66155,0 406 | 0.5656679999999999,0.648392,0 407 | 0.62788,0.591374,1 408 | 0.563364,0.59576,1 409 | 0.618664,0.600146,1 410 | 0.607143,0.622076,1 411 | 0.58871,0.620614,1 412 | 0.577189,0.594298,1 413 | 0.584101,0.576754,1 414 | 0.611751,0.56652,1 415 | 0.623272,0.5621350000000001,1 416 | 0.623272,0.5577490000000001,1 417 | 0.597926,0.554825,1 418 | 0.5541469999999999,0.547515,1 419 | 0.5449310000000001,0.535819,1 420 | 0.5449310000000001,0.532895,1 421 | 0.570276,0.528509,1 422 | 0.595622,0.527047,1 423 | 0.597926,0.527047,1 424 | 0.630184,0.529971,1 425 | 0.657834,0.529971,1 426 | 0.703917,0.527047,1 427 | 0.706221,0.516813,1 428 | 0.701613,0.510965,1 429 | 0.664747,0.535819,1 430 | 0.6440090000000001,0.563596,1 431 | 0.6440090000000001,0.575292,1 432 | 0.6440090000000001,0.58845,1 433 | 0.637097,0.607456,1 434 | 0.6347930000000001,0.616228,1 435 | 0.58871,0.619152,1 436 | 0.551843,0.607456,1 437 | 0.56106,0.594298,1 438 | 0.577189,0.541667,1 439 | 0.551843,0.522661,1 440 | 0.547235,0.510965,1 441 | 0.547235,0.508041,1 442 | 0.593318,0.516813,1 443 | 0.669355,0.522661,1 444 | 0.673963,0.524123,1 445 | 0.662442,0.56652,1 446 | 0.662442,0.56652,1 447 | 0.641705,0.535819,1 448 | 0.664747,0.516813,1 449 | 0.579493,0.5621350000000001,1 450 | 0.5449310000000001,0.570906,1 451 | 0.547235,0.578216,1 452 | 0.604839,0.59576,1 453 | 0.650922,0.59576,1 454 | 0.388249,0.645468,0 455 | 0.369816,0.693713,0 456 | 0.312212,0.706871,0 457 | 0.312212,0.728801,0 458 | 0.273041,0.712719,0 459 | 0.3053,0.740497,0 460 | 0.312212,0.753655,0 461 | 0.254608,0.756579,0 462 | 0.289171,0.741959,0 463 | 0.273041,0.717105,0 464 | 0.296083,0.6878649999999999,0 465 | 0.34447,0.665936,0 466 | 0.367512,0.651316,0 467 | 0.392857,0.652778,0 468 | 0.388249,0.608918,0 469 | 0.406682,0.591374,0 470 | 0.390553,0.563596,0 471 | 0.402074,0.541667,0 472 | 0.358295,0.543129,0 473 | 0.339862,0.611842,0 474 | 0.351382,0.689327,0 475 | 0.3053,0.733187,0 476 | 0.240783,0.749269,0 477 | 0.282258,0.765351,0 478 | 0.487327,0.667398,0 479 | 0.519585,0.677632,0 480 | 0.586406,0.686404,0 481 | 0.6347930000000001,0.712719,0 482 | 0.648618,0.728801,0 483 | 0.664747,0.730263,0 484 | 0.699309,0.701023,0 485 | 0.752304,0.633772,0 486 | 0.784562,0.551901,0 487 | 0.802995,0.538743,0 488 | 0.830645,0.532895,0 489 | 0.609447,0.81652,0 490 | 0.611751,0.785819,0 491 | 0.574885,0.756579,0 492 | 0.5449310000000001,0.727339,0 493 | 0.558756,0.718567,0 494 | 0.58871,0.722953,0 495 | 0.595622,0.733187,0 496 | 0.602535,0.752193,0 497 | 0.570276,0.765351,0 498 | 0.563364,0.777047,0 499 | 0.577189,0.796053,0 500 | 0.60023,0.813596,0 501 | 0.618664,0.825292,0 502 | 0.646313,0.796053,0 503 | 0.637097,0.765351,0 504 | 0.648618,0.739035,0 505 | 0.650922,0.743421,0 506 | 0.660138,0.741959,0 507 | 0.660138,0.740497,0 508 | 0.669355,0.752193,0 509 | 0.6347930000000001,0.782895,0 510 | 0.602535,0.775585,0 511 | 0.604839,0.771199,0 512 | 0.616359,0.755117,0 513 | 0.657834,0.747807,0 514 | 0.680876,0.740497,0 515 | 0.697005,0.705409,0 516 | 0.708525,0.674708,0 517 | 0.736175,0.644006,0 518 | 0.164747,0.8121350000000001,1 519 | 0.150922,0.847222,1 520 | 0.141705,0.81652,1 521 | 0.134793,0.84576,1 522 | 0.12788,0.877924,1 523 | 0.08410140000000001,0.86038,1 524 | 0.0679724,0.826754,1 525 | 0.123272,0.790205,1 526 | 0.0817972,0.801901,1 527 | 0.137097,0.760965,1 528 | 0.137097,0.712719,1 529 | 0.150922,0.749269,1 530 | 0.148618,0.673246,1 531 | 0.213134,0.629386,1 532 | 0.141705,0.59576,1 533 | 0.169355,0.626462,1 534 | 0.0771889,0.623538,1 535 | 0.0564516,0.6849420000000001,1 536 | 0.0864055,0.740497,1 537 | 0.07027650000000001,0.765351,1 538 | 0.06336410000000001,0.820906,1 539 | 0.0541475,0.762427,1 540 | 0.0518433,0.695175,1 541 | 0.0817972,0.648392,1 542 | 0.153226,0.594298,1 543 | 0.208525,0.582602,1 544 | 0.729263,0.6922509999999999,0 545 | 0.8053,0.6922509999999999,0 546 | 0.828341,0.6922509999999999,0 547 | 0.8559909999999999,0.689327,0 548 | 0.927419,0.674708,0 549 | 0.975806,0.648392,0 550 | 0.980415,0.636696,0 551 | 0.89977,0.629386,0 552 | 0.87212,0.620614,0 553 | 0.849078,0.644006,0 554 | 0.814516,0.65424,0 555 | 0.784562,0.673246,0 556 | 0.775346,0.6849420000000001,0 557 | 0.775346,0.686404,0 558 | 0.7453920000000001,0.6922509999999999,0 559 | 0.809908,0.671784,0 560 | 0.858295,0.679094,0 561 | 0.862903,0.66155,0 562 | 0.922811,0.657164,0 563 | 0.941244,0.638158,0 564 | 0.989631,0.638158,0 565 | 0.987327,0.64693,0 566 | 0.918203,0.679094,0 567 | 0.842166,0.673246,0 568 | 0.809908,0.658626,0 569 | 0.726959,0.702485,0 570 | 0.690092,0.703947,0 571 | 0.690092,0.725877,0 572 | 0.579493,0.737573,0 573 | 0.473502,0.689327,0 574 | 0.434332,0.674708,0 575 | 0.25,0.6878649999999999,0 576 | 0.224654,0.722953,0 577 | 0.180876,0.693713,1 578 | 0.118664,0.572368,1 579 | 0.164747,0.546053,1 580 | 0.171659,0.529971,1 581 | 0.15553,0.491959,1 582 | 0.132488,0.5486839999999999,1 583 | 0.125576,0.587573,1 584 | 0.178571,0.575292,1 585 | 0.22235,0.542544,1 586 | 0.229263,0.567105,1 587 | 0.201613,0.622368,1 588 | 0.173963,0.620322,1 589 | 0.141705,0.536404,1 590 | 0.137097,0.509795,1 591 | 0.0771889,0.577339,1 592 | 0.0748848,0.59576,1 593 | 0.132488,0.569152,1 594 | 0.192396,0.540497,1 595 | 0.0748848,0.5486839999999999,1 596 | 0.116359,0.544591,1 597 | 0.125576,0.679678,1 598 | 0.09101380000000001,0.769737,1 599 | 0.150922,0.861842,1 600 | 0.203917,0.927339,1 601 | 0.266129,0.93348,1 602 | 0.321429,0.93962,1 603 | 0.50576,0.851754,1 604 | 0.482719,0.818421,1 605 | 0.508065,0.813158,1 606 | 0.485023,0.795614,1 607 | 0.478111,0.772807,1 608 | 0.478111,0.746491,1 609 | 0.459677,0.746491,1 610 | 0.459677,0.762281,1 611 | 0.478111,0.786842,1 612 | 0.459677,0.80614,1 613 | 0.461982,0.832456,1 614 | 0.473502,0.874561,1 615 | 0.475806,0.911404,1 616 | 0.448157,0.900877,1 617 | 0.496544,0.8570179999999999,1 618 | 0.514977,0.818421,1 619 | 0.478111,0.772807,1 620 | 0.475806,0.730702,1 621 | 0.436636,0.753509,1 622 | 0.468894,0.764035,1 623 | 0.383641,0.485088,0 624 | 0.353687,0.471053,0 625 | 0.369816,0.451754,0 626 | 0.355991,0.435965,0 627 | 0.34447,0.42193,0 628 | 0.349078,0.411404,0 629 | 0.381336,0.428947,0 630 | 0.381336,0.455263,0 631 | 0.379032,0.492105,0 632 | 0.385945,0.514912,0 633 | 0.415899,0.535965,0 634 | 0.413594,0.516667,0 635 | 0.404378,0.500877,0 636 | 0.41129,0.474561,0 637 | 0.404378,0.45,0 638 | 0.395161,0.434211,0 639 | 0.397465,0.423684,0 640 | 0.408986,0.416667,0 641 | 0.789171,0.523684,0 642 | 0.807604,0.488596,0 643 | 0.8053,0.47807,0 644 | 0.784562,0.441228,0 645 | 0.782258,0.425439,0 646 | 0.830645,0.42193,0 647 | 0.839862,0.441228,0 648 | 0.823733,0.465789,0 649 | 0.839862,0.499123,0 650 | 0.839862,0.52193,0 651 | 0.8329490000000001,0.553509,0 652 | 0.842166,0.574561,0 653 | 0.860599,0.586842,0 654 | 0.812212,0.511404,0 655 | 0.798387,0.502632,0 656 | 0.802995,0.476316,0 657 | 0.775346,0.428947,0 658 | 0.736175,0.416667,0 659 | 0.782258,0.469298,0 660 | 0.782258,0.460526,0 661 | 0.759217,0.45,0 662 | 0.7453920000000001,0.451754,0 663 | 0.740783,0.446491,0 664 | 0.7338710000000001,0.439474,0 665 | 0.699309,0.448246,0 666 | 0.697005,0.465789,0 667 | 0.692396,0.465789,0 668 | 0.676267,0.464035,0 669 | 0.657834,0.458772,0 670 | 0.657834,0.458772,0 671 | 0.6440090000000001,0.455263,0 672 | 0.648618,0.437719,0 673 | 0.618664,0.423684,0 674 | 0.558756,0.414912,0 675 | 0.5541469999999999,0.416667,0 676 | 0.591014,0.441228,0 677 | 0.609447,0.448246,0 678 | 0.620968,0.471053,0 679 | 0.62788,0.47807,0 680 | 0.6440090000000001,0.47807,0 681 | 0.662442,0.474561,0 682 | 0.680876,0.474561,0 683 | 0.6324880000000001,0.474561,0 684 | 0.58871,0.451754,0 685 | 0.581797,0.432456,0 686 | 0.570276,0.411404,0 687 | 0.558756,0.40614,0 688 | 0.5541469999999999,0.499123,1 689 | 0.514977,0.516667,1 690 | 0.510369,0.504386,1 691 | 0.517281,0.495614,1 692 | 0.5541469999999999,0.490351,1 693 | 0.5541469999999999,0.476316,1 694 | 0.540323,0.472807,1 695 | 0.528802,0.486842,1 696 | 0.503456,0.485088,1 697 | 0.491935,0.465789,1 698 | 0.517281,0.462281,1 699 | 0.526498,0.458772,1 700 | 0.540323,0.439474,1 701 | 0.517281,0.435965,1 702 | 0.496544,0.42193,1 703 | 0.489631,0.418421,1 704 | 0.473502,0.416667,1 705 | 0.487327,0.448246,1 706 | 0.508065,0.45,1 707 | 0.514977,0.488596,1 708 | 0.450461,0.455263,1 709 | 0.475806,0.435965,1 710 | 0.473502,0.420175,1 711 | 0.457373,0.411404,1 712 | 0.450461,0.409649,1 713 | 0.487327,0.458772,1 714 | 0.5334100000000001,0.535965,1 715 | 0.538018,0.572807,1 716 | 0.528802,0.535965,1 717 | 0.512673,0.499123,1 718 | 0.185484,0.511404,1 719 | 0.208525,0.527193,1 720 | 0.190092,0.5201750000000001,1 721 | 0.187788,0.497368,1 722 | 0.190092,0.479825,1 723 | 0.197005,0.45,1 724 | 0.1947,0.427193,1 725 | 0.176267,0.407895,1 726 | 0.146313,0.402632,1 727 | 0.132488,0.441228,1 728 | 0.157834,0.464035,1 729 | 0.164747,0.464035,1 730 | 0.118664,0.488596,1 731 | 0.12788,0.509649,1 732 | 0.160138,0.467544,1 733 | 0.173963,0.432456,1 734 | 0.157834,0.425439,1 735 | 0.18318,0.420175,1 736 | 0.206221,0.479825,1 737 | 0.215438,0.516667,1 738 | 0.226959,0.532456,1 739 | 0.116359,0.525439,1 740 | 0.116359,0.485088,1 741 | 0.137097,0.455263,1 742 | 0.10023,0.444737,1 743 | 0.213134,0.483333,1 744 | 0.261521,0.535965,1 745 | 0.252304,0.553509,1 746 | 0.710829,0.981579,1 747 | 0.662442,0.969298,1 748 | 0.6947,0.986842,1 749 | 0.715438,0.958772,1 750 | 0.37212,0.988596,1 751 | 0.374424,0.967544,1 752 | 0.415899,0.9710530000000001,1 753 | 0.226959,0.7921049999999999,0 754 | 0.247696,0.834211,0 755 | 0.273041,0.858772,0 756 | 0.291475,0.862281,0 757 | 0.22235,0.813158,0 758 | 0.224654,0.797368,0 759 | 0.203917,0.7745610000000001,0 760 | 0.185484,0.755263,0 761 | 0.206221,0.728947,0 762 | 0.245392,0.723684,0 763 | 0.22235,0.70614,0 764 | 0.226959,0.695614,0 765 | 0.259217,0.681579,0 766 | 0.224654,0.6605259999999999,0 767 | 0.203917,0.648246,0 768 | 0.199309,0.655263,0 769 | 0.233871,0.697368,0 770 | 0.256912,0.727193,0 771 | 0.254608,0.665789,0 772 | 0.266129,0.641228,0 773 | 0.243088,0.635965,0 774 | 0.233871,0.618421,0 775 | 0.240783,0.613158,0 776 | 0.273041,0.625439,0 777 | 0.289171,0.625439,0 778 | 0.302995,0.614912,0 779 | 0.319124,0.600877,0 780 | 0.337558,0.5850880000000001,0 781 | 0.362903,0.548246,0 782 | 0.374424,0.5289469999999999,0 783 | 0.604839,0.844737,0 784 | 0.570276,0.816667,0 785 | 0.563364,0.804386,0 786 | 0.623272,0.814912,0 787 | 0.664747,0.804386,0 788 | 0.664747,0.7833329999999999,0 789 | 0.687788,0.748246,0 790 | 0.717742,0.7166670000000001,0 791 | 0.738479,0.702632,0 792 | 0.740783,0.735965,0 793 | 0.710829,0.737719,0 794 | 0.685484,0.7657890000000001,0 795 | 0.68318,0.776316,0 796 | 0.687788,0.802632,0 797 | 0.687788,0.820175,0 798 | 0.669355,0.837719,0 799 | 0.6555299999999999,0.844737,0 800 | 0.639401,0.818421,0 801 | 0.639401,0.8394740000000001,0 802 | 0.584101,0.8394740000000001,0 803 | 0.563364,0.797368,0 804 | 0.547235,0.772807,0 805 | 0.567972,0.842982,0 806 | 0.581797,0.869298,0 807 | 0.595622,0.871053,0 808 | 0.625576,0.869298,0 809 | 0.648618,0.8570179999999999,0 810 | 0.637097,0.8394740000000001,0 811 | 0.641705,0.804386,0 812 | 0.6670509999999999,0.8570179999999999,0 813 | 0.706221,0.82193,0 814 | 0.961982,0.579825,1 815 | 0.927419,0.592105,1 816 | 0.927419,0.572807,1 817 | 0.948157,0.569298,1 818 | 0.961982,0.55,1 819 | 0.957373,0.557018,1 820 | 0.915899,0.557018,1 821 | 0.915899,0.557018,1 822 | 0.922811,0.5289469999999999,1 823 | 0.925115,0.514912,1 824 | 0.936636,0.499123,1 825 | 0.968894,0.490351,1 826 | 0.998848,0.514912,1 827 | 0.959677,0.542982,1 828 | 0.9550689999999999,0.5289469999999999,1 829 | 0.941244,0.500877,1 830 | 0.918203,0.486842,1 831 | 0.91129,0.481579,1 832 | 0.913594,0.465789,1 833 | 0.906682,0.442982,1 834 | 0.879032,0.42193,1 835 | 0.874424,0.407895,1 836 | 0.927419,0.407895,1 837 | 0.948157,0.411404,1 838 | 0.961982,0.413158,1 839 | 0.9665899999999999,0.425439,1 840 | 0.9665899999999999,0.434211,1 841 | 0.948157,0.434211,1 842 | 0.9343320000000001,0.435965,1 843 | 0.948157,0.469298,1 844 | 0.964286,0.474561,1 845 | 0.989631,0.453509,1 846 | 0.987327,0.435965,1 847 | 0.9665899999999999,0.516667,1 848 | 0.959677,0.557018,1 849 | 0.904378,0.560526,1 850 | 0.915899,0.525439,1 851 | 0.922811,0.49386,1 852 | 0.91129,0.453509,1 853 | 0.890553,0.444737,1 854 | 0.874424,0.507895,1 855 | 0.897465,0.527193,1 856 | 0.913594,0.5377189999999999,1 857 | 0.968894,0.565789,1 858 | 0.985023,0.579825,1 859 | 0.99424,0.516667,1 860 | 0.964286,0.472807,1 861 | 0.975806,0.439474,1 862 | 0.989631,0.425439,1 863 | 0.996544,0.414912,1 864 | -------------------------------------------------------------------------------- /data/UniVariateLinearRegression.txt: -------------------------------------------------------------------------------- 1 | 6.1101,17.592 2 | 5.5277,9.1302 3 | 8.5186,13.662 4 | 7.0032,11.854 5 | 5.8598,6.8233 6 | 8.3829,11.886 7 | 7.4764,4.3483 8 | 8.5781,12 9 | 6.4862,6.5987 10 | 5.0546,3.8166 11 | 5.7107,3.2522 12 | 14.164,15.505 13 | 5.734,3.1551 14 | 8.4084,7.2258 15 | 5.6407,0.71618 16 | 5.3794,3.5129 17 | 6.3654,5.3048 18 | 5.1301,0.56077 19 | 6.4296,3.6518 20 | 7.0708,5.3893 21 | 6.1891,3.1386 22 | 20.27,21.767 23 | 5.4901,4.263 24 | 6.3261,5.1875 25 | 5.5649,3.0825 26 | 18.945,22.638 27 | 12.828,13.501 28 | 10.957,7.0467 29 | 13.176,14.692 30 | 22.203,24.147 31 | 5.2524,-1.22 32 | 6.5894,5.9966 33 | 9.2482,12.134 34 | 5.8918,1.8495 35 | 8.2111,6.5426 36 | 7.9334,4.5623 37 | 8.0959,4.1164 38 | 5.6063,3.3928 39 | 12.836,10.117 40 | 6.3534,5.4974 41 | 5.4069,0.55657 42 | 6.8825,3.9115 43 | 11.708,5.3854 44 | 5.7737,2.4406 45 | 7.8247,6.7318 46 | 7.0931,1.0463 47 | 5.0702,5.1337 48 | 5.8014,1.844 49 | 11.7,8.0043 50 | 5.5416,1.0179 51 | 7.5402,6.7504 52 | 5.3077,1.8396 53 | 7.4239,4.2885 54 | 7.6031,4.9981 55 | 6.3328,1.4233 56 | 6.3589,-1.4211 57 | 6.2742,2.4756 58 | 5.6397,4.6042 59 | 9.3102,3.9624 60 | 9.4536,5.4141 61 | 8.8254,5.1694 62 | 5.1793,-0.74279 63 | 21.279,17.929 64 | 14.908,12.054 65 | 18.959,17.054 66 | 7.2182,4.8852 67 | 8.2951,5.7442 68 | 10.236,7.7754 69 | 5.4994,1.0173 70 | 20.341,20.992 71 | 10.136,6.6799 72 | 7.3345,4.0259 73 | 6.0062,1.2784 74 | 7.2259,3.3411 75 | 5.0269,-2.6807 76 | 6.5479,0.29678 77 | 7.5386,3.8845 78 | 5.0365,5.7014 79 | 10.274,6.7526 80 | 5.1077,2.0576 81 | 5.7292,0.47953 82 | 5.1884,0.20421 83 | 6.3557,0.67861 84 | 9.7687,7.5435 85 | 6.5159,5.3436 86 | 8.5172,4.2415 87 | 9.1802,6.7981 88 | 6.002,0.92695 89 | 5.5204,0.152 90 | 5.0594,2.8214 91 | 5.7077,1.8451 92 | 7.6366,4.2959 93 | 5.8707,7.2029 94 | 5.3054,1.9869 95 | 8.2934,0.14454 96 | 13.394,9.0551 97 | 5.4369,0.61705 98 | -------------------------------------------------------------------------------- /project/plugins/build.sbt: -------------------------------------------------------------------------------- 1 | resolvers += "sbt-idea-repo" at "http://mpeltonen.github.com/maven" 2 | 3 | libraryDependencies += "com.github.mpeltonen" %% "sbt-idea" % "0.10.0" 4 | -------------------------------------------------------------------------------- /sbt: -------------------------------------------------------------------------------- 1 | java -Dfile.encoding=UTF8 -Xmx1536M -Xss1M -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=256m -jar `dirname $0`/sbt-launch-0.10.0.jar "$@" 2 | -------------------------------------------------------------------------------- /sbt-launch-0.10.0.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/everpeace/ml-examples-by-scalala/5784880ec1d6c41e8044f599138772dfe3b9e438/sbt-launch-0.10.0.jar -------------------------------------------------------------------------------- /src/main/scala/org/everpeace/scalala/sample/KMeansSample.scala: -------------------------------------------------------------------------------- 1 | package org.everpeace.scalala.sample 2 | 3 | import scala.io.Source.fromFile 4 | import scalala.scalar._ 5 | import scalala.tensor.:: 6 | import scalala.tensor.mutable._ 7 | import scalala.tensor.dense._ 8 | import scalala.tensor.sparse._ 9 | import scalala.library.Library._ 10 | import scalala.library.LinearAlgebra._ 11 | import scalala.library.Statistics._ 12 | import scalala.library.Plotting._ 13 | import scalala.operators.Implicits._ 14 | import java.awt.{Paint, Color} 15 | 16 | 17 | /** 18 | * K-Means Sample By Scalala. 19 | * 20 | * Author: Shingo Omura 21 | */ 22 | 23 | object KMeansSample { 24 | 25 | def main(args: Array[String]): Unit = run 26 | 27 | def run: Unit = { 28 | 29 | // loading sample data 30 | val reg = "(-?[0-9]*\\.[0-9]+)\\,(-?[0-9]*\\.[0-9]+)*".r 31 | val data: Matrix[Double] = DenseMatrix(fromFile("data/KMeans.txt").getLines().toList.flatMap(_ match { 32 | case reg(x1, x2) => Seq((x1.toDouble, x2.toDouble)) 33 | case _ => Seq.empty 34 | }): _*) 35 | 36 | val init_centroids = DenseMatrix((3d, 3d), (6d, 2d), (8d, 5d)) 37 | val max_iters = 10 38 | val kMeansResult = runKMeans(data, init_centroids, max_iters) 39 | 40 | println("\n\nLEARNED CENTROIDS:\n" + kMeansResult.last._2) 41 | println("\n\n") 42 | 43 | // plot data and KMeans result. 44 | for (i <- 0 until kMeansResult.size) { 45 | val idx = kMeansResult(i)._1 46 | val centroids = kMeansResult(i)._2 47 | clf 48 | scatter(centroids(::, 0), centroids(::, 1), circleSize(0.4)(centroids.numRows), {case i => clusterColor(i+1)}:Int~>Paint) 49 | xlabel("x1") 50 | ylabel("x2") 51 | title("K-Means %d-th iteration result.\n large circles indicates centeroids.".format(i+1)) 52 | plot.hold = true 53 | scatter(data(::, 0), data(::, 1), circleSize(0.1)(data.numRows), idx2color(idx)) 54 | plot.hold = false 55 | if(i != kMeansResult.size -1){ 56 | print("paused... to display %d-th iteration result, press enter.".format(i+2)) 57 | readLine() 58 | } 59 | } 60 | title("K-Means result after %d iterations.\n large circles indicates centeroids.".format(kMeansResult.size)) 61 | 62 | println("\n\nTo finish this program, close K-Means result window.") 63 | } 64 | 65 | // compute each centroids. 66 | // X's row is each data point. 67 | // idx(.) is cluster label in 1..K 68 | // returns a matrix C s.t. C(i,::)(i=0..K-1) is the center of cluster (i+1). 69 | def computeCentroids(X: Matrix[Double], idx: Vector[Int], K: Int): Matrix[Double] = { 70 | val centroids = DenseMatrix.zeros[Double](K, X.numCols) 71 | for (k <- 0 until K) { 72 | val cluster_k = X(idx.findAll(_ == (k + 1)).toSeq, ::) 73 | centroids(k, ::) := (sum(cluster_k, Axis.Horizontal) / cluster_k.numRows) 74 | } 75 | centroids 76 | } 77 | 78 | // find closest centroids 79 | // X's row is each data point. 80 | // centroids' row is each center of cluster 81 | // returns a vector idx s.t. idx(i) is a label of cluster(1..K) of X(i,::) 82 | def findClosestCentroids(X: Matrix[Double], centroids: Matrix[Double]): Vector[Int] = { 83 | val K = centroids.numRows 84 | val idx = DenseVector.zeros[Int](X.numRows) 85 | for (i <- 0 until X.numRows) { 86 | val distances = Vector.zeros[Double](K) 87 | for (j <- 0 until K) { 88 | val diff: Vector[Double] = X(i, ::) - centroids(j, ::) 89 | distances(j) = diff.dot(diff) 90 | } 91 | idx(i) = (distances.argmin + 1) 92 | } 93 | idx 94 | } 95 | 96 | // run K-means iteratively. 97 | // returns history of index vectors and centroids. 98 | def runKMeans(X: Matrix[Double], init_centroids: Matrix[Double], max_iters: Int): Seq[(Vector[Int], Matrix[Double])] = { 99 | val K = init_centroids.numRows 100 | var centroids_hist = Seq[Matrix[Double]]() 101 | var idx_hist = Seq[Vector[Int]]() 102 | println("=== start K-Means loop ===") 103 | var centroids = init_centroids 104 | for (i <- 1 to max_iters) { 105 | println("%d/%d : ".format(i, max_iters)) 106 | val idx = findClosestCentroids(X, centroids) 107 | println("idx: " + idx.asRow) 108 | centroids = computeCentroids(X, idx, K) 109 | println("centroids:\n" + centroids + "\n") 110 | centroids_hist = centroids +: centroids_hist 111 | idx_hist = idx +: idx_hist 112 | } 113 | println("=== finish K-Means loop ===") 114 | idx_hist.reverse.zip(centroids_hist.reverse) 115 | } 116 | 117 | val clusterColor: Int => Paint = _ match { 118 | case 1 => Color.YELLOW 119 | case 2 => Color.RED 120 | case 3 => Color.BLUE 121 | case _ => Color.BLACK 122 | } 123 | val idx2color: Vector[Int] => (Int ~> Paint) = v => { case i => clusterColor(v(i)) } 124 | } -------------------------------------------------------------------------------- /src/main/scala/org/everpeace/scalala/sample/MultiVariateLinearRegressionSample.scala: -------------------------------------------------------------------------------- 1 | package org.everpeace.scalala.sample 2 | 3 | import scala.io.Source.fromFile 4 | import scalala.scalar._ 5 | import scalala.tensor.:: 6 | import scalala.tensor.mutable._ 7 | import scalala.tensor.dense._ 8 | import scalala.tensor.sparse._ 9 | import scalala.library.Library._ 10 | import scalala.library.LinearAlgebra._ 11 | import scalala.library.Statistics._ 12 | import scalala.library.Plotting._ 13 | import scalala.operators.Implicits._ 14 | 15 | 16 | /** 17 | * Multi-Variate Linear Regression Sample By Scalala. 18 | * 19 | * Author: Shingo Omura 20 | */ 21 | 22 | object MultiVariateLinearRegressionSample { 23 | 24 | def main(args: Array[String]): Unit = run 25 | 26 | def run: Unit = { 27 | 28 | // loading sample data 29 | val reg = "([0-9]+)\\,([0-9]+)\\,([0-9]+)*".r 30 | val data = DenseMatrix(fromFile("data/MultiVariateLinearRegression.txt").getLines().toList.flatMap(_ match { 31 | case reg(x1, x2, y) => Seq((x1.toDouble, x2.toDouble, y.toDouble)) 32 | case _ => Seq.empty 33 | }): _*) 34 | println("Data Loaded:\n Area\t#BedRooms\tPrice\n" + data) 35 | 36 | // normalize features 37 | var X = data(::, 0 to 1) 38 | val y = data(::, 2) 39 | val norm = normalizeFeatures(X) 40 | print("\n\naverage(Area, #BedRooms) =" + norm._2) 41 | print("Std. Dev.(Area, #BedRooms) =" + norm._3) 42 | 43 | // adding bias term ( X => [1 ; normalized_x1 ; normalized_x2] ) 44 | X = DenseMatrix.horzcat(DenseMatrix.ones[Double](X.numRows, 1), norm._1) 45 | print("Normalized Data (added bias term):\n" + X) 46 | 47 | // learning parameters 48 | val alpha = 0.1d 49 | val num_iters = 100 50 | var initTheta = Vector.zeros[Double](3).asCol 51 | 52 | // learning 53 | val (learnedTheta, histOfCost) = gradientDescent(initTheta, computeCostAndGrad(X, y), alpha, num_iters) 54 | 55 | // display learned result 56 | plot((1 to num_iters).toArray, histOfCost) 57 | xlabel("number of iterations") 58 | ylabel("cost") 59 | title("History of Cost") 60 | print("\nLeraned Parameters(Bias,Area,#BedRooms):\t" + learnedTheta.asRow) 61 | println("Learned Cost:\t" + histOfCost(histOfCost.length - 1)) 62 | println("\n\nTo finish this program, close the cost's history window.") 63 | } 64 | 65 | 66 | // calculating cost. (cost = (x*theta -y )/2m) 67 | // X: feature row vector is stored in each row 68 | // y: y's column vector coresponding to feature row vector. 69 | // theta: parameter column vector 70 | def computeCostAndGrad(X: Matrix[Double], y: VectorCol[Double])(theta: VectorCol[Double]): (Double, VectorCol[Double]) = { 71 | val diff = X * theta - y 72 | val m = y.length 73 | val cost = (diff.t * diff) / (2 * m) 74 | val grad = (((X * theta - y).t * X) / m).t 75 | (cost, grad) 76 | } 77 | 78 | // normalize the features to average = 0, standard deviation = 1.0 79 | // X: feature row vector is stored in each row. i.e. each row indicates a specific feature. 80 | def normalizeFeatures(X: Matrix[Double]) = { 81 | // calculate μ (average) 82 | val mu = mean(X, Axis.Vertical) 83 | // calculate σ (standard deviation) 84 | val sigma = DenseVector.zeros[Double](X.numCols).asRow 85 | for (i <- 0 until X.numCols) sigma(i) = X(::, i).stddev 86 | 87 | // for each feature update x to (x-μ)/σ 88 | for (i <- 0 until X.numRows) 89 | X(i, ::) := (X(i, ::) :- mu) :/ sigma 90 | (X, mu, sigma) 91 | } 92 | } -------------------------------------------------------------------------------- /src/main/scala/org/everpeace/scalala/sample/RegularizedLogisticRegressionSample.scala: -------------------------------------------------------------------------------- 1 | package org.everpeace.scalala.sample 2 | 3 | import scala.io.Source.fromFile 4 | import scalala.scalar._ 5 | import scalala.tensor.:: 6 | import scalala.tensor.mutable._ 7 | import scalala.tensor.dense._ 8 | import scalala.tensor.sparse._ 9 | import scalala.library.Library._ 10 | import scalala.library.LinearAlgebra._ 11 | import scalala.library.Statistics._ 12 | import scalala.library.Plotting._ 13 | import scalala.operators.Implicits._ 14 | import scala.PartialFunction 15 | import java.awt.{Color, Paint} 16 | 17 | 18 | /** 19 | * Regularized Logistic Regression Sample By Scalala. 20 | * 21 | * Author: Shingo Omura 22 | */ 23 | 24 | object RegularizedLogisticRegressionSample { 25 | 26 | def main(args: Array[String]): Unit = run 27 | 28 | def run: Unit = { 29 | // loading sample data 30 | val reg = "(-?[0-9]*\\.[0-9]+)\\,(-?[0-9]*\\.[0-9]+)\\,([01])*".r 31 | val data: Matrix[Double] = DenseMatrix(fromFile("data/RegularizedLogisticRegression.txt").getLines().toList.flatMap(_ match { 32 | case reg(x1, x2, y) => Seq((x1.toDouble, x2.toDouble, y.toDouble)) 33 | case _ => Seq.empty 34 | }): _*) 35 | println("Data Loaded:\nTest1Score\tTest2Score\tResult(1=accepted/0=rejected)\n" + data) 36 | 37 | // Scalala cannot DenseMatrix(Cols) but DenseMatrix(Rows). 38 | var X = mapFeatures(data(::, 0), data(::, 1)) 39 | var y = data(::, 2) 40 | 41 | // parameters to learn. 42 | val init_theta = DenseVector.zeros[Double](X.numCols).asCol; 43 | // regularized paremeter. 44 | val lambda = 1d; 45 | // gradient descent parameters 46 | val alpha = 5d; 47 | val num_iters = 500; 48 | val (learnedTheta, costHistory) 49 | = gradientDescent(init_theta, 50 | costFunctionAndGrad(X, y, lambda), 51 | alpha, num_iters) 52 | val accr = accuracy(y, predict(learnedTheta)(data(::,0 to 1))) 53 | println("\nTraining Accuracy:%2.2f percent\n\n".format(accr * 100)) 54 | 55 | print("paused... press enter to plot learning results.") 56 | readLine() 57 | println("displaying leaning history of cost value.") 58 | subplot(2, 1, 1) 59 | plotLeraningHistory(costHistory) 60 | println("displaying sample data(blue:accepted, red:rejected) and learned decision boundary(yellow).") 61 | subplot(2, 1, 2) 62 | plotDecisionBoundary(data, learnedTheta) 63 | 64 | println("\nTo finish this program, close all chart windows.") 65 | } 66 | 67 | // maps the two input features to quadratic features. 68 | // Returns a new feature sequence with more features, 69 | // comprising of X1, X2, X1.^2, X2.^2, X1*X2, X1*X2.^2, etc.. 70 | def mapFeatures(X1: Vector[Double], X2: Vector[Double]): Matrix[Double] 71 | = { 72 | val degree = 6 73 | val featureRows: Seq[VectorRow[Double]] 74 | = for (i <- 0 to degree; j <- 0 to i) yield ((X1.asRow :^ (i - j)) :* (X2.asRow :^ j)) 75 | DenseMatrix(featureRows: _*).t 76 | } 77 | 78 | 79 | // compute sigmoid functions to each value for the column vector 80 | // scalala cannot exp(Vector)! (scalala doesn't define CanExp) 81 | def sigmoid(v: VectorCol[Double]): VectorCol[Double] = 1 :/ (1 :+ (-v).map(exp(_))) 82 | 83 | // compute cost and grad for regularized logistic regression. 84 | def costFunctionAndGrad(X: Matrix[Double], y: VectorCol[Double], lambda: Double)(theta: VectorCol[Double]): (Double, VectorCol[Double]) = { 85 | assert(X.numRows == y.length) 86 | assert(X.numCols == theta.length) 87 | 88 | val h = sigmoid(X * theta) 89 | val m = y.length 90 | // calculate penalty excluded the first theta value (bias term) 91 | val _theta = DenseVector(theta.values.toSeq: _*).asCol 92 | _theta(0) = 0 93 | val p = lambda * ((_theta.t * _theta) / (2 * m)) 94 | 95 | // cost = ((-y)'* log (h) - (1 - y)'* log (1 - h)) / m +p; 96 | val cost = (((-y).t * h.map(log(_))) - ((1 :- y).t * ((1 :- h)).map(log(_)))) / m + p 97 | 98 | // calculate grads 99 | // grad = (X'*(h - y) + lambda * _theta) / m; 100 | val grad = ((X.t * (h :- y)) :+ (lambda :* _theta)) / m 101 | 102 | (cost, grad) 103 | } 104 | 105 | // predict whether each sample is accepted or not. 106 | def predict(theta: Vector[Double])(X: Matrix[Double]): Vector[Double] = { 107 | val mapped = mapFeatures(X(::, 0), X(::, 1)) 108 | sigmoid(mapped * theta.asCol).map(p => if (p >= 0.5) 1.0d else 0.0d) 109 | } 110 | 111 | // plot History of cost 112 | def plotLeraningHistory(cost_hist: VectorCol[Double]): Unit = { 113 | figure(1) 114 | plot((1 to cost_hist.length).toArray, cost_hist) 115 | xlabel("number of iterations") 116 | ylabel("cost") 117 | title("learning cost history") 118 | } 119 | 120 | // plot samples 121 | def plotSampleData(data: Matrix[Double]): Unit = { 122 | val posIdx = data(::, 2).findAll(_ == 1.0).toSeq 123 | val negIdx = data(::, 2).findAll(_ == 0.0).toSeq 124 | val x1 = data(::, 0) 125 | val x2 = data(::, 1) 126 | 127 | val posx1 = x1(posIdx: _*) 128 | val posx2 = x2(posIdx: _*) 129 | val acceptedTips = (i: Int) => "ACCEPTED(" + posx1(i).toString + "," + posx2(i).toString + ")" 130 | scatter(posx1, posx2, circleSize(0.03)(posIdx.length), { 131 | case _ => Color.BLUE 132 | }: Int ~> Paint, 133 | tips = { 134 | case i: Int => acceptedTips(i) 135 | }: Int ~> String, name = "accepted") 136 | 137 | val negx1 = x1(negIdx: _*) 138 | val negx2 = x2(negIdx: _*) 139 | val rejectedTip = (i: Int) => "REJECTED(" + negx1(i).toString + "," + negx2(i).toString + ")" 140 | scatter(negx1, negx2, circleSize(0.03)(negIdx.length), { 141 | case _ => Color.RED 142 | }: Int ~> Paint, 143 | tips = { 144 | case i: Int => rejectedTip(i) 145 | }: Int ~> String, name = "rejected") 146 | 147 | xlabel("Test1 score") 148 | ylabel("Test2 score") 149 | } 150 | 151 | // plot decision boundary 152 | // scalala doesn't have contour, so this searches boundary manually. 153 | def plotDecisionBoundary(data: Matrix[Double], theta: VectorCol[Double]): Unit = { 154 | plot.hold = true 155 | plotSampleData(data) 156 | 157 | // compute decision boundaries 158 | val x1range = linspace(-1, 1.5, 100); 159 | val x2range = linspace(-1, 1.5, 100); 160 | val (bx, by) = computeDecisionBoundary(x1range, x2range, predict(theta)) 161 | 162 | //plot boundary 163 | scatter(bx, by, circleSize(0.03)(bx.length), { 164 | case _ => Color.YELLOW 165 | }: Int ~> Paint) 166 | title("Learned Decision Boundary\n (blue:accepted, red:rejected, yellow: boundary)") 167 | plot.hold = false 168 | } 169 | } -------------------------------------------------------------------------------- /src/main/scala/org/everpeace/scalala/sample/SupportVectorMachineWithGaussianKernel.scala: -------------------------------------------------------------------------------- 1 | package org.everpeace.scalala.sample 2 | 3 | import scala.io.Source.fromFile 4 | import scalala.scalar._ 5 | import scalala.tensor.:: 6 | import scalala.tensor.mutable._ 7 | import scalala.tensor.dense._ 8 | import scalala.tensor.sparse._ 9 | import scalala.library.Library._ 10 | import scalala.library.LinearAlgebra._ 11 | import scalala.library.Statistics._ 12 | import scalala.library.Plotting._ 13 | import scalala.operators.Implicits._ 14 | import java.awt.{Color, Paint} 15 | 16 | 17 | /** 18 | * Support Vector Machine With Gaussian Kernel Sample By Scalala. 19 | * 20 | * Author: Shingo Omura 21 | */ 22 | 23 | object SupportVectorMachineWithGaussianKernel { 24 | 25 | def main(args: Array[String]): Unit = run 26 | 27 | def run: Unit = { 28 | // loading sample data 29 | val reg = "(-?[0-9]*\\.[0-9]+)\\,(-?[0-9]*\\.[0-9]+)\\,([01])*".r 30 | val data: Matrix[Double] = DenseMatrix(fromFile("data/SupportVectorMachineWithGaussianKernel.txt").getLines().toList.flatMap(_ match { 31 | case reg(x1, x2, y) => Seq((x1.toDouble, x2.toDouble, y.toDouble)) 32 | case _ => Seq.empty 33 | }): _*) 34 | println("Data Loaded:\nX-value\tY-value\tResult(1=accepted/0=rejected)\n" + data) 35 | 36 | // plot sample 37 | var X = data(::, 0 to 1) 38 | var y = data(::, 2) 39 | scatter(X(::, 0), X(::, 1), circleSize(0.01)(X.numRows), y2Color(y)) 40 | xlabel("X-value") 41 | ylabel("Y-value") 42 | title("Input data") 43 | 44 | // learning parameter 45 | // C: regularized parameter 46 | // sigma: gaussian Kernel parameter 47 | val C = 1d 48 | val sigma = 0.1d 49 | 50 | // learn svm 51 | println("\n\npaused... press enter to start learning SVM.") 52 | readLine 53 | val model = trainSVM(X, y, C, gaussianKernel(sigma)) 54 | val accr = accuracy(y, predict(model)(X)) 55 | println("\nTraining Accuracy:%2.2f percent\n\n".format(accr * 100)) 56 | 57 | // plotting decision boundary 58 | println("paused... press enter to plot leaning result.") 59 | readLine 60 | plotDecisionBoundary(X, y, model) 61 | 62 | println("\n\nTo finish this program, close the result window.") 63 | } 64 | 65 | // gaussian kernel 66 | def gaussianKernel(sigma: Double)(x1: Vector[Double], x2: Vector[Double]): Double 67 | = { 68 | val _x1 = x1.asCol 69 | val _x2 = x2.asCol 70 | exp(-1 * ((_x1 - _x2).t * (_x1 - _x2)) / (2 * sigma * sigma)) 71 | } 72 | 73 | // SVM Model 74 | case class Model(X: Matrix[Double], y: Vector[Double], kernelF: (Vector[Double], Vector[Double]) => Double, 75 | b: Double, alphas: Vector[Double], w: Vector[Double]) 76 | 77 | // predict by SVM Model 78 | def predict(model: Model)(X: Matrix[Double]): Vector[Double] = { 79 | val pred = Vector.zeros[Double](X.numRows) 80 | val p = Vector.zeros[Double](X.numRows) 81 | for (i <- 0 until X.numRows) { 82 | var prediction = 0d; 83 | for (j <- 0 until model.X.numRows) { 84 | prediction = prediction + model.alphas(j) * model.y(j) * model.kernelF(X(i, ::), model.X(j, ::)) 85 | } 86 | p(i) = prediction + model.b; 87 | } 88 | pred(p.findAll(_ >= 0)) := 1.0d 89 | pred(p.findAll(_ < 0)) := 0.0d 90 | pred 91 | } 92 | 93 | // train SVM 94 | // This is a simplified version of the SMO algorithm for training SVMs. 95 | def trainSVM(X: Matrix[Double], Y: VectorCol[Double], C: Double, 96 | kernel: (Vector[Double], Vector[Double]) => Double, 97 | tol: Double = 1e-3, max_passes: Int = 5): Model = { 98 | val m = X.numRows 99 | val n = X.numCols 100 | val Y2 = Vector.vertcat(Y) 101 | Y2(Y findAll (_ == 0d)) := -1d // remap 0 to -1 102 | val alphas = Vector.zeros[Double](m) 103 | var b = 0.0d 104 | val E = Vector.zeros[Double](m) 105 | var passes = 0 106 | var eta = 0.0d 107 | var L = 0.0d 108 | var H = 0.0d 109 | 110 | // generate Kernel Matrix 111 | val K: Matrix[Double] = DenseMatrix.zeros[Double](m, m) 112 | for (i <- 0 until m; j <- i until m) { 113 | K(i, j) = kernel(X(i, ::).t, X(j, ::).t) 114 | K(j, i) = K(i, j) // the matrix is symmetric. 115 | } 116 | 117 | print("Training(C=%f) (This takes a few minutes.)\n".format(C)) 118 | var dots = 0 119 | while (passes < max_passes) { 120 | var num_alpha_changed = 0 121 | for (i <- 0 until m) { 122 | E(i) = b + (alphas :* (Y2 :* K(::, i))).sum - Y2(i) 123 | if ((Y2(i) * E(i) < -tol && alphas(i) < C) || (Y2(i) * E(i) > tol && alphas(i) > 0)) { 124 | var j = scala.math.ceil((m - 1) * scala.util.Random.nextDouble()).toInt 125 | // Make sure i \neq j 126 | while (j == i) (j = scala.math.ceil((m - 1) * scala.util.Random.nextDouble()).toInt) 127 | 128 | //Calculate Ej = f(x(j)) - y(j) using (2). 129 | E(j) = b + (alphas :* (Y2 :* K(::, j))).sum - Y2(j) 130 | 131 | //Save old alphas 132 | var alpha_i_old = alphas(i); 133 | var alpha_j_old = alphas(j); 134 | 135 | // Compute L and H by (10) or (11). 136 | if (Y2(i) == Y2(j)) { 137 | L = scala.math.max(0, alphas(j) + alphas(i) - C); 138 | H = scala.math.min(C, alphas(j) + alphas(i)); 139 | } else { 140 | L = scala.math.max(0, alphas(j) - alphas(i)); 141 | H = scala.math.min(C, C + alphas(j) - alphas(i)); 142 | } 143 | 144 | //Compute eta by (14). 145 | eta = 2 * K(i, j) - K(i, i) - K(j, j); 146 | 147 | if (L != H && eta < 0) { 148 | //Compute and clip new value for alpha j using (12) and (15). 149 | alphas(j) = alphas(j) - (Y2(j) * (E(i) - E(j))) / eta; 150 | 151 | //Clip 152 | alphas(j) = scala.math.min(H, alphas(j)); 153 | alphas(j) = scala.math.max(L, alphas(j)); 154 | 155 | // Check if change in alpha is significant 156 | if (abs(alphas(j) - alpha_j_old) < tol) { 157 | //continue to next i. 158 | // replace anyway 159 | alphas(j) = alpha_j_old; 160 | } else { 161 | //Determine value for alpha i using (16). 162 | alphas(i) = alphas(i) + Y2(i) * Y2(j) * (alpha_j_old - alphas(j)); 163 | 164 | //Compute b1 and b2 using (17) and (18) respectively. 165 | var b1 = b - E(i) - (Y2(i) * (alphas(i) - alpha_i_old) * K(i, j)) 166 | -(Y2(j) * (alphas(j) - alpha_j_old) * K(i, j)) 167 | var b2 = b - E(j) - (Y2(i) * (alphas(i) - alpha_i_old) * K(i, j)) 168 | -(Y2(j) * (alphas(j) - alpha_j_old) * K(j, j)) 169 | 170 | // Compute b by (19). 171 | if (0 < alphas(i) && alphas(i) < C) { 172 | b = b1 173 | } else if (0 < alphas(j) && alphas(j) < C) { 174 | b = b2 175 | } else { 176 | b = (b1 + b2) / 2.0d; 177 | } 178 | 179 | num_alpha_changed += 1 180 | } 181 | } 182 | } 183 | } 184 | 185 | if (num_alpha_changed == 0) { 186 | passes += 1 187 | } else { 188 | passes = 0 189 | } 190 | 191 | print(".") 192 | dots += 1 193 | if (dots > 78) { 194 | print("\n") 195 | dots = 0 196 | } 197 | } 198 | print("Done! \n\n") 199 | 200 | val _idx = alphas.findAll(_ > 0.0d).toSeq 201 | val _X = X(_idx, ::) 202 | val _Y = Y2(_idx) 203 | val _kernel = kernel 204 | val _b = b 205 | val _alphas = alphas(_idx) 206 | val _w = ((alphas :* Y2).asRow * X).asCol 207 | 208 | Model(_X, _Y, _kernel, _b, _alphas, _w) 209 | } 210 | 211 | def plotDecisionBoundary(X: Matrix[Double], y: Vector[Double], model: Model) = { 212 | print("Detecting decision boundaries...") 213 | // compute decision boundary. 214 | val NUM = 100 215 | val x1 = linspace(X(::, 0).min, X(::, 0).max, NUM) 216 | val x2 = linspace(X(::, 1).min, X(::, 1).max, NUM) 217 | val (bx1, bx2) = computeDecisionBoundary(x1, x2, predict(model)) 218 | print(" Done!\n") 219 | 220 | // plot input data and detected boundary 221 | clf 222 | plot.hold = true 223 | scatter(X(::, 0), X(::, 1), circleSize(0.01)(X.numRows), y2Color(y)) 224 | scatter(bx1, bx2, circleSize(0.01)(bx1.size), { 225 | case i => Color.YELLOW 226 | }: Int ~> Paint) 227 | xlabel("X-value") 228 | ylabel("Y-value") 229 | title("Learning result by SVM\n blue:accepted, red: rejected, yellow:learned decision boundary") 230 | } 231 | 232 | val i2color: Int => Paint = _ match { 233 | case 1 => Color.BLUE //accepted 234 | case 0 => Color.RED //rejected 235 | case _ => Color.BLACK //other 236 | } 237 | val y2Color: Vector[Double] => (Int ~> Paint) = y => { 238 | case i => i2color(y(i).toInt) 239 | } 240 | } -------------------------------------------------------------------------------- /src/main/scala/org/everpeace/scalala/sample/UniVariateLinearRegressionSample.scala: -------------------------------------------------------------------------------- 1 | package org.everpeace.scalala.sample 2 | 3 | import scala.io.Source.fromFile 4 | import scalala.scalar._ 5 | import scalala.tensor.:: 6 | import scalala.tensor.mutable._ 7 | import scalala.tensor.dense._ 8 | import scalala.tensor.sparse._ 9 | import scalala.library.Library._ 10 | import scalala.library.LinearAlgebra._ 11 | import scalala.library.Statistics._ 12 | import scalala.library.Plotting._ 13 | import scalala.operators.Implicits._ 14 | import java.awt.{Paint, Color} 15 | 16 | /** 17 | * Multi-Variate Linear Regression Sample By Scalala. 18 | * 19 | * Author: Shingo Omura 20 | */ 21 | 22 | object UniVariateLinearRegressionSample { 23 | 24 | def main(args: Array[String]): Unit = run 25 | 26 | def run: Unit = { 27 | // loading sample data 28 | val reg = "(-?[0-9]*\\.[0-9]+)\\,(-?[0-9]*\\.[0-9]+)*".r 29 | val data: Matrix[Double] = DenseMatrix(fromFile("data/UniVariateLinearRegression.txt").getLines().toList.flatMap(_ match { 30 | case reg(x, y) => Seq((x.toDouble, y.toDouble)) 31 | case _ => Seq.empty 32 | }): _*) 33 | 34 | plot.hold = true 35 | scatter(data(::, 0), data(::, 1), circleSize(0.3)(data.numRows), {case _ => Color.BLUE}:Int~>Paint) 36 | xlabel("x1") 37 | ylabel("x2") 38 | title("sample data") 39 | 40 | // add bias term to X 41 | // Scalala doesn't have DenseMatrix(VectorCol*) but DenseMatrix(VectorRow*) 42 | val X = DenseMatrix(DenseVector.ones[Double](data.numRows).asRow, data(::, 0).asRow).t 43 | val y = data(::, 1) 44 | 45 | // gradient descent parameters 46 | val alpha = 0.02d 47 | val num_iters = 500 48 | import MultiVariateLinearRegressionSample.computeCostAndGrad 49 | val (theta, costHist) = gradientDescent(DenseVector.zeros[Double](data.numCols), computeCostAndGrad(X, y), alpha, num_iters) 50 | 51 | readLine("Learning finished! press enter to display learned function.") 52 | 53 | plot(X(::, 1), X * theta, colorcode = "red") 54 | title("sample data(in blue) and learned function(in red).") 55 | 56 | readLine("paused... press enter to display cost history of learning.") 57 | figure(2) 58 | plot((1 to num_iters).toArray, costHist) 59 | xlabel("number of iterations") 60 | ylabel("cost") 61 | title("cost history of learning") 62 | 63 | println("\nTo finish this program, close all chart windows.") 64 | } 65 | 66 | } -------------------------------------------------------------------------------- /src/main/scala/org/everpeace/scalala/sample/package.scala: -------------------------------------------------------------------------------- 1 | package org.everpeace.scalala 2 | 3 | import scala.io.Source.fromFile 4 | import scalala.scalar._ 5 | import scalala.tensor.:: 6 | import scalala.tensor.mutable._ 7 | import scalala.tensor.dense._ 8 | import scalala.tensor.sparse._ 9 | import scalala.library.Library._ 10 | import scalala.library.LinearAlgebra._ 11 | import scalala.library.Statistics._ 12 | import scalala.library.Plotting._ 13 | import scalala.operators.Implicits._ 14 | import scala.PartialFunction 15 | import java.awt.{Color, Paint} 16 | import scalala.generic.collection.CanMapValues 17 | 18 | /** 19 | * 20 | * @author everpeace _at_ gmail _dot_ com 21 | * @date 11/12/27 22 | */ 23 | 24 | package object sample { 25 | type ~>[-A, +B] = PartialFunction[A, B] 26 | val circleSize = (s: Double) => (n: Int) => DenseVector.fill(n)(s) 27 | 28 | // gradient descent. this returns optimal theta(paremeter column vector) and history of cost value. 29 | // initTheta: paremeters column vector 30 | // func: function whose argument theta(parameter column vector) and return values are cost value and gradient vector at theta. 31 | // alpha: learning rate 32 | // num_iters: number of iterations 33 | def gradientDescent(initTheta: VectorCol[Double], func: (VectorCol[Double]) => (Double, VectorCol[Double]), alpha: Double, num_iters: Int): (VectorCol[Double], VectorCol[Double]) = { 34 | println("=== start gradientDescent loop ===") 35 | //initialize theta 36 | val theta = DenseVector.zeros[Double](initTheta.length) 37 | for (i <- 0 until theta.length) theta(i) = initTheta(i) 38 | val costHist = DenseVector.zeros[Double](num_iters) 39 | 40 | for (n <- 0 until num_iters) { 41 | print((n + 1) + "/" + num_iters + " : ") 42 | val r = func(theta) 43 | costHist(n) = r._1 44 | print("cost = " + r._1 + " theta = " + theta.asRow) 45 | theta :-= (alpha :* r._2) 46 | } 47 | // print("RESULT: cost = " + r._1 + " theta = " + theta.asRow) 48 | println("=== finish gradientDescent loop ===") 49 | (theta, costHist) 50 | } 51 | 52 | // construct mesh grid. 53 | def meshgrid(x1: Vector[Double], x2: Vector[Double]): (Matrix[Double], Matrix[Double]) = { 54 | val x1Mesh = DenseMatrix.zeros[Double](x2.length, x1.length) 55 | for (i <- 0 until x2.length) { 56 | x1Mesh(i, ::) := x1.asRow 57 | } 58 | val x2Mesh = DenseMatrix.zeros[Double](x2.length, x1.length) 59 | for (i <- 0 until x1.length) { 60 | x2Mesh(::, i) := x2.asCol 61 | } 62 | (x1Mesh, x2Mesh) 63 | } 64 | 65 | def computeDecisionBoundary(x1: Vector[Double], x2: Vector[Double], predict: Matrix[Double] => Vector[Double]): (Vector[Double], Vector[Double]) = { 66 | val (x1Mesh, x2Mesh) = meshgrid(x1, x2) 67 | val decisions = DenseMatrix.zeros[Double](x1Mesh.numRows, x1Mesh.numCols) 68 | 69 | // compute decisions for all mesh points. 70 | for (i <- 0 until x1Mesh.numCols) { 71 | val this_X: Matrix[Double] = DenseMatrix(x1Mesh(::, i).asRow, x2Mesh(::, i).asRow).t 72 | decisions(::, i) := predict(this_X) 73 | } 74 | 75 | // detect boundary. 76 | var bx1 = Seq[Double]() 77 | var bx2 = Seq[Double]() 78 | for (i <- 1 until decisions.numRows - 1; j <- 1 until decisions.numCols - 1) { 79 | if (decisions(i, j) == 0d && (decisions(i - 1, j - 1) == 1d || decisions(i - 1, j) == 1d || decisions(i - 1, j + 1) == 1d 80 | || decisions(i, j - 1) == 1d || decisions(i, j + 1) == 1d 81 | || decisions(i + 1, j) == 1d || decisions(i + 1, j) == 1d || decisions(i + 1, j + 1) == 1d)) { 82 | bx1 = x1Mesh(i, j) +: bx1 83 | bx2 = x2Mesh(i, j) +: bx2 84 | } 85 | } 86 | 87 | (Vector(bx1: _*), Vector(bx2: _*)) 88 | } 89 | 90 | def accuracy(y: Vector[Double], pred: Vector[Double]): Double 91 | = mean((y :== pred).map(if (_) 1.0d else 0.0)) 92 | } -------------------------------------------------------------------------------- /src/test/scala/dummy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/everpeace/ml-examples-by-scalala/5784880ec1d6c41e8044f599138772dfe3b9e438/src/test/scala/dummy --------------------------------------------------------------------------------