├── .gitignore ├── README.md ├── build.gradle ├── demo.gif ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── src └── main ├── kotlin ├── Dashboard.kt ├── PredictorModel.kt ├── TomNeuralNetwork.kt └── Util.kt └── resources └── color_training_set.csv /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | /build/ 3 | /.idea/ 4 | /.gradle/ 5 | *.iml 6 | gradle.properties 7 | secring.gpg 8 | /gradle.properties 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kotlin Simple Neural Network 2 | 3 | **PLEASE NOTE: This project has been [forked and moved here](https://github.com/thomasnield/kotlin-machine-learning-demos/) to showcase machine learning algorithms beyond neural networks.** 4 | 5 | This is a simple neural network application that will suggest a LIGHT or DARK font for a given background color. 6 | 7 | The training/predicting user interface was built with [TornadoFX](https://github.com/edvin/tornadofx). 8 | 9 | ## YouTube Walkthrough 10 | 11 | [![](https://img.youtube.com/vi/tAioWlhKA90/hqdefault.jpg)](https://www.youtube.com/watch?v=tAioWlhKA90) 12 | 13 | ## Featured at KotlinConf 2018 in Amsterdam 14 | 15 | [![](https://img.youtube.com/vi/-zTqtEcnM7A/hqdefault.jpg)](https://youtu.be/-zTqtEcnM7A) 16 | 17 | ## Details 18 | 19 | Currently there are three implementations: 20 | 21 | 1) [Simple RGB formula](https://stackoverflow.com/questions/1855884/determine-font-color-based-on-background-color#1855903) 22 | 2) My feed-forward brute force implementation (no backpropagation) 23 | 3) [ojAlgo! Neural Network](http://www.ojalgo.org/) 24 | 4) [DeepLearning4J](https://deeplearning4j.org/) 25 | 26 | For this simple toy example ojAlgo seems to perform the best, and is light and the simplest to implement. DL4J is definitely more heavyweight (with many dependencies) but is a more robust framework for larger, data-intensive deep learning problems in production. DL4J also has a nice [Kotlin MNIST example](https://github.com/deeplearning4j/dl4j-examples/tree/master/dl4j-examples/src/main/kotlin/org/deeplearning4j/examples/feedforward/mnist). 27 | 28 | Note also there is now a button to pre-train 1345 categorized colors. 29 | 30 | Tariq Rashid's book [Build Your Own Neural Network](https://www.amazon.com/Make-Your-Own-Neural-Network/dp/1530826608/) is a tremendous resource, as well as [3Blue1Brown's Video](https://www.youtube.com/watch?v=aircAruvnKk&list=PLZHQObOWTQDNU6R1_67000Dx_ZCJB-3pi). [Grokking Deep Learning](https://www.manning.com/books/grokking-deep-learning) is probably the most thorough and useful resource when you are ready to deep-dive. 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | ext.kotlin_version = '1.3.10' 3 | 4 | repositories { 5 | maven { url 'http://repo1.maven.org/maven2' } 6 | } 7 | dependencies { 8 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" 9 | } 10 | } 11 | 12 | 13 | apply plugin: "kotlin" 14 | apply plugin: 'application' 15 | 16 | compileKotlin { 17 | kotlinOptions.jvmTarget= "1.8" 18 | } 19 | 20 | repositories { 21 | maven { url 'http://repo1.maven.org/maven2' } 22 | maven { url 'https://jitpack.io' } 23 | } 24 | 25 | dependencies { 26 | 27 | compile "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version" 28 | 29 | compile 'no.tornado:tornadofx:1.+' 30 | 31 | compile 'org.ojalgo:ojalgo:46.1.0' 32 | compile 'org.deeplearning4j:deeplearning4j-core:1.0.0-beta2' 33 | compile 'org.nd4j:nd4j-native-platform:1.0.0-beta2' 34 | } 35 | 36 | -------------------------------------------------------------------------------- /demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thomasnield/kotlin_simple_neural_network/ba46a4afd728550ab0aa3f2713d8d70e2cc36164/demo.gif -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/thomasnield/kotlin_simple_neural_network/ba46a4afd728550ab0aa3f2713d8d70e2cc36164/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Sat Dec 09 09:03:22 CST 2017 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.0-bin.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS="" 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS= 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /src/main/kotlin/Dashboard.kt: -------------------------------------------------------------------------------- 1 | import javafx.application.Application 2 | import javafx.beans.property.ReadOnlyObjectWrapper 3 | import javafx.beans.property.SimpleObjectProperty 4 | import javafx.geometry.Insets 5 | import javafx.geometry.Orientation 6 | import javafx.scene.layout.Background 7 | import javafx.scene.layout.BackgroundFill 8 | import javafx.scene.layout.CornerRadii 9 | import javafx.scene.paint.Color 10 | import javafx.scene.text.FontWeight 11 | import tornadofx.* 12 | 13 | 14 | fun main(args: Array) = Application.launch(MainApp::class.java, *args) 15 | 16 | class MainApp: App(MainView::class) 17 | 18 | class MainView: View() { 19 | 20 | val backgroundColor = SimpleObjectProperty(Color.GRAY) 21 | 22 | fun assignRandomColor() = randomColor() 23 | .also { backgroundColor.set(it) } 24 | 25 | override val root = splitpane { 26 | style = "-fx-font-size: 16pt; " 27 | orientation = Orientation.VERTICAL 28 | 29 | splitpane { 30 | 31 | title = "Light/Dark Text Suggester" 32 | orientation = Orientation.HORIZONTAL 33 | 34 | borderpane { 35 | 36 | top = label("TRAIN") { 37 | style { 38 | textFill = Color.RED 39 | fontWeight = FontWeight.BOLD 40 | } 41 | } 42 | 43 | center = form { 44 | fieldset { 45 | 46 | field("Which looks better?").hbox { 47 | button("DARK") { 48 | textFill = Color.BLACK 49 | useMaxWidth = true 50 | 51 | backgroundProperty().bind( 52 | backgroundColor.select { ReadOnlyObjectWrapper(Background(BackgroundFill(it, CornerRadii.EMPTY, Insets.EMPTY))) } 53 | ) 54 | 55 | setOnAction { 56 | 57 | PredictorModel += CategorizedInput(backgroundColor.get(), FontShade.DARK) 58 | assignRandomColor() 59 | } 60 | } 61 | 62 | button("LIGHT") { 63 | textFill = Color.WHITE 64 | useMaxWidth = true 65 | 66 | backgroundProperty().bind( 67 | backgroundColor.select { ReadOnlyObjectWrapper(Background(BackgroundFill(it, CornerRadii.EMPTY, Insets.EMPTY))) } 68 | ) 69 | 70 | setOnAction { 71 | PredictorModel += CategorizedInput(backgroundColor.get(), FontShade.DARK) 72 | 73 | assignRandomColor() 74 | } 75 | } 76 | } 77 | } 78 | 79 | fieldset { 80 | field("Model") { 81 | combobox(PredictorModel.selectedPredictor) { 82 | 83 | PredictorModel.Predictor.values().forEach { items.add(it) } 84 | } 85 | } 86 | } 87 | 88 | fieldset { 89 | field("Pre-Train") { 90 | button("Train 1345 Colors") { 91 | useMaxWidth = true 92 | setOnAction { 93 | PredictorModel.preTrainData() 94 | isDisable = true 95 | } 96 | } 97 | } 98 | } 99 | } 100 | 101 | } 102 | 103 | borderpane { 104 | 105 | top = label("PREDICT") { 106 | style { 107 | textFill = Color.RED 108 | fontWeight = FontWeight.BOLD 109 | } 110 | } 111 | 112 | center = form { 113 | fieldset { 114 | field("Background") { 115 | colorpicker { 116 | valueProperty().onChange { 117 | backgroundColor.set(it) 118 | } 119 | 120 | customColors.forEach { println(it) } 121 | } 122 | } 123 | field("Result") { 124 | label("LOREM IPSUM") { 125 | backgroundProperty().bind( 126 | backgroundColor.select { ReadOnlyObjectWrapper(Background(BackgroundFill(it, CornerRadii.EMPTY, Insets.EMPTY))) } 127 | ) 128 | 129 | backgroundColor.onChange { 130 | val result = PredictorModel.predict(it!!) 131 | 132 | text = result.toString() 133 | textFill = result.color 134 | } 135 | 136 | } 137 | } 138 | } 139 | } 140 | } 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /src/main/kotlin/PredictorModel.kt: -------------------------------------------------------------------------------- 1 | import javafx.beans.property.SimpleObjectProperty 2 | import javafx.collections.FXCollections 3 | import javafx.scene.paint.Color 4 | import org.deeplearning4j.nn.api.OptimizationAlgorithm 5 | import org.deeplearning4j.nn.conf.NeuralNetConfiguration 6 | import org.deeplearning4j.nn.conf.layers.DenseLayer 7 | import org.deeplearning4j.nn.conf.layers.OutputLayer 8 | import org.deeplearning4j.nn.multilayer.MultiLayerNetwork 9 | import org.deeplearning4j.nn.weights.WeightInit 10 | import org.nd4j.linalg.activations.Activation 11 | import org.nd4j.linalg.factory.Nd4j 12 | import org.nd4j.linalg.learning.config.Nesterovs 13 | import org.ojalgo.ann.ArtificialNeuralNetwork 14 | import org.ojalgo.array.Primitive64Array 15 | import java.util.concurrent.ThreadLocalRandom 16 | 17 | object PredictorModel { 18 | 19 | val inputs = FXCollections.observableArrayList() 20 | 21 | val selectedPredictor = SimpleObjectProperty(Predictor.OJALGO_NN) 22 | 23 | fun predict(color: Color) = selectedPredictor.get().predict(color) 24 | 25 | operator fun plusAssign(categorizedInput: CategorizedInput) { 26 | inputs += categorizedInput 27 | } 28 | operator fun plusAssign(categorizedInput: Pair) { 29 | inputs += categorizedInput.let { CategorizedInput(it.first, it.second) } 30 | } 31 | 32 | fun preTrainData() { 33 | 34 | PredictorModel::class.java.getResource("color_training_set.csv").readText().lines() 35 | .asSequence() 36 | .map { s -> s.split(",").map { it.toInt() } } 37 | .map { Color.rgb(it[0], it[1], it[2]) } 38 | .map { CategorizedInput(it, Predictor.FORMULAIC.predict(it)) } 39 | .toList() 40 | .forEach { 41 | inputs += it 42 | } 43 | } 44 | 45 | 46 | enum class Predictor { 47 | 48 | /** 49 | * Uses a simple formula to classify colors as LIGHT or DARK 50 | */ 51 | FORMULAIC { 52 | override fun predict(color: Color) = (1 - (0.299 * color.red + 0.587 * color.green + 0.114 * color.blue)) 53 | .let { if (it < .5) FontShade.DARK else FontShade.LIGHT } 54 | }, 55 | /** 56 | * My implementation from scratch, still a work-in-progress 57 | * I need to implement gradient descent using Koma 58 | */ 59 | TOMS_FEED_FORWARD_NN { 60 | override fun predict(color: Color): FontShade { 61 | 62 | val bruteForceNN = neuralnetwork { 63 | inputlayer(3) 64 | hiddenlayer(3) 65 | outputlayer(2) 66 | } 67 | 68 | val trainingEntries = inputs.asSequence() 69 | .map { 70 | colorAttributes(it.color) to it.fontShade.outputValue 71 | }.asIterable() 72 | 73 | bruteForceNN.trainEntries(trainingEntries) 74 | 75 | val result = bruteForceNN.predictEntry(*colorAttributes(color)) 76 | println("DARK: ${result[0]} LIGHT: ${result[1]}") 77 | 78 | return when { 79 | result[0] > result[1] -> FontShade.DARK 80 | else -> FontShade.LIGHT 81 | } 82 | } 83 | }, 84 | /** 85 | * Uses ojAlgo's artificial neural network API 86 | * Which works really well and is much more lightweight than DL4J 87 | */ 88 | OJALGO_NN { 89 | 90 | override fun predict(color: Color): FontShade { 91 | 92 | val ann = ArtificialNeuralNetwork.builder(3, 3, 2).apply { 93 | 94 | activator(0, ArtificialNeuralNetwork.Activator.RECTIFIER) 95 | activator(1, ArtificialNeuralNetwork.Activator.SOFTMAX) 96 | 97 | rate(.05) 98 | error(ArtificialNeuralNetwork.Error.CROSS_ENTROPY) 99 | 100 | val inputValues = inputs.asSequence().map { Primitive64Array.FACTORY.copy(* colorAttributes(it.color)) } 101 | .toList() 102 | 103 | val outputValues = inputs.asSequence().map { Primitive64Array.FACTORY.copy(*it.fontShade.outputValue) } 104 | .toList() 105 | 106 | train(inputValues, outputValues) 107 | }.get() 108 | 109 | return ann.invoke(Primitive64Array.FACTORY.copy(*colorAttributes(color))).let { 110 | println("${it[0]} ${it[1]}") 111 | if (it[0] > it[1]) FontShade.LIGHT else FontShade.DARK 112 | } 113 | } 114 | }, 115 | 116 | /** 117 | * Uses DeepLearning4J, a heavyweight neural network library that is probably overkill for this toy problem. 118 | * However, DL4J is a good library to use for large real-world projects. 119 | */ 120 | DL4J_NN { 121 | override fun predict(color: Color): FontShade { 122 | 123 | val dl4jNN = NeuralNetConfiguration.Builder() 124 | .weightInit(WeightInit.UNIFORM) 125 | .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) 126 | .updater(Nesterovs(.006, .9)) 127 | .l2(1e-4) 128 | .list( 129 | DenseLayer.Builder().nIn(3).nOut(3).activation(Activation.RELU).build(), 130 | OutputLayer.Builder().nIn(3).nOut(2).activation(Activation.SOFTMAX).build() 131 | ).pretrain(false) 132 | .backprop(true) 133 | .build() 134 | .let(::MultiLayerNetwork).apply { init() } 135 | 136 | val examples = inputs.asSequence() 137 | .map { colorAttributes(it.color) } 138 | .toList().toTypedArray() 139 | .let { Nd4j.create(it) } 140 | 141 | val outcomes = inputs.asSequence() 142 | .map { it.fontShade.outputValue } 143 | .toList().toTypedArray() 144 | .let { Nd4j.create(it) } 145 | 146 | 147 | // train for 1000 iterations (epochs) 148 | repeat(1000) { 149 | dl4jNN.fit(examples, outcomes) 150 | } 151 | 152 | // Test the input color and predict it as LIGHT or DARK 153 | val result = dl4jNN.output(Nd4j.create(colorAttributes(color))).toDoubleVector() 154 | 155 | println(result.joinToString(", ")) 156 | 157 | return if (result[0] > result[1]) FontShade.LIGHT else FontShade.DARK 158 | 159 | } 160 | }; 161 | 162 | abstract fun predict(color: Color): FontShade 163 | override fun toString() = name.replace("_", " ") 164 | } 165 | 166 | } 167 | 168 | data class CategorizedInput( 169 | val color: Color, 170 | val fontShade: FontShade 171 | ) 172 | 173 | enum class FontShade(val color: Color, val outputValue: DoubleArray){ 174 | DARK(Color.BLACK, doubleArrayOf(0.0, 1.0)), 175 | LIGHT(Color.WHITE, doubleArrayOf(1.0,0.0)) 176 | } 177 | 178 | // UTILITIES 179 | 180 | fun randomInt(lower: Int, upper: Int) = ThreadLocalRandom.current().nextInt(lower, upper + 1) 181 | 182 | 183 | fun randomColor() = (1..3).asSequence() 184 | .map { randomInt(0,255) } 185 | .toList() 186 | .let { Color.rgb(it[0], it[1], it[2]) } 187 | 188 | fun colorAttributes(c: Color) = doubleArrayOf( 189 | c.red, 190 | c.green, 191 | c.blue 192 | ) 193 | -------------------------------------------------------------------------------- /src/main/kotlin/TomNeuralNetwork.kt: -------------------------------------------------------------------------------- 1 | import org.ojalgo.matrix.BasicMatrix 2 | import tornadofx.* 3 | import java.util.concurrent.ThreadLocalRandom 4 | import kotlin.math.abs 5 | import kotlin.math.exp 6 | 7 | // This is a still a work-in-progress. Need to implement Koma-based gradient descent 8 | 9 | fun neuralnetwork(op: NeuralNetworkBuilder.() -> Unit): NeuralNetwork { 10 | val nn = NeuralNetworkBuilder() 11 | nn.op() 12 | return nn.build().also { it.randomize() } 13 | } 14 | 15 | class NeuralNetwork( 16 | inputNodeCount: Int, 17 | hiddenLayerCounts: List, 18 | outputLayerCount: Int 19 | ) { 20 | 21 | 22 | val inputLayer = InputLayer(inputNodeCount) 23 | 24 | val hiddenLayers = hiddenLayerCounts.asSequence() 25 | .map { 26 | CalculatedLayer(it) 27 | }.toList().also { layers -> 28 | layers.withIndex().forEach { (i,layer) -> 29 | layer.feedingLayer = (if (i == 0) inputLayer else layers[i-1]) 30 | } 31 | } 32 | 33 | val outputLayer = CalculatedLayer(outputLayerCount).also { 34 | it.feedingLayer = (if (hiddenLayers.isNotEmpty()) hiddenLayers.last() else inputLayer) 35 | } 36 | 37 | fun randomize() { 38 | hiddenLayers.forEach { it.randomizeWeights() } 39 | outputLayer.randomizeWeights() 40 | } 41 | 42 | fun calculate() { 43 | hiddenLayers.forEach { it.calculate() } 44 | outputLayer.calculate() 45 | } 46 | 47 | val weightMatrices get() = hiddenLayers.asSequence().map { it.weightsMatrix } 48 | .plusElement(outputLayer.weightsMatrix) 49 | .toList() 50 | 51 | val calculatedLayers = hiddenLayers.plusElement(outputLayer) 52 | 53 | 54 | /** 55 | * Input a set of training values for each node 56 | */ 57 | fun trainEntries(inputsAndTargets: Iterable>) { 58 | 59 | // randomize if needed 60 | val entries = inputsAndTargets.toList() 61 | 62 | var lowestError = Int.MAX_VALUE 63 | var bestWeights = weightMatrices 64 | 65 | // calculate new hidden and output node values 66 | (0..10000).asSequence().takeWhile { lowestError > 0 }.forEach { 67 | randomize() 68 | 69 | val totalError = entries.asSequence().map { (input,target) -> 70 | 71 | inputLayer.withIndex().forEach { (i,layer) -> layer.value = input[i] } 72 | calculate() 73 | 74 | 75 | outputLayer.asSequence().map { it.value }.zip(target.asSequence()) 76 | .filter { (calculated, desired) -> 77 | desired == 1.0 && abs(calculated - desired) < .5 || 78 | desired == 0.0 && abs(calculated - desired) > .5 79 | }.count() 80 | }.sum() 81 | 82 | if (entries.count() > 1 && totalError < lowestError) { 83 | println("$totalError < $lowestError") 84 | lowestError = totalError 85 | bestWeights = weightMatrices 86 | } 87 | } 88 | 89 | bestWeights.withIndex().forEach { (i, m) -> 90 | calculatedLayers[i].weightsMatrix = m 91 | } 92 | } 93 | 94 | fun predictEntry(vararg inputValues: Double): DoubleArray { 95 | 96 | 97 | // assign input values to input nodes 98 | inputValues.withIndex().forEach { (i,v) -> inputLayer.nodes[i].value = v } 99 | 100 | // calculate new hidden and output node values 101 | calculate() 102 | return outputLayer.map { it.value }.toDoubleArray() 103 | } 104 | } 105 | 106 | 107 | 108 | // LAYERS 109 | sealed class Layer: Iterable { 110 | abstract val nodes: List 111 | override fun iterator() = nodes.iterator() 112 | } 113 | 114 | /** 115 | * An `InputLayer` belongs to the first layer and accepts the input values for each `InputNode` 116 | */ 117 | class InputLayer(nodeCount: Int): Layer() { 118 | 119 | override val nodes = (0..(nodeCount-1)).asSequence() 120 | .map { InputNode(it) } 121 | .toList() 122 | } 123 | 124 | /** 125 | * A `CalculatedLayer` is used for the hidden and output layers, and is derived off weights and values off each previous layer 126 | */ 127 | class CalculatedLayer(nodeCount: Int): Layer() { 128 | 129 | var feedingLayer: Layer by singleAssign() 130 | 131 | override val nodes by lazy { 132 | (0..(nodeCount - 1)).asSequence() 133 | .map { CalculatedNode(it, this) } 134 | .toList() 135 | } 136 | 137 | var weightsMatrix: BasicMatrix = primitivematrix(0,0) 138 | var valuesMatrix: BasicMatrix = primitivematrix(0,0) 139 | 140 | fun randomizeWeights() { 141 | weightsMatrix = primitivematrix(count(), feedingLayer.count()) { 142 | populate { row,col -> randomInitialValue() } 143 | } 144 | } 145 | 146 | fun calculate() { 147 | valuesMatrix = (weightsMatrix * feedingLayer.toPrimitiveMatrix({it.value})).scalarApply { sigmoid(it) } 148 | } 149 | } 150 | 151 | 152 | // NODES 153 | sealed class Node(val index: Int) { 154 | abstract val value: Double 155 | } 156 | 157 | 158 | class InputNode(index: Int): Node(index) { 159 | override var value = randomInitialValue() 160 | } 161 | 162 | 163 | class CalculatedNode(index: Int, 164 | val parentLayer: CalculatedLayer 165 | ): Node(index) { 166 | 167 | override val value get() = parentLayer.valuesMatrix[index.toLong(),0].toDouble() 168 | 169 | } 170 | 171 | fun randomInitialValue() = ThreadLocalRandom.current().nextDouble(-1.0,1.0) 172 | fun sigmoid(x: Number) = 1.0 / (1.0 + exp(-x.toDouble())) 173 | 174 | // BUILDERS 175 | class NeuralNetworkBuilder { 176 | 177 | var input = 0 178 | var hidden = mutableListOf() 179 | var output = 0 180 | 181 | fun inputlayer(nodeCount: Int) { 182 | input = nodeCount 183 | } 184 | 185 | fun hiddenlayer(nodeCount: Int) { 186 | hidden.add(nodeCount) 187 | } 188 | 189 | fun outputlayer(nodeCount: Int) { 190 | output = nodeCount 191 | } 192 | 193 | fun build() = NeuralNetwork(input, hidden, output) 194 | } 195 | -------------------------------------------------------------------------------- /src/main/kotlin/Util.kt: -------------------------------------------------------------------------------- 1 | import org.ojalgo.algebra.Operation 2 | import org.ojalgo.algebra.ScalarOperation 3 | import org.ojalgo.matrix.BasicMatrix 4 | import org.ojalgo.matrix.ComplexMatrix 5 | import org.ojalgo.matrix.PrimitiveMatrix 6 | import org.ojalgo.matrix.RationalMatrix 7 | import org.ojalgo.scalar.ComplexNumber 8 | import org.ojalgo.scalar.RationalNumber 9 | import java.math.BigDecimal 10 | 11 | fun Sequence.toPrimitiveMatrix(vararg selectors: (T) -> N): PrimitiveMatrix { 12 | val items = toList() 13 | 14 | return primitivematrix(items.count(), selectors.count()) { 15 | populate { row, col -> 16 | selectors[col.toInt()](items[row.toInt()]) 17 | } 18 | } 19 | } 20 | 21 | fun Iterable.toPrimitiveMatrix(vararg selectors: (T) -> N): PrimitiveMatrix { 22 | val items = toList() 23 | 24 | return primitivematrix(items.count(), selectors.count()) { 25 | populate { row, col -> 26 | selectors[col.toInt()](items[row.toInt()]) 27 | } 28 | } 29 | } 30 | 31 | 32 | fun Sequence.toComplexMatrix(vararg selectors: (T) -> N): ComplexMatrix { 33 | val items = toList() 34 | 35 | return complexmatrix(items.count(), selectors.count()) { 36 | populate { row, col -> 37 | selectors[col.toInt()](items[row.toInt()]) 38 | } 39 | } 40 | } 41 | fun Iterable.toComplexMatrix(vararg selectors: (T) -> N): ComplexMatrix { 42 | val items = toList() 43 | 44 | return complexmatrix(items.count(), selectors.count()) { 45 | populate { row, col -> 46 | selectors[col.toInt()](items[row.toInt()]) 47 | } 48 | } 49 | } 50 | 51 | 52 | fun Sequence.toRationalMatrix(vararg selectors: (T) -> N): RationalMatrix { 53 | val items = toList() 54 | 55 | return rationalmatrix(items.count(), selectors.count()) { 56 | populate { row, col -> 57 | selectors[col.toInt()](items[row.toInt()]) 58 | } 59 | } 60 | } 61 | fun Iterable.toRationalMatrix(vararg selectors: (T) -> N): RationalMatrix { 62 | val items = toList() 63 | 64 | return rationalmatrix(items.count(), selectors.count()) { 65 | populate { row, col -> 66 | selectors[col.toInt()](items[row.toInt()]) 67 | } 68 | } 69 | } 70 | 71 | 72 | fun vectorOf(vararg values: Int) = primitivematrix(values.count(), 1) { 73 | populate { row, col -> values[row.toInt()] } 74 | } 75 | 76 | fun vectorOf(vararg values: Double) = primitivematrix(values.count(), 1) { 77 | populate { row, col -> values[row.toInt()] } 78 | } 79 | 80 | fun vectorOf(vararg values: Long) = primitivematrix(values.count(), 1) { 81 | populate { row, col -> values[row.toInt()] } 82 | } 83 | 84 | fun vectorOf(vararg values: BigDecimal) = rationalmatrix(values.count(), 1) { 85 | populate { row, col -> values[row.toInt()] } 86 | } 87 | 88 | 89 | fun primitivematrix(rows: Int, cols: Int, op: (BasicMatrix.PhysicalBuilder.() -> Unit)? = null) = 90 | PrimitiveMatrix.FACTORY.getBuilder(rows,cols).also { 91 | if (op != null) op(it) 92 | }.build() 93 | 94 | 95 | fun complexmatrix(rows: Int, cols: Int, op: (BasicMatrix.PhysicalBuilder.() -> Unit)? = null) = 96 | ComplexMatrix.FACTORY.getBuilder(rows,cols).also { 97 | if (op != null) op(it) 98 | }.build() 99 | 100 | fun rationalmatrix(rows: Int, cols: Int, op: (BasicMatrix.PhysicalBuilder.() -> Unit)? = null) = 101 | RationalMatrix.FACTORY.getBuilder(rows,cols).also { 102 | if (op != null) op(it) 103 | }.build() 104 | 105 | fun BasicMatrix.PhysicalBuilder.populate(op: (Long,Long) -> Number) = 106 | loopAll { row, col -> set(row, col, op(row,col)) } 107 | 108 | fun BasicMatrix.PhysicalBuilder.setAll(vararg values: Number) { 109 | 110 | var index = 0 111 | 112 | for (r in 0..(countRows()-1)) { 113 | for (c in 0..(countColumns()-1)) { 114 | set(r,c,values[index++]) 115 | } 116 | } 117 | } 118 | 119 | 120 | fun BasicMatrix.scalarApply(op: (Number) -> Number) = primitivematrix(countRows().toInt(), countColumns().toInt()) { 121 | populate { row, col -> op(this@scalarApply[row, col]) } 122 | build() 123 | } 124 | 125 | 126 | operator fun Operation.Addition.plus(t: T) = add(t) 127 | operator fun Operation.Division.div(t: T) = divide(t) 128 | operator fun Operation.Multiplication.times(t: T) = multiply(t) 129 | operator fun Operation.Subtraction.minus(t: T) = subtract(t) 130 | 131 | operator fun ScalarOperation.Addition.plus(number: N) = add(number) 132 | operator fun ScalarOperation.Division.div(number: N) = divide(number) 133 | operator fun ScalarOperation.Multiplication.times(number: N) = multiply(number) 134 | operator fun ScalarOperation.Subtraction.minus(number: N) = subtract(number) 135 | 136 | 137 | 138 | -------------------------------------------------------------------------------- /src/main/resources/color_training_set.csv: -------------------------------------------------------------------------------- 1 | 0,0,0 2 | 0,0,128 3 | 0,0,139 4 | 0,0,205 5 | 0,0,238 6 | 0,0,255 7 | 0,100,0 8 | 0,104,139 9 | 0,128,0 10 | 0,128,128 11 | 0,134,139 12 | 0,139,0 13 | 0,139,139 14 | 0,139,69 15 | 0,154,205 16 | 0,178,238 17 | 0,191,255 18 | 0,197,205 19 | 0,199,140 20 | 0,201,87 21 | 0,205,0 22 | 0,205,102 23 | 0,205,205 24 | 0,206,209 25 | 0,229,238 26 | 0,238,0 27 | 0,238,118 28 | 0,238,238 29 | 0,245,255 30 | 0,250,154 31 | 0,255,0 32 | 0,255,127 33 | 0,255,255 34 | 0,26,128 35 | 0,51,0 36 | 0,51,51 37 | 100,149,237 38 | 10,10,10 39 | 102,102,0 40 | 102,102,102 41 | 102,102,102 42 | 102,128,230 43 | 102,139,139 44 | 102,153,102 45 | 102,153,153 46 | 102,205,0 47 | 102,205,170 48 | 102,26,51 49 | 102,51,102 50 | 102,77,179 51 | 104,131,139 52 | 104,34,139 53 | 105,105,105 54 | 105,105,105 55 | 105,139,105 56 | 105,139,34 57 | 105,89,205 58 | 106,90,205 59 | 107,107,107 60 | 107,142,35 61 | 108,123,139 62 | 108,166,205 63 | 110,110,110 64 | 110,123,139 65 | 110,139,61 66 | 112,112,112 67 | 112,128,144 68 | 113,113,198 69 | 113,198,113 70 | 115,115,115 71 | 117,117,117 72 | 118,238,0 73 | 118,238,198 74 | 119,136,153 75 | 120,120,120 76 | 121,205,205 77 | 122,103,238 78 | 122,122,122 79 | 122,139,139 80 | 122,197,205 81 | 122,55,139 82 | 123,104,238 83 | 124,205,124 84 | 124,252,0 85 | 125,125,125 86 | 125,158,192 87 | 125,38,205 88 | 126,192,238 89 | 127,127,127 90 | 127,255,0 91 | 127,255,212 92 | 128,0,0 93 | 128,0,128 94 | 128,102,204 95 | 128,128,0 96 | 128,128,105 97 | 128,128,128 98 | 128,128,128 99 | 128,128,26 100 | 128,138,135 101 | 128,153,255 102 | 128,179,128 103 | 128,179,179 104 | 128,51,77 105 | 128,77,128 106 | 130,130,130 107 | 131,111,255 108 | 131,139,131 109 | 131,139,139 110 | 13,13,13 111 | 132,112,255 112 | 132,132,132 113 | 133,133,133 114 | 135,135,135 115 | 135,206,235 116 | 135,206,250 117 | 135,206,255 118 | 135,38,87 119 | 137,104,205 120 | 138,138,138 121 | 138,43,226 122 | 138,51,36 123 | 138,54,15 124 | 139,0,0 125 | 139,0,139 126 | 139,101,8 127 | 139,102,139 128 | 139,105,105 129 | 139,105,20 130 | 139,10,80 131 | 139,115,85 132 | 139,117,0 133 | 139,119,101 134 | 139,121,94 135 | 139,123,139 136 | 139,125,107 137 | 139,125,123 138 | 139,126,102 139 | 139,129,76 140 | 139,131,120 141 | 139,131,134 142 | 139,134,130 143 | 139,134,78 144 | 139,136,120 145 | 139,137,112 146 | 139,137,137 147 | 139,139,0 148 | 139,139,122 149 | 139,139,131 150 | 139,26,26 151 | 139,28,98 152 | 139,34,82 153 | 139,35,35 154 | 139,37,0 155 | 139,54,38 156 | 139,58,58 157 | 139,58,98 158 | 139,62,47 159 | 139,69,0 160 | 139,69,19 161 | 139,71,137 162 | 139,71,38 163 | 139,71,93 164 | 139,76,57 165 | 139,87,66 166 | 139,90,0 167 | 139,90,43 168 | 139,95,101 169 | 139,99,108 170 | 140,140,140 171 | 141,182,205 172 | 141,238,238 173 | 142,142,142 174 | 142,142,56 175 | 142,229,238 176 | 142,56,142 177 | 143,143,143 178 | 143,188,143 179 | 144,238,144 180 | 145,145,145 181 | 145,44,238 182 | 147,112,219 183 | 148,0,211 184 | 148,148,148 185 | 150,150,150 186 | 150,205,205 187 | 151,255,255 188 | 15,15,15 189 | 152,245,255 190 | 152,251,152 191 | 153,0,0 192 | 153,102,0 193 | 153,102,153 194 | 153,128,230 195 | 153,153,0 196 | 153,153,153 197 | 153,153,153 198 | 153,153,51 199 | 153,179,255 200 | 153,204,153 201 | 153,204,204 202 | 153,50,204 203 | 153,51,0 204 | 153,77,0 205 | 153,77,102 206 | 154,192,205 207 | 154,205,50 208 | 154,255,154 209 | 154,50,205 210 | 155,205,155 211 | 155,48,255 212 | 156,102,31 213 | 156,156,156 214 | 158,158,158 215 | 159,121,238 216 | 159,182,205 217 | 160,82,45 218 | 161,161,161 219 | 162,181,205 220 | 162,205,90 221 | 163,163,163 222 | 164,211,238 223 | 165,42,42 224 | 166,166,166 225 | 16,78,139 226 | 168,168,168 227 | 169,169,169 228 | 170,170,170 229 | 171,130,255 230 | 171,171,171 231 | 173,173,173 232 | 173,216,230 233 | 173,255,47 234 | 174,238,238 235 | 176,176,176 236 | 176,196,222 237 | 176,224,230 238 | 176,226,255 239 | 176,23,31 240 | 178,223,238 241 | 178,34,34 242 | 178,58,238 243 | 179,102,128 244 | 179,102,26 245 | 179,128,179 246 | 179,128,26 247 | 179,153,255 248 | 179,179,179 249 | 179,179,179 250 | 179,179,26 251 | 179,179,77 252 | 179,204,255 253 | 179,230,179 254 | 179,230,230 255 | 179,238,58 256 | 179,26,26 257 | 179,77,26 258 | 180,205,205 259 | 180,238,180 260 | 180,82,205 261 | 181,181,181 262 | 18,18,18 263 | 183,183,183 264 | 184,134,11 265 | 184,184,184 266 | 185,211,238 267 | 186,186,186 268 | 186,85,211 269 | 187,255,255 270 | 188,143,143 271 | 188,210,238 272 | 188,238,104 273 | 189,183,107 274 | 189,189,189 275 | 189,252,201 276 | 191,191,191 277 | 191,239,255 278 | 191,62,255 279 | 192,192,192 280 | 192,255,62 281 | 193,193,193 282 | 193,205,193 283 | 193,205,205 284 | 193,255,193 285 | 194,194,194 286 | 196,196,196 287 | 197,193,170 288 | 198,113,113 289 | 198,226,255 290 | 199,199,199 291 | 199,21,133 292 | 199,97,20 293 | 201,201,201 294 | 20,20,20 295 | 202,225,255 296 | 202,255,112 297 | 204,102,51 298 | 204,128,153 299 | 204,128,51 300 | 204,153,204 301 | 204,153,51 302 | 204,179,255 303 | 204,204,102 304 | 204,204,204 305 | 204,204,204 306 | 204,204,51 307 | 204,230,255 308 | 204,255,204 309 | 204,255,255 310 | 204,51,51 311 | 205,0,0 312 | 205,0,205 313 | 205,102,0 314 | 205,102,29 315 | 205,104,137 316 | 205,104,57 317 | 205,105,201 318 | 0,0,0 319 | 0,0,128 320 | 0,0,139 321 | 0,0,205 322 | 0,0,238 323 | 0,0,255 324 | 0,100,0 325 | 0,104,139 326 | 0,128,0 327 | 0,128,128 328 | 0,134,139 329 | 0,139,0 330 | 0,139,139 331 | 0,139,69 332 | 0,154,205 333 | 0,178,238 334 | 0,191,255 335 | 0,197,205 336 | 0,199,140 337 | 0,201,87 338 | 0,205,0 339 | 0,205,102 340 | 0,205,205 341 | 0,206,209 342 | 0,229,238 343 | 0,238,0 344 | 0,238,118 345 | 0,238,238 346 | 0,245,255 347 | 0,250,154 348 | 0,255,0 349 | 0,255,127 350 | 0,255,255 351 | 0,26,128 352 | 0,51,0 353 | 0,51,51 354 | 100,149,237 355 | 10,10,10 356 | 102,102,0 357 | 102,102,102 358 | 102,102,102 359 | 102,128,230 360 | 102,139,139 361 | 102,153,102 362 | 102,153,153 363 | 102,205,0 364 | 102,205,170 365 | 102,26,51 366 | 102,51,102 367 | 102,77,179 368 | 104,131,139 369 | 104,34,139 370 | 105,105,105 371 | 105,105,105 372 | 105,139,105 373 | 105,139,34 374 | 105,89,205 375 | 106,90,205 376 | 107,107,107 377 | 107,142,35 378 | 108,123,139 379 | 108,166,205 380 | 110,110,110 381 | 110,123,139 382 | 110,139,61 383 | 112,112,112 384 | 112,128,144 385 | 113,113,198 386 | 113,198,113 387 | 115,115,115 388 | 117,117,117 389 | 118,238,0 390 | 118,238,198 391 | 119,136,153 392 | 120,120,120 393 | 121,205,205 394 | 122,103,238 395 | 122,122,122 396 | 122,139,139 397 | 122,197,205 398 | 122,55,139 399 | 123,104,238 400 | 124,205,124 401 | 124,252,0 402 | 125,125,125 403 | 125,158,192 404 | 125,38,205 405 | 126,192,238 406 | 127,127,127 407 | 127,255,0 408 | 127,255,212 409 | 128,0,0 410 | 128,0,128 411 | 128,102,204 412 | 128,128,0 413 | 128,128,105 414 | 128,128,128 415 | 128,128,128 416 | 128,128,26 417 | 128,138,135 418 | 128,153,255 419 | 128,179,128 420 | 128,179,179 421 | 128,51,77 422 | 128,77,128 423 | 130,130,130 424 | 131,111,255 425 | 131,139,131 426 | 131,139,139 427 | 13,13,13 428 | 132,112,255 429 | 132,132,132 430 | 133,133,133 431 | 135,135,135 432 | 135,206,235 433 | 135,206,250 434 | 135,206,255 435 | 135,38,87 436 | 137,104,205 437 | 138,138,138 438 | 138,43,226 439 | 138,51,36 440 | 138,54,15 441 | 139,0,0 442 | 139,0,139 443 | 139,101,8 444 | 139,102,139 445 | 139,105,105 446 | 139,105,20 447 | 139,10,80 448 | 139,115,85 449 | 139,117,0 450 | 139,119,101 451 | 139,121,94 452 | 139,123,139 453 | 139,125,107 454 | 139,125,123 455 | 139,126,102 456 | 139,129,76 457 | 139,131,120 458 | 139,131,134 459 | 139,134,130 460 | 139,134,78 461 | 139,136,120 462 | 139,137,112 463 | 139,137,137 464 | 139,139,0 465 | 139,139,122 466 | 139,139,131 467 | 139,26,26 468 | 139,28,98 469 | 139,34,82 470 | 139,35,35 471 | 139,37,0 472 | 139,54,38 473 | 139,58,58 474 | 139,58,98 475 | 139,62,47 476 | 139,69,0 477 | 139,69,19 478 | 139,71,137 479 | 139,71,38 480 | 139,71,93 481 | 139,76,57 482 | 139,87,66 483 | 139,90,0 484 | 139,90,43 485 | 139,95,101 486 | 139,99,108 487 | 140,140,140 488 | 141,182,205 489 | 141,238,238 490 | 142,142,142 491 | 142,142,56 492 | 142,229,238 493 | 142,56,142 494 | 143,143,143 495 | 143,188,143 496 | 144,238,144 497 | 145,145,145 498 | 145,44,238 499 | 147,112,219 500 | 148,0,211 501 | 148,148,148 502 | 150,150,150 503 | 150,205,205 504 | 151,255,255 505 | 15,15,15 506 | 152,245,255 507 | 152,251,152 508 | 153,0,0 509 | 153,102,0 510 | 153,102,153 511 | 153,128,230 512 | 153,153,0 513 | 153,153,153 514 | 153,153,153 515 | 153,153,51 516 | 153,179,255 517 | 153,204,153 518 | 153,204,204 519 | 153,50,204 520 | 153,51,0 521 | 153,77,0 522 | 153,77,102 523 | 154,192,205 524 | 154,205,50 525 | 154,255,154 526 | 154,50,205 527 | 155,205,155 528 | 155,48,255 529 | 156,102,31 530 | 156,156,156 531 | 158,158,158 532 | 159,121,238 533 | 159,182,205 534 | 160,82,45 535 | 161,161,161 536 | 162,181,205 537 | 162,205,90 538 | 163,163,163 539 | 164,211,238 540 | 165,42,42 541 | 166,166,166 542 | 16,78,139 543 | 168,168,168 544 | 169,169,169 545 | 170,170,170 546 | 171,130,255 547 | 171,171,171 548 | 173,173,173 549 | 173,216,230 550 | 173,255,47 551 | 174,238,238 552 | 176,176,176 553 | 176,196,222 554 | 176,224,230 555 | 176,226,255 556 | 176,23,31 557 | 178,223,238 558 | 178,34,34 559 | 178,58,238 560 | 179,102,128 561 | 179,102,26 562 | 179,128,179 563 | 179,128,26 564 | 179,153,255 565 | 179,179,179 566 | 179,179,179 567 | 179,179,26 568 | 179,179,77 569 | 179,204,255 570 | 179,230,179 571 | 179,230,230 572 | 179,238,58 573 | 179,26,26 574 | 179,77,26 575 | 180,205,205 576 | 180,238,180 577 | 180,82,205 578 | 181,181,181 579 | 18,18,18 580 | 183,183,183 581 | 184,134,11 582 | 184,184,184 583 | 185,211,238 584 | 186,186,186 585 | 186,85,211 586 | 187,255,255 587 | 188,143,143 588 | 188,210,238 589 | 188,238,104 590 | 189,183,107 591 | 189,189,189 592 | 189,252,201 593 | 191,191,191 594 | 191,239,255 595 | 191,62,255 596 | 192,192,192 597 | 192,255,62 598 | 193,193,193 599 | 193,205,193 600 | 193,205,205 601 | 193,255,193 602 | 194,194,194 603 | 196,196,196 604 | 197,193,170 605 | 198,113,113 606 | 198,226,255 607 | 199,199,199 608 | 199,21,133 609 | 199,97,20 610 | 201,201,201 611 | 20,20,20 612 | 202,225,255 613 | 202,255,112 614 | 204,102,51 615 | 204,128,153 616 | 204,128,51 617 | 204,153,204 618 | 204,153,51 619 | 204,179,255 620 | 204,204,102 621 | 204,204,204 622 | 204,204,204 623 | 204,204,51 624 | 204,230,255 625 | 204,255,204 626 | 204,255,255 627 | 204,51,51 628 | 205,0,0 629 | 205,0,205 630 | 205,102,0 631 | 205,102,29 632 | 205,104,137 633 | 205,104,57 634 | 205,105,201 635 | 205,112,84 636 | 205,129,98 637 | 205,133,0 638 | 205,133,63 639 | 205,140,149 640 | 205,145,158 641 | 205,149,12 642 | 205,150,205 643 | 205,155,155 644 | 205,155,29 645 | 205,16,118 646 | 205,170,125 647 | 205,173,0 648 | 205,175,149 649 | 205,179,139 650 | 205,181,205 651 | 205,183,158 652 | 205,183,181 653 | 205,186,150 654 | 205,190,112 655 | 205,192,176 656 | 205,193,197 657 | 205,197,191 658 | 205,198,115 659 | 205,200,177 660 | 205,201,165 661 | 205,201,201 662 | 205,205,0 663 | 205,205,180 664 | 205,205,193 665 | 205,38,38 666 | 205,41,144 667 | 205,50,120 668 | 205,51,51 669 | 205,55,0 670 | 205,79,57 671 | 205,85,85 672 | 205,91,69 673 | 205,92,92 674 | 205,96,144 675 | 207,207,207 676 | 208,32,144 677 | 209,209,209 678 | 209,238,238 679 | 209,95,238 680 | 210,105,30 681 | 210,180,140 682 | 211,211,211 683 | 212,212,212 684 | 214,214,214 685 | 216,191,216 686 | 217,217,217 687 | 218,112,214 688 | 218,165,32 689 | 219,112,147 690 | 219,219,219 691 | 220,20,60 692 | 220,220,220 693 | 221,160,221 694 | 222,184,135 695 | 222,222,222 696 | 224,102,255 697 | 224,224,224 698 | 224,238,224 699 | 224,238,238 700 | 224,255,255 701 | 227,168,105 702 | 227,207,87 703 | 227,227,227 704 | 229,229,229 705 | 230,128,77 706 | 230,153,179 707 | 230,153,77 708 | 230,179,204 709 | 230,179,230 710 | 230,179,77 711 | 230,204,255 712 | 230,230,128 713 | 230,230,153 714 | 230,230,179 715 | 230,230,204 716 | 230,230,230 717 | 230,230,250 718 | 230,230,77 719 | 230,77,77 720 | 232,232,232 721 | 23,23,23 722 | 233,150,122 723 | 234,234,234 724 | 235,235,235 725 | 237,145,33 726 | 237,237,237 727 | 238,0,0 728 | 238,0,238 729 | 238,106,167 730 | 238,106,80 731 | 238,118,0 732 | 238,118,33 733 | 238,121,159 734 | 238,121,66 735 | 238,122,233 736 | 238,130,238 737 | 238,130,98 738 | 238,149,114 739 | 238,154,0 740 | 238,154,73 741 | 238,162,173 742 | 238,169,184 743 | 238,173,14 744 | 238,174,238 745 | 238,180,180 746 | 238,180,34 747 | 238,18,137 748 | 238,197,145 749 | 238,201,0 750 | 238,203,173 751 | 238,207,161 752 | 238,210,238 753 | 238,213,183 754 | 238,213,210 755 | 238,216,174 756 | 238,220,130 757 | 238,223,204 758 | 238,224,229 759 | 238,229,222 760 | 238,230,133 761 | 238,232,170 762 | 238,232,205 763 | 238,233,191 764 | 238,233,233 765 | 238,238,0 766 | 238,238,209 767 | 238,238,224 768 | 238,44,44 769 | 238,48,167 770 | 238,58,140 771 | 238,59,59 772 | 238,64,0 773 | 238,92,66 774 | 238,99,99 775 | 240,128,128 776 | 240,230,140 777 | 240,240,240 778 | 240,248,255 779 | 240,255,240 780 | 240,255,255 781 | 24,116,205 782 | 242,242,242 783 | 242,242,242 784 | 244,164,96 785 | 244,244,244 786 | 245,222,179 787 | 245,245,220 788 | 245,245,245 789 | 245,245,245 790 | 245,255,250 791 | 247,247,247 792 | 248,248,255 793 | 250,128,114 794 | 250,235,215 795 | 250,240,230 796 | 250,250,210 797 | 250,250,250 798 | 252,230,201 799 | 252,252,252 800 | 25,25,112 801 | 253,245,230 802 | 255,0,0 803 | 255,0,255 804 | 255,102,102 805 | 255,105,180 806 | 255,106,106 807 | 255,110,180 808 | 255,114,86 809 | 255,125,64 810 | 255,127,0 811 | 255,127,36 812 | 255,127,80 813 | 255,128,0 814 | 255,128,128 815 | 255,130,171 816 | 255,130,71 817 | 255,131,250 818 | 255,140,0 819 | 255,140,105 820 | 255,153,102 821 | 255,153,128 822 | 255,153,153 823 | 255,153,18 824 | 255,160,122 825 | 255,165,0 826 | 255,165,79 827 | 255,174,185 828 | 255,179,102 829 | 255,179,128 830 | 255,179,153 831 | 255,179,179 832 | 255,181,197 833 | 255,182,193 834 | 255,185,15 835 | 255,187,255 836 | 255,192,203 837 | 255,193,193 838 | 255,193,37 839 | 255,20,147 840 | 255,204,102 841 | 255,204,128 842 | 255,204,153 843 | 255,204,179 844 | 255,204,204 845 | 255,204,230 846 | 255,204,255 847 | 255,211,155 848 | 255,215,0 849 | 255,218,185 850 | 255,222,173 851 | 255,225,255 852 | 255,228,181 853 | 255,228,196 854 | 255,228,225 855 | 255,230,102 856 | 255,230,128 857 | 255,230,153 858 | 255,230,179 859 | 255,230,204 860 | 255,231,186 861 | 255,235,205 862 | 255,236,139 863 | 255,239,213 864 | 255,239,219 865 | 255,240,245 866 | 255,245,238 867 | 255,246,143 868 | 255,248,220 869 | 255,250,205 870 | 255,250,240 871 | 255,250,250 872 | 255,255,0 873 | 255,255,102 874 | 255,255,128 875 | 255,255,153 876 | 255,255,179 877 | 255,255,204 878 | 255,255,224 879 | 255,255,240 880 | 255,255,255 881 | 255,255,255 882 | 255,255,77 883 | 255,48,48 884 | 255,52,179 885 | 255,62,150 886 | 255,64,64 887 | 255,69,0 888 | 255,97,3 889 | 255,99,71 890 | 26,0,104 891 | 26,26,26 892 | 26,26,26 893 | 26,51,153 894 | 26,77,26 895 | 26,77,77 896 | 28,134,238 897 | 28,28,28 898 | 30,144,255 899 | 30,30,30 900 | 31,31,31 901 | 3,168,158 902 | 32,178,170 903 | 3,3,3 904 | 33,33,33 905 | 34,139,34 906 | 36,36,36 907 | 38,38,38 908 | 39,64,139 909 | 40,40,40 910 | 41,36,33 911 | 41,41,41 912 | 43,43,43 913 | 46,139,87 914 | 46,46,46 915 | 47,79,79 916 | 48,128,20 917 | 48,48,48 918 | 50,205,50 919 | 51,0,51 920 | 51,102,102 921 | 51,102,51 922 | 51,161,201 923 | 51,26,128 924 | 51,51,51 925 | 51,51,51 926 | 51,77,179 927 | 54,100,139 928 | 54,54,54 929 | 5,5,5 930 | 56,142,142 931 | 56,56,56 932 | 58,95,205 933 | 59,59,59 934 | 60,179,113 935 | 61,145,64 936 | 61,61,61 937 | 61,89,171 938 | 64,224,208 939 | 64,64,64 940 | 65,105,225 941 | 66,66,66 942 | 67,110,238 943 | 67,205,128 944 | 69,139,0 945 | 69,139,116 946 | 69,69,69 947 | 70,130,180 948 | 71,60,139 949 | 71,71,71 950 | 72,118,255 951 | 72,209,204 952 | 72,61,139 953 | 74,112,139 954 | 74,74,74 955 | 75,0,130 956 | 77,0,26 957 | 77,102,204 958 | 77,128,128 959 | 77,128,77 960 | 77,26,77 961 | 77,51,153 962 | 77,77,77 963 | 77,77,77 964 | 78,238,148 965 | 79,148,205 966 | 79,79,79 967 | 81,81,81 968 | 82,139,139 969 | 82,82,82 970 | 83,134,139 971 | 84,139,84 972 | 84,255,159 973 | 84,84,84 974 | 85,107,47 975 | 85,26,139 976 | 85,85,85 977 | 87,87,87 978 | 8,8,8 979 | 89,89,89 980 | 91,91,91 981 | 92,172,238 982 | 92,92,92 983 | 93,71,139 984 | 94,38,18 985 | 94,94,94 986 | 95,158,160 987 | 96,123,139 988 | 97,97,97 989 | 99,184,255 990 | 99,99,99 991 | 205,129,98 992 | 205,133,0 993 | 205,133,63 994 | 205,140,149 995 | 205,145,158 996 | 205,149,12 997 | 205,150,205 998 | 205,155,155 999 | 205,155,29 1000 | 205,16,118 1001 | 205,170,125 1002 | 205,173,0 1003 | 205,175,149 1004 | 205,179,139 1005 | 205,181,205 1006 | 205,183,158 1007 | 205,183,181 1008 | 205,186,150 1009 | 205,190,112 1010 | 205,192,176 1011 | 205,193,197 1012 | 205,197,191 1013 | 205,198,115 1014 | 205,200,177 1015 | 205,201,165 1016 | 205,201,201 1017 | 205,205,0 1018 | 205,205,180 1019 | 205,205,193 1020 | 205,38,38 1021 | 205,41,144 1022 | 205,50,120 1023 | 205,51,51 1024 | 205,55,0 1025 | 205,79,57 1026 | 205,85,85 1027 | 205,91,69 1028 | 205,92,92 1029 | 205,96,144 1030 | 207,207,207 1031 | 208,32,144 1032 | 209,209,209 1033 | 209,238,238 1034 | 209,95,238 1035 | 210,105,30 1036 | 210,180,140 1037 | 211,211,211 1038 | 212,212,212 1039 | 214,214,214 1040 | 216,191,216 1041 | 217,217,217 1042 | 218,112,214 1043 | 218,165,32 1044 | 219,112,147 1045 | 219,219,219 1046 | 220,20,60 1047 | 220,220,220 1048 | 221,160,221 1049 | 222,184,135 1050 | 222,222,222 1051 | 224,102,255 1052 | 224,224,224 1053 | 224,238,224 1054 | 224,238,238 1055 | 224,255,255 1056 | 227,168,105 1057 | 227,207,87 1058 | 227,227,227 1059 | 229,229,229 1060 | 230,128,77 1061 | 230,153,179 1062 | 230,153,77 1063 | 230,179,204 1064 | 230,179,230 1065 | 230,179,77 1066 | 230,204,255 1067 | 230,230,128 1068 | 230,230,153 1069 | 230,230,179 1070 | 230,230,204 1071 | 230,230,230 1072 | 230,230,250 1073 | 230,230,77 1074 | 230,77,77 1075 | 232,232,232 1076 | 23,23,23 1077 | 233,150,122 1078 | 234,234,234 1079 | 235,235,235 1080 | 237,145,33 1081 | 237,237,237 1082 | 238,0,0 1083 | 238,0,238 1084 | 238,106,167 1085 | 238,106,80 1086 | 238,118,0 1087 | 238,118,33 1088 | 238,121,159 1089 | 238,121,66 1090 | 238,122,233 1091 | 238,130,238 1092 | 238,130,98 1093 | 238,149,114 1094 | 238,154,0 1095 | 238,154,73 1096 | 238,162,173 1097 | 238,169,184 1098 | 238,173,14 1099 | 238,174,238 1100 | 238,180,180 1101 | 238,180,34 1102 | 238,18,137 1103 | 238,197,145 1104 | 238,201,0 1105 | 238,203,173 1106 | 238,207,161 1107 | 238,210,238 1108 | 238,213,183 1109 | 238,213,210 1110 | 238,216,174 1111 | 238,220,130 1112 | 238,223,204 1113 | 238,224,229 1114 | 238,229,222 1115 | 238,230,133 1116 | 238,232,170 1117 | 238,232,205 1118 | 238,233,191 1119 | 238,233,233 1120 | 238,238,0 1121 | 238,238,209 1122 | 238,238,224 1123 | 238,44,44 1124 | 238,48,167 1125 | 238,58,140 1126 | 238,59,59 1127 | 238,64,0 1128 | 238,92,66 1129 | 238,99,99 1130 | 240,128,128 1131 | 240,230,140 1132 | 240,240,240 1133 | 240,248,255 1134 | 240,255,240 1135 | 240,255,255 1136 | 24,116,205 1137 | 242,242,242 1138 | 242,242,242 1139 | 244,164,96 1140 | 244,244,244 1141 | 245,222,179 1142 | 245,245,220 1143 | 245,245,245 1144 | 245,245,245 1145 | 245,255,250 1146 | 247,247,247 1147 | 248,248,255 1148 | 250,128,114 1149 | 250,235,215 1150 | 250,240,230 1151 | 250,250,210 1152 | 250,250,250 1153 | 252,230,201 1154 | 252,252,252 1155 | 25,25,112 1156 | 253,245,230 1157 | 255,0,0 1158 | 255,0,255 1159 | 255,102,102 1160 | 255,105,180 1161 | 255,106,106 1162 | 255,110,180 1163 | 255,114,86 1164 | 255,125,64 1165 | 255,127,0 1166 | 255,127,36 1167 | 255,127,80 1168 | 255,128,0 1169 | 255,128,128 1170 | 255,130,171 1171 | 255,130,71 1172 | 255,131,250 1173 | 255,140,0 1174 | 255,140,105 1175 | 255,153,102 1176 | 255,153,128 1177 | 255,153,153 1178 | 255,153,18 1179 | 255,160,122 1180 | 255,165,0 1181 | 255,165,79 1182 | 255,174,185 1183 | 255,179,102 1184 | 255,179,128 1185 | 255,179,153 1186 | 255,179,179 1187 | 255,181,197 1188 | 255,182,193 1189 | 255,185,15 1190 | 255,187,255 1191 | 255,192,203 1192 | 255,193,193 1193 | 255,193,37 1194 | 255,20,147 1195 | 255,204,102 1196 | 255,204,128 1197 | 255,204,153 1198 | 255,204,179 1199 | 255,204,204 1200 | 255,204,230 1201 | 255,204,255 1202 | 255,211,155 1203 | 255,215,0 1204 | 255,218,185 1205 | 255,222,173 1206 | 255,225,255 1207 | 255,228,181 1208 | 255,228,196 1209 | 255,228,225 1210 | 255,230,102 1211 | 255,230,128 1212 | 255,230,153 1213 | 255,230,179 1214 | 255,230,204 1215 | 255,231,186 1216 | 255,235,205 1217 | 255,236,139 1218 | 255,239,213 1219 | 255,239,219 1220 | 255,240,245 1221 | 255,245,238 1222 | 255,246,143 1223 | 255,248,220 1224 | 255,250,205 1225 | 255,250,240 1226 | 255,250,250 1227 | 255,255,0 1228 | 255,255,102 1229 | 255,255,128 1230 | 255,255,153 1231 | 255,255,179 1232 | 255,255,204 1233 | 255,255,224 1234 | 255,255,240 1235 | 255,255,255 1236 | 255,255,255 1237 | 255,255,77 1238 | 255,48,48 1239 | 255,52,179 1240 | 255,62,150 1241 | 255,64,64 1242 | 255,69,0 1243 | 255,97,3 1244 | 255,99,71 1245 | 26,0,104 1246 | 26,26,26 1247 | 26,26,26 1248 | 26,51,153 1249 | 26,77,26 1250 | 26,77,77 1251 | 28,134,238 1252 | 28,28,28 1253 | 30,144,255 1254 | 30,30,30 1255 | 31,31,31 1256 | 3,168,158 1257 | 32,178,170 1258 | 3,3,3 1259 | 33,33,33 1260 | 34,139,34 1261 | 36,36,36 1262 | 38,38,38 1263 | 39,64,139 1264 | 40,40,40 1265 | 41,36,33 1266 | 41,41,41 1267 | 43,43,43 1268 | 46,139,87 1269 | 46,46,46 1270 | 47,79,79 1271 | 48,128,20 1272 | 48,48,48 1273 | 50,205,50 1274 | 51,0,51 1275 | 51,102,102 1276 | 51,102,51 1277 | 51,161,201 1278 | 51,26,128 1279 | 51,51,51 1280 | 51,51,51 1281 | 51,77,179 1282 | 54,100,139 1283 | 54,54,54 1284 | 5,5,5 1285 | 56,142,142 1286 | 56,56,56 1287 | 58,95,205 1288 | 59,59,59 1289 | 60,179,113 1290 | 61,145,64 1291 | 61,61,61 1292 | 61,89,171 1293 | 64,224,208 1294 | 64,64,64 1295 | 65,105,225 1296 | 66,66,66 1297 | 67,110,238 1298 | 67,205,128 1299 | 69,139,0 1300 | 69,139,116 1301 | 69,69,69 1302 | 70,130,180 1303 | 71,60,139 1304 | 71,71,71 1305 | 72,118,255 1306 | 72,209,204 1307 | 72,61,139 1308 | 74,112,139 1309 | 74,74,74 1310 | 75,0,130 1311 | 77,0,26 1312 | 77,102,204 1313 | 77,128,128 1314 | 77,128,77 1315 | 77,26,77 1316 | 77,51,153 1317 | 77,77,77 1318 | 77,77,77 1319 | 78,238,148 1320 | 79,148,205 1321 | 79,79,79 1322 | 81,81,81 1323 | 82,139,139 1324 | 82,82,82 1325 | 83,134,139 1326 | 84,139,84 1327 | 84,255,159 1328 | 84,84,84 1329 | 85,107,47 1330 | 85,26,139 1331 | 85,85,85 1332 | 87,87,87 1333 | 8,8,8 1334 | 89,89,89 1335 | 91,91,91 1336 | 92,172,238 1337 | 92,92,92 1338 | 93,71,139 1339 | 94,38,18 1340 | 94,94,94 1341 | 95,158,160 1342 | 96,123,139 1343 | 97,97,97 1344 | 99,184,255 1345 | 99,99,99 --------------------------------------------------------------------------------