├── Demo_MicrosoftML_0.R ├── README.md ├── R_digit_recognizer_3_V2.R ├── R_leaf_classification_1_V2.R ├── R_leaf_classification_2_V2.R ├── readme.txt ├── test.csv └── train.csv /Demo_MicrosoftML_0.R: -------------------------------------------------------------------------------- 1 | ### 2 | ### SLO_DUG 09.02.2017 3 | ### Leaf Classification 4 | ### MicrosoftML - Demo 5 | ### 6 | 7 | # NOTE: Make sure you connect to R Engine: 64Bit - [64-bit] C:\Program Files\Microsoft SQL Server\140\R_SERVER 8 | # .libPaths() 9 | # Tools -> Global Options 10 | setwd("C:/DataTK/Kaggle/Leaf_Classification/") 11 | 12 | ############################################################## 13 | 14 | ## ---- echo = FALSE, message = FALSE-------------------------------------- 15 | #knitr::opts_chunk$set(collapse = TRUE, comment = "#>") 16 | #options(tibble.print_min = 4L, tibble.print_max = 4L) 17 | 18 | 19 | 20 | library(MicrosoftML) 21 | library(RevoScaleR) 22 | library(lattice) 23 | 24 | 25 | # Read the data into a data frame in memory 26 | mortXdf <- file.path(rxGetOption("sampleDataDir"), "mortDefaultSmall") 27 | mortDF <- rxDataStep(mortXdf) 28 | 29 | 30 | # Create a logical TRUE/FALSE variable 31 | mortDF$default <- mortDF$default == 1 32 | 33 | # Divide the data into train and test data sets 34 | set.seed(37) 35 | isTest <- rnorm(nrow(mortDF)) > 0 36 | mortTest <- mortDF[isTest,] 37 | mortTrain <- mortDF[!isTest,] 38 | 39 | #information on Data 40 | rxGetInfo(mortTrain, getVarInfo = TRUE) 41 | rxGetInfo(mortTest) 42 | 43 | 44 | 45 | ################################## 46 | # Binary type labels 47 | ################################## 48 | 49 | # Binary Formula 50 | 51 | binaryFormula <- default ~ creditScore + houseAge + yearsEmploy + ccDebt + year 52 | 53 | #----------------------- 54 | # rxLogisticRegression 55 | #----------------------- 56 | 57 | logitModel <- rxLogisticRegression(binaryFormula, data = mortTrain) #add: showTrainingStats = TRUE 58 | summary(logitModel) 59 | logitScore <- rxPredict(logitModel, data = mortTest, extraVarsToWrite = "default") 60 | 61 | #draw ROC Curve 62 | rxRocCurve(actualVarName = "default", predVarNames = "Probability", data = logitScore) 63 | # AUC = 0.90 64 | 65 | #----------------------- 66 | # rxFastTrees 67 | #----------------------- 68 | 69 | fastTreeModel <- rxFastTrees(binaryFormula, data = mortTrain, numTrees = 75, numLeaves = 10) 70 | summary(fastTreeModel) 71 | fastTreeScore <- rxPredict(fastTreeModel, data = mortTest, extraVarsToWrite = "default") 72 | 73 | 74 | ftRoc <- rxRoc(actualVarName = "default", predVarNames = "Probability", data = fastTreeScore) 75 | rxAuc(ftRoc) 76 | # [1] 0.9326229 77 | 78 | 79 | #----------------------- 80 | # rxFastForest 81 | #----------------------- 82 | 83 | 84 | rxFastForestModel <- rxFastForest(binaryFormula, data = mortTrain, numTrees = 75, numLeaves = 10) 85 | summary(rxFastForestModel) 86 | rxFastForestScore <- rxPredict(rxFastForestModel, data = mortTest, extraVarsToWrite = "default") 87 | #Roc 88 | ffRoc <- rxRoc(actualVarName = "default", predVarNames = "Probability",data = rxFastForestScore) 89 | rxAuc(ffRoc) 90 | # [1] 0.7441262 91 | 92 | 93 | # ------------------ 94 | # rxNeuralNet 95 | # ------------------ 96 | rxNeuralNetModel <- rxNeuralNet(binaryFormula, data = mortTrain, numHiddenNodes = 10) 97 | rxNeuralNetScore <- rxPredict(rxNeuralNetModel, data = mortTest, extraVarsToWrite = "default") 98 | rxRocCurve(actualVarName = "default", predVarNames = "Probability", 99 | data = rxNeuralNetScore, title = "ROC Curve for 'default' using rxNeuralNet") 100 | # AUC 0.76 101 | 102 | 103 | # ------------------ 104 | # rxFastLinear 105 | # ------------------ 106 | rxFastLinearModel <- rxFastLinear(binaryFormula, data = mortTrain) 107 | summary(rxFastLinearModel) 108 | rxFastLinearScore <- rxPredict(rxFastLinearModel, data = mortTest, extraVarsToWrite = "default") 109 | rxRocCurve(actualVarName = "default", predVarNames = "Probability", 110 | data = rxFastLinearScore, title = "ROC Curve for 'default' using rxFastLinear") 111 | # AUC = 0.95 112 | 113 | 114 | 115 | ################################## 116 | # Multi-class type labels 117 | ################################## 118 | 119 | 120 | 121 | trainRows <- c(1:30, 51:80, 101:130) 122 | testRows = !(1:150 %in% trainRows) 123 | trainIris <- iris[trainRows,] 124 | testIris <- iris[testRows,] 125 | multiFormula <- Species ~Sepal.Length + Sepal.Width + Petal.Length + Petal.Width 126 | 127 | 128 | #----------------------- 129 | # rxLogisticRegression 130 | #----------------------- 131 | 132 | logitModel <- rxLogisticRegression(multiFormula, type = "multiClass", data = trainIris) 133 | logitScore <- rxPredict(logitModel, data = testIris, extraVarsToWrite = "Species") 134 | 135 | #predicted Labels 136 | rxCrossTabs(~Species:PredictedLabel, data = logitScore, removeZeroCounts = TRUE) 137 | 138 | 139 | # ------------------ 140 | # rxNeuralNet 141 | # ------------------ 142 | 143 | rxNeuralNetModel <- rxNeuralNet( 144 | multiFormula 145 | ,type = "multiClass" 146 | ,optimizer = sgd(learningRate = 0.2) 147 | ,data = trainIris 148 | ) 149 | 150 | rxNeuralNetScore <- rxPredict(rxNeuralNetModel, data = testIris, extraVarsToWrite = "Species") 151 | 152 | #predicted Labels 153 | rxCrossTabs(~Species:PredictedLabel, data = rxNeuralNetScore, removeZeroCounts = TRUE) 154 | 155 | 156 | 157 | ################################## 158 | # Regression type label 159 | ################################## 160 | 161 | 162 | # Sample Data 163 | DF <- airquality[!is.na(airquality$Ozone), ] 164 | DF$Ozone <- as.numeric(DF$Ozone) 165 | randomSplit <- rnorm(nrow(DF)) 166 | trainAir <- DF[randomSplit >= 0,] 167 | testAir <- DF[randomSplit < 0,] 168 | 169 | 170 | 171 | # Regression type label formula 172 | airFormula <- Ozone ~ Solar.R + Wind + Temp 173 | 174 | 175 | 176 | # ------------------------ 177 | # rxFastTrees 178 | # ------------------------ 179 | 180 | fastTreeModel <- rxFastTrees(airFormula, type = "regression", data = trainAir) 181 | fastTreeScore <- rxPredict(fastTreeModel, data = testAir, extraVarsToWrite = "Ozone") 182 | rxLinePlot(Score~Ozone, type = c("smooth", "p"), data = fastTreeScore, 183 | title = "rxFastTrees", lineColor = "red") 184 | 185 | 186 | # ------------------------ 187 | # rxFastForest 188 | # ------------------------ 189 | 190 | rxFastForestModel <- rxFastForest(airFormula, type = "regression", data = trainAir) 191 | rxFastForestScore <- rxPredict(rxFastForestModel, data = testAir, extraVarsToWrite = "Ozone") 192 | rxLinePlot(Score~Ozone, type = c("smooth", "p"), 193 | data = rxFastForestScore, title = "rxFastForest", lineColor = "red") 194 | 195 | 196 | # ------------------------ 197 | # rxNeuralNet 198 | # ------------------------ 199 | 200 | rxNeuralNetModel <- rxNeuralNet(airFormula, type = "regression", data = trainAir, numHiddenNodes = 8) 201 | rxNeuralNetScore <- rxPredict(rxNeuralNetModel, data = testAir, extraVarsToWrite = "Ozone") 202 | 203 | rxLinePlot(Score~Ozone, type = c("smooth", "p"), data = rxNeuralNetScore, 204 | title = "rxNeuralNet", lineColor = "red") 205 | 206 | 207 | # ------------------------ 208 | # rxFastLinear with l1Weight and l2Weight 209 | # ------------------------ 210 | 211 | rxFastLinearModel <- rxFastLinear(airFormula, type = "regression", data = trainAir, l2Weight = 0.01) 212 | rxFastLinearScore <- rxPredict(rxFastLinearModel, data = testAir, extraVarsToWrite = "Ozone") 213 | rxLinePlot(Score~Ozone, type = c("smooth", "p"), data = rxFastLinearScore, 214 | title = "rxFastLinear", lineColor = "red") 215 | 216 | 217 | # ------------------ 218 | # rxOneClassSvm 219 | # ------------------ 220 | 221 | # generate some random data 222 | numRows <- 500 223 | normalData <- data.frame( day = 1:numRows) 224 | normalData$pageViews = runif(numRows, min = 10, max = 1000) 225 | normalData$clicks = runif(numRows, min = 0, max = 5) 226 | 227 | testData <- data.frame(day = 1:numRows) 228 | testData$pageViews = runif(numRows, min = 10, max = 1000) 229 | testData$clicks = runif(numRows, min = 0, max = 5) 230 | 231 | outliers <-c(100, 200, 300, 400) 232 | testData$outliers <- FALSE 233 | testData$outliers[outliers] <- TRUE 234 | testData$pageViews[outliers] <- 950 + runif(4, min = 0, max = 50) 235 | testData$clicks[outliers] <- 5 + runif(4, min = 0, max = 1) 236 | 237 | # model train and prediction 238 | modelSvm <- rxOneClassSvm(formula = ~pageViews + clicks,data = normalData) 239 | score1DF <- rxPredict(modelSvm, data = testData,extraVarsToWrite = c("outliers", "day")) 240 | 241 | 242 | rxLinePlot(Score~day, type = c("p"), data = score1DF, 243 | title = "Scores from rxOneClassSvm", 244 | symbolColor = ifelse(score1DF$outliers, "red", "blue")) 245 | 246 | 247 | # -------------------------- 248 | # Sentiment analysis 249 | # -------------------------- 250 | 251 | trainReviews <- as.data.frame(matrix(c( 252 | "TRUE", "This is great", 253 | "FALSE", "I hate it", 254 | "TRUE", "Love it", 255 | "FALSE", "Do not like it", 256 | "TRUE", "Really like it", 257 | "FALSE", "I hate it", 258 | "TRUE", "I like it a lot", 259 | "FALSE", "I kind of hate it", 260 | "TRUE", "I do like it", 261 | "FALSE", "I really hate it", 262 | "TRUE", "It is very good", 263 | "FALSE", "I hate it a bunch", 264 | "TRUE", "I love it a bunch", 265 | "FALSE", "I hate it", 266 | "TRUE", "I like it very much", 267 | "FALSE", "I hate it very much.", 268 | "TRUE", "I really do love it", 269 | "FALSE", "I really do hate it", 270 | "TRUE", "Love it!", 271 | "FALSE", "Hate it!", 272 | "TRUE", "I love it", 273 | "FALSE", "I hate it", 274 | "TRUE", "I love it", 275 | "FALSE", "I hate it", 276 | "TRUE", "I love it"), 277 | ncol = 2, byrow = TRUE, dimnames = list(NULL, c("like", "review"))), 278 | stringsAsFactors = FALSE) 279 | trainReviews$like <- as.logical(trainReviews$like) 280 | 281 | testReviews <- data.frame(review = c( 282 | "This is great", 283 | "I hate it", 284 | "Love it", 285 | "Really like it", 286 | "I hate it", 287 | "I like it a lot", 288 | "I love it", 289 | "I do like it", 290 | "I really hate it", 291 | "I love it"), stringsAsFactors = FALSE) 292 | 293 | 294 | # -- Check the results of sentiment with different Transforms 295 | 296 | # ------------------------------------ 297 | # categorical with rxLogisticRegression 298 | # -------------------------------------- 299 | 300 | outModel1 <- rxLogisticRegression(like~catReview, data = trainReviews, 301 | mlTransforms = list(categorical(vars = c(catReview = "review")))) 302 | summary(outModel1) 303 | scoreOutDF1 <- rxPredict(outModel1, data = testReviews, extraVarsToWrite = "review") 304 | 305 | scoreOutDF1 306 | 307 | 308 | 309 | # ----------------------------------------- 310 | # categoricalHash with rxLogisticRegression 311 | # ------------------------------------------ 312 | 313 | outModel2 <- rxLogisticRegression(like~hashReview, data = trainReviews, 314 | mlTransforms = list( 315 | categoricalHash(vars = c(hashReview = "review"), invertHash = -1, 316 | hashBits = 8 ))) 317 | 318 | summary(outModel2) 319 | 320 | scoreOutDF2 <- rxPredict(outModel2, data = testReviews, extraVarsToWrite = "review") 321 | scoreOutDF2 322 | 323 | # ------------------------------------------- 324 | # featurizeText with rxLogisticRegression 325 | # ------------------------------------------- 326 | 327 | outModel5 <- rxLogisticRegression(like~reviewTran, data = trainReviews, 328 | mlTransforms = list(featurizeText(vars = c(reviewTran = "review"), 329 | stopwordsRemover = stopwordsDefault(), 330 | keepPunctuations = FALSE))) 331 | 332 | summary(outModel5) 333 | 334 | scoreOutDF5 <- rxPredict(outModel5, data = testReviews, extraVarsToWrite = "review") 335 | scoreOutDF5 336 | 337 | # ------------------------------- 338 | # categoricalHash with rxNeuralNet 339 | # -------------------------------- 340 | 341 | outModel6 <- rxNeuralNet(like ~ hashReview 342 | ,data = trainReviews 343 | ,optimizer = sgd(learningRate = 0.1) 344 | ,mlTransforms = list( 345 | categoricalHash(vars = c(hashReview = "review"), invertHash = 1, hashBits = 8), 346 | selectFeatures("hashReview", mode = minCount()))) 347 | 348 | scoreOutDF6 <- rxPredict(outModel6, data = testReviews, extraVarsToWrite = "review") 349 | scoreOutDF6 350 | 351 | 352 | 353 | 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SLODUG2017 2 | SLODUG2017 3 | -------------------------------------------------------------------------------- /R_digit_recognizer_3_V2.R: -------------------------------------------------------------------------------- 1 | ### 2 | ### SLO_DUG 09.02.2017 3 | ### Digit recognizer 4 | ### with MicrosoftML - Neural Net - DNN with Convolutions 5 | ### # Data available: https://www.kaggle.com/c/digit-recognizer/data 6 | 7 | 8 | # NOTE: Make sure you connect to R Engine: 64Bit - [64-bit] C:\Program Files\Microsoft SQL Server\140\R_SERVER 9 | # Tools -> Global Options 10 | setwd("C:/DataTK/Kaggle/Digit_recognition/") 11 | 12 | # Load Packages 13 | library(MicrosoftML) 14 | library(RevoScaleR) 15 | library(ggplot2) 16 | library(readr) 17 | library(magrittr) 18 | library(dplyr) 19 | library(corrplot) 20 | 21 | 22 | 23 | #---------------------------- 24 | ## Check the data 25 | #---------------------------- 26 | 27 | 28 | library(readr) 29 | train <- read.csv("C:/DataTK/Kaggle/Digit_recognition/train.csv") 30 | test <- read.csv("C:/DataTK/Kaggle/Digit_recognition/test.csv") 31 | head(train[1:10]) 32 | 33 | # Create a 28*28 matrix with pixel color values 34 | m = matrix(unlist(train[10,-1]),nrow = 28,byrow = T) 35 | # Plot that matrix 36 | image(m,col=grey.colors(255)) 37 | 38 | rotate <- function(x) t(apply(x, 2, rev)) # reverses (rotates the matrix) 39 | 40 | # Plot a bunch of images 41 | par(mfrow=c(2,3)) 42 | lapply(1:6, 43 | function(x) image( 44 | rotate(matrix(unlist(train[x,-1]),nrow = 28,byrow = T)), 45 | col=grey.colors(255), 46 | xlab=train[x,1] 47 | ) 48 | ) 49 | 50 | 51 | #plot back 52 | par(mfrow=c(1,1)) 53 | 54 | 55 | 56 | 57 | 58 | #------------------------------------ 59 | # 60 | # MicrosoftML 61 | # 62 | #------------------------------------ 63 | 64 | 65 | # Read Data 66 | 67 | train_DR <- read.csv("C:/DataTK/Kaggle/Digit_recognition/train.csv") 68 | test_DR <- read.csv("C:/DataTK/Kaggle/Digit_recognition/test.csv") 69 | 70 | 71 | 72 | # get data into XDF format 73 | #readPath <- rxGetOption("sampleDataDir") 74 | infile_train <- file.path("C:/DataTK/Kaggle/Digit_recognition/", "train.csv") 75 | infile_test <- file.path("C:/DataTK/Kaggle/Digit_recognition/", "test.csv") 76 | 77 | trainDR <- rxImport(infile_train) 78 | trainData <- rxImport(inData = trainDR, outFile="Digit_train.xdf", 79 | stringsAsFactors = TRUE, missingValueString = "M", rowsPerRead = 200000, overwrite = TRUE) 80 | 81 | testDR <- rxImport(infile_test) 82 | testData <- rxImport(inData = testDR, outFile="Digit_test.xdf", 83 | stringsAsFactors = TRUE, missingValueString = "M", rowsPerRead = 200000, overwrite = TRUE) 84 | 85 | 86 | 87 | #check traindata 88 | # "Digit_train.xdf" 89 | # "Digit_test.xdf" 90 | # File name: 91 | rxGetInfo(trainDR, getVarInfo = TRUE) #has label variable called "label" 92 | rxGetInfo(testDR, getVarInfo = TRUE) #does not have label variable 93 | 94 | 95 | dataTrain <- rxReadXdf("Digit_train.xdf") 96 | dataTest <- rxReadXdf("Digit_test.xdf") 97 | 98 | 99 | 100 | 101 | # NET# language for DDN 102 | # More on NET# https://docs.microsoft.com/en-us/azure/machine-learning/machine-learning-azure-ml-netsharp-reference-guide 103 | # or: https://blogs.technet.microsoft.com/machinelearning/2015/02/16/neural-nets-in-azure-ml-introduction-to-net/ 104 | 105 | netDefinition <- ' 106 | // Define constants. 107 | const { T = true; F = false; } 108 | 109 | // Input layer definition. 110 | input Picture [28, 28]; 111 | 112 | // First convolutional layer definition. 113 | hidden C1 [5 * 13^2] 114 | from Picture convolve { 115 | InputShape = [28, 28]; 116 | UpperPad = [ 1, 1]; 117 | KernelShape = [ 5, 5]; 118 | Stride = [ 2, 2]; 119 | MapCount = 5; 120 | } 121 | 122 | // Second convolutional layer definition. 123 | hidden C2 [50, 5, 5] 124 | from C1 convolve { 125 | InputShape = [ 5, 13, 13]; 126 | KernelShape = [ 1, 5, 5]; 127 | Stride = [ 1, 2, 2]; 128 | Sharing = [ F, T, T]; 129 | MapCount = 10; 130 | } 131 | 132 | // Third fully connected layer definition. 133 | hidden H3 [100] 134 | from C2 all; 135 | 136 | // Output layer definition. 137 | output Result [10] 138 | from H3 all; 139 | ' 140 | 141 | 142 | 143 | # Train the neural Network 144 | 145 | # Download CUDA Drivers: https://developer.nvidia.com/cuda-downloads 146 | 147 | #start time 148 | ptm <- proc.time() 149 | 150 | #GPU 151 | model_DNN_GPU <- rxNeuralNet(label ~. 152 | ,data = dataTrain 153 | ,type = "multi" 154 | ,numIterations = 10 155 | ,normalize = "no" 156 | ,acceleration = "gpu" #enable this if you have CUDA driver 157 | ,miniBatchSize = 64 #set to 1 else set to 64 if you have CUDA driver problem 158 | #,netDefinition = readChar(netDefFile, file.info(netDefFile)$size) 159 | ,netDefinition = netDefinition 160 | ,optimizer = sgd(learningRate = 0.1, lRateRedRatio = 0.9, lRateRedFreq = 10) 161 | ) 162 | 163 | #end time 164 | time_MSFTML_NN <- proc.time() - ptm 165 | time_MSFTML_NN <- time_MSFTML_NN[[3]] 166 | 167 | # with GPU 168 | # time_MSFTML_NN <- 14.673564 169 | time_MSFTML_NN <- 85.84 170 | 171 | DNN_GPU_score <- rxPredict(model_DNN_GPU, dataTest, extraVarsToWrite = "label") 172 | rxCrossTabs(formula = ~F(label):PredictLabel, data=DNN_GPU_score) 173 | 174 | # Accuracy 175 | sum(Score_DNN$Label == DNN_GPU_score$PredictedLabel)/dim(DNN_GPU_score)[1] 176 | 177 | 178 | # --------------- 179 | # without GPU 180 | # --------------- 181 | 182 | #netDefFile <- system.file("demoScripts/mnist.nn", package = "MicrosoftML") 183 | #source(system.file("extdata/mnist.R", package = "MicrosoftML")) 184 | 185 | mnist <- getMnistData(download = TRUE, sampleDataDir = NULL, createDir = TRUE) 186 | mnistTrain <- mnist$mnistTrain 187 | mnistTest <- mnist$mnistTest 188 | 189 | #start time 190 | ptm <- proc.time() 191 | 192 | 193 | # multiClass with rxNeuralNet 194 | Model_DNN <- rxNeuralNet(Label ~ . 195 | ,data = mnistTrain 196 | ,numIterations = 10 197 | ,normalize = "no" 198 | ,optimizer = sgd(learningRate=0.1, lRateRedRatio=0.9, lRateRedFreq=10) 199 | #,netDefinition = readChar(netDefFile, file.info(netDefFile)$size) 200 | ,netDefinition = netDefinition 201 | ,type = "multi") 202 | 203 | #end time 204 | time_MSFTML_NN_NoGPU <- proc.time() - ptm 205 | time_MSFTML_NN_NoGPU <- time_MSFTML_NN_NoGPU[[3]] 206 | 207 | time_MSFTML_NN_NoGPU <- 125.11 208 | 209 | Score_DNN <- rxPredict(Model_DNN, mnistTest, extraVarsToWrite = "Label") 210 | 211 | 212 | rxCrossTabs(formula = ~ F(Label):PredictedLabel, data = Score_DNN) 213 | 214 | # Show the (micro-)accuracy 215 | sum(Score_DNN$Label == Score_DNN$PredictedLabel)/dim(Score_DNN)[1] 216 | # [1] 0.9767 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | #--------------------------------- 225 | ## Using H20 226 | #--------------------------------- 227 | 228 | 229 | library(h2o) 230 | localH2O = h2o.init(max_mem_size = '4g', nthreads = -1) 231 | 232 | 233 | ## MNIST data as H2O 234 | #convert digit labels to factor for classification 235 | train[,1] <- as.factor(train[,1]) 236 | train_h2o <- as.h2o(train) 237 | 238 | 239 | test_h2o <- as.h2o(test) 240 | 241 | 242 | 243 | #start time 244 | ptm <- proc.time() 245 | 246 | 247 | ## train model 248 | model_h20 <- h2o.deeplearning(x = 2:785 249 | ,y = 1 # label for label 250 | ,training_frame = train_h2o 251 | ,activation = "RectifierWithDropout" 252 | ,input_dropout_ratio = 0.2 # % of inputs dropout 253 | ,hidden_dropout_ratios = c(0.5,0.5) # % for nodes dropout 254 | ,balance_classes = TRUE 255 | ,hidden = c(100,100) # two layers of 100 nodes 256 | ,momentum_stable = 0.99 257 | ,nesterov_accelerated_gradient = T # use it for speed 258 | ,epochs = 15) 259 | 260 | #end time 261 | time_h20DL <- proc.time() - ptm 262 | time_h20DL <- time_h20DL[[3]] 263 | 264 | 265 | time_h20DL <- 142.01 266 | 267 | # success rate matrix 268 | h2o.confusionMatrix(model_h20) 269 | 270 | #exit the h20 271 | h2o.shutdown() 272 | 273 | 274 | # -------------------- 275 | # Comparison 276 | # -------------------- 277 | 278 | Compare_NN<- data.frame( 279 | method=c('ML_NN_GPU','ML_NN_Non_GPU','H2o','XGBoost'), 280 | TrainTime = c(time_MSFTML_NN,time_MSFTML_NN_NoGPU, time_h20DL, time_xgb_NN) 281 | ) 282 | 283 | 284 | 285 | 286 | ggplot(Compare_NN,aes(x=method,y=TrainTime, fill=method))+ 287 | geom_bar(stat='identity')+ 288 | ggtitle('NeuralNetwork Digit recognizer train time comparison') 289 | 290 | 291 | 292 | 293 | 294 | # -------------------------------------- 295 | # XgBoost 296 | # ------------------------------------ 297 | 298 | 299 | library(readr) 300 | library(ggplot2) 301 | library(caret) 302 | library(Matrix) 303 | library(xgboost) 304 | 305 | 306 | # data preparation 307 | TRAIN <- read.csv("../input/train.csv") 308 | TEST <- read.csv("../input/test.csv") 309 | LABEL <- TRAIN$label 310 | TRAIN$label <- NULL 311 | LINCOMB <- findLinearCombos(TRAIN) 312 | TRAIN <- TRAIN[, -LINCOMB$remove] 313 | TEST <- TEST[, -LINCOMB$remove] 314 | NZV <- nearZeroVar(TRAIN, saveMetrics = TRUE) 315 | TRAIN <- TRAIN[, -which(NZV[1:nrow(NZV),]$nzv == TRUE)] 316 | TEST <- TEST[, -which(NZV[1:nrow(NZV),]$nzv == TRUE)] 317 | TRAIN$LABEL <- LABEL 318 | 319 | 320 | 321 | # define xgb.train parameters 322 | PARAM <- list( 323 | # General Parameters 324 | booster = "gbtree", # default 325 | silent = 0, # default 326 | # Booster Parameters 327 | eta = 0.05, # default = 0.30 328 | gamma = 0, # default 329 | max_depth = 5, # default = 6 330 | min_child_weight = 1, # default 331 | subsample = 0.70, # default = 1 332 | colsample_bytree = 0.95, # default = 1 333 | num_parallel_tree = 1, # default 334 | lambda = 0, # default 335 | lambda_bias = 0, # default 336 | alpha = 0, # default 337 | # Task Parameters 338 | objective = "multi:softmax", # default = "reg:linear" 339 | num_class = 10, # default = 0 340 | base_score = 0.5, # default 341 | eval_metric = "merror" # default = "rmes" 342 | ) 343 | 344 | 345 | # convert TRAIN dataframe into a design matrix 346 | TRAIN_SMM <- sparse.model.matrix(LABEL ~ ., data = TRAIN) 347 | TRAIN_XGB <- xgb.DMatrix(data = TRAIN_SMM, label = LABEL) 348 | 349 | 350 | #start time 351 | ptm <- proc.time() 352 | 353 | # train xgb model 354 | MODEL <- xgb.train(params = PARAM, 355 | data = TRAIN_XGB, 356 | nrounds = 50, # change this to 400 357 | verbose = 2, 358 | watchlist = list(TRAIN_SMM = TRAIN_XGB) 359 | ) 360 | 361 | #end time 362 | time_xgb_NN <- proc.time() - ptm 363 | time_xgb_NN <- time_xgb_NN[[3]] 364 | 365 | time_xgb_NN <- 251.52 366 | 367 | 368 | 369 | 370 | TEST$LABEL <- 0 371 | 372 | # use the trained xgb model ("MODEL") on the test data ("TEST") to predict the response variable ("LABEL") 373 | TEST_SMM <- sparse.model.matrix(LABEL ~ ., data = TEST) 374 | PRED <- predict(MODEL, TEST_SMM) 375 | 376 | -------------------------------------------------------------------------------- /R_leaf_classification_1_V2.R: -------------------------------------------------------------------------------- 1 | ### 2 | ### SLO_DUG 09.02.2017 3 | ### Leaf Classification 4 | ### R - without MicrosoftML and RevoscaleR and using PCA for complexity reduction 5 | ### 6 | 7 | 8 | # NOTE: Make sure you connect to R Engine: 64Bit - [64-bit] C:\Program Files\Microsoft SQL Server\130\R_SERVER 9 | # Tools -> Global Options 10 | setwd("C:/DataTK/Kaggle/Leaf_Classification/") 11 | 12 | # Load Packages 13 | library(ggplot2) 14 | library(readr) 15 | library(magrittr) 16 | library(dplyr) 17 | library(e1071) 18 | library(rpart) 19 | library(Metrics) 20 | library(randomForest) 21 | library(Matrix) 22 | library(methods) 23 | library(MLmetrics) 24 | library(rpart.plot) 25 | library(corrplot) 26 | library(xgboost) 27 | library(caret) 28 | 29 | 30 | 31 | # Read Data 32 | train <- read.csv("C:/DataTK/Kaggle/Leaf_Classification/train.csv") 33 | test <- read.csv("C:/DataTK/Kaggle/Leaf_Classification/test.csv") 34 | 35 | train<- train[,-1] 36 | test<- test 37 | 38 | 39 | sapply(train, class) 40 | 41 | 42 | Num_NA<-sapply(train,function(y)length(which(is.na(y)==T))) 43 | sum(Num_NA) 44 | 45 | 46 | 47 | #------------------------- 48 | # PCA! Reduction for each of three groups of attributes (Margin, Shape, texture) 49 | #------------------------- 50 | Margin<-train %>% select(contains("margin")) 51 | pr_Margin<-princomp(Margin) 52 | Shape<- train %>% select(contains('shape')) 53 | pr_shape<-princomp(Shape) 54 | Texture<- train %>% select(contains('texture')) 55 | pr_texture<-princomp(Texture) 56 | 57 | 58 | #Check the loadings 59 | summary(pr_Margin) 60 | summary(pr_shape) 61 | summary(pr_texture) 62 | 63 | #Take the best 64 | Train_PCA<- data.frame(train$species,pr_Margin$scores[,1:5],pr_shape$scores[,1:3],pr_texture$scores[,1:5]) 65 | colnames(Train_PCA)<-c('species','Com1','Com2','Com3','Com4','Com5','Com6','Com7','Com8','Com9','Com10','Com11','Com12','Com13') 66 | Test_Margin<- predict(pr_Margin,newdata=test %>% select(contains("margin")))[,1:5] 67 | Test_Shape<- predict(pr_shape,newdata=test %>% select(contains("shape")))[,1:3] 68 | Test_Texture<- predict(pr_texture,newdata=test %>%select(contains("texture")))[,1:5] 69 | Test<- data.frame(Test_Margin,Test_Shape,Test_Texture) 70 | colnames(Test)<-c('Com1','Com2','Com3','Com4','Com5','Com6','Com7','Com8','Com9','Com10','Com11','Com12','Com13') 71 | 72 | 73 | #------------------------------------------------------------------------- 74 | # # Naive Bayes - (1Sec) 75 | #------------------------------------------------------------------------- 76 | 77 | #start time 78 | ptm <- proc.time() 79 | 80 | NaivB<- naiveBayes(species~.,Train_PCA) 81 | pred_NaivB <- predict(NaivB,newdata=Train_PCA[,2:14],type='raw') 82 | logloss_NaivB <-MultiLogLoss(y_true = Train_PCA[,1], y_pred = as.matrix(pred_NaivB)) 83 | 84 | #end time 85 | time_NaivB <- proc.time() - ptm 86 | time_NaivB <- time_NaivB[[3]] 87 | 88 | 89 | 90 | #------------------------------------------------------------------------- 91 | # classification tree - (2Sec) 92 | #------------------------------------------------------------------------- 93 | 94 | #start time 95 | ptm <- proc.time() 96 | 97 | Control<- trainControl(method='repeatedcv',number =10,repeats=3) 98 | Tree<- train(Train_PCA[,2:14],Train_PCA[,1],method='rpart',trControl=Control) 99 | pred_Tree<- predict(Tree,newdata= Train_PCA[,2:14],type='prob') 100 | logloss_Tree<- MultiLogLoss(y_true = Train_PCA[,1], y_pred = as.matrix(pred_Tree)) 101 | 102 | #end time 103 | time_Tree <- proc.time() - ptm 104 | time_Tree <- time_Tree[[3]] 105 | 106 | 107 | 108 | #------------------------------------------------------------------------- 109 | # # Random forest - (2min) 110 | #------------------------------------------------------------------------- 111 | 112 | #start time 113 | ptm <- proc.time() 114 | 115 | 116 | Control<- trainControl(method='repeatedcv',number =10,repeats=3) 117 | rf<- train(Train_PCA[,2:14],Train_PCA[,1],method='rf',prox=TRUE,allowParallel=TRUE,trControl=Control) 118 | pred_rf<- predict(rf,newdata= Train_PCA[,2:14],type='prob') 119 | logloss_rf<-MultiLogLoss(y_true = Train_PCA[,1], y_pred = as.matrix(pred_rf)) 120 | 121 | #end time 122 | time_rf <- proc.time() - ptm 123 | time_rf <- time_rf[[3]] 124 | 125 | 126 | time_rf <- 185.95 127 | logloss_rf <- 0.1972274 128 | 129 | #------------------------------------------------------------------------- 130 | # # Multinominal Logit Regression - (~ 5min) 131 | #------------------------------------------------------------------------- 132 | 133 | #start time 134 | ptm <- proc.time() 135 | 136 | Control<- trainControl(method='repeatedcv',number = 10,repeats=3) 137 | Grid<- expand.grid(decay=c(0.0001,0.0000001,0.00000001)) 138 | LG<-train(Train_PCA[,2:14],Train_PCA[,1],method='multinom',prox=TRUE,allowParallel=TRUE,trControl=Control,tuneGrid=Grid,MaxNWts=2000) 139 | 140 | # Running predictions for Logit Regression 141 | pred_LG<- predict(LG,newdata= Train_PCA[,2:14],type='prob') 142 | logloss_LG <-MultiLogLoss(y_true = Train_PCA[,1], y_pred = as.matrix(pred_LG)) 143 | 144 | #end time 145 | time_LG <- proc.time() - ptm 146 | time_LG <- time_LG[[3]] 147 | 148 | time_LG <- 280.52 149 | logloss_LG <- 0.2146564 150 | 151 | #------------------------------------------------------------------------- 152 | # #XGboost - God please don't run this at live demo!!! - (30min - R Killer)!!! 153 | #------------------------------------------------------------------------- 154 | 155 | 156 | #start time 157 | ptm <- proc.time() 158 | 159 | cv.ctrl <- trainControl(method = "repeatedcv", repeats = 10,number = 3) 160 | 161 | xgb.grid <- expand.grid(nrounds = 100, 162 | max_depth = seq(6,10), 163 | eta = c(0.01,0.3, 1), 164 | gamma = c(0.0, 0.2, 1), 165 | colsample_bytree = c(0.5,0.8, 1), 166 | min_child_weight= 1 167 | ) 168 | 169 | xgb_tune <-train(species ~., 170 | data=Train_PCA, 171 | method="xgbTree", 172 | trControl=cv.ctrl, 173 | tuneGrid=xgb.grid 174 | ) 175 | 176 | #predictions for XGboost 177 | pred_xgb<- predict(xgb_tune,newdata= Train_PCA[,2:14],type='prob') 178 | logloss_xgb<-MultiLogLoss(y_true = Train_PCA[,1], y_pred = as.matrix(pred_xgb)) 179 | 180 | 181 | #end time 182 | time_xgb<- proc.time() - ptm 183 | time_xgb <- time_xgb[[3]] 184 | 185 | # a Hack for SLODug Demo :) 186 | logloss_xgb <- 0.404116 187 | time_xgb <- 18429 #300 minut 188 | 189 | # Comparison 190 | MLL_Perf<- data.frame(method=c('NaiveBayes','Classification Tree','Random Forest','Logistic Regression','XGBoost'), 191 | Multilogloss=c(logloss_NaivB,logloss_Tree,logloss_rf,logloss_LG,logloss_xgb), 192 | TrainTime = c(time_NaivB,time_Tree,time_rf,time_LG,time_xgb)) 193 | 194 | 195 | ggplot(MLL_Perf,aes(x=method,y=Multilogloss, fill=method)) + 196 | geom_bar(stat='identity') + 197 | ggtitle('MultiLog Loss for Leaf classification PCA model efficiency with e1071/caret/xgboost/RandomForest') 198 | 199 | 200 | # Check Times: 201 | MLL_Perf 202 | -------------------------------------------------------------------------------- /R_leaf_classification_2_V2.R: -------------------------------------------------------------------------------- 1 | ### 2 | ### SLO_DUG 09.02.2017 3 | ### Leaf Classification 4 | ### R - with MicrosoftML and RevoscaleR 5 | ### 6 | 7 | # NOTE: Make sure you connect to R Engine: 64Bit - [64-bit] C:\Program Files\Microsoft SQL Server\140\R_SERVER 8 | # SET: Tools -> Global Options 9 | setwd("C:/DataTK/Kaggle/Leaf_Classification/") 10 | # sessionInfo() 11 | # dfip <- data.frame(installed.packages()) 12 | # unique(dfip$LibPath) 13 | # .libPaths() 14 | # rm(dfip) 15 | 16 | 17 | # Load Packages 18 | library(MicrosoftML) 19 | library(RevoScaleR) 20 | library(ggplot2) 21 | library(readr) 22 | library(magrittr) 23 | library(dplyr) 24 | library(corrplot) 25 | library(MLmetrics) 26 | 27 | 28 | # Read Data 29 | train <- read.csv("C:/DataTK/Kaggle/Leaf_Classification/train.csv") 30 | test <- read.csv("C:/DataTK/Kaggle/Leaf_Classification/test.csv") 31 | 32 | #we don't need labels 33 | #use it for predictions! 34 | train<- train[,-1] 35 | 36 | # check the class for the train dataset 37 | sapply(train, class) 38 | 39 | 40 | # Check for missing values! 41 | Num_NA<-sapply(train,function(y)length(which(is.na(y)==T))) 42 | sum(Num_NA) 43 | 44 | 45 | 46 | # get data into XDF format 47 | #readPath <- rxGetOption("sampleDataDir") 48 | infile <- file.path("C:/DataTK/Kaggle/Leaf_Classification/", "train.csv") 49 | trainDF <- rxImport(infile) 50 | trainData <- rxImport(inData = trainDF, outFile="Leaf_train.xdf", 51 | stringsAsFactors = TRUE, missingValueString = "M", 52 | rowsPerRead = 200000, overwrite = TRUE) 53 | 54 | #check traindata 55 | # "Leaf_train.xdf" 56 | # File name: C:\DataTK\Kaggle\Leaf_Classification\Leaf_train.xdf 57 | rxGetInfo(trainData, getVarInfo = TRUE) 58 | 59 | 60 | #data preparation 61 | str(trainData) #xdf data 62 | str(train) #data frame 63 | names(trainData) 64 | 65 | formula <- "species ~ margin1 + margin2 + margin3 + margin4 + margin5 + margin6 + margin7 + margin8 + margin9 + margin10 + margin11 + margin12 + margin13 + margin14 66 | + margin15 + margin16 + margin17 + margin18 + margin19 + margin20 + margin21 + margin22 + margin23 + margin24 + margin25 + margin26 + margin27 + margin28 + margin29 67 | + margin30 + margin31 + margin32 + margin33 + margin34 + margin35 + margin36 + margin37 + margin38 + margin39 + margin40 + margin41 + margin42 + margin43 + margin44 68 | + margin45 + margin46 + margin47 + margin48 + margin49 + margin50 + margin51 + margin52 + margin53 + margin54 + margin55 + margin56 + margin57 + margin58 + margin59 69 | + margin60 + margin61 + margin62 + margin63 + margin64 + shape1 + shape2 + shape3 + shape4 + shape5 + shape6 + shape7 + shape8 + shape9 + shape10 70 | + shape11 + shape12 + shape13 + shape14 + shape15 + shape16 + shape17 + shape18 + shape19 + shape20 + shape21 + shape22 + shape23 + shape24 + shape25 71 | + shape26 + shape27 + shape28 + shape29 + shape30 + shape31 + shape32 + shape33 + shape34 + shape35 + shape36 + shape37 + shape38 + shape39 + shape40 72 | + shape41 + shape42 + shape43 + shape44 + shape45 + shape46 + shape47 + shape48 + shape49 + shape50 + shape51 + shape52 + shape53 + shape54 + shape55 73 | + shape56 + shape57 + shape58 + shape59 + shape60 + shape61 + shape62 + shape63 + shape64 + texture1 + texture2 + texture3 + texture4 + texture5 + texture6 74 | + texture7 + texture8 + texture9 + texture10 + texture11 + texture12 + texture13 + texture14 + texture15 + texture16 + texture17 + texture18 + texture19 + texture20 + texture21 75 | + texture22 + texture23 + texture24 + texture25 + texture26 + texture27 + texture28 + texture29 + texture30 + texture31 + texture32 + texture33 + texture34 + texture35 + texture36 76 | + texture37 + texture38 + texture39 + texture40 + texture41 + texture42 + texture43 + texture44 + texture45 + texture46 + texture47 + texture48 + texture49 + texture50 + texture51 77 | + texture52 + texture53 + texture54 + texture55 + texture56 + texture57 + texture58 + texture59 + texture60 + texture61 + texture62 + texture63 + texture64" 78 | 79 | 80 | scope <- "margin1 + margin2 + margin3 + margin4 + margin5 + margin6 + margin7 + margin8 + margin9 + margin10 + margin11 + margin12 + margin13 + margin14 81 | + margin15 + margin16 + margin17 + margin18 + margin19 + margin20 + margin21 + margin22 + margin23 + margin24 + margin25 + margin26 + margin27 + margin28 + margin29 82 | + margin30 + margin31 + margin32 + margin33 + margin34 + margin35 + margin36 + margin37 + margin38 + margin39 + margin40 + margin41 + margin42 + margin43 + margin44 83 | + margin45 + margin46 + margin47 + margin48 + margin49 + margin50 + margin51 + margin52 + margin53 + margin54 + margin55 + margin56 + margin57 + margin58 + margin59 84 | + margin60 + margin61 + margin62 + margin63 + margin64 + shape1 + shape2 + shape3 + shape4 + shape5 + shape6 + shape7 + shape8 + shape9 + shape10 85 | + shape11 + shape12 + shape13 + shape14 + shape15 + shape16 + shape17 + shape18 + shape19 + shape20 + shape21 + shape22 + shape23 + shape24 + shape25 86 | + shape26 + shape27 + shape28 + shape29 + shape30 + shape31 + shape32 + shape33 + shape34 + shape35 + shape36 + shape37 + shape38 + shape39 + shape40 87 | + shape41 + shape42 + shape43 + shape44 + shape45 + shape46 + shape47 + shape48 + shape49 + shape50 + shape51 + shape52 + shape53 + shape54 + shape55 88 | + shape56 + shape57 + shape58 + shape59 + shape60 + shape61 + shape62 + shape63 + shape64 + texture1 + texture2 + texture3 + texture4 + texture5 + texture6 89 | + texture7 + texture8 + texture9 + texture10 + texture11 + texture12 + texture13 + texture14 + texture15 + texture16 + texture17 + texture18 + texture19 + texture20 + texture21 90 | + texture22 + texture23 + texture24 + texture25 + texture26 + texture27 + texture28 + texture29 + texture30 + texture31 + texture32 + texture33 + texture34 + texture35 + texture36 91 | + texture37 + texture38 + texture39 + texture40 + texture41 + texture42 + texture43 + texture44 + texture45 + texture46 + texture47 + texture48 + texture49 + texture50 + texture51 92 | + texture52 + texture53 + texture54 + texture55 + texture56 + texture57 + texture58 + texture59 + texture60 + texture61 + texture62 + texture63 + texture64" 93 | 94 | 95 | 96 | #------------------------------------------------------- 97 | ##Classification and Regression Decision Trees (rxDTree) 98 | #------------------------------------------------------- 99 | 100 | #start time 101 | ptm <- proc.time() 102 | 103 | LC_CRT <- rxDTree(formula, data = train, maxDepth = 5, cp=0.01, xval = 0, blocksPerRead = 200) 104 | pred_CRT <- rxPredict(LC_CRT,data=train, type='prob') 105 | logloss_CRT<-MultiLogLoss(y_true = train$species, y_pred = as.matrix(pred_CRT)) 106 | 107 | #end time 108 | time_CRT <- proc.time() - ptm 109 | time_CRT <- time_CRT[[3]] 110 | 111 | 112 | 113 | 114 | 115 | #--------------------------------------------- 116 | ##Stochastic Gradient Boosting (rxBTrees) 117 | #--------------------------------------------- 118 | 119 | # DON'T RUN AT PRESENTATION 120 | #start time 121 | ptm <- proc.time() 122 | 123 | LC_GBT <- rxBTrees(formula, data = trainData, maxDepth = 5, nTree = 50, lossFunction = "multinomial") 124 | pred_GBT <- rxPredict(LC_GBT,data=train, type='prob') 125 | logloss_GBT<-MultiLogLoss(y_true = train$species, y_pred = as.matrix(pred_GBT)) 126 | 127 | #end time 128 | time_GBT <- proc.time() - ptm 129 | 130 | logloss_GBT <- 2.6467047565207934 131 | time_GBT <- 3405.21 132 | 133 | #----------------------------------------------------------- 134 | ##Classification and Regression Decision Forests (rxDForest) 135 | #----------------------------------------------------------- 136 | 137 | # runs cca 50sec 138 | #start time 139 | ptm <- proc.time() 140 | 141 | 142 | LC_DF <- rxDForest(formula, data = train, maxDepth = 5, cp=0.01, xval = 0, blocksPerRead = 200) 143 | pred_DF <- rxPredict(LC_DF,data=train, type='prob') 144 | logloss_DF<-MultiLogLoss(y_true = train$species, y_pred = as.matrix(pred_DF[,c(1:99)])) 145 | 146 | #end time 147 | time_DF <- proc.time() - ptm 148 | time_DF <-time_DF[[3]] 149 | 150 | #------------------------------------------------------------------------- 151 | # # MicrosoftML - Multi-class logistic regression (rxLogisticRegression) 152 | #------------------------------------------------------------------------- 153 | 154 | #start time 155 | ptm <- proc.time() 156 | 157 | LC_MCLR <- rxLogisticRegression(formula = formula, type = "multiClass", data = train) 158 | pred_MCLR <- rxPredict(LC_MCLR,data=train) 159 | logloss_MCLR<-MultiLogLoss(y_true = train$species, y_pred = as.matrix(pred_MCLR[,c(2:100)])) 160 | 161 | 162 | #end time 163 | time_MCLR <- proc.time() - ptm 164 | time_MCLR <- time_MCLR[[3]] 165 | 166 | 167 | #------------------------------------------------------------------------- 168 | # # MicrosoftML - # Multi-class (regression) neural net (rxNeuralNet) 169 | #------------------------------------------------------------------------- 170 | 171 | #start time 172 | ptm <- proc.time() 173 | 174 | 175 | LC_MNN <- rxNeuralNet(formula = formula, data = train, type = "multiClass") 176 | pred_MNN <- rxPredict(LC_MNN, data = train, extraVarsToWrite = "species") 177 | logloss_MNN <- MultiLogLoss(y_true = train$species, y_pred = as.matrix(pred_MNN[,c(3:101)])) 178 | 179 | #end time 180 | time_MNN <- proc.time() - ptm 181 | time_MNN <- time_MNN[[3]] 182 | 183 | 184 | 185 | #------------------------------------------------------------------------- 186 | # Algorithm LOG Loss comparison 187 | #------------------------------------------------------------------------- 188 | MLLoss <- data.frame( 189 | method=c('Classification Decision Trees','Gradient Boosting', 190 | 'Classification Decision Forests', 'Multi-class logistic regression', 'Multi-class Neural Net') 191 | ,Multilogloss=c(logloss_CRT,logloss_GBT, logloss_DF, logloss_MCLR,logloss_MNN) 192 | ,TrainTime = c(time_CRT,time_GBT,time_DF,time_MCLR,time_MNN) 193 | ) 194 | 195 | 196 | 197 | ggplot(MLLoss,aes(x=method,y=Multilogloss, fill=method))+ 198 | geom_bar(stat='identity')+ 199 | ggtitle('MultiLog Loss for Leaf classification model efficiency with RevoScaleR/MicrosoftML') 200 | 201 | 202 | 203 | MLLoss 204 | 205 | 206 | 207 | #------------------ END OF FILE 208 | 209 | 210 | 211 | 212 | # Make a submission 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | -------------------------------------------------------------------------------- /readme.txt: -------------------------------------------------------------------------------- 1 | Examples from the Slovenian User Group meeting Feb 2017 --------------------------------------------------------------------------------