├── .Rprofile
├── .gitignore
├── Keras_and_Shiny.Rproj
├── PARAMETERS
├── R
├── .Rprofile
├── .gitignore
├── app.R
├── prepare_model.R
└── set_env.R
├── README.md
├── config_templ.txt
├── deployment
├── .gitignore
├── env.lock
├── libs
│ └── .gitignore
└── sbox
│ └── .gitignore
├── logs
└── .gitignore
├── packages
├── .gitignore
├── Application
│ ├── .Rprofile
│ ├── Application.Rproj
│ ├── DESCRIPTION
│ ├── NAMESPACE
│ ├── NEWS
│ ├── R
│ │ ├── api_Application.R
│ │ ├── package_logger.R
│ │ ├── package_validation.R
│ │ ├── packages_import.R
│ │ ├── server.R
│ │ └── ui.R
│ └── man
│ │ ├── Application_getLogger.Rd
│ │ ├── assert.Rd
│ │ ├── dt_validate.Rd
│ │ ├── dt_validate_and_groom.Rd
│ │ └── reexports.Rd
├── DataPreparation
│ ├── .Rprofile
│ ├── DESCRIPTION
│ ├── DataPreparation.Rproj
│ ├── NAMESPACE
│ ├── NEWS
│ ├── R
│ │ ├── api_DataPreparation.R
│ │ ├── package_logger.R
│ │ ├── package_validation.R
│ │ └── packages_import.R
│ └── man
│ │ ├── DataPreparation_getLogger.Rd
│ │ ├── assert.Rd
│ │ ├── dt_validate.Rd
│ │ └── dt_validate_and_groom.Rd
└── Modeling
│ ├── .Rprofile
│ ├── DESCRIPTION
│ ├── Modeling.Rproj
│ ├── NAMESPACE
│ ├── NEWS
│ ├── R
│ ├── api_Modeling.R
│ ├── package_logger.R
│ ├── package_validation.R
│ └── packages_import.R
│ └── man
│ ├── Modeling_getLogger.Rd
│ ├── assert.Rd
│ ├── dt_validate.Rd
│ └── dt_validate_and_groom.Rd
└── tests
├── .Rprofile
├── .gitignore
└── Keras_and_Shiny_Tests.Rproj
/.Rprofile:
--------------------------------------------------------------------------------
1 | source(file.path('R', 'set_env.R'), chdir = TRUE)
2 | options(rsuite.cache_path = "~/.rsuite/cache")
3 | options(rsuite.user_templ_path = "~/.rsuite/templates")
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .Rproj.user
2 | .Rhistory
3 | .Rdata
4 | .Rbuildignore
5 | .Ruserdata
6 | conda
7 | models
8 | config.txt
9 |
--------------------------------------------------------------------------------
/Keras_and_Shiny.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 | StripTrailingWhitespace: Yes
17 |
18 | BuildType: Package
19 | PackageUseDevtools: Yes
20 | PackageInstallArgs: --no-multiarch --with-keep.source
21 |
--------------------------------------------------------------------------------
/PARAMETERS:
--------------------------------------------------------------------------------
1 | RSuiteVersion: 0.32.245
2 | RVersion: 3.5
3 | Project: Keras_and_Shiny
4 | Repositories: MRAN[2018-09-05]
5 | Artifacts: config_templ.txt, conda
6 |
--------------------------------------------------------------------------------
/R/.Rprofile:
--------------------------------------------------------------------------------
1 | source('set_env.R', chdir = TRUE)
2 |
--------------------------------------------------------------------------------
/R/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WLOGSolutions/Keras_and_Shiny/ff4e9c0afb75948a926184ce612891fa89b42675/R/.gitignore
--------------------------------------------------------------------------------
/R/app.R:
--------------------------------------------------------------------------------
1 | # Detect proper script_path (you cannot use args yet as they are build with tools in set_env.r)
2 | script_path <- (function() {
3 | args <- commandArgs(trailingOnly = FALSE)
4 | script_path <- dirname(sub("--file=", "", args[grep("--file=", args)]))
5 | if (!length(script_path)) {
6 | return("R")
7 | }
8 | if (grepl("darwin", R.version$os)) {
9 | base <- gsub("~\\+~", " ", base) # on MacOS ~+~ in path denotes whitespace
10 | }
11 | return(normalizePath(script_path))
12 | })()
13 |
14 | # Setting .libPaths() to point to libs folder
15 | source(file.path(script_path, "set_env.R"), chdir = T)
16 |
17 | config <- load_config()
18 | args <- args_parser()
19 |
20 | ###############################################################################
21 |
22 | # Force using local Python environment
23 | reticulate::use_python(python = file.path(script_path, "..", "conda"), require = TRUE)
24 | loginfo("Python initialized.")
25 |
26 | library(Application)
27 | runApplication(ui, server, port = 4605)
28 |
--------------------------------------------------------------------------------
/R/prepare_model.R:
--------------------------------------------------------------------------------
1 | # Detect proper script_path (you cannot use args yet as they are build with tools in set_env.r)
2 | script_path <- (function() {
3 | args <- commandArgs(trailingOnly = FALSE)
4 | script_path <- dirname(sub("--file=", "", args[grep("--file=", args)]))
5 | if (!length(script_path)) {
6 | return("R")
7 | }
8 | if (grepl("darwin", R.version$os)) {
9 | base <- gsub("~\\+~", " ", base) # on MacOS ~+~ in path denotes whitespace
10 | }
11 | return(normalizePath(script_path))
12 | })()
13 |
14 | # Setting .libPaths() to point to libs folder
15 | source(file.path(script_path, "set_env.R"), chdir = T)
16 |
17 | config <- load_config()
18 | args <- args_parser()
19 |
20 | ###############################################################################
21 |
22 | # Force using local Python environment
23 | reticulate::use_python(python = file.path(script_path, "..", "conda"), require = TRUE)
24 | loginfo("Python initialized.")
25 |
26 | library(DataPreparation)
27 | library(Modeling)
28 |
29 | ### 1. DATA PREPARATION
30 |
31 | # Read the path to data (image files) and training subset fraction from the config
32 | data_path <- normalizePath(config$data_path, winslash = "/")
33 | train_fraction <- as.numeric(config$train_fraction)
34 |
35 | # Read all images from "training" subfolder (a subset for training and validation) and convert them into a single pixel intensity matrix; append labels
36 | subfolder <- "training"
37 | loginfo("Started image processing (%s)...", subfolder)
38 |
39 | trainvalid_data <- getAllImages(data_path, subfolder)
40 |
41 | loginfo("Image processing complete.")
42 |
43 | # Normalize pixel intensities
44 | trainvalid_data$data_tensor <- normalizePixelIntensities(trainvalid_data$data_tensor)
45 |
46 | # Convert vector of labels in to a one-hot encoded label matrix (a requirement for keras)
47 | trainvalid_data$labels <- convertLabels(trainvalid_data$labels)
48 |
49 | # Split data randomly into training and validation subsets
50 | set.seed(1)
51 | trainvalid_data <- splitDataset(trainvalid_data, training_fraction = train_fraction)
52 |
53 | # Record the number of observations in training and validation subsets
54 | nobs_train <- nrow(trainvalid_data$labels$train)
55 | nobs_valid <- nrow(trainvalid_data$labels$valid)
56 |
57 | ### 2. MODEL TRAINING
58 |
59 | # Neural network layer architecture
60 | model <- defineModelArchitecture()
61 |
62 | # Compile the model
63 | model <- compileModel(model)
64 |
65 | # Train the model
66 | loginfo("Model training started...")
67 | tic <- Sys.time()
68 |
69 | model <- trainModel(model, trainvalid_data, epochs = 30, batch_size = 256)
70 |
71 | toc <- Sys.time()
72 | model_created <- toc
73 | ltime <- difftime(toc, tic, "CET", "secs")
74 |
75 | loginfo("Model training complete. Training time: %.1f secs", ltime)
76 |
77 | # Calculate training and validation accuracy
78 | acc_train <- calculateAccuracy(model, trainvalid_data$data_tensor$train, trainvalid_data$labels$train)
79 | acc_valid <- calculateAccuracy(model, trainvalid_data$data_tensor$valid, trainvalid_data$labels$valid)
80 |
81 | ### 3. MODEL TESTING
82 | # Read all images from "testing" subfolder (a subset for training and validation) and convert them into a single pixel intensity matrix; append labels
83 | subfolder <- "testing"
84 | loginfo("Started image processing (%s)...", subfolder)
85 |
86 | test_data <- getAllImages(data_path, subfolder)
87 |
88 | loginfo("Image processing complete.")
89 |
90 | # Normalize pixel intensities
91 | test_data$data_tensor <- normalizePixelIntensities(test_data$data_tensor)
92 |
93 | # Convert vector of labels in to a one-hot encoded label matrix
94 | test_data$labels <- convertLabels(test_data$labels)
95 |
96 | # Record the number of observations in test set
97 | nobs_test <- nrow(test_data$labels)
98 |
99 | # Calculate testing accuracy
100 | acc_test <- calculateAccuracy(model, test_data$data_tensor, test_data$labels)
101 |
102 | ### 4. MODEL SAVING
103 | save_path <- file.path(script_path, "..", "models")
104 |
105 | saveModel(model, model_created, save_path)
106 |
107 | loginfo("Number of observations used to build the model: train=%s; valid=%s; test=%s;", nobs_train, nobs_valid, nobs_test)
108 | loginfo("Model accuracy: train=%.4f; valid=%.4f; test=%.4f;", acc_train, acc_valid, acc_test)
109 |
--------------------------------------------------------------------------------
/R/set_env.R:
--------------------------------------------------------------------------------
1 | lib_path <- file.path("..", "libs")
2 | sbox_path <- file.path("..", "sbox")
3 | if (!file.exists(lib_path)) {
4 | lib_path <- file.path("..", "deployment", "libs")
5 | sbox_path <- file.path("..", "deployment", "sbox")
6 | }
7 |
8 | if (!dir.exists(sbox_path)) {
9 | dir.create(sbox_path, recursive = T)
10 | }
11 |
12 | .libPaths(c(normalizePath(sbox_path), normalizePath(lib_path), .libPaths()))
13 |
14 | library(logging)
15 | logging::logReset()
16 | logging::setLevel(level = "FINEST")
17 | logging::addHandler(logging::writeToConsole, level = "INFO")
18 |
19 | log_fpath <- (function() {
20 | log_file <- gsub("-", "_", sprintf("%s.log", Sys.Date()))
21 | log_dir <- normalizePath(file.path("..", "logs"))
22 | fpath <- file.path(log_dir, log_file)
23 | if (file.exists(fpath) && file.access(fpath, 2) == -1) {
24 | fpath <- paste0(fpath, ".", Sys.info()[["user"]])
25 | }
26 | return(fpath)
27 | })()
28 |
29 | log_dir <- normalizePath(file.path("..", "logs"))
30 | if (dir.exists(log_dir)) {
31 | logging::addHandler(logging::writeToFile, level = "FINEST", file = log_fpath)
32 | }
33 |
34 | script_path <- getwd()
35 |
36 | args_parser <- function() {
37 | args <- commandArgs(trailingOnly = FALSE)
38 | list(
39 | get = function(name, required = TRUE, default = NULL) {
40 | prefix <- sprintf("--%s=", name)
41 | value <- sub(prefix, "", args[grep(prefix, args)])
42 |
43 | if (length(value) != 1 || is.null(value)) {
44 | if (required) {
45 | logerror("--%s parameter is required", name)
46 | stop(1)
47 | }
48 | return(default)
49 | }
50 | return(value)
51 | }
52 | )
53 | }
54 |
55 | load_config <- function() {
56 | config_file <- file.path(script_path, "..", "config.txt")
57 | if (!file.exists(config_file)) {
58 | templ_file <- file.path(script_path, "..", "config_templ.txt")
59 | if (!file.exists(templ_file)) {
60 | return(list())
61 | }
62 | file.copy(templ_file, config_file)
63 | }
64 |
65 | config <- read.dcf(config_file)
66 | if ("LogLevel" %in% colnames(config)) {
67 | for (hname in names(logging::getLogger()[["handlers"]])) {
68 | logging::setLevel(config[, "LogLevel"], logging::getHandler(hname))
69 | }
70 | }
71 |
72 | config_lst <- as.list(config)
73 | names(config_lst) <- colnames(config)
74 |
75 | return(config_lst)
76 | }
77 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Showcase: the power of combining [GNU R](https://www.r-project.org/ "GNU R") and [Python](https://www.python.org/ "Python"). How to build deeplearning model for digit recognition using [Keras](https://keras.rstudio.com/) and deploy it as a [Shiny app](https://shiny.rstudio.com/ "Shiny app") with [R Suite](https://rsuite.io).
2 |
3 | ## Table of Contents
4 |
5 |
6 |
7 | - [Introduction](#introduction)
8 | - [Preliminary requirements](#preliminary-requirements)
9 | - [Recreating the case](#recreating-the-case)
10 | - [Additional links](#additional-links)
11 |
12 |
13 |
14 |
15 | ## Introduction ##
16 |
17 | This is a showcase based on the tutorial presented at [ML@Enterprise Forum 2018](https://mlforum.pl/) in Warsaw. It is intended to show how you can get what's best in [GNU R](https://www.r-project.org/ "GNU R") (e.g. [Shiny applications](https://shiny.rstudio.com/)) and in Python (e.g. [Keras framework](https://keras.io/)) and combine it to get a ready-to-deploy package thanks to [R Suite toolset](https://rsuite.io/).
18 |
19 | Going through this instruction, you should be able to:
20 |
21 | 1. Recreate development environment,
22 | 2. Build a CNN model for digit reconition,
23 | 3. Run a web app that uses created model to recognize new examples,
24 | 4. Create a deployment package ready for production.
25 |
26 | The instruction shows how to recreate development environment starting from cloning this repository. If you would like to see how such solution is built from scratch or just see the result (the deployment package) please go to the bottom of this document where you can find adequate links.
27 |
28 | ## Preliminary requirements
29 |
30 | This particular case was developed and built for Windows x64, however all tools are also available for Linux and MacOS.
31 |
32 | Tools used for development (with versions):
33 |
34 | * [R (for Windows)](https://cran.r-project.org/bin/windows/base/) [3.5.1]
35 | * [RStudio](https://www.rstudio.com/products/rstudio/download/) [1.1.456]
36 | * [R Suite CLI](http://rsuite.io/RSuite_Download.php) [0.32-245]
37 | * [Miniconda](https://conda.io/miniconda.html) [4.5.11]
38 |
39 | To be able to build models, you will also need to download and unzip training/validation and test datasets (handwritten MNIST digits).
40 |
41 | You can go either with the full dataset ([download MNIST full dataset](https://s3.eu-central-1.amazonaws.com/wlog-share/keras_and_shiny_showcase/mnist_png_full.zip)) which contains 60k examples for training and 10k for testing or a sample ([download MNIST sample](https://s3.eu-central-1.amazonaws.com/wlog-share/keras_and_shiny_showcase/mnist_png.zip)) which has 10k for training and 2.5k for testing.
42 |
43 | Of course with the full dataset the model will be significantlny more accurate, but the time for image processing, training and testing will be also much longer. On my 4-core/8-thread Core i7 I was able to build a model on full dataset in about 40 minutes and it had train/val/test accuracy about 98% / 97% / 97%. With just the sample, model building took 8 minutes but the quality was like 94% / 92% / 90%, with exactly the same settings.
44 |
45 | ## Recreating the case
46 |
47 | The first step is to clone or download this repository:
48 |
49 | ```
50 | >git clone https://github.com/WLOGSolutions/Keras_and_Shiny
51 | ```
52 |
53 | Then, we will need to install all external R dependencies of our custom packages (which as you can see are three: `DataPreparaton`, `Modeling`, `Application`). You can check the dependencies in `DESCRIPTION` files of each package. To install them with R Suite, you just need to call in the console:
54 |
55 |
56 | >cd ...\Keras_and_Shiny
57 | ...\Keras_and_Shiny>rsuite proj depsinst
58 |
59 |
60 | You can also use `-v` option when calling `depsinst` (or any other R Suite command) - "v" stands for "verbose" and it will cause showing additional detailed logs of the commands executed underneath - in this case you will be able to see which packages are being installed at the moment. If you don't use `-v` don't worry if the installation takes a few minutes and there's no console output - there are plenty of dependencies to download but at least you don't have to do it manually.
61 |
62 | Next, as we will use `Keras` framework which requires Python, we need to build a local Python environment inside our project. This entire environment will be then embedded inside the deployment package, so there is no need to install Python on production. To build the Python (conda) enviroment, which was defined in `DESCRIPTION` file of `DataPreparation` package, we call (optionally with `-v`):
63 |
64 |
65 | ...\Keras_and_Shiny>rsuite sysreqs install
66 |
67 |
68 | Having the environment set up (both R and Python components) we can build custom packages that contain all functions that will be used to create and use the model (and also the definitions of user interface and server logic for the Shiny app). The command is just:
69 |
70 |
71 | ...\Keras_and_Shiny>rsuite proj build
72 |
73 |
74 | Everything is almost ready to run the model building script, we only need to point the path to the folder with images that we downloaded and unzipped previously (either `mnist_png` or `mnist_png_full`). To do so, we need to change the `data_path` parameter in `config_templ.txt` which is placed in the main project folder, e.g.:
75 |
76 |
77 | LogLevel: INFO
78 | data_path: C:\Users\Ja\Documents\Projekty\mnist_png_full
79 | train_fraction: 0.83
80 |
81 |
82 | As the master script for model building (`prepare_model.R`) will now know where to look for the data, we can run it using `rscript` command:
83 |
84 |
85 | ...\Keras_and_Shiny>cd R
86 | ...\Keras_and_Shiny\R>rscript prepare_model.R
87 |
88 |
89 | The script will read all images from the given `data_path`, pre-process them, train and test the CNN model and save the model in HDF5 format into `model` folder inside the main project folder.
90 |
91 | Model has the following architecture:
92 |
93 | 
94 |
95 | Having a model, we can now run the Shiny application and see what it can do:
96 |
97 |
98 | ...\Keras_and_Shiny\R>rscript app.R
99 |
100 |
101 | In a web browser, under [http://localhost:4605](http://localhost:4605), we should be able to see the application running:
102 |
103 | 
104 |
105 | The app allows to read the HDF5 model that we trained and saved a minute ago, load a sample image (it can be created manually in any graphics editor, remembering that it has to be 28 x 28 pixel and grayscale) and use the model to identify the digit by clicking `Identify!` button.
106 |
107 | As we tested the solution in dev environment and we see that it is totally amazing, we can prepare a deployment package. First we need to lock the dev environment in case that after some time we will need to recreate it. This will enforce `rsuite proj depsinst` to install exactly the same versions of R packages as used before. The project that we cloned from github is already locked (see the `env.lock` file in `deployment` folder) so it is not necessary to lock it again, but if we were building the project from scratch, we would call:
108 |
109 |
110 | ...\Keras_and_Shiny\R>rsuite proj lock
111 |
112 |
113 | The final step is to build a deployment package which as simple as:
114 |
115 |
116 | ...\Keras_and_Shiny\R>rsuite proj zip --version 1.0
117 |
118 |
119 | When developing the project from scratch and having it under Git or SVN control, we would not need to manually provide `--version`. Also if we want to build the package in a specific directory we can add option for path, e.g. `-p C:\Users\Ja\Desktop\`.
120 |
121 | After the deployment package is build, we can see that it contains all scripts, our custom packages as binaries, all R dependencies installed and also the entire conda environment inside. Now you can unzip and run it on any machine that has the same OS (here: Windows x64) and R installed. You do not have to install or configure anything more on production.
122 |
123 | ## Additional links
124 |
125 | Presentation that shows how this case was built from scratch:
126 |
127 | [Link to the presentation](https://s3.eu-central-1.amazonaws.com/wlog-share/keras_and_shiny_showcase/keras_and_shiny_eng_summary.pdf)
128 |
129 | Already build deployment package for Windows (just download, unzip and have fun :) ):
130 |
131 | [Link to the deployment package](https://s3.eu-central-1.amazonaws.com/wlog-share/keras_and_shiny_showcase/Keras_and_Shiny_1.0x.zip)
132 |
133 |
134 |
--------------------------------------------------------------------------------
/config_templ.txt:
--------------------------------------------------------------------------------
1 | LogLevel: INFO
2 | data_path:
3 | train_fraction: 0.83
--------------------------------------------------------------------------------
/deployment/.gitignore:
--------------------------------------------------------------------------------
1 | intrepo
2 | docker_cache
3 |
--------------------------------------------------------------------------------
/deployment/env.lock:
--------------------------------------------------------------------------------
1 | Package: abind
2 | Version: 1.4-5
3 |
4 | Package: assertthat
5 | Version: 0.2.0
6 |
7 | Package: base64enc
8 | Version: 0.1-3
9 |
10 | Package: BH
11 | Version: 1.66.0-1
12 |
13 | Package: cli
14 | Version: 1.0.0
15 |
16 | Package: config
17 | Version: 0.3
18 |
19 | Package: crayon
20 | Version: 1.3.4
21 |
22 | Package: digest
23 | Version: 0.6.16
24 |
25 | Package: fansi
26 | Version: 0.3.0
27 |
28 | Package: glue
29 | Version: 1.3.0
30 |
31 | Package: htmltools
32 | Version: 0.3.6
33 |
34 | Package: httpuv
35 | Version: 1.4.5
36 |
37 | Package: jsonlite
38 | Version: 1.5
39 |
40 | Package: keras
41 | Version: 2.2.0
42 |
43 | Package: later
44 | Version: 0.7.4
45 |
46 | Package: lattice
47 | Version: 0.20-35
48 |
49 | Package: logging
50 | Version: 0.7-103
51 |
52 | Package: magrittr
53 | Version: 1.5
54 |
55 | Package: Matrix
56 | Version: 1.2-14
57 |
58 | Package: mime
59 | Version: 0.5
60 |
61 | Package: pillar
62 | Version: 1.3.0
63 |
64 | Package: processx
65 | Version: 3.2.0
66 |
67 | Package: promises
68 | Version: 1.0.1
69 |
70 | Package: ps
71 | Version: 1.1.0
72 |
73 | Package: purrr
74 | Version: 0.2.5
75 |
76 | Package: R6
77 | Version: 2.2.2
78 |
79 | Package: Rcpp
80 | Version: 0.12.18
81 |
82 | Package: reticulate
83 | Version: 1.10
84 |
85 | Package: rlang
86 | Version: 0.2.2
87 |
88 | Package: rstudioapi
89 | Version: 0.7
90 |
91 | Package: shiny
92 | Version: 1.1.0
93 |
94 | Package: sourcetools
95 | Version: 0.1.7
96 |
97 | Package: tensorflow
98 | Version: 1.9
99 |
100 | Package: tfruns
101 | Version: 1.4
102 |
103 | Package: tibble
104 | Version: 1.4.2
105 |
106 | Package: tidyselect
107 | Version: 0.2.4
108 |
109 | Package: utf8
110 | Version: 1.1.4
111 |
112 | Package: whisker
113 | Version: 0.3-2
114 |
115 | Package: xtable
116 | Version: 1.8-3
117 |
118 | Package: yaml
119 | Version: 2.2.0
120 |
121 | Package: zeallot
122 | Version: 0.1.0
123 |
--------------------------------------------------------------------------------
/deployment/libs/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 | !.gitignore
3 |
--------------------------------------------------------------------------------
/deployment/sbox/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 | !.gitignore
3 |
--------------------------------------------------------------------------------
/logs/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 | !.gitignore
3 |
--------------------------------------------------------------------------------
/packages/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/WLOGSolutions/Keras_and_Shiny/ff4e9c0afb75948a926184ce612891fa89b42675/packages/.gitignore
--------------------------------------------------------------------------------
/packages/Application/.Rprofile:
--------------------------------------------------------------------------------
1 | source(file.path('..', '..', 'R', 'set_env.R'), chdir = TRUE)
2 |
--------------------------------------------------------------------------------
/packages/Application/Application.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 | StripTrailingWhitespace: Yes
17 |
18 | BuildType: Package
19 | PackageUseDevtools: Yes
20 | PackageInstallArgs: --no-multiarch --with-keep.source
21 |
--------------------------------------------------------------------------------
/packages/Application/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: Application
2 | Type: Package
3 | Title: What the package does (short line)
4 | Version: 0.1
5 | Date: 2018-09-17
6 | Author: pchab
7 | Maintainer: Who to complain to
8 | Description: More about what it does (maybe more than one line)
9 | License: What license is it under?
10 | Imports: logging, tensorflow, keras, shiny, DataPreparation, Modeling
11 | RoxygenNote: 6.0.1
12 |
--------------------------------------------------------------------------------
/packages/Application/NAMESPACE:
--------------------------------------------------------------------------------
1 | # Generated by roxygen2: do not edit by hand
2 |
3 | export(Application_getLogger)
4 | export(createTensor)
5 | export(loadAndPrepareImage)
6 | export(loadModel)
7 | export(normalizePixelIntensities)
8 | export(predictClass)
9 | export(predictProbabilities)
10 | export(runApplication)
11 | export(server)
12 | export(ui)
13 | import(DataPreparation)
14 | import(Modeling)
15 | import(keras)
16 | import(logging)
17 | import(shiny)
18 | import(tensorflow)
19 |
--------------------------------------------------------------------------------
/packages/Application/NEWS:
--------------------------------------------------------------------------------
1 | News for Application
2 |
3 | CHANGES IN 0.1 (2018-09-17):
4 | * Initial version
5 |
--------------------------------------------------------------------------------
/packages/Application/R/api_Application.R:
--------------------------------------------------------------------------------
1 | #'
2 | #' Run the app on selected port.
3 | #'
4 | #' @param ui layout of the page
5 | #' @param server server logic function
6 | #' @param port port on localhost to run the app
7 | #'
8 | #' @export
9 | #'
10 | runApplication <- function(ui, server, port) {
11 | app <- shinyApp(ui = ui, server = server)
12 | runApp(app, port = port)
13 | }
14 |
15 | #'
16 | #' Convert to a tensor 1 x 28 x 28 x 1 (to be compatible with model).
17 | #'
18 | #' @param img_mtrx a single image pixel intensity "matrix" (in fact a tensor 28 x 28 x 1 that is the result of loadAndPrepareImage)
19 | #'
20 | #' @return a single image pixel intensity tensor (1 x 28 x 28 x 1)
21 | #'
22 | #' @export
23 | #'
24 | createTensor <- function(img_mtrx) {
25 | img_tensor <- array(dim = c(1, 28, 28, 1))
26 | img_tensor[1, , , ] <- img_mtrx
27 |
28 | return(img_tensor)
29 | }
30 |
31 | #'
32 | #' Use the model to predict class on a new example.
33 | #'
34 | #' @param model Keras model
35 | #' @param data_tensor a single pixel intenstity tensor 1 x 28 x 28 x 1
36 | #'
37 | #' @return a class label (digit 0-9 as character)
38 | #'
39 | #' @export
40 | #'
41 | predictClass <- function(model, data_tensor) {
42 | class <- predict_classes(model, data_tensor)
43 |
44 | return(as.character(class))
45 | }
46 |
47 | #'
48 | #' Calculates class probabilities (the output of the softmax layer) and convert them into a data frame.
49 | #'
50 | #' @param model Keras model
51 | #' @param data_tensor a single pixel intenstity tensor 1 x 28 x 28 x 1
52 | #'
53 | #' @return a data frame with class probabilities
54 | #'
55 | #' @export
56 | #'
57 | predictProbabilities <- function(model, data_tensor) {
58 | prob <- predict_proba(model, data_tensor)
59 | prob_df <- data.frame(Class = as.character(0:9),
60 | Probability = round(as.vector(prob), 5))
61 |
62 | return(prob_df)
63 | }
64 |
--------------------------------------------------------------------------------
/packages/Application/R/package_logger.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # Application
3 | #
4 | # Package logger setup
5 | #----------------------------------------------------------------------------
6 |
7 | .logger_name <- "Application"
8 | .pkg_logger <- logging::getLogger(.logger_name)
9 | .pkg_logger$setLevel("FINEST")
10 |
11 | pkg_loginfo <- function(msg, ...) tryCatch(logging::loginfo(msg, ..., logger = .pkg_logger),
12 | error = function(e) warning(e))
13 | pkg_logdebug <- function(msg, ...) tryCatch(logging::logdebug(msg, ..., logger = .pkg_logger),
14 | error = function(e) warning(e))
15 | pkg_logerror <- function(msg, ...) tryCatch(logging::logerror(msg, ..., logger = .pkg_logger),
16 | error = function(e) warning(e))
17 | pkg_logwarn <- function(msg, ...) tryCatch(logging::logwarn(msg, ..., logger = .pkg_logger),
18 | error = function(e) warning(e))
19 | pkg_logfinest <- function(msg, ...) tryCatch(logging::logfinest(msg, ..., logger = .pkg_logger),
20 | error = function(e) warning(e))
21 |
22 | #'
23 | #' Retrieves Application logger.
24 | #'
25 | #' @return logger object
26 | #'
27 | #' @export
28 | #'
29 | Application_getLogger <- function() {
30 | .pkg_logger
31 | }
32 |
--------------------------------------------------------------------------------
/packages/Application/R/package_validation.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # Application
3 | #
4 | # Validation tools
5 | #----------------------------------------------------------------------------
6 |
7 | #'
8 | #' The same as stopifnot but with message.
9 | #'
10 | #' @param cond Condition to be evaluated
11 | #' @param msg Message for stop if condition is FALSE. If not passed \code{cond} code itself is used for the message.
12 | #'
13 | assert <- function(cond, fail_msg = NULL, ...) {
14 | if (!cond) {
15 | if (is.null(fail_msg) || missing(fail_msg)) {
16 | fail_msg <- sprintf("Condition failed: %s", deparse(substitute(cond), width.cutoff = 30L))
17 | } else {
18 | fail_msg <- sprintf(fail_msg, ...)
19 | }
20 | stop(fail_msg, call. = FALSE)
21 | }
22 | invisible()
23 | }
24 |
25 | #' Performes validation of data.table passed
26 | #'
27 | #' @param dt data.table to be validated
28 | #' @param dt_arg name of data.table argument for proper message content
29 | #' @param col_class named list of columns and there expected classes
30 | #'
31 | #' @examples {
32 | #' dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
33 | #' dt_validate(dt, "dt", list(c2 = "Date", c1 = "numeric"))
34 | #' }
35 | #'
36 | dt_validate <- function(dt, dt_arg, expected_col_class) {
37 | assert(is.data.table(dt),
38 | fail_msg = sprintf("data.table expected for '%s'", dt_arg))
39 |
40 | expected_names <- names(expected_col_class)
41 |
42 | notfound_columns <- setdiff(expected_names, colnames(dt))
43 | assert(!length(notfound_columns),
44 | fail_msg = sprintf("Expected columns not present in the '%s' data.table: %s",
45 | dt_arg,
46 | paste(notfound_columns, collapse = ", ")))
47 |
48 | for (cn in expected_names) {
49 | col_class <- class(dt[, get(cn)])
50 | exp_class <- expected_col_class[[cn]]
51 | assert(col_class == exp_class,
52 | fail_msg = sprintf("Column of type '%s' expected for '%s' in data.table '%s'; '%s' found",
53 | exp_class, cn, dt_arg, col_class))
54 | }
55 | }
56 |
57 | #'
58 | #' Performes validation and grooming of data.table passed
59 | #'
60 | #' @param dt data.table to be validated and groomed
61 | #' @param dt_arg name of data.table argument for proper message content
62 | #' @param col_class named list of columns and there expected classes
63 | #'
64 | #' @return data.table with only expected columns in order specified by \code{col_class}.
65 | #' @examples {
66 | #' dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
67 | #' dt_validate_and_groom(dt, "dt", list(c2 = "Date", c1 = "numeric"))
68 | #' # should output
69 | #' # c2 c1
70 | #' # 1: 2016-04-05 1
71 | #' # 2: 2016-04-05 2
72 | #' # 3: 2016-04-05 3
73 | #' }
74 | #'
75 | dt_validate_and_groom <- function(dt, dt_arg, expected_col_class) {
76 | dt_validate(dt, dt_arg, expected_col_class)
77 |
78 | expected_names <- names(expected_col_class)
79 | res_dt <- dt[, expected_names, with = FALSE] # select only requested colums
80 | setcolorder(res_dt, expected_names) # set proper column ordering
81 |
82 | return(res_dt)
83 | }
84 |
--------------------------------------------------------------------------------
/packages/Application/R/packages_import.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # Application
3 | #
4 | # Global package definitions and imports
5 | #----------------------------------------------------------------------------
6 |
7 | #' @import logging
8 | #' @import tensorflow
9 | #' @import keras
10 | #' @import shiny
11 | #' @importFrom DataPreparation loadAndPrepareImage normalizePixelIntensities
12 | #' @importFrom Modeling loadModel
13 | NULL
14 |
--------------------------------------------------------------------------------
/packages/Application/R/server.R:
--------------------------------------------------------------------------------
1 |
2 | #' @export
3 | server <- function(input, output){
4 | # Shiny app server logic
5 |
6 | model <- reactive({
7 | loadModel(input$model1$datapath)
8 | })
9 | image_matrix <- reactive({
10 | loadAndPrepareImage(input$image1$datapath)
11 | })
12 | image_tensor <- reactive({
13 | image_tensor <- createTensor(image_matrix())
14 | image_tensor <- normalizePixelIntensities(image_tensor)
15 | })
16 | class <- eventReactive(input$action1, {
17 | predictClass(model(), image_tensor())
18 | })
19 | probs <- eventReactive(input$action1, {
20 | predictProbabilities(model(), image_tensor())
21 | })
22 |
23 | output$model_params <- renderPrint({
24 | print(model())
25 | }, width = 170)
26 | output$image_mtrx <- renderPrint({
27 | print(image_matrix()[, , 1])
28 | }, width = 200)
29 | output$pred_class <- renderText({
30 | class()
31 | })
32 | output$pred_probs <- renderTable({
33 | probs_to_print <- probs()
34 | probs_to_print$Probability <- as.character(probs_to_print$Probability)
35 | probs_to_print
36 | })
37 | }
38 |
--------------------------------------------------------------------------------
/packages/Application/R/ui.R:
--------------------------------------------------------------------------------
1 | #' @export
2 | ui <- function() {
3 | # Layout definition for Shiny app
4 |
5 | ui <- fluidPage(
6 | titlePanel("Digit identification"),
7 | sidebarLayout(
8 | sidebarPanel(
9 | fileInput("model1", "Choose HDF5 model file"),
10 | fileInput("image1", "Choose image"),
11 | tags$b("Click to identify a digit"),
12 | tags$br(),
13 | actionButton("action1", "Identify!"),
14 | tags$br(), tags$br(),
15 | tags$b("Identified class:"),
16 | tags$br(),
17 | tags$b(textOutput("pred_class")),
18 | tags$style("#pred_class{color: red; font-size: 30px;}"),
19 | tags$br(),
20 | tags$b("Class probabilities:"),
21 | tags$br(),
22 | tableOutput("pred_probs"),
23 | width = 3
24 | ),
25 | mainPanel(
26 | tags$style(type="text/css",
27 | ".shiny-output-error { visibility: hidden; }",
28 | ".shiny-output-error:before { visibility: hidden; }"),
29 | tags$b("Image as matrix:"),
30 | tags$br(),
31 | verbatimTextOutput("image_mtrx"),
32 | tags$br(),
33 | tags$b("Model architecture:"),
34 | tags$br(),
35 | verbatimTextOutput("model_params")
36 | )
37 | )
38 | )
39 |
40 | return(ui)
41 | }
42 |
--------------------------------------------------------------------------------
/packages/Application/man/Application_getLogger.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_logger.R
3 | \name{Application_getLogger}
4 | \alias{Application_getLogger}
5 | \title{Retrieves Application logger.}
6 | \usage{
7 | Application_getLogger()
8 | }
9 | \value{
10 | logger object
11 | }
12 | \description{
13 | Retrieves Application logger.
14 | }
15 |
--------------------------------------------------------------------------------
/packages/Application/man/assert.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{assert}
4 | \alias{assert}
5 | \title{The same as stopifnot but with message.}
6 | \usage{
7 | assert(cond, fail_msg = NULL, ...)
8 | }
9 | \arguments{
10 | \item{cond}{Condition to be evaluated}
11 |
12 | \item{msg}{Message for stop if condition is FALSE. If not passed \code{cond} code itself is used for the message.}
13 | }
14 | \description{
15 | The same as stopifnot but with message.
16 | }
17 |
--------------------------------------------------------------------------------
/packages/Application/man/dt_validate.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{dt_validate}
4 | \alias{dt_validate}
5 | \title{Performes validation of data.table passed}
6 | \usage{
7 | dt_validate(dt, dt_arg, expected_col_class)
8 | }
9 | \arguments{
10 | \item{dt}{data.table to be validated}
11 |
12 | \item{dt_arg}{name of data.table argument for proper message content}
13 |
14 | \item{col_class}{named list of columns and there expected classes}
15 | }
16 | \description{
17 | Performes validation of data.table passed
18 | }
19 | \examples{
20 | {
21 | dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
22 | dt_validate(dt, "dt", list(c2 = "Date", c1 = "numeric"))
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/packages/Application/man/dt_validate_and_groom.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{dt_validate_and_groom}
4 | \alias{dt_validate_and_groom}
5 | \title{Performes validation and grooming of data.table passed}
6 | \usage{
7 | dt_validate_and_groom(dt, dt_arg, expected_col_class)
8 | }
9 | \arguments{
10 | \item{dt}{data.table to be validated and groomed}
11 |
12 | \item{dt_arg}{name of data.table argument for proper message content}
13 |
14 | \item{col_class}{named list of columns and there expected classes}
15 | }
16 | \value{
17 | data.table with only expected columns in order specified by \code{col_class}.
18 | }
19 | \description{
20 | Performes validation and grooming of data.table passed
21 | }
22 | \examples{
23 | {
24 | dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
25 | dt_validate_and_groom(dt, "dt", list(c2 = "Date", c1 = "numeric"))
26 | # should output
27 | # c2 c1
28 | # 1: 2016-04-05 1
29 | # 2: 2016-04-05 2
30 | # 3: 2016-04-05 3
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/packages/Application/man/reexports.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/api_Application.R
3 | \docType{import}
4 | \name{reexports}
5 | \alias{reexports}
6 | \alias{loadModel}
7 | \alias{reexports}
8 | \alias{loadAndPrepareImage}
9 | \alias{reexports}
10 | \alias{normalizePixelIntensities}
11 | \title{Objects exported from other packages}
12 | \keyword{internal}
13 | \description{
14 | These objects are imported from other packages. Follow the links
15 | below to see their documentation.
16 |
17 | \describe{
18 | \item{DataPreparation}{\code{\link[DataPreparation]{loadAndPrepareImage}}, \code{\link[DataPreparation]{normalizePixelIntensities}}}
19 |
20 | \item{Modeling}{\code{\link[Modeling]{loadModel}}}
21 | }}
22 |
23 |
--------------------------------------------------------------------------------
/packages/DataPreparation/.Rprofile:
--------------------------------------------------------------------------------
1 | source(file.path('..', '..', 'R', 'set_env.R'), chdir = TRUE)
2 |
--------------------------------------------------------------------------------
/packages/DataPreparation/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: DataPreparation
2 | Type: Package
3 | Title: What the package does (short line)
4 | Version: 0.1
5 | Date: 2018-09-10
6 | Author: pchab
7 | Maintainer: Who to complain to
8 | Description: More about what it does (maybe more than one line)
9 | License: What license is it under?
10 | Imports: logging, tensorflow, keras, abind
11 | SystemRequirements: conda (python=3.6.6 tensorflow=1.10.0 keras=2.2.2 pillow=5.2.0)
12 | RoxygenNote: 6.1.0
13 |
--------------------------------------------------------------------------------
/packages/DataPreparation/DataPreparation.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 | StripTrailingWhitespace: Yes
17 |
18 | BuildType: Package
19 | PackageUseDevtools: Yes
20 | PackageInstallArgs: --no-multiarch --with-keep.source
21 |
--------------------------------------------------------------------------------
/packages/DataPreparation/NAMESPACE:
--------------------------------------------------------------------------------
1 | # Generated by roxygen2: do not edit by hand
2 |
3 | export(DataPreparation_getLogger)
4 | export(convertLabels)
5 | export(getAllImages)
6 | export(loadAndPrepareImage)
7 | export(normalizePixelIntensities)
8 | export(splitDataset)
9 | import(abind)
10 | import(keras)
11 | import(logging)
12 | import(tensorflow)
13 |
--------------------------------------------------------------------------------
/packages/DataPreparation/NEWS:
--------------------------------------------------------------------------------
1 | News for DataPreparation
2 |
3 | CHANGES IN 0.1 (2018-09-10):
4 | * Initial version
5 |
--------------------------------------------------------------------------------
/packages/DataPreparation/R/api_DataPreparation.R:
--------------------------------------------------------------------------------
1 | #'
2 | #' Reads the file and converts it into grayscale pixel intensity tensor.
3 | #'
4 | #' @param image_fpath PNG image file path (28 x 28, grayscale).
5 | #'
6 | #' @return tensor with dimensions 28 x 28 x 1.
7 | #'
8 | #' @export
9 | #'
10 | loadAndPrepareImage <- function(image_fpath) {
11 | assert(file.exists(image_fpath), "Invalid image file path.")
12 |
13 | image <- image_load(image_fpath, grayscale = TRUE)
14 | image_array <- image_to_array(image)
15 |
16 | return(image_array)
17 | }
18 |
19 | #'
20 | #' Reading consecutive files, converting to a pixel intensity tensor and appending them to a single tensor which eventually should contain all read numbers, one after another. Additionally, creating a vector with respective labels "0" to "9".
21 | #'
22 | #' @param data_path a path to the folder with data and the subfolder name (one of: "training", "testing")
23 | #' @param subfolder_name "training" and "testing" subfolders should contain another level of subfolders, labelled 0-9 with the examples (28 x 28 grayscale PNG images) of respective handwritten digits
24 | #'
25 | #' @return a two-element list: 1) "data_tensor": a 4D tensor with dimensions [number_of_images x 28 x 28 x 1] containing pixel intensities of all numbers and 2) "labels": a character vector containing respective labels
26 | #'
27 | #' @export
28 | #'
29 | getAllImages <- function(data_path, subfolder_name) {
30 | assert(subfolder_name %in% c("training", "testing"),
31 | "subfolder name should be one of: 'training', 'testing'.")
32 |
33 | # initialize a matrix representing all read images and a vector of labels
34 | data <- array(dim = c(0, 28, 28, 1))
35 | labels <- character(0)
36 |
37 | # a loop over all subfolders "0" to "9"
38 | for (i in 0:9) {
39 | number_folder_path <- file.path(data_path, subfolder_name, i)
40 | image_names <- list.files(number_folder_path, pattern = "*.png")
41 |
42 | n <- length(image_names)
43 | number_data <- array(dim = c(n, 28, 28, 1))
44 | number_labels <- rep(as.character(i), n)
45 |
46 | pkg_loginfo("Number of images with label %d: %d", i, n)
47 |
48 | # a loop over all files in a subfolder
49 | j <- 1
50 | for (image_name in image_names) {
51 | image_fpath <- file.path(number_folder_path, image_name)
52 | number_data[j, , , ] <- loadAndPrepareImage(image_fpath)
53 |
54 | if (j %% 100 == 0)
55 | pkg_loginfo("Processed %d out of %d images (label %d)...", j, n, i)
56 |
57 | j <- j + 1
58 | }
59 |
60 | data <- abind(data, number_data, along = 1)
61 | labels <- c(labels, number_labels)
62 | }
63 |
64 | return(list(data_tensor = data, labels = labels))
65 | }
66 |
67 | #'
68 | #' Normalize intensities to the scale 0-1.
69 | #'
70 | #' @param data_tensor pixel intensities data tensor (values in 0-255)
71 | #'
72 | #' @return normalized pixel intensities data tensor (values in [0, 1])
73 | #'
74 | #' @export
75 | #'
76 | normalizePixelIntensities <- function(data_tensor) {
77 | data_tensor <- data_tensor / 255
78 |
79 | return(data_tensor)
80 | }
81 |
82 | #'
83 | #' Convert a label vector into one-hot encoded label matrix.
84 | #'
85 | #' @param labels A character vector with labels.
86 | #'
87 | #' @return One-hot encoded label matrix.
88 | #'
89 | #' @export
90 | #'
91 | convertLabels <- function(labels) {
92 | labels <- to_categorical(labels, num_classes = 10)
93 |
94 | return(labels)
95 | }
96 |
97 | #'
98 | #' Split input dataset (both data and labels) into training and validation subset using the provided fraction parameter to determine the proportion.
99 | #'
100 | #' @param data named list containing training/validation dataset ("$data") and labels ("$labels")
101 | #' @param training_fraction a fraction of dataset to be used as training subset
102 | #'
103 | #' @return named list with training subset and labels and validation subset and labels
104 | #'
105 | #' @export
106 | #'
107 | splitDataset <- function(data, training_fraction = 0.75) {
108 | assert(training_fraction > 0 & training_fraction < 1, "training_fraction has to be between 0 and 1.")
109 |
110 | valid_fraction = 1 - training_fraction
111 | ind <- sample(2, size = dim(data$data)[1], replace = TRUE, prob = c(training_fraction, valid_fraction))
112 |
113 | train_data <- data$data[ind == 1, , , , drop = FALSE]
114 | valid_data <- data$data[ind == 2, , , , drop = FALSE]
115 | train_labels <- data$labels[ind == 1, ]
116 | valid_labels <- data$labels[ind == 2, ]
117 |
118 | return(list(
119 | data_tensor = list(train = train_data, valid = valid_data),
120 | labels = list(train = train_labels, valid = valid_labels)))
121 | }
122 |
--------------------------------------------------------------------------------
/packages/DataPreparation/R/package_logger.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # DataPreparation
3 | #
4 | # Package logger setup
5 | #----------------------------------------------------------------------------
6 |
7 | .logger_name <- "DataPreparation"
8 | .pkg_logger <- logging::getLogger(.logger_name)
9 | .pkg_logger$setLevel("FINEST")
10 |
11 | pkg_loginfo <- function(msg, ...) tryCatch(logging::loginfo(msg, ..., logger = .pkg_logger),
12 | error = function(e) warning(e))
13 | pkg_logdebug <- function(msg, ...) tryCatch(logging::logdebug(msg, ..., logger = .pkg_logger),
14 | error = function(e) warning(e))
15 | pkg_logerror <- function(msg, ...) tryCatch(logging::logerror(msg, ..., logger = .pkg_logger),
16 | error = function(e) warning(e))
17 | pkg_logwarn <- function(msg, ...) tryCatch(logging::logwarn(msg, ..., logger = .pkg_logger),
18 | error = function(e) warning(e))
19 | pkg_logfinest <- function(msg, ...) tryCatch(logging::logfinest(msg, ..., logger = .pkg_logger),
20 | error = function(e) warning(e))
21 |
22 | #'
23 | #' Retrieves DataPreparation logger.
24 | #'
25 | #' @return logger object
26 | #'
27 | #' @export
28 | #'
29 | DataPreparation_getLogger <- function() {
30 | .pkg_logger
31 | }
32 |
--------------------------------------------------------------------------------
/packages/DataPreparation/R/package_validation.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # DataPreparation
3 | #
4 | # Validation tools
5 | #----------------------------------------------------------------------------
6 |
7 | #'
8 | #' The same as stopifnot but with message.
9 | #'
10 | #' @param cond Condition to be evaluated
11 | #' @param msg Message for stop if condition is FALSE. If not passed \code{cond} code itself is used for the message.
12 | #'
13 | assert <- function(cond, fail_msg = NULL, ...) {
14 | if (!cond) {
15 | if (is.null(fail_msg) || missing(fail_msg)) {
16 | fail_msg <- sprintf("Condition failed: %s", deparse(substitute(cond), width.cutoff = 30L))
17 | } else {
18 | fail_msg <- sprintf(fail_msg, ...)
19 | }
20 | stop(fail_msg, call. = FALSE)
21 | }
22 | invisible()
23 | }
24 |
25 | #' Performes validation of data.table passed
26 | #'
27 | #' @param dt data.table to be validated
28 | #' @param dt_arg name of data.table argument for proper message content
29 | #' @param col_class named list of columns and there expected classes
30 | #'
31 | #' @examples {
32 | #' dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
33 | #' dt_validate(dt, "dt", list(c2 = "Date", c1 = "numeric"))
34 | #' }
35 | #'
36 | dt_validate <- function(dt, dt_arg, expected_col_class) {
37 | assert(is.data.table(dt),
38 | fail_msg = sprintf("data.table expected for '%s'", dt_arg))
39 |
40 | expected_names <- names(expected_col_class)
41 |
42 | notfound_columns <- setdiff(expected_names, colnames(dt))
43 | assert(!length(notfound_columns),
44 | fail_msg = sprintf("Expected columns not present in the '%s' data.table: %s",
45 | dt_arg,
46 | paste(notfound_columns, collapse = ", ")))
47 |
48 | for (cn in expected_names) {
49 | col_class <- class(dt[, get(cn)])
50 | exp_class <- expected_col_class[[cn]]
51 | assert(col_class == exp_class,
52 | fail_msg = sprintf("Column of type '%s' expected for '%s' in data.table '%s'; '%s' found",
53 | exp_class, cn, dt_arg, col_class))
54 | }
55 | }
56 |
57 | #'
58 | #' Performes validation and grooming of data.table passed
59 | #'
60 | #' @param dt data.table to be validated and groomed
61 | #' @param dt_arg name of data.table argument for proper message content
62 | #' @param col_class named list of columns and there expected classes
63 | #'
64 | #' @return data.table with only expected columns in order specified by \code{col_class}.
65 | #' @examples {
66 | #' dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
67 | #' dt_validate_and_groom(dt, "dt", list(c2 = "Date", c1 = "numeric"))
68 | #' # should output
69 | #' # c2 c1
70 | #' # 1: 2016-04-05 1
71 | #' # 2: 2016-04-05 2
72 | #' # 3: 2016-04-05 3
73 | #' }
74 | #'
75 | dt_validate_and_groom <- function(dt, dt_arg, expected_col_class) {
76 | dt_validate(dt, dt_arg, expected_col_class)
77 |
78 | expected_names <- names(expected_col_class)
79 | res_dt <- dt[, expected_names, with = FALSE] # select only requested colums
80 | setcolorder(res_dt, expected_names) # set proper column ordering
81 |
82 | return(res_dt)
83 | }
84 |
--------------------------------------------------------------------------------
/packages/DataPreparation/R/packages_import.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # DataPreparation
3 | #
4 | # Global package definitions and imports
5 | #----------------------------------------------------------------------------
6 |
7 | #' @import logging
8 | #' @import tensorflow
9 | #' @import keras
10 | #' @import abind
11 | NULL
12 |
--------------------------------------------------------------------------------
/packages/DataPreparation/man/DataPreparation_getLogger.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_logger.R
3 | \name{DataPreparation_getLogger}
4 | \alias{DataPreparation_getLogger}
5 | \title{Retrieves DataPreparation logger.}
6 | \usage{
7 | DataPreparation_getLogger()
8 | }
9 | \value{
10 | logger object
11 | }
12 | \description{
13 | Retrieves DataPreparation logger.
14 | }
15 |
--------------------------------------------------------------------------------
/packages/DataPreparation/man/assert.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{assert}
4 | \alias{assert}
5 | \title{The same as stopifnot but with message.}
6 | \usage{
7 | assert(cond, fail_msg = NULL, ...)
8 | }
9 | \arguments{
10 | \item{cond}{Condition to be evaluated}
11 |
12 | \item{msg}{Message for stop if condition is FALSE. If not passed \code{cond} code itself is used for the message.}
13 | }
14 | \description{
15 | The same as stopifnot but with message.
16 | }
17 |
--------------------------------------------------------------------------------
/packages/DataPreparation/man/dt_validate.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{dt_validate}
4 | \alias{dt_validate}
5 | \title{Performes validation of data.table passed}
6 | \usage{
7 | dt_validate(dt, dt_arg, expected_col_class)
8 | }
9 | \arguments{
10 | \item{dt}{data.table to be validated}
11 |
12 | \item{dt_arg}{name of data.table argument for proper message content}
13 |
14 | \item{col_class}{named list of columns and there expected classes}
15 | }
16 | \description{
17 | Performes validation of data.table passed
18 | }
19 | \examples{
20 | {
21 | dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
22 | dt_validate(dt, "dt", list(c2 = "Date", c1 = "numeric"))
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/packages/DataPreparation/man/dt_validate_and_groom.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{dt_validate_and_groom}
4 | \alias{dt_validate_and_groom}
5 | \title{Performes validation and grooming of data.table passed}
6 | \usage{
7 | dt_validate_and_groom(dt, dt_arg, expected_col_class)
8 | }
9 | \arguments{
10 | \item{dt}{data.table to be validated and groomed}
11 |
12 | \item{dt_arg}{name of data.table argument for proper message content}
13 |
14 | \item{col_class}{named list of columns and there expected classes}
15 | }
16 | \value{
17 | data.table with only expected columns in order specified by \code{col_class}.
18 | }
19 | \description{
20 | Performes validation and grooming of data.table passed
21 | }
22 | \examples{
23 | {
24 | dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
25 | dt_validate_and_groom(dt, "dt", list(c2 = "Date", c1 = "numeric"))
26 | # should output
27 | # c2 c1
28 | # 1: 2016-04-05 1
29 | # 2: 2016-04-05 2
30 | # 3: 2016-04-05 3
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/packages/Modeling/.Rprofile:
--------------------------------------------------------------------------------
1 | source(file.path('..', '..', 'R', 'set_env.R'), chdir = TRUE)
2 |
--------------------------------------------------------------------------------
/packages/Modeling/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: Modeling
2 | Type: Package
3 | Title: What the package does (short line)
4 | Version: 0.1
5 | Date: 2018-09-10
6 | Author: pchab
7 | Maintainer: Who to complain to
8 | Description: More about what it does (maybe more than one line)
9 | License: What license is it under?
10 | Imports: logging, tensorflow, keras
11 | RoxygenNote: 6.1.0
12 |
--------------------------------------------------------------------------------
/packages/Modeling/Modeling.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 | StripTrailingWhitespace: Yes
17 |
18 | BuildType: Package
19 | PackageUseDevtools: Yes
20 | PackageInstallArgs: --no-multiarch --with-keep.source
21 |
--------------------------------------------------------------------------------
/packages/Modeling/NAMESPACE:
--------------------------------------------------------------------------------
1 | # Generated by roxygen2: do not edit by hand
2 |
3 | export(Modeling_getLogger)
4 | export(calculateAccuracy)
5 | export(compileModel)
6 | export(defineModelArchitecture)
7 | export(loadModel)
8 | export(saveModel)
9 | export(trainModel)
10 | import(keras)
11 | import(logging)
12 | import(tensorflow)
13 |
--------------------------------------------------------------------------------
/packages/Modeling/NEWS:
--------------------------------------------------------------------------------
1 | News for Modeling
2 |
3 | CHANGES IN 0.1 (2018-09-10):
4 | * Initial version
5 |
--------------------------------------------------------------------------------
/packages/Modeling/R/api_Modeling.R:
--------------------------------------------------------------------------------
1 | #'
2 | #' Prepare model architecture.
3 | #'
4 | #' @return Keras sequential model object with defined layers
5 | #'
6 | #' @export
7 | #'
8 | defineModelArchitecture <- function() {
9 |
10 | model_architecture <- keras_model_sequential() %>%
11 | layer_conv_2d(filters = 20, kernel_size = c(5, 5), strides = c(1, 1), input_shape = c(28, 28, 1)) %>%
12 | layer_max_pooling_2d(pool_size = c(2,2), strides = c(2, 2)) %>%
13 | layer_conv_2d(filters = 50, kernel_size = c(5, 5), strides = c(1, 1)) %>%
14 | layer_max_pooling_2d(pool_size = c(2,2), strides = c(2, 2)) %>%
15 | layer_flatten() %>%
16 | layer_dense(units = 120, activation = "relu") %>%
17 | layer_dense(units = 10, activation = "softmax")
18 |
19 | return (model_architecture)
20 | }
21 |
22 | #'
23 | #' Compile keras model.
24 | #'
25 | #' @param model Keras sequential model object with defined layers
26 | #'
27 | #' @return compiled Keras model
28 | #'
29 | #' @export
30 | #'
31 | compileModel <- function(model) {
32 |
33 | model %>% compile(
34 | optimizer = optimizer_sgd(lr = 0.01),
35 | loss = "categorical_crossentropy",
36 | metrics = "accuracy")
37 |
38 | return(model)
39 | }
40 |
41 |
42 | #'
43 | #' Fit model parameters.
44 | #'
45 | #' @param model compiled keras model
46 | #' @param data training and validation data (output of splitDataset function)
47 | #' @param epochs number of epochs
48 | #' @param batch_size mini-batch size
49 | #'
50 | #' @return fitted keras model
51 | #'
52 | #' @export
53 | #'
54 | trainModel <- function(model, data, epochs = 30, batch_size = 256) {
55 |
56 | model %>% fit(data$data_tensor$train,
57 | data$labels$train,
58 | epochs = epochs,
59 | batch_size = batch_size,
60 | validation_data = list(data$data_tensor$valid, data$labels$valid))
61 |
62 | return(model)
63 | }
64 |
65 | #'
66 | #' Calculate model accuracy.
67 | #'
68 | #' @param model fitted keras model
69 | #' @param data data tensor N x 28 x 28 x 1
70 | #' @param labels label one-hot encoded matrix
71 | #'
72 | #' @return Accuracy as percentage of properly classified examples
73 | #'
74 | #' @export
75 | #'
76 | calculateAccuracy <- function(model, data, labels) {
77 | acc <- evaluate(model, data, labels)$acc
78 |
79 | return(acc)
80 | }
81 |
82 | #'
83 | #' Give model a name in the format "model_YYYYMMDD_HHMMSS" and save it to a given directory in HDF5 format.
84 | #'
85 | #' @param model keras model
86 | #' @param model_created timestamp when the model was created
87 | #' @param save_path a path to the folder where model should be saved
88 | #'
89 | #' @export
90 | #'
91 | saveModel <- function(model, model_created, save_path) {
92 | dir.create(save_path, showWarnings = FALSE)
93 |
94 | model_name <- paste0("model_", gsub(" ", "_", gsub("-|:", "", as.character(model_created))))
95 | model_fpath <- file.path(save_path, model_name)
96 | keras::save_model_hdf5(model, model_fpath)
97 |
98 | pkg_loginfo("Model '%s' saved.", model_name)
99 | }
100 |
101 | #'
102 | #' Load model from the disk.
103 | #'
104 | #' @param fpath path to the HDF5 file containing the model
105 | #'
106 | #' @return loaded Keras model
107 | #'
108 | #' @export
109 | #'
110 | loadModel <- function(fpath) {
111 | model <- load_model_hdf5(fpath)
112 |
113 | return(model)
114 | }
115 |
--------------------------------------------------------------------------------
/packages/Modeling/R/package_logger.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # Modeling
3 | #
4 | # Package logger setup
5 | #----------------------------------------------------------------------------
6 |
7 | .logger_name <- "Modeling"
8 | .pkg_logger <- logging::getLogger(.logger_name)
9 | .pkg_logger$setLevel("FINEST")
10 |
11 | pkg_loginfo <- function(msg, ...) tryCatch(logging::loginfo(msg, ..., logger = .pkg_logger),
12 | error = function(e) warning(e))
13 | pkg_logdebug <- function(msg, ...) tryCatch(logging::logdebug(msg, ..., logger = .pkg_logger),
14 | error = function(e) warning(e))
15 | pkg_logerror <- function(msg, ...) tryCatch(logging::logerror(msg, ..., logger = .pkg_logger),
16 | error = function(e) warning(e))
17 | pkg_logwarn <- function(msg, ...) tryCatch(logging::logwarn(msg, ..., logger = .pkg_logger),
18 | error = function(e) warning(e))
19 | pkg_logfinest <- function(msg, ...) tryCatch(logging::logfinest(msg, ..., logger = .pkg_logger),
20 | error = function(e) warning(e))
21 |
22 | #'
23 | #' Retrieves Modeling logger.
24 | #'
25 | #' @return logger object
26 | #'
27 | #' @export
28 | #'
29 | Modeling_getLogger <- function() {
30 | .pkg_logger
31 | }
32 |
--------------------------------------------------------------------------------
/packages/Modeling/R/package_validation.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # Modeling
3 | #
4 | # Validation tools
5 | #----------------------------------------------------------------------------
6 |
7 | #'
8 | #' The same as stopifnot but with message.
9 | #'
10 | #' @param cond Condition to be evaluated
11 | #' @param msg Message for stop if condition is FALSE. If not passed \code{cond} code itself is used for the message.
12 | #'
13 | assert <- function(cond, fail_msg = NULL, ...) {
14 | if (!cond) {
15 | if (is.null(fail_msg) || missing(fail_msg)) {
16 | fail_msg <- sprintf("Condition failed: %s", deparse(substitute(cond), width.cutoff = 30L))
17 | } else {
18 | fail_msg <- sprintf(fail_msg, ...)
19 | }
20 | stop(fail_msg, call. = FALSE)
21 | }
22 | invisible()
23 | }
24 |
25 | #' Performes validation of data.table passed
26 | #'
27 | #' @param dt data.table to be validated
28 | #' @param dt_arg name of data.table argument for proper message content
29 | #' @param col_class named list of columns and there expected classes
30 | #'
31 | #' @examples {
32 | #' dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
33 | #' dt_validate(dt, "dt", list(c2 = "Date", c1 = "numeric"))
34 | #' }
35 | #'
36 | dt_validate <- function(dt, dt_arg, expected_col_class) {
37 | assert(is.data.table(dt),
38 | fail_msg = sprintf("data.table expected for '%s'", dt_arg))
39 |
40 | expected_names <- names(expected_col_class)
41 |
42 | notfound_columns <- setdiff(expected_names, colnames(dt))
43 | assert(!length(notfound_columns),
44 | fail_msg = sprintf("Expected columns not present in the '%s' data.table: %s",
45 | dt_arg,
46 | paste(notfound_columns, collapse = ", ")))
47 |
48 | for (cn in expected_names) {
49 | col_class <- class(dt[, get(cn)])
50 | exp_class <- expected_col_class[[cn]]
51 | assert(col_class == exp_class,
52 | fail_msg = sprintf("Column of type '%s' expected for '%s' in data.table '%s'; '%s' found",
53 | exp_class, cn, dt_arg, col_class))
54 | }
55 | }
56 |
57 | #'
58 | #' Performes validation and grooming of data.table passed
59 | #'
60 | #' @param dt data.table to be validated and groomed
61 | #' @param dt_arg name of data.table argument for proper message content
62 | #' @param col_class named list of columns and there expected classes
63 | #'
64 | #' @return data.table with only expected columns in order specified by \code{col_class}.
65 | #' @examples {
66 | #' dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
67 | #' dt_validate_and_groom(dt, "dt", list(c2 = "Date", c1 = "numeric"))
68 | #' # should output
69 | #' # c2 c1
70 | #' # 1: 2016-04-05 1
71 | #' # 2: 2016-04-05 2
72 | #' # 3: 2016-04-05 3
73 | #' }
74 | #'
75 | dt_validate_and_groom <- function(dt, dt_arg, expected_col_class) {
76 | dt_validate(dt, dt_arg, expected_col_class)
77 |
78 | expected_names <- names(expected_col_class)
79 | res_dt <- dt[, expected_names, with = FALSE] # select only requested colums
80 | setcolorder(res_dt, expected_names) # set proper column ordering
81 |
82 | return(res_dt)
83 | }
84 |
--------------------------------------------------------------------------------
/packages/Modeling/R/packages_import.R:
--------------------------------------------------------------------------------
1 | #----------------------------------------------------------------------------
2 | # Modeling
3 | #
4 | # Global package definitions and imports
5 | #----------------------------------------------------------------------------
6 |
7 | #' @import logging
8 | #' @import tensorflow
9 | #' @import keras
10 | NULL
11 |
--------------------------------------------------------------------------------
/packages/Modeling/man/Modeling_getLogger.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_logger.R
3 | \name{Modeling_getLogger}
4 | \alias{Modeling_getLogger}
5 | \title{Retrieves Modeling logger.}
6 | \usage{
7 | Modeling_getLogger()
8 | }
9 | \value{
10 | logger object
11 | }
12 | \description{
13 | Retrieves Modeling logger.
14 | }
15 |
--------------------------------------------------------------------------------
/packages/Modeling/man/assert.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{assert}
4 | \alias{assert}
5 | \title{The same as stopifnot but with message.}
6 | \usage{
7 | assert(cond, fail_msg = NULL, ...)
8 | }
9 | \arguments{
10 | \item{cond}{Condition to be evaluated}
11 |
12 | \item{msg}{Message for stop if condition is FALSE. If not passed \code{cond} code itself is used for the message.}
13 | }
14 | \description{
15 | The same as stopifnot but with message.
16 | }
17 |
--------------------------------------------------------------------------------
/packages/Modeling/man/dt_validate.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{dt_validate}
4 | \alias{dt_validate}
5 | \title{Performes validation of data.table passed}
6 | \usage{
7 | dt_validate(dt, dt_arg, expected_col_class)
8 | }
9 | \arguments{
10 | \item{dt}{data.table to be validated}
11 |
12 | \item{dt_arg}{name of data.table argument for proper message content}
13 |
14 | \item{col_class}{named list of columns and there expected classes}
15 | }
16 | \description{
17 | Performes validation of data.table passed
18 | }
19 | \examples{
20 | {
21 | dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
22 | dt_validate(dt, "dt", list(c2 = "Date", c1 = "numeric"))
23 | }
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/packages/Modeling/man/dt_validate_and_groom.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/package_validation.R
3 | \name{dt_validate_and_groom}
4 | \alias{dt_validate_and_groom}
5 | \title{Performes validation and grooming of data.table passed}
6 | \usage{
7 | dt_validate_and_groom(dt, dt_arg, expected_col_class)
8 | }
9 | \arguments{
10 | \item{dt}{data.table to be validated and groomed}
11 |
12 | \item{dt_arg}{name of data.table argument for proper message content}
13 |
14 | \item{col_class}{named list of columns and there expected classes}
15 | }
16 | \value{
17 | data.table with only expected columns in order specified by \code{col_class}.
18 | }
19 | \description{
20 | Performes validation and grooming of data.table passed
21 | }
22 | \examples{
23 | {
24 | dt <- data.table(c1= c(1,2,3), c2=c(Sys.Date()), c3=c("Ala & kotek"))
25 | dt_validate_and_groom(dt, "dt", list(c2 = "Date", c1 = "numeric"))
26 | # should output
27 | # c2 c1
28 | # 1: 2016-04-05 1
29 | # 2: 2016-04-05 2
30 | # 3: 2016-04-05 3
31 | }
32 |
33 | }
34 |
--------------------------------------------------------------------------------
/tests/.Rprofile:
--------------------------------------------------------------------------------
1 | source(file.path('..', 'R', 'set_env.R'), chdir = TRUE)
2 |
--------------------------------------------------------------------------------
/tests/.gitignore:
--------------------------------------------------------------------------------
1 | .Rproj.user
2 | .Rhistory
3 | .Rdata
4 | .Ruserdata
5 |
--------------------------------------------------------------------------------
/tests/Keras_and_Shiny_Tests.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 | StripTrailingWhitespace: Yes
17 |
18 | BuildType: Package
19 | PackageUseDevtools: Yes
20 | PackageInstallArgs: --no-multiarch --with-keep.source
21 |
--------------------------------------------------------------------------------