├── .DS_Store ├── .coveralls.yml ├── .esdoc.json ├── .eslintrc ├── .eslintrc.json ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── LICENSE ├── README.md ├── bundle ├── tensorscript.cjs.js ├── tensorscript.esm.js ├── tensorscript.umd.js ├── tensorscript.umd.min.js ├── tensorscript.web.js └── tensorscript.web.min.js ├── docs ├── API.md ├── CNAME ├── ast │ └── source │ │ ├── classification.mjs.json │ │ ├── deep_learning.mjs.json │ │ ├── deeplearning.mjs.json │ │ ├── logistic_regression.mjs.json │ │ ├── lstm_multivariate_time_series.mjs.json │ │ ├── lstm_time_series.mjs.json │ │ ├── mlr.mjs.json │ │ ├── model_interface.mjs.json │ │ ├── multiple_linear_regression.mjs.json │ │ └── regression.mjs.json ├── badge.svg ├── class │ └── lib │ │ ├── classification.mjs~DeepLearningClassification.html │ │ ├── deep_learning.mjs~BaseNeuralNetwork.html │ │ ├── deeplearning.mjs~BaseNeuralNetwork.html │ │ ├── deeplearning.mjs~DeepLearningClassification.html │ │ ├── deeplearning.mjs~DeepLearningRegression.html │ │ ├── logistic_regression.mjs~LogisticRegression.html │ │ ├── lstm_multivariate_time_series.mjs~LSTMMultivariateTimeSeries.html │ │ ├── lstm_time_series.mjs~LSTMTimeSeries.html │ │ ├── mlr.mjs~MultipleLinearRegression.html │ │ ├── model_interface.mjs~TensorScriptModelInterface.html │ │ ├── multiple_linear_regression.mjs~MultipleLinearRegression.html │ │ └── regression.mjs~DeepLearningRegression.html ├── coverage.json ├── css │ ├── github.css │ ├── identifiers.css │ ├── manual.css │ ├── prettify-tomorrow.css │ ├── search.css │ ├── source.css │ ├── style.css │ └── test.css ├── file │ └── lib │ │ ├── classification.mjs.html │ │ ├── deep_learning.mjs.html │ │ ├── deeplearning.mjs.html │ │ ├── logistic_regression.mjs.html │ │ ├── lstm_multivariate_time_series.mjs.html │ │ ├── lstm_time_series.mjs.html │ │ ├── mlr.mjs.html │ │ ├── model_interface.mjs.html │ │ ├── multiple_linear_regression.mjs.html │ │ └── regression.mjs.html ├── gtm.js ├── identifiers.html ├── image │ ├── badge.svg │ ├── esdoc-logo-mini-black.png │ ├── esdoc-logo-mini.png │ ├── github.png │ ├── manual-badge.svg │ └── search.png ├── index.html ├── index.json ├── manual │ ├── CHANGELOG.html │ ├── configuration.html │ ├── example.html │ ├── faq.html │ ├── index.html │ ├── installation.html │ ├── overview.html │ ├── tutorial.html │ └── usage.html ├── script │ ├── inherited-summary.js │ ├── inner-link.js │ ├── manual.js │ ├── patch-for-local.js │ ├── prettify │ │ ├── Apache-License-2.0.txt │ │ └── prettify.js │ ├── pretty-print.js │ ├── search.js │ ├── search_index.js │ └── test-summary.js ├── source.html ├── test-file │ └── test │ │ └── unit │ │ ├── base_neural_network_spec.mjs.html │ │ ├── classification_spec.mjs.html │ │ ├── logistic_regression_spec.mjs.html │ │ ├── lstm_multivariate_time_series_spec.mjs.html │ │ ├── lstm_time_series_spec.mjs.html │ │ ├── math_js_spec.mjs.html │ │ ├── model_interface_spec.mjs.html │ │ ├── multiple_linear_regression_spec.mjs.html │ │ └── regression_spec.mjs.html └── test.html ├── index.js ├── lib ├── classification.js ├── deep_learning.js ├── logistic_regression.js ├── lstm_multivariate_time_series.js ├── lstm_time_series.js ├── model_interface.js ├── multiple_linear_regression.js ├── regression.js └── text_embedding.js ├── manual ├── examples │ ├── ex_classification-iris.mjs │ ├── ex_classification-social.mjs │ ├── ex_nn-portland.mjs │ ├── ex_regression-boston.mjs │ ├── ex_regression-portland.mjs │ └── ex_timeseries-airline.mjs ├── faq.md ├── overview.md └── usage.md ├── package.json ├── renovate.json ├── rollup.config.js └── test ├── .DS_Store ├── mock ├── .DS_Store └── data │ ├── .DS_Store │ ├── airline-sales.csv │ ├── boston_housing_data.csv │ ├── boston_housing_names.txt │ ├── demo.mjs │ ├── international-airline-passengers-no_footer.csv │ ├── iris_data.csv │ ├── iris_names.txt │ ├── pollution.csv │ ├── portland_housing_data.csv │ ├── sample.csv │ └── social_network_ads.csv ├── mock_saved_files ├── .DS_Store └── mlr_model │ ├── model.json │ └── weights.bin └── unit ├── base_neural_network_spec.js ├── classification_spec.js ├── logistic_regression_spec.js ├── lstm_multivariate_time_series_spec.js ├── lstm_time_series_spec.js ├── math_js_spec.js ├── model_interface_spec.js ├── multiple_linear_regression_spec.js ├── regression_spec.js └── text_embedding_spec.js /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/.DS_Store -------------------------------------------------------------------------------- /.coveralls.yml: -------------------------------------------------------------------------------- 1 | service_name: travis-pro 2 | repo_token: W7pCj4DTvWJmrm3Hz0YKcrNSOH00Bq3Hd -------------------------------------------------------------------------------- /.esdoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": "./lib", 3 | "destination": "./docs", 4 | "plugins": [ 5 | { 6 | "name": "@repetere/esdoc-inject-gtm-plugin", 7 | "option": { 8 | "enable": true, 9 | "id": "UA-112697260-2" 10 | } 11 | }, 12 | { 13 | "name": "esdoc-ecmascript-proposal-plugin", 14 | "option": {"all": true} 15 | }, 16 | { 17 | "name": "esdoc-standard-plugin", 18 | "option":{ 19 | "typeInference": { 20 | "enable": false 21 | }, 22 | "lint": { 23 | "enable": false 24 | }, 25 | "brand": { 26 | "title": "TensorScript", 27 | "description": "Deep Learning Classification, Clustering, Time Series, Regression and Multi-Layered Perceptrons with Tensorflow", 28 | "repository": "https://github.com/repetere/tensorscript", 29 | "site": "https://repetere.github.io/tensorscript", 30 | "author": "https://github.com/repetere", 31 | "image": "https://repetere.ai/favicon.png" 32 | }, 33 | "manual": { 34 | "index": "./README.md", 35 | "asset": "./manual/asset", 36 | "files": [ 37 | "./manual/overview.md", 38 | "./manual/usage.md", 39 | "./manual/faq.md", 40 | "./CHANGELOG.md" 41 | ] 42 | }, 43 | "test": { 44 | "source": "./test/", 45 | "interfaces": ["describe", "it", "context", "suite", "test"], 46 | "includes": ["(spec|Spec|test|Test)\\.mjs$"], 47 | "excludes": ["\\.config\\.js$"] 48 | } 49 | } 50 | }, 51 | { 52 | "name": "esdoc-publish-markdown-plugin", 53 | "option": { 54 | "filename":"./API.md" 55 | } 56 | } 57 | ], 58 | "includes": ["\\.mjs$"] 59 | } 60 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint:recommended", 3 | "plugins": [], 4 | "parserOptions": { 5 | "sourceType": "module", 6 | "ecmaVersion": 2017, 7 | "ecmaFeatures": { 8 | "jsx": true 9 | } 10 | }, 11 | "rules": { 12 | "jsx-quotes": ["error", "prefer-double"], 13 | "indent": ["error", 2], 14 | "quotes": ["error", "single"], 15 | "semi": ["warn", "always"], 16 | "comma-dangle": ["warn", "always"], 17 | "comma-spacing": "warn", 18 | "array-bracket-spacing": "warn", 19 | "object-curly-spacing": ["error", "always"], 20 | "eqeqeq": "warn", 21 | "no-cond-assign": ["warn", "always"], 22 | "no-unused-vars": "warn", 23 | "no-console": "warn", 24 | "brace-style": ["warn", "1tbs"], 25 | "strict": ["error", "safe"] 26 | }, 27 | "env": { 28 | "browser": true, 29 | "commonjs": true, 30 | "es6": true, 31 | "node": true, 32 | "mocha": true 33 | } 34 | } -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint:recommended", 3 | "plugins": [], 4 | "parserOptions": { 5 | "sourceType": "module", 6 | "ecmaVersion": 2017, 7 | "ecmaFeatures": { 8 | "jsx": true 9 | } 10 | }, 11 | "rules": { 12 | "jsx-quotes": ["error", "prefer-double"], 13 | "indent": ["error", 2], 14 | "quotes": ["error", "single"], 15 | "semi": ["warn", "always"], 16 | "comma-dangle": ["warn", "always"], 17 | "comma-spacing": "warn", 18 | "array-bracket-spacing": "warn", 19 | "object-curly-spacing": ["error", "always"], 20 | "eqeqeq": "warn", 21 | "no-cond-assign": ["warn", "always"], 22 | "no-unused-vars": "warn", 23 | "no-console": "warn", 24 | "brace-style": ["warn", "1tbs"], 25 | "strict": ["error", "safe"] 26 | }, 27 | "env": { 28 | "browser": true, 29 | "commonjs": true, 30 | "es6": true, 31 | "node": true, 32 | "mocha": true 33 | } 34 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | .vscode 3 | .vscode/* 4 | logs 5 | *.log 6 | npm-debug.log* 7 | yarn-debug.log* 8 | yarn-error.log* 9 | package-lock.json 10 | *.package-lock.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | 24 | # nyc test coverage 25 | .nyc_output 26 | 27 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 28 | .grunt 29 | 30 | # Bower dependency directory (https://bower.io/) 31 | bower_components 32 | 33 | # node-waf configuration 34 | .lock-wscript 35 | 36 | # Compiled binary addons (https://nodejs.org/api/addons.html) 37 | build/Release 38 | 39 | # Dependency directories 40 | node_modules/ 41 | jspm_packages/ 42 | 43 | # TypeScript v1 declaration files 44 | typings/ 45 | 46 | # Optional npm cache directory 47 | .npm 48 | 49 | # Optional eslint cache 50 | .eslintcache 51 | 52 | # Optional REPL history 53 | .node_repl_history 54 | 55 | # Output of 'npm pack' 56 | *.tgz 57 | 58 | # Yarn Integrity file 59 | .yarn-integrity 60 | 61 | # dotenv environment variables file 62 | .env 63 | 64 | # next.js build output 65 | .next 66 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "10" 4 | - "8" 5 | env: 6 | - CXX=g++-4.8 7 | addons: 8 | apt: 9 | sources: 10 | - ubuntu-toolchain-r-test 11 | packages: 12 | - g++-4.8 13 | 14 | notifications: 15 | irc: 16 | channels: 17 | - "irc.freenode.org#periodicjs" 18 | template: 19 | - "Build by %{author} :" 20 | - "Git Info: %{branch} - %{commit} with the message %{commit_message}" 21 | - "Travis states: %{message}" 22 | - "Build Details: %{build_url}" 23 | - "Change View: %{compare_url}" 24 | - "result = %{result}" 25 | - "elapsed time... %{duration}" 26 | use_notice: true 27 | skip_join: false 28 | 29 | install: 30 | - npm install mocha nyc @std/esm -g 31 | - npm install 32 | # - npm install --skip_app_post_install=true 33 | 34 | branches: 35 | only: 36 | - master 37 | - devel 38 | git: 39 | submodules: false 40 | 41 | script: 42 | - npm set progress=false && npm run test && npm run coverage 43 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 1.4.0 (2019-05-09) 4 | - **Feat** 5 | - Added support for Model Saving and Loading 6 | 7 | ## 1.0.0 (2018-07-30) 8 | 9 | - **Feat** 10 | - First Initial Release 11 | - **Fix** 12 | - N/A 13 | - **Chore** 14 | - Allow for multistep future forecasts in multivariate timeseries. 15 | - Add basic clustering example. 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Repetere 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/API.md: -------------------------------------------------------------------------------- 1 | # Class 2 | 3 | # Function -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | tensorscript.io -------------------------------------------------------------------------------- /docs/badge.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | document 13 | document 14 | NaN% 15 | NaN% 16 | 17 | 18 | -------------------------------------------------------------------------------- /docs/coverage.json: -------------------------------------------------------------------------------- 1 | { 2 | "coverage": "0%", 3 | "expectCount": 0, 4 | "actualCount": 0, 5 | "files": {} 6 | } -------------------------------------------------------------------------------- /docs/css/github.css: -------------------------------------------------------------------------------- 1 | /* github markdown */ 2 | .github-markdown { 3 | font-size: 16px; 4 | } 5 | 6 | .github-markdown h1, 7 | .github-markdown h2, 8 | .github-markdown h3, 9 | .github-markdown h4, 10 | .github-markdown h5 { 11 | margin-top: 1em; 12 | margin-bottom: 16px; 13 | font-weight: bold; 14 | padding: 0; 15 | } 16 | 17 | .github-markdown h1:nth-of-type(1) { 18 | margin-top: 0; 19 | } 20 | 21 | .github-markdown h1 { 22 | font-size: 2em; 23 | padding-bottom: 0.3em; 24 | } 25 | 26 | .github-markdown h2 { 27 | font-size: 1.75em; 28 | padding-bottom: 0.3em; 29 | } 30 | 31 | .github-markdown h3 { 32 | font-size: 1.5em; 33 | } 34 | 35 | .github-markdown h4 { 36 | font-size: 1.25em; 37 | } 38 | 39 | .github-markdown h5 { 40 | font-size: 1em; 41 | } 42 | 43 | .github-markdown ul, .github-markdown ol { 44 | padding-left: 2em; 45 | } 46 | 47 | .github-markdown pre > code { 48 | font-size: 0.85em; 49 | } 50 | 51 | .github-markdown table { 52 | margin-bottom: 1em; 53 | border-collapse: collapse; 54 | border-spacing: 0; 55 | } 56 | 57 | .github-markdown table tr { 58 | background-color: #fff; 59 | border-top: 1px solid #ccc; 60 | } 61 | 62 | .github-markdown table th, 63 | .github-markdown table td { 64 | padding: 6px 13px; 65 | border: 1px solid #ddd; 66 | } 67 | 68 | .github-markdown table tr:nth-child(2n) { 69 | background-color: #f8f8f8; 70 | } 71 | 72 | .github-markdown hr { 73 | border-right: 0; 74 | border-bottom: 1px solid #e5e5e5; 75 | border-left: 0; 76 | border-top: 0; 77 | } 78 | 79 | /** badge(.svg) does not have border */ 80 | .github-markdown img:not([src*=".svg"]) { 81 | max-width: 100%; 82 | box-shadow: 1px 1px 1px rgba(0,0,0,0.5); 83 | } 84 | -------------------------------------------------------------------------------- /docs/css/identifiers.css: -------------------------------------------------------------------------------- 1 | .identifiers-wrap { 2 | display: flex; 3 | align-items: flex-start; 4 | } 5 | 6 | .identifier-dir-tree { 7 | background: #fff; 8 | border: solid 1px #ddd; 9 | border-radius: 0.25em; 10 | top: 52px; 11 | position: -webkit-sticky; 12 | position: sticky; 13 | max-height: calc(100vh - 155px); 14 | overflow-y: scroll; 15 | min-width: 200px; 16 | margin-left: 1em; 17 | } 18 | 19 | .identifier-dir-tree-header { 20 | padding: 0.5em; 21 | background-color: #fafafa; 22 | border-bottom: solid 1px #ddd; 23 | } 24 | 25 | .identifier-dir-tree-content { 26 | padding: 0 0.5em 0; 27 | } 28 | 29 | .identifier-dir-tree-content > div { 30 | padding-top: 0.25em; 31 | padding-bottom: 0.25em; 32 | } 33 | 34 | .identifier-dir-tree-content a { 35 | color: inherit; 36 | } 37 | 38 | -------------------------------------------------------------------------------- /docs/css/manual.css: -------------------------------------------------------------------------------- 1 | .github-markdown .manual-toc { 2 | padding-left: 0; 3 | } 4 | 5 | .manual-index .manual-cards { 6 | display: flex; 7 | flex-wrap: wrap; 8 | } 9 | 10 | .manual-index .manual-card-wrap { 11 | width: 280px; 12 | padding: 10px 20px 10px 0; 13 | box-sizing: border-box; 14 | } 15 | 16 | .manual-index .manual-card-wrap > h1 { 17 | margin: 0; 18 | font-size: 1em; 19 | font-weight: 600; 20 | padding: 0.2em 0 0.2em 0.5em; 21 | border-radius: 0.1em 0.1em 0 0; 22 | border: none; 23 | } 24 | 25 | .manual-index .manual-card-wrap > h1 span { 26 | color: #555; 27 | } 28 | 29 | .manual-index .manual-card { 30 | height: 200px; 31 | overflow: hidden; 32 | border: solid 1px rgba(230, 230, 230, 0.84); 33 | border-radius: 0 0 0.1em 0.1em; 34 | padding: 8px; 35 | position: relative; 36 | } 37 | 38 | .manual-index .manual-card > div { 39 | transform: scale(0.4); 40 | transform-origin: 0 0; 41 | width: 250%; 42 | } 43 | 44 | .manual-index .manual-card > a { 45 | position: absolute; 46 | top: 0; 47 | left: 0; 48 | width: 100%; 49 | height: 100%; 50 | background: rgba(210, 210, 210, 0.1); 51 | } 52 | 53 | .manual-index .manual-card > a:hover { 54 | background: none; 55 | } 56 | 57 | .manual-index .manual-badge { 58 | margin: 0; 59 | } 60 | 61 | .manual-index .manual-user-index { 62 | margin-bottom: 1em; 63 | border-bottom: solid 1px #ddd; 64 | } 65 | 66 | .manual-root .navigation { 67 | padding-left: 4px; 68 | margin-top: 4px; 69 | } 70 | 71 | .navigation .manual-toc-root > div { 72 | padding-left: 0.25em; 73 | padding-right: 0.75em; 74 | } 75 | 76 | .github-markdown .manual-toc-title a { 77 | color: inherit; 78 | } 79 | 80 | .manual-breadcrumb-list { 81 | font-size: 0.8em; 82 | margin-bottom: 1em; 83 | } 84 | 85 | .manual-toc-title a:hover { 86 | color: #039BE5; 87 | } 88 | 89 | .manual-toc li { 90 | margin: 0.75em 0; 91 | list-style-type: none; 92 | } 93 | 94 | .navigation .manual-toc [class^="indent-h"] a { 95 | color: #666; 96 | } 97 | 98 | .navigation .manual-toc .indent-h1 a { 99 | color: #555; 100 | font-weight: 600; 101 | display: block; 102 | } 103 | 104 | .manual-toc .indent-h1 { 105 | display: block; 106 | margin: 0.4em 0 0 0.25em; 107 | padding: 0.2em 0 0.2em 0.5em; 108 | border-radius: 0.1em; 109 | } 110 | 111 | .manual-root .navigation .manual-toc li:not(.indent-h1) { 112 | margin-top: 0.5em; 113 | } 114 | 115 | .manual-toc .indent-h2 { 116 | display: none; 117 | margin-left: 1.5em; 118 | } 119 | .manual-toc .indent-h3 { 120 | display: none; 121 | margin-left: 2.5em; 122 | } 123 | .manual-toc .indent-h4 { 124 | display: none; 125 | margin-left: 3.5em; 126 | } 127 | .manual-toc .indent-h5 { 128 | display: none; 129 | margin-left: 4.5em; 130 | } 131 | 132 | .manual-nav li { 133 | margin: 0.75em 0; 134 | } 135 | -------------------------------------------------------------------------------- /docs/css/prettify-tomorrow.css: -------------------------------------------------------------------------------- 1 | /* Tomorrow Theme */ 2 | /* Original theme - https://github.com/chriskempson/tomorrow-theme */ 3 | /* Pretty printing styles. Used with prettify.js. */ 4 | /* SPAN elements with the classes below are added by prettyprint. */ 5 | /* plain text */ 6 | .pln { 7 | color: #4d4d4c; } 8 | 9 | @media screen { 10 | /* string content */ 11 | .str { 12 | color: #718c00; } 13 | 14 | /* a keyword */ 15 | .kwd { 16 | color: #8959a8; } 17 | 18 | /* a comment */ 19 | .com { 20 | color: #8e908c; } 21 | 22 | /* a type name */ 23 | .typ { 24 | color: #4271ae; } 25 | 26 | /* a literal value */ 27 | .lit { 28 | color: #f5871f; } 29 | 30 | /* punctuation */ 31 | .pun { 32 | color: #4d4d4c; } 33 | 34 | /* lisp open bracket */ 35 | .opn { 36 | color: #4d4d4c; } 37 | 38 | /* lisp close bracket */ 39 | .clo { 40 | color: #4d4d4c; } 41 | 42 | /* a markup tag name */ 43 | .tag { 44 | color: #c82829; } 45 | 46 | /* a markup attribute name */ 47 | .atn { 48 | color: #f5871f; } 49 | 50 | /* a markup attribute value */ 51 | .atv { 52 | color: #3e999f; } 53 | 54 | /* a declaration */ 55 | .dec { 56 | color: #f5871f; } 57 | 58 | /* a variable name */ 59 | .var { 60 | color: #c82829; } 61 | 62 | /* a function name */ 63 | .fun { 64 | color: #4271ae; } } 65 | /* Use higher contrast and text-weight for printable form. */ 66 | @media print, projection { 67 | .str { 68 | color: #060; } 69 | 70 | .kwd { 71 | color: #006; 72 | font-weight: bold; } 73 | 74 | .com { 75 | color: #600; 76 | font-style: italic; } 77 | 78 | .typ { 79 | color: #404; 80 | font-weight: bold; } 81 | 82 | .lit { 83 | color: #044; } 84 | 85 | .pun, .opn, .clo { 86 | color: #440; } 87 | 88 | .tag { 89 | color: #006; 90 | font-weight: bold; } 91 | 92 | .atn { 93 | color: #404; } 94 | 95 | .atv { 96 | color: #060; } } 97 | /* Style */ 98 | /* 99 | pre.prettyprint { 100 | background: white; 101 | font-family: Consolas, Monaco, 'Andale Mono', monospace; 102 | font-size: 12px; 103 | line-height: 1.5; 104 | border: 1px solid #ccc; 105 | padding: 10px; } 106 | */ 107 | 108 | /* Specify class=linenums on a pre to get line numbering */ 109 | ol.linenums { 110 | margin-top: 0; 111 | margin-bottom: 0; } 112 | 113 | /* IE indents via margin-left */ 114 | li.L0, 115 | li.L1, 116 | li.L2, 117 | li.L3, 118 | li.L4, 119 | li.L5, 120 | li.L6, 121 | li.L7, 122 | li.L8, 123 | li.L9 { 124 | /* */ } 125 | 126 | /* Alternate shading for lines */ 127 | li.L1, 128 | li.L3, 129 | li.L5, 130 | li.L7, 131 | li.L9 { 132 | /* */ } 133 | -------------------------------------------------------------------------------- /docs/css/search.css: -------------------------------------------------------------------------------- 1 | /* search box */ 2 | .search-box { 3 | position: absolute; 4 | top: 10px; 5 | right: 50px; 6 | padding-right: 8px; 7 | padding-bottom: 10px; 8 | line-height: normal; 9 | font-size: 12px; 10 | } 11 | 12 | .search-box img { 13 | width: 20px; 14 | vertical-align: top; 15 | } 16 | 17 | .search-input { 18 | display: inline; 19 | visibility: hidden; 20 | width: 0; 21 | padding: 2px; 22 | height: 1.5em; 23 | outline: none; 24 | background: transparent; 25 | border: 1px #0af; 26 | border-style: none none solid none; 27 | vertical-align: bottom; 28 | } 29 | 30 | .search-input-edge { 31 | display: none; 32 | width: 1px; 33 | height: 5px; 34 | background-color: #0af; 35 | vertical-align: bottom; 36 | } 37 | 38 | .search-result { 39 | position: absolute; 40 | display: none; 41 | height: 600px; 42 | width: 100%; 43 | padding: 0; 44 | margin-top: 5px; 45 | margin-left: 24px; 46 | background: white; 47 | box-shadow: 1px 1px 4px rgb(0,0,0); 48 | white-space: nowrap; 49 | overflow-y: scroll; 50 | } 51 | 52 | .search-result-import-path { 53 | color: #aaa; 54 | font-size: 12px; 55 | } 56 | 57 | .search-result li { 58 | list-style: none; 59 | padding: 2px 4px; 60 | } 61 | 62 | .search-result li a { 63 | display: block; 64 | } 65 | 66 | .search-result li.selected { 67 | background: #ddd; 68 | } 69 | 70 | .search-result li.search-separator { 71 | background: rgb(37, 138, 175); 72 | color: white; 73 | } 74 | 75 | .search-box.active .search-input { 76 | visibility: visible; 77 | transition: width 0.2s ease-out; 78 | width: 300px; 79 | } 80 | 81 | .search-box.active .search-input-edge { 82 | display: inline-block; 83 | } 84 | 85 | -------------------------------------------------------------------------------- /docs/css/source.css: -------------------------------------------------------------------------------- 1 | table.files-summary { 2 | width: 100%; 3 | margin: 10px 0; 4 | border-spacing: 0; 5 | border: 0; 6 | border-collapse: collapse; 7 | text-align: right; 8 | } 9 | 10 | table.files-summary tbody tr:hover { 11 | background: #eee; 12 | } 13 | 14 | table.files-summary td:first-child, 15 | table.files-summary td:nth-of-type(2) { 16 | text-align: left; 17 | } 18 | 19 | table.files-summary[data-use-coverage="false"] td.coverage { 20 | display: none; 21 | } 22 | 23 | table.files-summary thead { 24 | background: #fafafa; 25 | } 26 | 27 | table.files-summary td { 28 | border: solid 1px #ddd; 29 | padding: 4px 10px; 30 | vertical-align: top; 31 | } 32 | 33 | table.files-summary td.identifiers > span { 34 | display: block; 35 | margin-top: 4px; 36 | } 37 | table.files-summary td.identifiers > span:first-child { 38 | margin-top: 0; 39 | } 40 | 41 | table.files-summary .coverage-count { 42 | font-size: 12px; 43 | color: #aaa; 44 | display: inline-block; 45 | min-width: 40px; 46 | } 47 | 48 | .total-coverage-count { 49 | position: relative; 50 | bottom: 2px; 51 | font-size: 12px; 52 | color: #666; 53 | font-weight: 500; 54 | padding-left: 5px; 55 | } 56 | -------------------------------------------------------------------------------- /docs/css/test.css: -------------------------------------------------------------------------------- 1 | table.test-summary thead { 2 | background: #fafafa; 3 | } 4 | 5 | table.test-summary thead .test-description { 6 | width: 50%; 7 | } 8 | 9 | table.test-summary { 10 | width: 100%; 11 | margin: 10px 0; 12 | border-spacing: 0; 13 | border: 0; 14 | border-collapse: collapse; 15 | } 16 | 17 | table.test-summary thead .test-count { 18 | width: 3em; 19 | } 20 | 21 | table.test-summary tbody tr:hover { 22 | background-color: #eee; 23 | } 24 | 25 | table.test-summary td { 26 | border: solid 1px #ddd; 27 | padding: 4px 10px; 28 | vertical-align: top; 29 | } 30 | 31 | table.test-summary td p { 32 | margin: 0; 33 | } 34 | 35 | table.test-summary tr.test-interface .toggle { 36 | display: inline-block; 37 | float: left; 38 | margin-right: 4px; 39 | cursor: pointer; 40 | font-size: 0.8em; 41 | padding-top: 0.25em; 42 | } 43 | 44 | table.test-summary tr.test-interface .toggle.opened:before { 45 | content: '▼'; 46 | } 47 | 48 | table.test-summary tr.test-interface .toggle.closed:before { 49 | content: '▶'; 50 | } 51 | 52 | table.test-summary .test-target > span { 53 | display: block; 54 | margin-top: 4px; 55 | } 56 | table.test-summary .test-target > span:first-child { 57 | margin-top: 0; 58 | } 59 | -------------------------------------------------------------------------------- /docs/file/lib/classification.mjs.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | lib/classification.mjs | TensorScript 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 |
21 | Home 22 | Manual 23 | Reference 24 | Source 25 | Test 26 | 33 |
34 | 35 | 49 | 50 |

lib/classification.mjs

51 |
import { BaseNeuralNetwork, } from './deep_learning';
 52 | 
 53 | /**
 54 |  * Deep Learning Classification with Tensorflow
 55 |  * @class DeepLearningClassification
 56 |  * @implements {BaseNeuralNetwork}
 57 |  */
 58 | export class DeepLearningClassification extends BaseNeuralNetwork{
 59 |   /**
 60 |    * @param {{layers:Array<Object>,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters
 61 |    * @param {{model:Object,tf:Object,}} properties - extra instance properties
 62 |    */
 63 |   constructor(options = {}, properties) {
 64 |     const config = Object.assign({
 65 |       layers: [],
 66 |       compile: {
 67 |         loss: 'categoricalCrossentropy',
 68 |         optimizer: 'adam',
 69 |       },
 70 |       fit: {
 71 |         epochs: 100,
 72 |         batchSize: 5,
 73 |       },
 74 |     }, options);
 75 |     super(config, properties);
 76 |     return this;
 77 |   }
 78 |   /**
 79 |    * Adds dense layers to tensorflow classification model
 80 |    * @override 
 81 |    * @param {Array<Array<number>>} x_matrix - independent variables
 82 |    * @param {Array<Array<number>>} y_matrix - dependent variables
 83 |    * @param {Array<Object>} layers - model dense layer parameters
 84 |    */
 85 |   generateLayers(x_matrix, y_matrix, layers) {
 86 |     const xShape = this.getInputShape(x_matrix);
 87 |     const yShape = this.getInputShape(y_matrix);
 88 |     this.yShape = yShape;
 89 |     this.xShape = xShape;
 90 |     const denseLayers = [];
 91 |     if (layers) {
 92 |       denseLayers.push(...layers);
 93 |     } else {
 94 |       denseLayers.push({ units: (xShape[ 1 ] * 2), inputDim: xShape[1],  activation: 'relu', });
 95 |       denseLayers.push({ units: yShape[ 1 ], activation: 'softmax', });
 96 |     }
 97 |     this.layers = denseLayers;
 98 |     denseLayers.forEach(layer => {
 99 |       this.model.add(this.tf.layers.dense(layer));
100 |     });
101 |   }
102 | }
103 | 104 |
105 | 106 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /docs/file/lib/deeplearning.mjs.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | lib/deeplearning.mjs | @tensorscript/ts-deeplearning 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | Home 16 | 17 | Reference 18 | Source 19 | 20 | 27 |
28 | 29 | 40 | 41 |

lib/deeplearning.mjs

42 |
import { TensorScriptModelInterface, } from '@tensorscript/core';
 43 | 
 44 | /**
 45 |  * Deep Learning with Tensorflow
 46 |  * @class BaseNeuralNetwork
 47 |  * @implements {TensorScriptModelInterface}
 48 |  */
 49 | export class BaseNeuralNetwork extends TensorScriptModelInterface {
 50 |   /**
 51 |    * @param {{layers:Array<Object>,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters
 52 |    * @param {{model:Object,tf:Object,}} properties - extra instance properties
 53 |    */
 54 |   constructor(options = {}, properties) {
 55 |     const config = Object.assign({
 56 |       layers: [],
 57 |       compile: {
 58 |         loss: 'meanSquaredError',
 59 |         optimizer: 'adam',
 60 |       },
 61 |       fit: {
 62 |         epochs: 100,
 63 |         batchSize: 5,
 64 |       },
 65 |     }, options);
 66 |     super(config, properties);
 67 |     return this;
 68 |   }
 69 |   /**
 70 |    * Adds dense layers to tensorflow model
 71 |    * @abstract 
 72 |    * @param {Array<Array<number>>} x_matrix - independent variables
 73 |    * @param {Array<Array<number>>} y_matrix - dependent variables
 74 |    * @param {Array<Object>} layers - model dense layer parameters
 75 |    */
 76 |   generateLayers(x_matrix, y_matrix, layers) {
 77 |     throw new ReferenceError('generateLayers method is not implemented');
 78 |   }
 79 |   /**
 80 |    * Asynchronously trains tensorflow model
 81 |    * @override
 82 |    * @param {Array<Array<number>>} x_matrix - independent variables
 83 |    * @param {Array<Array<number>>} y_matrix - dependent variables
 84 |    * @param {Array<Object>} layers - array of model dense layer parameters
 85 |    * @param {Array<Array<number>>} x_text - validation data independent variables
 86 |    * @param {Array<Array<number>>} y_text - validation data dependent variables
 87 |    * @return {Object} returns trained tensorflow model 
 88 |    */
 89 |   async train(x_matrix, y_matrix, layers, x_test, y_test) {
 90 |     const xShape = this.getInputShape(x_matrix);
 91 |     const yShape = this.getInputShape(y_matrix);
 92 |     const xs = this.tf.tensor(x_matrix, xShape);
 93 |     const ys = this.tf.tensor(y_matrix, yShape);
 94 |     this.xShape = xShape;
 95 |     this.yShape = yShape;
 96 |     this.model = this.tf.sequential();
 97 |     this.generateLayers.call(this, x_matrix, y_matrix, layers || this.layers, x_test, y_test);
 98 |     this.model.compile(this.settings.compile);
 99 |     await this.model.fit(xs, ys, this.settings.fit);
100 |     xs.dispose();
101 |     ys.dispose();
102 |     return this.model;
103 |   }
104 |   /**
105 |    * Predicts new dependent variables
106 |    * @override
107 |    * @param {Array<Array<number>>|Array<number>} matrix - new test independent variables
108 |    * @return {{data: Promise}} returns tensorflow prediction 
109 |    */
110 |   calculate(input_matrix) {
111 |     if (!input_matrix || Array.isArray(input_matrix)===false) throw new Error('invalid input matrix');
112 |     const predictionInput = (Array.isArray(input_matrix[ 0 ]))
113 |       ? input_matrix
114 |       : [
115 |         input_matrix,
116 |       ];
117 |     const predictionTensor = this.tf.tensor(predictionInput);
118 |     const prediction = this.model.predict(predictionTensor);
119 |     predictionTensor.dispose();
120 |     return prediction;
121 |   }
122 | }
123 | 124 |
125 | 126 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | -------------------------------------------------------------------------------- /docs/file/lib/mlr.mjs.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | lib/mlr.mjs | @tensorscript/ts-mlr 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | Home 16 | 17 | Reference 18 | Source 19 | 20 | 27 |
28 | 29 | 36 | 37 |

lib/mlr.mjs

38 |
import { TensorScriptModelInterface, } from '@tensorscript/core';
 39 | 
 40 | /**
 41 |  * Mulitple Linear Regression with Tensorflow
 42 |  * @class MultipleLinearRegression
 43 |  * @implements {TensorScriptModelInterface}
 44 |  */
 45 | export class MultipleLinearRegression extends TensorScriptModelInterface {
 46 |   /**
 47 |    * @param {Object} options - tensorflow model hyperparameters
 48 |    * @param {Object} customTF - custom tensorflow module: tensorflow / tensorflow-node / tensorflow-node-gpu
 49 |    */
 50 |   constructor(options = {}, customTF) {
 51 |     const config = Object.assign({
 52 |       epochs:500,
 53 |     }, options);
 54 |     super(config, customTF);
 55 |     return this;
 56 |   }
 57 |   /**
 58 |    * Asynchronously trains tensorflow model
 59 |    * @override
 60 |    * @param {Array<Array<number>>} x_matrix - independent variables
 61 |    * @param {Array<Array<number>>} y_matrix - dependent variables
 62 |    * @return {Object} returns trained tensorflow model 
 63 |    */
 64 |   async train(x_matrix, y_matrix) {
 65 |     const xShape = this.getInputShape(x_matrix);
 66 |     const yShape = this.getInputShape(y_matrix);
 67 |     const xs = this.tf.tensor(x_matrix, xShape);
 68 |     const ys = this.tf.tensor(y_matrix, yShape);
 69 |     this.yShape = yShape;
 70 |     this.xShape = xShape;
 71 |     this.model = this.tf.sequential();
 72 |     this.model.add(this.tf.layers.dense({ units: yShape[1], inputShape: [xShape[1],], }));
 73 |     this.model.compile({
 74 |       loss: 'meanSquaredError',
 75 |       optimizer: 'sgd',
 76 |     });
 77 |     await this.model.fit(xs, ys, this.settings);
 78 |     xs.dispose();
 79 |     ys.dispose();
 80 |     return this.model;
 81 |   }
 82 |   /**
 83 |    * Predicts new dependent variables
 84 |    * @override
 85 |    * @param {Array<Array<number>>|Array<number>} matrix - new test independent variables
 86 |    * @return {{data: Promise}} returns tensorflow prediction 
 87 |    */
 88 |   calculate(input_matrix) {
 89 |     const predictionInput = (Array.isArray(input_matrix[ 0 ]))
 90 |       ? input_matrix
 91 |       : [ input_matrix, ];
 92 |     const predictionTensor = this.tf.tensor(predictionInput);
 93 |     const prediction = this.model.predict(predictionTensor);
 94 |     predictionTensor.dispose();
 95 |     return prediction;
 96 |   }
 97 | }
98 | 99 |
100 | 101 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | -------------------------------------------------------------------------------- /docs/file/lib/multiple_linear_regression.mjs.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | lib/multiple_linear_regression.mjs | TensorScript 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 |
21 | Home 22 | Manual 23 | Reference 24 | Source 25 | Test 26 | 33 |
34 | 35 | 49 | 50 |

lib/multiple_linear_regression.mjs

51 |
import { BaseNeuralNetwork, } from './deep_learning';
 52 | 
 53 | /**
 54 |  * Mulitple Linear Regression with Tensorflow
 55 |  * @class MultipleLinearRegression
 56 |  * @implements {BaseNeuralNetwork}
 57 |  */
 58 | export class MultipleLinearRegression extends BaseNeuralNetwork {
 59 |   /**
 60 |    * @param {{layers:Array<Object>,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters
 61 |    * @param {{model:Object,tf:Object,}} properties - extra instance properties
 62 |    */
 63 |   /* istanbul ignore next */
 64 |   constructor(options = {}, properties = {}) {
 65 |     const config = Object.assign({
 66 |       layers: [],
 67 |       compile: {
 68 |         loss: 'meanSquaredError',
 69 |         optimizer: 'sgd',
 70 |       },
 71 |       fit: {
 72 |         epochs: 500,
 73 |         batchSize: 5,
 74 |       },
 75 |     }, options);
 76 |     super(config, properties);
 77 |     return this;
 78 |   }
 79 |   /**
 80 |    * Adds dense layers to tensorflow regression model
 81 |    * @override 
 82 |    * @param {Array<Array<number>>} x_matrix - independent variables
 83 |    * @param {Array<Array<number>>} y_matrix - dependent variables
 84 |    * @param {Array<Object>} layers - model dense layer parameters
 85 |    */
 86 |   generateLayers(x_matrix, y_matrix, layers) {
 87 |     const xShape = this.getInputShape(x_matrix);
 88 |     const yShape = this.getInputShape(y_matrix);
 89 |     const denseLayers = [];
 90 |     if (layers) {
 91 |       denseLayers.push(...layers);
 92 |     } else {
 93 |       denseLayers.push({ units: yShape[1], inputShape: [xShape[1],], });
 94 |     }
 95 |     this.layers = denseLayers;
 96 |     denseLayers.forEach(layer => {
 97 |       this.model.add(this.tf.layers.dense(layer));
 98 |     });
 99 |   }
100 | }
101 | 102 |
103 | 104 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /docs/gtm.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /docs/identifiers.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Reference | TensorScript 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 |
21 | Home 22 | Manual 23 | Reference 24 | Source 25 | 26 | 33 |
34 | 35 | 41 | 42 |

References

43 | 44 |
45 |
46 | 47 |
48 | 49 | 50 |
51 |
52 | 53 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | -------------------------------------------------------------------------------- /docs/image/badge.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | document 13 | document 14 | @ratio@ 15 | @ratio@ 16 | 17 | 18 | -------------------------------------------------------------------------------- /docs/image/esdoc-logo-mini-black.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/docs/image/esdoc-logo-mini-black.png -------------------------------------------------------------------------------- /docs/image/esdoc-logo-mini.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/docs/image/esdoc-logo-mini.png -------------------------------------------------------------------------------- /docs/image/github.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/docs/image/github.png -------------------------------------------------------------------------------- /docs/image/manual-badge.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | manual 13 | manual 14 | @value@ 15 | @value@ 16 | 17 | 18 | -------------------------------------------------------------------------------- /docs/image/search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/docs/image/search.png -------------------------------------------------------------------------------- /docs/manual/CHANGELOG.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Manual | TensorScript 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 |
21 | Home 22 | Manual 23 | Reference 24 | Source 25 | 26 | 33 |
34 | 35 | 77 | 78 |

Changelog

1.4.0 (2019-05-09)

84 |

1.0.0 (2018-07-30)

99 |
100 |
101 | 102 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | -------------------------------------------------------------------------------- /docs/manual/configuration.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Manual | TensorScript 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | Home 16 | Manual 17 | Reference 18 | Source 19 | Test 20 | 27 |
28 | 29 | 85 | 86 |

config

this is the overview

87 |
88 |
89 | 90 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /docs/manual/example.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Manual | TensorScript 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | Home 16 | Manual 17 | Reference 18 | Source 19 | Test 20 | 27 |
28 | 29 | 85 | 86 |

example

this is the overview

87 |
88 |
89 | 90 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /docs/manual/faq.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Manual | TensorScript 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 |
21 | Home 22 | Manual 23 | Reference 24 | Source 25 | 26 | 33 |
34 | 35 | 77 | 78 |

Frequently Asked Questions

Scaling

90 |
91 |
92 | 93 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /docs/manual/installation.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Manual | TensorScript 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | Home 16 | Manual 17 | Reference 18 | Source 19 | Test 20 | 27 |
28 | 29 | 85 | 86 |

install

this is the overview

87 |
88 |
89 | 90 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /docs/manual/tutorial.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Manual | TensorScript 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 |
15 | Home 16 | Manual 17 | Reference 18 | Source 19 | Test 20 | 27 |
28 | 29 | 85 | 86 |

tutorial

this is the overview

87 |
88 |
89 | 90 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | -------------------------------------------------------------------------------- /docs/script/inherited-summary.js: -------------------------------------------------------------------------------- 1 | (function(){ 2 | function toggle(ev) { 3 | var button = ev.target; 4 | var parent = ev.target.parentElement; 5 | while(parent) { 6 | if (parent.tagName === 'TABLE' && parent.classList.contains('summary')) break; 7 | parent = parent.parentElement; 8 | } 9 | 10 | if (!parent) return; 11 | 12 | var tbody = parent.querySelector('tbody'); 13 | if (button.classList.contains('opened')) { 14 | button.classList.remove('opened'); 15 | button.classList.add('closed'); 16 | tbody.style.display = 'none'; 17 | } else { 18 | button.classList.remove('closed'); 19 | button.classList.add('opened'); 20 | tbody.style.display = 'block'; 21 | } 22 | } 23 | 24 | var buttons = document.querySelectorAll('.inherited-summary thead .toggle'); 25 | for (var i = 0; i < buttons.length; i++) { 26 | buttons[i].addEventListener('click', toggle); 27 | } 28 | })(); 29 | -------------------------------------------------------------------------------- /docs/script/inner-link.js: -------------------------------------------------------------------------------- 1 | // inner link(#foo) can not correctly scroll, because page has fixed header, 2 | // so, I manually scroll. 3 | (function(){ 4 | var matched = location.hash.match(/errorLines=([\d,]+)/); 5 | if (matched) return; 6 | 7 | function adjust() { 8 | window.scrollBy(0, -55); 9 | var el = document.querySelector('.inner-link-active'); 10 | if (el) el.classList.remove('inner-link-active'); 11 | 12 | // ``[ ] . ' " @`` are not valid in DOM id. so must escape these. 13 | var id = location.hash.replace(/([\[\].'"@$])/g, '\\$1'); 14 | var el = document.querySelector(id); 15 | if (el) el.classList.add('inner-link-active'); 16 | } 17 | 18 | window.addEventListener('hashchange', adjust); 19 | 20 | if (location.hash) { 21 | setTimeout(adjust, 0); 22 | } 23 | })(); 24 | 25 | (function(){ 26 | var els = document.querySelectorAll('[href^="#"]'); 27 | var href = location.href.replace(/#.*$/, ''); // remove existed hash 28 | for (var i = 0; i < els.length; i++) { 29 | var el = els[i]; 30 | el.href = href + el.getAttribute('href'); // because el.href is absolute path 31 | } 32 | })(); 33 | -------------------------------------------------------------------------------- /docs/script/manual.js: -------------------------------------------------------------------------------- 1 | (function(){ 2 | var matched = location.pathname.match(/\/(manual\/.*\.html)$/); 3 | if (!matched) return; 4 | 5 | var currentName = matched[1]; 6 | var cssClass = '.navigation .manual-toc li[data-link="' + currentName + '"]'; 7 | var styleText = cssClass + '{ display: block; }\n'; 8 | styleText += cssClass + '.indent-h1 a { color: #039BE5 }'; 9 | var style = document.createElement('style'); 10 | style.textContent = styleText; 11 | document.querySelector('head').appendChild(style); 12 | })(); 13 | -------------------------------------------------------------------------------- /docs/script/patch-for-local.js: -------------------------------------------------------------------------------- 1 | (function(){ 2 | if (location.protocol === 'file:') { 3 | var elms = document.querySelectorAll('a[href="./"]'); 4 | for (var i = 0; i < elms.length; i++) { 5 | elms[i].href = './index.html'; 6 | } 7 | } 8 | })(); 9 | -------------------------------------------------------------------------------- /docs/script/pretty-print.js: -------------------------------------------------------------------------------- 1 | (function(){ 2 | prettyPrint(); 3 | var lines = document.querySelectorAll('.prettyprint.linenums li[class^="L"]'); 4 | for (var i = 0; i < lines.length; i++) { 5 | lines[i].id = 'lineNumber' + (i + 1); 6 | } 7 | 8 | var matched = location.hash.match(/errorLines=([\d,]+)/); 9 | if (matched) { 10 | var lines = matched[1].split(','); 11 | for (var i = 0; i < lines.length; i++) { 12 | var id = '#lineNumber' + lines[i]; 13 | var el = document.querySelector(id); 14 | el.classList.add('error-line'); 15 | } 16 | return; 17 | } 18 | 19 | if (location.hash) { 20 | // ``[ ] . ' " @`` are not valid in DOM id. so must escape these. 21 | var id = location.hash.replace(/([\[\].'"@$])/g, '\\$1'); 22 | var line = document.querySelector(id); 23 | if (line) line.classList.add('active'); 24 | } 25 | })(); 26 | -------------------------------------------------------------------------------- /docs/script/search.js: -------------------------------------------------------------------------------- 1 | (function(){ 2 | var searchIndex = window.esdocSearchIndex; 3 | var searchBox = document.querySelector('.search-box'); 4 | var input = document.querySelector('.search-input'); 5 | var result = document.querySelector('.search-result'); 6 | var selectedIndex = -1; 7 | var prevText; 8 | 9 | // active search box and focus when mouse enter on search box. 10 | searchBox.addEventListener('mouseenter', function(){ 11 | searchBox.classList.add('active'); 12 | input.focus(); 13 | }); 14 | 15 | // search with text when key is upped. 16 | input.addEventListener('keyup', function(ev){ 17 | var text = ev.target.value.toLowerCase(); 18 | if (!text) { 19 | result.style.display = 'none'; 20 | result.innerHTML = ''; 21 | return; 22 | } 23 | 24 | if (text === prevText) return; 25 | prevText = text; 26 | 27 | var html = {class: [], method: [], member: [], function: [], variable: [], typedef: [], external: [], file: [], test: [], testFile: []}; 28 | var len = searchIndex.length; 29 | var kind; 30 | for (var i = 0; i < len; i++) { 31 | var pair = searchIndex[i]; 32 | if (pair[0].indexOf(text) !== -1) { 33 | kind = pair[3]; 34 | html[kind].push('
  • ' + pair[2] + '
  • '); 35 | } 36 | } 37 | 38 | var innerHTML = ''; 39 | for (kind in html) { 40 | var list = html[kind]; 41 | if (!list.length) continue; 42 | innerHTML += '
  • ' + kind + '
  • \n' + list.join('\n'); 43 | } 44 | result.innerHTML = innerHTML; 45 | if (innerHTML) result.style.display = 'block'; 46 | selectedIndex = -1; 47 | }); 48 | 49 | // down, up and enter key are pressed, select search result. 50 | input.addEventListener('keydown', function(ev){ 51 | if (ev.keyCode === 40) { 52 | // arrow down 53 | var current = result.children[selectedIndex]; 54 | var selected = result.children[selectedIndex + 1]; 55 | if (selected && selected.classList.contains('search-separator')) { 56 | var selected = result.children[selectedIndex + 2]; 57 | selectedIndex++; 58 | } 59 | 60 | if (selected) { 61 | if (current) current.classList.remove('selected'); 62 | selectedIndex++; 63 | selected.classList.add('selected'); 64 | } 65 | } else if (ev.keyCode === 38) { 66 | // arrow up 67 | var current = result.children[selectedIndex]; 68 | var selected = result.children[selectedIndex - 1]; 69 | if (selected && selected.classList.contains('search-separator')) { 70 | var selected = result.children[selectedIndex - 2]; 71 | selectedIndex--; 72 | } 73 | 74 | if (selected) { 75 | if (current) current.classList.remove('selected'); 76 | selectedIndex--; 77 | selected.classList.add('selected'); 78 | } 79 | } else if (ev.keyCode === 13) { 80 | // enter 81 | var current = result.children[selectedIndex]; 82 | if (current) { 83 | var link = current.querySelector('a'); 84 | if (link) location.href = link.href; 85 | } 86 | } else { 87 | return; 88 | } 89 | 90 | ev.preventDefault(); 91 | }); 92 | 93 | // select search result when search result is mouse over. 94 | result.addEventListener('mousemove', function(ev){ 95 | var current = result.children[selectedIndex]; 96 | if (current) current.classList.remove('selected'); 97 | 98 | var li = ev.target; 99 | while (li) { 100 | if (li.nodeName === 'LI') break; 101 | li = li.parentElement; 102 | } 103 | 104 | if (li) { 105 | selectedIndex = Array.prototype.indexOf.call(result.children, li); 106 | li.classList.add('selected'); 107 | } 108 | }); 109 | 110 | // clear search result when body is clicked. 111 | document.body.addEventListener('click', function(ev){ 112 | selectedIndex = -1; 113 | result.style.display = 'none'; 114 | result.innerHTML = ''; 115 | }); 116 | 117 | })(); 118 | -------------------------------------------------------------------------------- /docs/script/search_index.js: -------------------------------------------------------------------------------- 1 | window.esdocSearchIndex = [] -------------------------------------------------------------------------------- /docs/script/test-summary.js: -------------------------------------------------------------------------------- 1 | (function(){ 2 | function toggle(ev) { 3 | var button = ev.target; 4 | var parent = ev.target.parentElement; 5 | while(parent) { 6 | if (parent.tagName === 'TR' && parent.classList.contains('test-interface')) break; 7 | parent = parent.parentElement; 8 | } 9 | 10 | if (!parent) return; 11 | 12 | var direction; 13 | if (button.classList.contains('opened')) { 14 | button.classList.remove('opened'); 15 | button.classList.add('closed'); 16 | direction = 'closed'; 17 | } else { 18 | button.classList.remove('closed'); 19 | button.classList.add('opened'); 20 | direction = 'opened'; 21 | } 22 | 23 | var targetDepth = parseInt(parent.dataset.testDepth, 10) + 1; 24 | var nextElement = parent.nextElementSibling; 25 | while (nextElement) { 26 | var depth = parseInt(nextElement.dataset.testDepth, 10); 27 | if (depth >= targetDepth) { 28 | if (direction === 'opened') { 29 | if (depth === targetDepth) nextElement.style.display = ''; 30 | } else if (direction === 'closed') { 31 | nextElement.style.display = 'none'; 32 | var innerButton = nextElement.querySelector('.toggle'); 33 | if (innerButton && innerButton.classList.contains('opened')) { 34 | innerButton.classList.remove('opened'); 35 | innerButton.classList.add('closed'); 36 | } 37 | } 38 | } else { 39 | break; 40 | } 41 | nextElement = nextElement.nextElementSibling; 42 | } 43 | } 44 | 45 | var buttons = document.querySelectorAll('.test-summary tr.test-interface .toggle'); 46 | for (var i = 0; i < buttons.length; i++) { 47 | buttons[i].addEventListener('click', toggle); 48 | } 49 | 50 | var topDescribes = document.querySelectorAll('.test-summary tr[data-test-depth="0"]'); 51 | for (var i = 0; i < topDescribes.length; i++) { 52 | topDescribes[i].style.display = ''; 53 | } 54 | })(); 55 | -------------------------------------------------------------------------------- /docs/source.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Source | TensorScript 5 | 6 | 7 | 8 | 9 | 10 | 11 | 18 | 19 | 20 |
    21 | Home 22 | Manual 23 | Reference 24 | Source 25 | 26 | 33 |
    34 | 35 | 41 | 42 |

    Source 0/0

    43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 |
    FileIdentifierDocumentSizeLinesUpdated
    59 |
    60 | 61 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | export { TensorScriptModelInterface, } from './lib/model_interface.js'; 2 | export { BaseNeuralNetwork, } from './lib/deep_learning.js'; 3 | export { DeepLearningRegression, } from './lib/regression.js'; 4 | export { DeepLearningClassification, } from './lib/classification.js'; 5 | export { LogisticRegression, } from './lib/logistic_regression.js'; 6 | export { MultipleLinearRegression, } from './lib/multiple_linear_regression.js'; 7 | export { LSTMTimeSeries, } from './lib/lstm_time_series.js'; 8 | export { LSTMMultivariateTimeSeries, } from './lib/lstm_multivariate_time_series.js'; 9 | export { TextEmbedding, } from './lib/text_embedding.js'; -------------------------------------------------------------------------------- /lib/classification.js: -------------------------------------------------------------------------------- 1 | import { BaseNeuralNetwork, } from './deep_learning'; 2 | 3 | /** 4 | * Deep Learning Classification with Tensorflow 5 | * @class DeepLearningClassification 6 | * @implements {BaseNeuralNetwork} 7 | */ 8 | export class DeepLearningClassification extends BaseNeuralNetwork{ 9 | /** 10 | * @param {{layers:Array,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters 11 | * @param {{model:Object,tf:Object,}} properties - extra instance properties 12 | */ 13 | constructor(options = {}, properties) { 14 | const config = Object.assign({ 15 | layers: [], 16 | compile: { 17 | loss: 'categoricalCrossentropy', 18 | optimizer: 'adam', 19 | }, 20 | fit: { 21 | epochs: 100, 22 | batchSize: 5, 23 | }, 24 | }, options); 25 | super(config, properties); 26 | return this; 27 | } 28 | /** 29 | * Adds dense layers to tensorflow classification model 30 | * @override 31 | * @param {Array>} x_matrix - independent variables 32 | * @param {Array>} y_matrix - dependent variables 33 | * @param {Array} layers - model dense layer parameters 34 | */ 35 | generateLayers(x_matrix, y_matrix, layers) { 36 | const xShape = this.getInputShape(x_matrix); 37 | const yShape = this.getInputShape(y_matrix); 38 | this.yShape = yShape; 39 | this.xShape = xShape; 40 | const denseLayers = []; 41 | if (layers) { 42 | denseLayers.push(...layers); 43 | } else { 44 | denseLayers.push({ units: (xShape[ 1 ] * 2), inputDim: xShape[1], activation: 'relu', }); 45 | denseLayers.push({ units: yShape[ 1 ], activation: 'softmax', }); 46 | } 47 | this.layers = denseLayers; 48 | denseLayers.forEach(layer => { 49 | this.model.add(this.tf.layers.dense(layer)); 50 | }); 51 | } 52 | } -------------------------------------------------------------------------------- /lib/deep_learning.js: -------------------------------------------------------------------------------- 1 | import { TensorScriptModelInterface, } from './model_interface'; 2 | 3 | /** 4 | * Deep Learning with Tensorflow 5 | * @class BaseNeuralNetwork 6 | * @implements {TensorScriptModelInterface} 7 | */ 8 | export class BaseNeuralNetwork extends TensorScriptModelInterface { 9 | /** 10 | * @param {{layers:Array,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters 11 | * @param {{model:Object,tf:Object,}} properties - extra instance properties 12 | */ 13 | constructor(options = {}, properties) { 14 | const config = Object.assign({ 15 | layers: [], 16 | compile: { 17 | loss: 'meanSquaredError', 18 | optimizer: 'adam', 19 | }, 20 | fit: { 21 | epochs: 100, 22 | batchSize: 5, 23 | }, 24 | }, options); 25 | super(config, properties); 26 | return this; 27 | } 28 | /** 29 | * Adds dense layers to tensorflow model 30 | * @abstract 31 | * @param {Array>} x_matrix - independent variables 32 | * @param {Array>} y_matrix - dependent variables 33 | * @param {Array} layers - model dense layer parameters 34 | */ 35 | generateLayers(x_matrix, y_matrix, layers) { 36 | throw new ReferenceError('generateLayers method is not implemented'); 37 | } 38 | /** 39 | * Asynchronously trains tensorflow model 40 | * @override 41 | * @param {Array>} x_matrix - independent variables 42 | * @param {Array>} y_matrix - dependent variables 43 | * @param {Array} layers - array of model dense layer parameters 44 | * @param {Array>} x_text - validation data independent variables 45 | * @param {Array>} y_text - validation data dependent variables 46 | * @return {Object} returns trained tensorflow model 47 | */ 48 | async train(x_matrix, y_matrix, layers, x_test, y_test) { 49 | const xShape = this.getInputShape(x_matrix); 50 | const yShape = this.getInputShape(y_matrix); 51 | const xs = this.tf.tensor(x_matrix, xShape); 52 | const ys = this.tf.tensor(y_matrix, yShape); 53 | this.xShape = xShape; 54 | this.yShape = yShape; 55 | if (typeof this.trained==='undefined' || this.trained === false) { 56 | this.model = this.tf.sequential(); 57 | this.generateLayers.call(this, x_matrix, y_matrix, layers || this.layers, x_test, y_test); 58 | this.model.compile(this.settings.compile); 59 | } 60 | await this.model.fit(xs, ys, this.settings.fit); 61 | xs.dispose(); 62 | ys.dispose(); 63 | return this.model; 64 | } 65 | /** 66 | * Predicts new dependent variables 67 | * @override 68 | * @param {Array>|Array} matrix - new test independent variables 69 | * @param {Object} options - model prediction options 70 | * @return {{data: Promise}} returns tensorflow prediction 71 | */ 72 | calculate(input_matrix, options) { 73 | if (!input_matrix || Array.isArray(input_matrix)===false) throw new Error('invalid input matrix'); 74 | const predictionInput = (Array.isArray(input_matrix[ 0 ])) 75 | ? input_matrix 76 | : [ 77 | input_matrix, 78 | ]; 79 | const predictionTensor = this.tf.tensor(predictionInput); 80 | const prediction = this.model.predict(predictionTensor, options); 81 | predictionTensor.dispose(); 82 | return prediction; 83 | } 84 | } -------------------------------------------------------------------------------- /lib/logistic_regression.js: -------------------------------------------------------------------------------- 1 | import { BaseNeuralNetwork, } from './deep_learning'; 2 | 3 | /** 4 | * Logistic Regression Classification with Tensorflow 5 | * @class LogisticRegression 6 | * @implements {BaseNeuralNetwork} 7 | */ 8 | export class LogisticRegression extends BaseNeuralNetwork { 9 | /** 10 | * @param {{layers:Array,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters 11 | * @param {{model:Object,tf:Object,}} properties - extra instance properties 12 | */ 13 | constructor(options = {}, properties) { 14 | const config = Object.assign({ 15 | layers: [], 16 | type:'simple', 17 | compile: { 18 | loss: 'meanSquaredError', 19 | optimizer: 'rmsprop', 20 | }, 21 | fit: { 22 | epochs: 100, 23 | batchSize: 5, 24 | }, 25 | }, options); 26 | super(config, properties); 27 | return this; 28 | } 29 | /** 30 | * Adds dense layers to tensorflow classification model 31 | * @override 32 | * @param {Array>} x_matrix - independent variables 33 | * @param {Array>} y_matrix - dependent variables 34 | * @param {Array} layers - model dense layer parameters 35 | * @param {Array>} x_test - validation data independent variables 36 | * @param {Array>} y_test - validation data dependent variables 37 | */ 38 | generateLayers(x_matrix, y_matrix, layers, x_test, y_test) { 39 | const xShape = this.getInputShape(x_matrix); 40 | const yShape = this.getInputShape(y_matrix); 41 | this.yShape = yShape; 42 | this.xShape = xShape; 43 | const denseLayers = []; 44 | if (layers) { 45 | denseLayers.push(...layers); 46 | } else if (this.settings.type==='class') { 47 | denseLayers.push({ units: 1, inputDim: xShape[ 1 ], activation: 'sigmoid', }); 48 | this.settings.compile.loss = 'binaryCrossentropy'; 49 | } else if (this.settings.type === 'l1l2') { 50 | const kernelRegularizer = this.tf.regularizers.l1l2({ l1: 0.01, l2: 0.01, }); 51 | denseLayers.push({ units: 1, inputDim: xShape[ 1 ], activation: 'sigmoid', kernelRegularizer, }); 52 | this.settings.compile.loss = 'binaryCrossentropy'; 53 | } else { 54 | denseLayers.push({ units: 1, inputShape: [xShape[1], ], }); 55 | } 56 | this.layers = denseLayers; 57 | denseLayers.forEach(layer => { 58 | this.model.add(this.tf.layers.dense(layer)); 59 | }); 60 | /* istanbul ignore next */ 61 | if (x_test && y_test) { 62 | this.settings.fit.validationData = [x_test, y_test]; 63 | } 64 | } 65 | } -------------------------------------------------------------------------------- /lib/multiple_linear_regression.js: -------------------------------------------------------------------------------- 1 | import { BaseNeuralNetwork, } from './deep_learning'; 2 | 3 | /** 4 | * Mulitple Linear Regression with Tensorflow 5 | * @class MultipleLinearRegression 6 | * @implements {BaseNeuralNetwork} 7 | */ 8 | export class MultipleLinearRegression extends BaseNeuralNetwork { 9 | /** 10 | * @param {{layers:Array,compile:Object,fit:Object}} options - neural network configuration and tensorflow model hyperparameters 11 | * @param {{model:Object,tf:Object,}} properties - extra instance properties 12 | */ 13 | /* istanbul ignore next */ 14 | constructor(options = {}, properties = {}) { 15 | const config = Object.assign({ 16 | layers: [], 17 | compile: { 18 | loss: 'meanSquaredError', 19 | optimizer: 'sgd', 20 | }, 21 | fit: { 22 | epochs: 500, 23 | batchSize: 5, 24 | }, 25 | }, options); 26 | super(config, properties); 27 | return this; 28 | } 29 | /** 30 | * Adds dense layers to tensorflow regression model 31 | * @override 32 | * @param {Array>} x_matrix - independent variables 33 | * @param {Array>} y_matrix - dependent variables 34 | * @param {Array} layers - model dense layer parameters 35 | */ 36 | generateLayers(x_matrix, y_matrix, layers) { 37 | const xShape = this.getInputShape(x_matrix); 38 | const yShape = this.getInputShape(y_matrix); 39 | const denseLayers = []; 40 | if (layers) { 41 | denseLayers.push(...layers); 42 | } else { 43 | denseLayers.push({ units: yShape[1], inputShape: [xShape[1],], }); 44 | } 45 | this.layers = denseLayers; 46 | denseLayers.forEach(layer => { 47 | this.model.add(this.tf.layers.dense(layer)); 48 | }); 49 | } 50 | } -------------------------------------------------------------------------------- /lib/regression.js: -------------------------------------------------------------------------------- 1 | import { BaseNeuralNetwork, } from './deep_learning'; 2 | 3 | /** 4 | * Deep Learning Regression with Tensorflow 5 | * @class DeepLearningRegression 6 | * @implements {BaseNeuralNetwork} 7 | */ 8 | export class DeepLearningRegression extends BaseNeuralNetwork { 9 | /** 10 | * @param {{layers:Array,compile:Object,fit:Object,layerPreference:String}} options - neural network configuration and tensorflow model hyperparameters 11 | * @param {{model:Object,tf:Object,}} properties - extra instance properties 12 | */ 13 | constructor(options = {}, properties) { 14 | const config = Object.assign({ 15 | layers: [], 16 | layerPreference:'deep', 17 | compile: { 18 | loss: 'meanSquaredError', 19 | optimizer: 'adam', 20 | }, 21 | fit: { 22 | epochs: 100, 23 | batchSize: 5, 24 | }, 25 | }, options); 26 | super(config, properties); 27 | return this; 28 | } 29 | /** 30 | * Adds dense layers to tensorflow regression model 31 | * @override 32 | * @param {Array>} x_matrix - independent variables 33 | * @param {Array>} y_matrix - dependent variables 34 | * @param {Array} layers - model dense layer parameters 35 | */ 36 | generateLayers(x_matrix, y_matrix, layers) { 37 | const xShape = this.getInputShape(x_matrix); 38 | const yShape = this.getInputShape(y_matrix); 39 | const denseLayers = []; 40 | if (layers) { 41 | denseLayers.push(...layers); 42 | } else if(this.settings.layerPreference==='deep') { 43 | denseLayers.push({ units: xShape[ 1 ], inputShape: [xShape[1],], kernelInitializer: 'randomNormal', activation: 'relu', }); 44 | denseLayers.push({ units: parseInt(Math.ceil(xShape[ 1 ] / 2), 10), kernelInitializer: 'randomNormal', activation: 'relu', }); 45 | denseLayers.push({ units: yShape[ 1 ], kernelInitializer: 'randomNormal', }); 46 | } else { 47 | denseLayers.push({ units: (xShape[ 1 ] * 2), inputShape: [xShape[1],], kernelInitializer: 'randomNormal', activation: 'relu', }); 48 | denseLayers.push({ units: yShape[ 1 ], kernelInitializer: 'randomNormal', }); 49 | } 50 | this.layers = denseLayers; 51 | denseLayers.forEach(layer => { 52 | this.model.add(this.tf.layers.dense(layer)); 53 | }); 54 | } 55 | } -------------------------------------------------------------------------------- /lib/text_embedding.js: -------------------------------------------------------------------------------- 1 | import { TensorScriptModelInterface, } from './model_interface'; 2 | import * as UniversalSentenceEncoder from '@tensorflow-models/universal-sentence-encoder'; 3 | let model; 4 | let tokenizer; 5 | /** 6 | * Text Embedding with Tensorflow Universal Sentence Encoder (USE) 7 | * @class TextEmbedding 8 | * @implements {TensorScriptModelInterface} 9 | */ 10 | export class TextEmbedding extends TensorScriptModelInterface { 11 | /** 12 | * @param {Object} options - Options for USE 13 | * @param {{model:Object,tf:Object,}} properties - extra instance properties 14 | */ 15 | constructor(options = {}, properties) { 16 | const config = Object.assign({ 17 | }, options); 18 | super(config, properties); 19 | return this; 20 | } 21 | /** 22 | * Asynchronously loads Universal Sentence Encoder and tokenizer 23 | * @override 24 | * @return {Object} returns loaded UniversalSentenceEncoder model 25 | */ 26 | async train() { 27 | const promises = []; 28 | if (!model) promises.push(UniversalSentenceEncoder.load()); 29 | else promises.push(Promise.resolve(model)); 30 | if (!tokenizer) promises.push(UniversalSentenceEncoder.loadTokenizer()); 31 | else promises.push(Promise.resolve(tokenizer)); 32 | const USE = await Promise.all(promises); 33 | if (!model) model = USE[ 0 ]; 34 | if (!tokenizer) tokenizer = USE[ 1 ]; 35 | this.model = model; 36 | this.tokenizer = tokenizer; 37 | return this.model; 38 | } 39 | /** 40 | * Calculates sentence embeddings 41 | * @override 42 | * @param {Array>|Array} input_array - new test independent variables 43 | * @param {Object} options - model prediction options 44 | * @return {{data: Promise}} returns tensorflow prediction 45 | */ 46 | calculate(input_array, options = {}) { 47 | if (!input_array || Array.isArray(input_array) === false) throw new Error('invalid input array of sentences'); 48 | const embeddings = this.model.embed(input_array); 49 | return embeddings; 50 | } 51 | /** 52 | * Returns prediction values from tensorflow model 53 | * @param {Array} input_matrix - array of sentences to embed 54 | * @param {Boolean} [options.json=true] - return object instead of typed array 55 | * @param {Boolean} [options.probability=true] - return real values instead of integers 56 | * @return {Array>} predicted model values 57 | */ 58 | async predict(input_array, options = {}) { 59 | const config = Object.assign({ 60 | json: true, 61 | probability: true, 62 | }, options); 63 | const embeddings = await this.calculate(input_array, options); 64 | const predictions = await embeddings.data(); 65 | if (config.json === false) { 66 | return predictions; 67 | } else { 68 | const shape = [input_array.length, 512, ]; 69 | const predictionValues = (options.probability === false) 70 | ? Array.from(predictions).map(Math.round) 71 | : Array.from(predictions); 72 | return this.reshape(predictionValues, shape); 73 | } 74 | } 75 | } -------------------------------------------------------------------------------- /manual/examples/ex_classification-iris.mjs: -------------------------------------------------------------------------------- 1 | /** 2 | * Node 8+ 3 | * $ node --experimental-modules manual/examples/ex_classification-iris.mjs 4 | */ 5 | import * as ms from 'modelscript'; // used for scaling, data manipulation 6 | import { DeepLearningClassification, } from '../../index.mjs'; 7 | //if running on node 8 | // import tf from '@tensorflow/tfjs-node'; 9 | const ConfusionMatrix = ms.ml.ConfusionMatrix; 10 | 11 | const independentVariables = [ 12 | 'sepal_length_cm', 13 | 'sepal_width_cm', 14 | 'petal_length_cm', 15 | 'petal_width_cm', 16 | ]; 17 | const dependentVariables = [ 18 | 'plant_Iris-setosa', 19 | 'plant_Iris-versicolor', 20 | 'plant_Iris-virginica', 21 | ]; 22 | 23 | async function main() { 24 | const CSVData = await ms.csv.loadCSV('./test/mock/data/iris_data.csv'); 25 | const DataSet = new ms.DataSet(CSVData); 26 | DataSet.fitColumns({ 27 | plant: 'onehot', 28 | }); 29 | const testTrainSplit = ms.cross_validation.train_test_split(DataSet.data, { train_size: 0.7, }); 30 | const { train, test, } = testTrainSplit; 31 | const testDataSet = new ms.DataSet(test); 32 | const trainDataSet = new ms.DataSet(train); 33 | const x_matrix_train = trainDataSet.columnMatrix(independentVariables); 34 | const y_matrix_train = trainDataSet.columnMatrix(dependentVariables); 35 | const x_matrix_test = testDataSet.columnMatrix(independentVariables); 36 | const y_matrix_test = testDataSet.columnMatrix(dependentVariables); 37 | /* 38 | y_matrix_test = [ 39 | [ 1, 0, 0 ], 40 | [ 1, 0, 0 ], 41 | [ 1, 0, 0 ], 42 | [ 1, 0, 0 ], 43 | ... 44 | ] 45 | */ 46 | const fit = { 47 | epochs: 200, 48 | batchSize:10, 49 | }; 50 | const nnClassification = new DeepLearningClassification({ fit, }, { 51 | // tf - can switch to tensorflow gpu here 52 | }); 53 | console.log('training model'); 54 | await nnClassification.train(x_matrix_train, y_matrix_train); 55 | const estimatesPredictions = await nnClassification.predict(x_matrix_test, { probability: false, }); 56 | /* 57 | estimates = [ 58 | [ 1, 0, 0 ], 59 | [ 1, 0, 0 ], 60 | ... 61 | ] 62 | y_matrix_test = [ 63 | [ 1, 0, 0 ], 64 | [ 1, 0, 0 ], 65 | ... 66 | ] 67 | */ 68 | const estimatedValues = ms.DataSet.reverseColumnMatrix({ vectors: estimatesPredictions, labels: dependentVariables, }); 69 | const actualValues = ms.DataSet.reverseColumnMatrix({ vectors: y_matrix_test, labels: dependentVariables, }); 70 | /* 71 | estimatedValues = [ 72 | { 'plant_Iris-setosa': 1, 'plant_Iris-versicolor': 0, 'plant_Iris-virginica': 0 }, 73 | { 'plant_Iris-setosa': 1, 'plant_Iris-versicolor': 0, 'plant_Iris-virginica': 0 }, 74 | ... 75 | ]; 76 | actualValues = [ 77 | { 'plant_Iris-setosa': 1, 'plant_Iris-versicolor': 0, 'plant_Iris-virginica': 0 }, 78 | { 'plant_Iris-setosa': 1, 'plant_Iris-versicolor': 0, 'plant_Iris-virginica': 0 }, 79 | ... 80 | ]; 81 | */ 82 | const reformattedEstimatesValues = DataSet.oneHotDecoder('plant', { data: estimatedValues, }); 83 | const reformattedActualValues = DataSet.oneHotDecoder('plant', { data: actualValues, }); 84 | /* 85 | reformattedEstimatesValues = [ 86 | { plant: 'Iris-setosa' }, 87 | { plant: 'Iris-setosa' }, 88 | ... 89 | ]; 90 | reformattedActualValues = [ 91 | { plant: 'Iris-setosa' }, 92 | { plant: 'Iris-setosa' }, 93 | ... 94 | ]; 95 | */ 96 | const estimates = ms.DataSet.columnArray('plant', { data: reformattedEstimatesValues, }); 97 | const actuals = ms.DataSet.columnArray('plant', { data: reformattedActualValues, }); 98 | /* 99 | estimates = [ 100 | 'Iris-setosa', 101 | 'Iris-setosa', 102 | ... 103 | ]; 104 | actuals = [ 105 | 'Iris-setosa', 106 | 'Iris-setosa', 107 | ... 108 | ]; 109 | */ 110 | const CM = ConfusionMatrix.fromLabels(actuals,estimates); 111 | const accuracy = CM.getAccuracy(); 112 | console.log({ accuracy, }); // { accuracy: 0.9111111111111111 } ~ 91% 113 | } 114 | 115 | main(); -------------------------------------------------------------------------------- /manual/examples/ex_classification-social.mjs: -------------------------------------------------------------------------------- 1 | /** 2 | * Node 8+ 3 | * $ node --experimental-modules manual/examples/ex_classification-social.mjs 4 | */ 5 | import * as ms from 'modelscript'; // used for scaling, data manipulation 6 | import { LogisticRegression, } from '../../index.mjs'; 7 | //if running on node 8 | // import tf from '@tensorflow/tfjs-node'; 9 | const ConfusionMatrix = ms.ml.ConfusionMatrix; 10 | 11 | const independentVariables = [ 12 | 'Age', 13 | 'EstimatedSalary', 14 | ]; 15 | const dependentVariables = [ 16 | 'Purchased', 17 | ]; 18 | 19 | async function main() { 20 | /* 21 | CSVData = [ 22 | { 23 | 'User ID': 15709441, 24 | Gender: 'Female', 25 | Age: 35, 26 | EstimatedSalary: 44000, 27 | Purchased: 0 28 | }, 29 | { 30 | 'User ID': 15710257, 31 | Gender: 'Female', 32 | Age: 35, 33 | EstimatedSalary: 25000, 34 | Purchased: 0 35 | }, 36 | ... 37 | ] 38 | */ 39 | const CSVData = await ms.csv.loadCSV('./test/mock/data/social_network_ads.csv'); 40 | const CSVDataSet = new ms.DataSet(CSVData); 41 | CSVDataSet.fitColumns({ 42 | Age: ['scale', 'standard',], 43 | EstimatedSalary: ['scale', 'standard',], 44 | }); 45 | const testTrainSplit = ms.cross_validation.train_test_split(CSVDataSet.data, { train_size: 0.7, }); 46 | const { train, test, } = testTrainSplit; 47 | const testDataSet = new ms.DataSet(test); 48 | const trainDataSet = new ms.DataSet(train); 49 | const x_matrix_train = trainDataSet.columnMatrix(independentVariables); 50 | const y_matrix_train = trainDataSet.columnMatrix(dependentVariables); 51 | const x_matrix_test = testDataSet.columnMatrix(independentVariables); 52 | const y_matrix_test = testDataSet.columnMatrix(dependentVariables); 53 | /* 54 | train = [ 55 | { 56 | 'User ID': 15715541, 57 | Gender: 'Female', 58 | Age: -0.9210258186650291, 59 | EstimatedSalary: 0.4181457784478519, 60 | Purchased: 0 61 | }, 62 | { 63 | 'User ID': 15622478, 64 | Gender: 'Male', 65 | Age: 0.8914537830579694, 66 | EstimatedSalary: -0.7843074508252975, 67 | Purchased: 0 68 | }, 69 | ... 70 | ] 71 | y_matrix_test = [ 72 | [ 0 ], 73 | [ 0 ], 74 | [ 0 ], 75 | [ 1 ], 76 | ... 77 | ] 78 | */ 79 | const fit = { 80 | epochs: 200, 81 | batchSize:10, 82 | }; 83 | const nnLR = new LogisticRegression({ fit, }, { 84 | // tf - can switch to tensorflow gpu here 85 | }); 86 | console.log('training model'); 87 | await nnLR.train(x_matrix_train, y_matrix_train); 88 | const estimatesPredictions = await nnLR.predict(x_matrix_test, { probability: false, }); 89 | /* 90 | estimates = [ 91 | [ 0 ], 92 | [ 0 ], 93 | ... 94 | ] 95 | y_matrix_test = [ 96 | [ 0 ], 97 | [ 0 ], 98 | ... 99 | ] 100 | */ 101 | const estimatedValues = CSVDataSet.reverseColumnMatrix({ vectors: estimatesPredictions, labels: dependentVariables, }); 102 | const actualValues = CSVDataSet.reverseColumnMatrix({ vectors: y_matrix_test, labels: dependentVariables, }); 103 | /* 104 | estimatedValues = [ 105 | { Purchased: 0 }, 106 | { Purchased: 0 }, 107 | ... 108 | ]; 109 | actualValues = [ 110 | { Purchased: 0 }, 111 | { Purchased: 0 }, 112 | ... 113 | ]; 114 | */ 115 | const estimates = CSVDataSet.columnArray('Purchased', { data: estimatedValues, }); 116 | const actuals = CSVDataSet.columnArray('Purchased', { data: actualValues, }); 117 | /* 118 | estimates = [ 119 | 0, 120 | 0, 121 | ... 122 | ]; 123 | actuals = [ 124 | 0, 125 | 0, 126 | ... 127 | ]; 128 | */ 129 | const CM = ConfusionMatrix.fromLabels(actuals, estimates); 130 | const accuracy = CM.getAccuracy(); 131 | console.log({ accuracy, }); // { accuracy: 0.8166666666666667 } ~ 82% 132 | 133 | const newScaledPrediction = CSVDataSet.transformObject({ 134 | Age: 35, 135 | EstimatedSalary: 44000, 136 | 'User ID': undefined, 137 | Gender: undefined, 138 | Purchased: undefined, 139 | }); 140 | const newScaledPrediction2 = CSVDataSet.transformObject({ 141 | Age: 18, 142 | EstimatedSalary: 32000, 143 | 'User ID': undefined, 144 | Gender: undefined, 145 | Purchased: undefined, 146 | }); 147 | const newScaledPrediction3 = CSVDataSet.transformObject({ 148 | Age: 39, 149 | EstimatedSalary: 127000, 150 | 'User ID': undefined, 151 | Gender: undefined, 152 | Purchased: undefined, 153 | }); 154 | const predictionData = [ newScaledPrediction, newScaledPrediction2, newScaledPrediction3, ]; 155 | /* 156 | newScaledPrediction = { Age: -0.253270175924977, EstimatedSalary: -0.75497932328205, 'User ID': undefined, Gender: undefined, Purchased: undefined } 157 | inputMatrix = [ [ -0.253270175924977, -0.75497932328205 ] ] 158 | */ 159 | const inputMatrix = CSVDataSet.columnMatrix(independentVariables, predictionData); 160 | const newPredictions = await nnLR.predict(inputMatrix, { probability: false, }); 161 | /* newPredictions= [ 162 | [ 0 ], - no (Age 35/EstimatedSalary 44000) 163 | [ 0 ], - no (Age 18/EstimatedSalary 32000) 164 | [ 1 ], - yes (Age 39/EstimatedSalary 127000) 165 | ] 166 | */ 167 | 168 | } 169 | 170 | main(); -------------------------------------------------------------------------------- /manual/examples/ex_regression-portland.mjs: -------------------------------------------------------------------------------- 1 | /** 2 | * Node 8+ 3 | * $ node --experimental-modules manual/examples/ex_regression-boston.mjs 4 | */ 5 | import * as ms from 'modelscript'; // used for scaling, data manipulation 6 | import { MultipleLinearRegression, } from '../../index.mjs'; 7 | //if running on node 8 | // import tf from '@tensorflow/tfjs-node'; 9 | 10 | 11 | const independentVariables = ['sqft', 'bedrooms', ]; 12 | const dependentVariables = ['price',]; 13 | 14 | async function main() { 15 | /* 16 | CSVData = [ 17 | { sqft: 2104, bedrooms: 3, price: 399900 }, 18 | { sqft: 1600, bedrooms: 3, price: 329900 }, 19 | ... 20 | { sqft: 1203, bedrooms: 3, price: 239500 } 21 | ] 22 | */ 23 | const CSVData = await ms.csv.loadCSV('./test/mock/data/portland_housing_data.csv'); 24 | const CSVDataSet = new ms.DataSet(CSVData); 25 | CSVDataSet.fitColumns({ 26 | sqft: ['scale', 'standard',], 27 | bedrooms: ['scale', 'standard',], 28 | price: ['scale', 'standard',], 29 | }); 30 | // console.log(CSVDataSet.scalers) 31 | const testTrainSplit = ms.cross_validation.train_test_split(CSVDataSet.data, { train_size: 0.7, }); 32 | const { train, test, } = testTrainSplit; 33 | const testDataSet = new ms.DataSet(test); 34 | const trainDataSet = new ms.DataSet(train); 35 | const x_matrix_train = trainDataSet.columnMatrix(independentVariables); 36 | const y_matrix_train = trainDataSet.columnMatrix(dependentVariables); 37 | const x_matrix_test = testDataSet.columnMatrix(independentVariables); 38 | const y_matrix_test = testDataSet.columnMatrix(dependentVariables); 39 | /* 40 | x_train = [ 41 | [2014, 3], 42 | [1600, 3], 43 | ... 44 | ] 45 | y_matrix_test = [ 46 | [399900], 47 | [329900], 48 | ... 49 | ] 50 | */ 51 | const fit = { 52 | epochs: 100, 53 | batchSize:5, 54 | }; 55 | const nnLR = new MultipleLinearRegression({ fit, }, { 56 | // tf - can switch to tensorflow gpu here 57 | }); 58 | console.log('training model'); 59 | await nnLR.train(x_matrix_train, y_matrix_train); 60 | const estimatesPredictions = await nnLR.predict(x_matrix_test); 61 | /* 62 | estimatesPredictions = [ 63 | [ -0.09963418543338776 ], 64 | [ 2.1494181156158447 ], 65 | ... 66 | ] 67 | y_matrix_test = [ 68 | [ -0.0832826930356929 ], 69 | [ 2.874981038969331 ], 70 | ... 71 | ] 72 | */ 73 | const estimatedValues = CSVDataSet.reverseColumnMatrix({ vectors: estimatesPredictions, labels: dependentVariables, }); 74 | const actualValues = CSVDataSet.reverseColumnMatrix({ vectors: y_matrix_test, labels: dependentVariables, }); 75 | /* 76 | estimatedValues = [ 77 | { price: -0.0832826930356929 }, 78 | { price: 2.874981038969331 }, 79 | ... 80 | ]; 81 | actualValues = [ 82 | { price: -0.097346231341362 }, 83 | { price: 2.206512451171875 }, 84 | ... 85 | ]; 86 | */ 87 | const estimatesDescaled = estimatedValues.map(val=>CSVDataSet.inverseTransformObject(val)); 88 | const actualsDescaled = actualValues.map(val =>CSVDataSet.inverseTransformObject(val)); 89 | /* 90 | estimatesDescaled = [ 91 | { price: 328581.1387223731, 92 | ... 93 | bedrooms:: NaN }, 94 | { price: 610483.8580602541, 95 | ... 96 | bedrooms:: NaN }, 97 | ... 98 | ]; 99 | actuals = [ 100 | { price: 329999,, 101 | ... 102 | bedrooms:: NaN }, 103 | { price: 699900,, 104 | ... 105 | bedrooms:: NaN }, 106 | ... 107 | ]; 108 | */ 109 | 110 | const estimates = ms.DataSet.columnArray('price', { data: estimatesDescaled, }); 111 | const actuals = ms.DataSet.columnArray('price', { data: actualsDescaled, }); 112 | /* 113 | estimates = [ 114 | 328772.6900112897, 115 | 620789.3324962095, 116 | ] 117 | actuals = [ 118 | 329999, 119 | 699900, 120 | ] 121 | */ 122 | 123 | const accuracy = ms.util.rSquared(actuals, estimates); 124 | console.log({ accuracy, }); // { accuracy: 0.8596126619239105 } ~ 86% 125 | 126 | const newScaledPredictions = [ 127 | { sqft: 4215, bedrooms: 4, price: undefined, }, 128 | { sqft: 852, bedrooms: 2, price: undefined, }, 129 | ].map(pred => CSVDataSet.transformObject(pred)); 130 | 131 | /* 132 | newScaledPredictions = [ 133 | { 134 | sqft: 2.78635030976002, 135 | bedrooms: 1.0904165374468842, 136 | price: NaN 137 | }, 138 | { 139 | sqft: -1.4454227371491544, 140 | bedrooms: -1.5377669117840669, 141 | price: NaN 142 | } 143 | ] 144 | inputMatrix = [ 145 | [ 2.78635030976002, 1.0904165374468842 ], 146 | [ -1.4454227371491544, -1.5377669117840669 ] 147 | ], 148 | */ 149 | const inputMatrix = CSVDataSet.columnMatrix(independentVariables, newScaledPredictions); 150 | const newPredictions = await nnLR.predict(inputMatrix); 151 | /* newPredictions= [ [ 2.135737180709839 ], [ -0.9959254860877991 ] ] */ 152 | const newPreds = CSVDataSet.reverseColumnMatrix({ vectors: newPredictions, labels: dependentVariables, }); 153 | /* newPreds= [ { price: 2.135737180709839 }, { price: -0.9959254860877991 } ] ] */ 154 | 155 | const newpredsDescaled = newPreds 156 | .map(val => CSVDataSet.inverseTransformObject(val)) 157 | .map(p=>({ price:p.price, })); 158 | /* newpredsDescaled = [ { price: 606373.7003425548 }, { price: 220151.62698092213 } ] */ 159 | 160 | } 161 | 162 | main(); -------------------------------------------------------------------------------- /manual/examples/ex_timeseries-airline.mjs: -------------------------------------------------------------------------------- 1 | /** 2 | * Node 8+ 3 | * $ node --experimental-modules manual/examples/ex_regression-boston.mjs 4 | */ 5 | import * as ms from 'modelscript'; // used for scaling, data manipulation 6 | import { LSTMTimeSeries, } from '../../index.mjs'; 7 | //if running on node 8 | // import tf from '@tensorflow/tfjs-node'; 9 | 10 | 11 | const independentVariables = ['Passengers',]; 12 | const dependentVariables = ['Passengers',]; 13 | 14 | async function main() { 15 | /* 16 | CSVData = [ 17 | { Month: '1949-01', Passengers: 112 }, 18 | { Month: '1949-02', Passengers: 118 }, 19 | { Month: '1949-03', Passengers: 132 }, 20 | { Month: '1949-04', Passengers: 129 }, 21 | ... 22 | ] 23 | after scaling = [ 24 | { Month: '1949-01', Passengers: -1.4028822039369186 }, 25 | { Month: '1949-02', Passengers: -1.3528681653893018 }, 26 | ... 27 | ] 28 | */ 29 | const CSVData = await ms.csv.loadCSV('./test/mock/data/airline-sales.csv'); 30 | const CSVDataSet = new ms.DataSet(CSVData); 31 | CSVDataSet.fitColumns({ 32 | Passengers: ['scale', 'standard', ], 33 | }); 34 | 35 | const train_size = parseInt(CSVDataSet.data.length * 0.67); 36 | // const test_size = CSVDataSet.data.length - train_size; 37 | const trainDataSet = new ms.DataSet(CSVDataSet.data.slice(0, train_size)); 38 | const testDataSet = new ms.DataSet(CSVDataSet.data.slice(train_size, CSVDataSet.data.length)); 39 | const x_matrix_train = trainDataSet.columnMatrix(independentVariables); 40 | const y_matrix_train = trainDataSet.columnMatrix(dependentVariables); 41 | const x_matrix_test = testDataSet.columnMatrix(independentVariables); 42 | const y_matrix_test = testDataSet.columnMatrix(dependentVariables); 43 | /* 44 | x_train = [ 45 | [ -1.4028822039369186 ], 46 | [ -1.3528681653893018 ], 47 | ... 48 | ] 49 | y_matrix_test = [ 50 | [ 0.2892594335907897 ], 51 | [ 0.17256001031301674 ], 52 | ... 53 | ] 54 | */ 55 | const fit = { 56 | epochs: 200, 57 | batchSize:5, 58 | }; 59 | const lstmTS = new LSTMTimeSeries({ fit, }, { 60 | // tf - can switch to tensorflow gpu here 61 | }); 62 | console.log('training model'); 63 | await lstmTS.train(x_matrix_train); 64 | const estimatesPredictions = await lstmTS.predict(x_matrix_test); 65 | /* 66 | estimatesPredictions = [ 67 | [ 0.3164129853248596 ], 68 | [ 0.22678324580192566 ], 69 | ... 70 | ] 71 | y_matrix_test = [ 72 | [ 0.2892594335907897 ], 73 | [ 0.17256001031301674 ], 74 | ... 75 | ] 76 | */ 77 | const estimatedValues = CSVDataSet.reverseColumnMatrix({ vectors: estimatesPredictions, labels: dependentVariables, }); 78 | const actualValues = CSVDataSet.reverseColumnMatrix({ vectors: y_matrix_test, labels: dependentVariables, }); 79 | /* 80 | estimatedValues = [ 81 | { Passengers: 0.29080596566200256 }, 82 | { Passengers: 0.20487788319587708 }, 83 | ... 84 | ]; 85 | actualValues = [ 86 | { Passengers: 0.2892594335907897 }, 87 | { Passengers: 0.17256001031301674 }, 88 | ... 89 | ]; 90 | */ 91 | const estimatesDescaled = estimatedValues.map(val=>CSVDataSet.inverseTransformObject(val)); 92 | const actualsDescaled = actualValues.map(val => CSVDataSet.inverseTransformObject(val)); 93 | 94 | /* 95 | estimatesDescaled = [ 96 | { Passengers: 315.18553175661754 }, 97 | { Passengers: 304.87705618118696 }, 98 | ... 99 | ]; 100 | actuals = [ 101 | { Passengers: 315 }, 102 | { Passengers: 301 }, 103 | ... 104 | ]; 105 | */ 106 | 107 | const estimates = ms.DataSet.columnArray('Passengers', { data: estimatesDescaled, }); 108 | const actuals = ms.DataSet.columnArray('Passengers', { data: actualsDescaled, }); 109 | /* 110 | estimates = [ 111 | 328772.6900112897, 112 | 620789.3324962095, 113 | ] 114 | actuals = [ 115 | 329999, 116 | 699900, 117 | ] 118 | */ 119 | 120 | const accuracy = ms.util.rSquared(actuals, estimates); 121 | console.log({ accuracy, }); // { accuracy: 0.0.768662218946419 } ~ 77% 122 | 123 | const newScaledPredictions = [ 124 | { Month: '1960-12', Passengers: estimates[estimates.length -1], }, 125 | ].map(pred => CSVDataSet.transformObject(pred)); 126 | 127 | /* 128 | newScaledPredictions = [ 129 | { Month: '1960-12', Passengers: 0.8113135695457461 } 130 | ] 131 | inputMatrix = [ 132 | [ 0.7591229677200318 ] 133 | ], 134 | */ 135 | const inputMatrix = CSVDataSet.columnMatrix(independentVariables, newScaledPredictions); 136 | const newPredictions = await lstmTS.predict(inputMatrix); 137 | /* newPredictions= [ [ 0.6041761636734009 ] ] */ 138 | const newPreds = CSVDataSet.reverseColumnMatrix({ vectors: newPredictions, labels: dependentVariables, }); 139 | /* newPreds= [ { Passengers: 0.6041761636734009 } ] ] */ 140 | 141 | const newpredsDescaled = newPreds 142 | .map(val => CSVDataSet.inverseTransformObject(val)) 143 | .map(p=>({ Passengers:p.Passengers, })); 144 | /* newpredsDescaled = [ { Passengers: 352.77940025172586 } ] */ 145 | 146 | } 147 | 148 | main(); -------------------------------------------------------------------------------- /manual/faq.md: -------------------------------------------------------------------------------- 1 | # Frequently Asked Questions 2 | 3 | ## Scaling 4 | 5 | - **How do I scale and descale my inputs?** 6 | - The library is only for model create, data engineering and modeling is typically handled elsewhere 7 | - Check out the regression examples (hint: use [ModelScript](https://repetere.github.io/modelscript/)). 8 | - **How do I cross validate, test/train split, K-fold, etc?** 9 | - (See above) The library is only for model create, data engineering and modeling is typically handled elsewhere 10 | - Check out the code examples (hint: use [ModelScript](https://repetere.github.io/modelscript/)). -------------------------------------------------------------------------------- /manual/overview.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | All of the model building classes are structured similarly to ml.js models. Each model has three shared components regardless if it's a regression, classification or time series model. 4 | 5 | This library was built to quickly implement [Tensorflow](https://js.tensorflow.org) and [Keras](https://js.tensorflow.org/tutorials/import-keras.html) models in JavaScript. 6 | 7 | ## Configuration 8 | 9 | All of the model configuration is managed during creating an instance of the class. All models have `fit` and `compile` properties that are assigned to the instance `settings` property. 10 | 11 | You can also pass the Tensorflow reference to be used, as a configuration property if you want to use the compiled C++ or GPU via CUDA versions on the `tf` property; 12 | 13 | * `this.settings.compile` is passed to `tensorflow.model.compile` and contains the configuration for optimizers ([`read more`](https://js.tensorflow.org/api/latest/#tf.Model.compile)). 14 | * `this.settings.fit` is passed to `tensorflow.model.fit` and contains the configuration for model fitting ([`read more`](https://js.tensorflow.org/api/latest/#tf.Sequential.fit)) 15 | 16 | ### Model constructor example 17 | 18 | ```javascript 19 | import { MultipleLinearRegression, } from 'tensorscript'; 20 | import tf from '@tensorflow/tfjs-node-gpu'; 21 | 22 | const MLR = new MultipleLinearRegression({ 23 | fit:{ 24 | epochs:500, 25 | batchSize:5, 26 | }, 27 | compile:{ 28 | loss: 'meanSquaredError', 29 | optimizer: 'adam', 30 | }, 31 | },{ 32 | tf, 33 | }); 34 | ``` 35 | 36 | Each constructor takes two parameters `settings` and `properties`. As in the example above, *settings* are used to configure tensorflow objects and properties are used to configure tensorscript (like which version of tensorflow to use). 37 | 38 | ## Training 39 | 40 | All tensorflow models train asynchronously, therefore all tensorscript model train functions are all asynchronous. Always pass dependent and independent variables as matrix inputs. 41 | 42 | Time series models can be trained with only one input matrix. 43 | 44 | ### Training example 45 | 46 | ```javascript 47 | 48 | import { MultipleLinearRegression, } from 'tensorscript'; 49 | 50 | async function main(){ 51 | const MLR = new MultipleLinearRegression(); 52 | const x = [ [1], [2] ]; 53 | const y = [ [3], [4] ]; 54 | await MLR.train(x,y); 55 | } 56 | ``` 57 | 58 | ## Predicting 59 | 60 | All prediction inputs must be the same shape as training inputs. TensorScript provides an asynchronous predict method that converts tensor values to javascript objects. If you want the tensor from tensorflow, use the calculate method instead. 61 | 62 | ### Predicting example 63 | 64 | ```javascript 65 | import { MultipleLinearRegression, } from 'tensorscript'; 66 | 67 | async function main(){ 68 | const MLR = new MultipleLinearRegression(); 69 | const x = [ [1], [2] ]; 70 | const y = [ [3], [4] ]; 71 | await MLR.train(x,y); 72 | 73 | await MLR.predict([[3]]) //=> [[5]]; 74 | const tensorPrediction = await MLR.calculate([[3]]) //=> [[5]]; 75 | tensorPrediction.data() 76 | .then(tensors=> /*Typed Array*/) 77 | } 78 | ``` 79 | 80 | ## Examples 81 | 82 | For more examples on how to do feature scaling and more, check out the *[Examples folder](https://github.com/repetere/tensorscript/tree/master/manual/examples)*. -------------------------------------------------------------------------------- /manual/usage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | Usage examples are below 4 | 5 | ## Classification 6 | 7 | - Deep Learning Classification with Iris data set. [link](https://github.com/repetere/tensorscript/blob/master/manual/examples/ex_classification-iris.mjs) 8 | - Logistic Regression Classification with social media ads. [link](https://github.com/repetere/tensorscript/blob/master/manual/examples/ex_classification-social.mjs) 9 | 10 | ## Regression 11 | 12 | - Deep Learning Regression with Boston housing data set. [link](https://github.com/repetere/tensorscript/blob/master/manual/examples/ex_regression-boston.mjs) 13 | - Multiple Linear Regression with Portland housing data set. [link](https://github.com/repetere/tensorscript/blob/master/manual/examples/ex_regression-portland.mjs) 14 | 15 | ## Time Series 16 | 17 | - Long Short Term Memory with Airline prices. [link](https://github.com/repetere/tensorscript/blob/master/manual/examples/ex_timeseries-airline.mjs) 18 | 19 | ## Neural Network 20 | 21 | - MLP with dense layers and the Portland housing data. [link](https://github.com/repetere/tensorscript/blob/master/manual/examples/ex_nn-portland.mjs) 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tensorscript", 3 | "version": "1.5.1", 4 | "description": "Deep Learning Classification, LSTM Time Series, Regression and Multi-Layered Perceptrons with Tensorflow", 5 | "main": "index.js", 6 | "module": "index.js", 7 | "type": "module", 8 | "browser": "bundle/tensorscript.umd.js", 9 | "web": "bundle/tensorscript.web.js", 10 | "node": "bundle/tensorscript.cjs.js", 11 | "es": "bundle/tensorscript.esm.js", 12 | "scripts": { 13 | "test": "nyc mocha -r @std/esm \"test/**/*.{js,mjs}\"", 14 | "test-file": "nyc mocha -r @std/esm ", 15 | "doc": "esdoc", 16 | "build": "rollup -c && esdoc", 17 | "coverage": "nyc report --reporter=text-lcov | coveralls" 18 | }, 19 | "repository": { 20 | "type": "git", 21 | "url": "git+https://github.com/repetere/tensorscript.git" 22 | }, 23 | "license": "MIT", 24 | "bugs": { 25 | "url": "https://github.com/repetere/tensorscript/issues" 26 | }, 27 | "engines": { 28 | "node": ">=8" 29 | }, 30 | "author": { 31 | "name": "Yaw Etse", 32 | "url": "https://github.com/yawetse" 33 | }, 34 | "keywords": [ 35 | "regression", 36 | "classification", 37 | "supervised", 38 | "learning", 39 | "supervised learning", 40 | "dl", 41 | "deep learning", 42 | "tensorflow", 43 | "machine learning", 44 | "ai" 45 | ], 46 | "homepage": "https://repetere.github.io/tensorscript", 47 | "devDependencies": { 48 | "@babel/core": "^7.4.3", 49 | "@babel/plugin-external-helpers": "^7.2.0", 50 | "@babel/plugin-proposal-class-properties": "^7.4.0", 51 | "@babel/plugin-proposal-export-default-from": "^7.2.0", 52 | "@babel/plugin-proposal-export-namespace-from": "^7.2.0", 53 | "@babel/plugin-syntax-dynamic-import": "^7.2.0", 54 | "@babel/plugin-transform-runtime": "^7.4.3", 55 | "@babel/preset-env": "^7.4.4", 56 | "@babel/preset-react": "^7.0.0", 57 | "@babel/runtime": "^7.4.3", 58 | "@repetere/esdoc-inject-gtm-plugin": "^0.1.0", 59 | "@std/esm": "^0.26.0", 60 | "babel-plugin-istanbul": "^5.1.4", 61 | "babel-plugin-replace-imports": "^1.0.2", 62 | "babel-polyfill": "^6.26.0", 63 | "babel-preset-env": "^1.7.0", 64 | "chai": "^4.2.0", 65 | "chai-as-promised": "^7.1.1", 66 | "coveralls": "^3.0.3", 67 | "esdoc": "^1.1.0", 68 | "esdoc-ecmascript-proposal-plugin": "^1.0.0", 69 | "esdoc-inject-script-plugin": "^1.0.0", 70 | "esdoc-publish-markdown-plugin": "^1.1.0", 71 | "esdoc-standard-plugin": "^1.0.0", 72 | "esm": "^3.2.22", 73 | "fs-extra": "^7.0.1", 74 | "mocha": "^6.1.4", 75 | "modelscript": "^2.1.3", 76 | "nyc": "^14.1.0", 77 | "rollup": "^1.11.3", 78 | "rollup-plugin-alias": "^1.5.1", 79 | "rollup-plugin-async": "^1.2.0", 80 | "rollup-plugin-babel": "^4.3.2", 81 | "rollup-plugin-commonjs": "^9.3.4", 82 | "rollup-plugin-node-builtins": "^2.1.2", 83 | "rollup-plugin-node-globals": "^1.4.0", 84 | "rollup-plugin-node-resolve": "^4.2.3", 85 | "rollup-plugin-replace": "^2.1.1", 86 | "rollup-plugin-terser": "^4.0.4", 87 | "rollup-plugin-terser-js": "^1.0.2", 88 | "sinon": "^7.3.2", 89 | "sinon-chai": "^3.3.0" 90 | }, 91 | "dependencies": { 92 | "@tensorflow-models/universal-sentence-encoder": "^1.1.1", 93 | "@tensorflow/tfjs": "^1.2.7", 94 | "@tensorflow/tfjs-node": "^1.2.7", 95 | "lodash.range": "^3.2.0" 96 | }, 97 | "@std/esm": "cjs", 98 | "nyc": { 99 | "extension": [ 100 | ".mjs", 101 | ".js" 102 | ], 103 | "include": [ 104 | "lib/**/*.mjs", 105 | "lib/**/*.js" 106 | ], 107 | "require": [ 108 | "@std/esm" 109 | ] 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import resolve from 'rollup-plugin-node-resolve'; 2 | import commonjs from 'rollup-plugin-commonjs'; 3 | // import alias from 'rollup-plugin-alias'; 4 | import babel from 'rollup-plugin-babel'; 5 | import replace from 'rollup-plugin-replace'; 6 | // import async from 'rollup-plugin-async'; 7 | import builtins from 'rollup-plugin-node-builtins'; 8 | import globals from 'rollup-plugin-node-globals'; 9 | import pkg from './package.json'; 10 | import { terser, } from 'rollup-plugin-terser'; 11 | 12 | const plugins = [ 13 | replace({ 14 | 'process.env.NODE_ENV': JSON.stringify('development'), 15 | }), 16 | resolve({ 17 | preferBuiltins: true, 18 | }), // so Rollup can find `ms` 19 | builtins({ 20 | }), 21 | commonjs({ 22 | namedExports: { 23 | 'node_modules/lodash.range/index.js': [ 'default', ], 24 | 'node_modules/@tensorflow/tfjs/dist/tf.esm.js': [ 'default' ], 25 | }, 26 | }), // so Rollup can convert `ms` to an ES module 27 | babel({ 28 | runtimeHelpers: true, 29 | externalHelpers: true, 30 | // exclude: 'node_modules/@babel/runtime/**', 31 | exclude: 'node_modules/@babel/runtime/helpers/typeof.js', 32 | 'presets': [ 33 | [ '@babel/env', {}, ], 34 | ], 35 | plugins: [ 36 | [ 37 | '@babel/transform-runtime', 38 | // { useESModules: output.format !== 'cjs' } 39 | ], 40 | [ 'babel-plugin-replace-imports', { 41 | 'test': /tensorflow\/tfjs-node$/, 42 | 'replacer': 'tensorflow/tfjs', 43 | }, ], 44 | [ 45 | '@babel/plugin-proposal-export-namespace-from', 46 | ], 47 | ], 48 | // exclude: 'node_modules/**', // only transpile our source code 49 | }), 50 | 51 | globals({ 52 | }), 53 | 54 | ]; 55 | 56 | const minifiedPlugins = plugins.concat([ 57 | terser({ 58 | sourcemap: true 59 | }), 60 | ]); 61 | 62 | export default [ 63 | // browser-friendly UMD build 64 | { 65 | input: 'index.js', 66 | output: [ 67 | { 68 | exports: 'named', 69 | file: pkg.browser, 70 | name: 'tensorscript', 71 | format: 'umd', 72 | }, 73 | { 74 | exports: 'named', 75 | file: pkg.web, 76 | name: 'tensorscript', 77 | format: 'iife', 78 | }, 79 | ], 80 | plugins, 81 | }, 82 | { 83 | input: 'index.js', 84 | output: [ 85 | { 86 | exports: 'named', 87 | file: pkg.browser.replace('.js','.min.js'), 88 | name: 'tensorscript', 89 | format: 'umd', 90 | }, 91 | { 92 | exports: 'named', 93 | file: pkg.web.replace('.js','.min.js'), 94 | name: 'tensorscript', 95 | format: 'iife', 96 | }, 97 | ], 98 | plugins:minifiedPlugins, 99 | }, 100 | 101 | // CommonJS (for Node) and ES module (for bundlers) build. 102 | // (We could have three entries in the configuration array 103 | // instead of two, but it's quicker to generate multiple 104 | // builds from a single configuration where possible, using 105 | // an array for the `output` option, where we can specify 106 | // `file` and `format` for each target) 107 | { 108 | input: 'index.js', 109 | external: [ 110 | '@tensorflow/tfjs', 111 | '@tensorflow/tfjs-node', 112 | '@tensorflow-models/universal-sentence-encoder', 113 | 'lodash.range', 114 | // 'lodash.rangeright' 115 | ], 116 | output: [ 117 | { 118 | exports: 'named', 119 | file: pkg.node, 120 | name: 'tensorscript', 121 | format: 'cjs', 122 | }, 123 | { 124 | exports: 'named', 125 | file: pkg.es, 126 | name: 'tensorscript', 127 | format: 'es', 128 | }, 129 | ], 130 | }, 131 | ]; 132 | -------------------------------------------------------------------------------- /test/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/test/.DS_Store -------------------------------------------------------------------------------- /test/mock/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/test/mock/.DS_Store -------------------------------------------------------------------------------- /test/mock/data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/test/mock/data/.DS_Store -------------------------------------------------------------------------------- /test/mock/data/airline-sales.csv: -------------------------------------------------------------------------------- 1 | "Month","Passengers" 2 | "1949-01",112 3 | "1949-02",118 4 | "1949-03",132 5 | "1949-04",129 6 | "1949-05",121 7 | "1949-06",135 8 | "1949-07",148 9 | "1949-08",148 10 | "1949-09",136 11 | "1949-10",119 12 | "1949-11",104 13 | "1949-12",118 14 | "1950-01",115 15 | "1950-02",126 16 | "1950-03",141 17 | "1950-04",135 18 | "1950-05",125 19 | "1950-06",149 20 | "1950-07",170 21 | "1950-08",170 22 | "1950-09",158 23 | "1950-10",133 24 | "1950-11",114 25 | "1950-12",140 26 | "1951-01",145 27 | "1951-02",150 28 | "1951-03",178 29 | "1951-04",163 30 | "1951-05",172 31 | "1951-06",178 32 | "1951-07",199 33 | "1951-08",199 34 | "1951-09",184 35 | "1951-10",162 36 | "1951-11",146 37 | "1951-12",166 38 | "1952-01",171 39 | "1952-02",180 40 | "1952-03",193 41 | "1952-04",181 42 | "1952-05",183 43 | "1952-06",218 44 | "1952-07",230 45 | "1952-08",242 46 | "1952-09",209 47 | "1952-10",191 48 | "1952-11",172 49 | "1952-12",194 50 | "1953-01",196 51 | "1953-02",196 52 | "1953-03",236 53 | "1953-04",235 54 | "1953-05",229 55 | "1953-06",243 56 | "1953-07",264 57 | "1953-08",272 58 | "1953-09",237 59 | "1953-10",211 60 | "1953-11",180 61 | "1953-12",201 62 | "1954-01",204 63 | "1954-02",188 64 | "1954-03",235 65 | "1954-04",227 66 | "1954-05",234 67 | "1954-06",264 68 | "1954-07",302 69 | "1954-08",293 70 | "1954-09",259 71 | "1954-10",229 72 | "1954-11",203 73 | "1954-12",229 74 | "1955-01",242 75 | "1955-02",233 76 | "1955-03",267 77 | "1955-04",269 78 | "1955-05",270 79 | "1955-06",315 80 | "1955-07",364 81 | "1955-08",347 82 | "1955-09",312 83 | "1955-10",274 84 | "1955-11",237 85 | "1955-12",278 86 | "1956-01",284 87 | "1956-02",277 88 | "1956-03",317 89 | "1956-04",313 90 | "1956-05",318 91 | "1956-06",374 92 | "1956-07",413 93 | "1956-08",405 94 | "1956-09",355 95 | "1956-10",306 96 | "1956-11",271 97 | "1956-12",306 98 | "1957-01",315 99 | "1957-02",301 100 | "1957-03",356 101 | "1957-04",348 102 | "1957-05",355 103 | "1957-06",422 104 | "1957-07",465 105 | "1957-08",467 106 | "1957-09",404 107 | "1957-10",347 108 | "1957-11",305 109 | "1957-12",336 110 | "1958-01",340 111 | "1958-02",318 112 | "1958-03",362 113 | "1958-04",348 114 | "1958-05",363 115 | "1958-06",435 116 | "1958-07",491 117 | "1958-08",505 118 | "1958-09",404 119 | "1958-10",359 120 | "1958-11",310 121 | "1958-12",337 122 | "1959-01",360 123 | "1959-02",342 124 | "1959-03",406 125 | "1959-04",396 126 | "1959-05",420 127 | "1959-06",472 128 | "1959-07",548 129 | "1959-08",559 130 | "1959-09",463 131 | "1959-10",407 132 | "1959-11",362 133 | "1959-12",405 134 | "1960-01",417 135 | "1960-02",391 136 | "1960-03",419 137 | "1960-04",461 138 | "1960-05",472 139 | "1960-06",535 140 | "1960-07",622 141 | "1960-08",606 142 | "1960-09",508 143 | "1960-10",461 144 | "1960-11",390 145 | "1960-12",432 -------------------------------------------------------------------------------- /test/mock/data/boston_housing_names.txt: -------------------------------------------------------------------------------- 1 | 1. Title: Boston Housing Data 2 | 3 | 2. Sources: 4 | (a) Origin: This dataset was taken from the StatLib library which is 5 | maintained at Carnegie Mellon University. 6 | (b) Creator: Harrison, D. and Rubinfeld, D.L. 'Hedonic prices and the 7 | demand for clean air', J. Environ. Economics & Management, 8 | vol.5, 81-102, 1978. 9 | (c) Date: July 7, 1993 10 | 11 | 3. Past Usage: 12 | - Used in Belsley, Kuh & Welsch, 'Regression diagnostics ...', Wiley, 13 | 1980. N.B. Various transformations are used in the table on 14 | pages 244-261. 15 | - Quinlan,R. (1993). Combining Instance-Based and Model-Based Learning. 16 | In Proceedings on the Tenth International Conference of Machine 17 | Learning, 236-243, University of Massachusetts, Amherst. Morgan 18 | Kaufmann. 19 | 20 | 4. Relevant Information: 21 | 22 | Concerns housing values in suburbs of Boston. 23 | 24 | 5. Number of Instances: 506 25 | 26 | 6. Number of Attributes: 13 continuous attributes (including "class" 27 | attribute "MEDV"), 1 binary-valued attribute. 28 | 29 | 7. Attribute Information: 30 | 31 | 1. CRIM per capita crime rate by town 32 | 2. ZN proportion of residential land zoned for lots over 33 | 25,000 sq.ft. 34 | 3. INDUS proportion of non-retail business acres per town 35 | 4. CHAS Charles River dummy variable (= 1 if tract bounds 36 | river; 0 otherwise) 37 | 5. NOX nitric oxides concentration (parts per 10 million) 38 | 6. RM average number of rooms per dwelling 39 | 7. AGE proportion of owner-occupied units built prior to 1940 40 | 8. DIS weighted distances to five Boston employment centres 41 | 9. RAD index of accessibility to radial highways 42 | 10. TAX full-value property-tax rate per $10,000 43 | 11. PTRATIO pupil-teacher ratio by town 44 | 12. B 1000(Bk - 0.63)^2 where Bk is the proportion of blacks 45 | by town 46 | 13. LSTAT % lower status of the population 47 | 14. MEDV Median value of owner-occupied homes in $1000's 48 | 49 | 8. Missing Attribute Values: None. 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /test/mock/data/demo.mjs: -------------------------------------------------------------------------------- 1 | // import ms from 'modelscript'; 2 | // import { LSTMMultivariateTimeSeries, } from '../../../lib/lstm_multivariate_time_series.mjs'; 3 | // import range from 'lodash.range'; 4 | 5 | // const independentVariables = ['i1', 'i2', 'i3', 'i4', 'i5', 'i6', 'i7', 'i8', ]; 6 | // const dependentVariables = ['o1',]; 7 | 8 | // const ds = [ 9 | // [10, 20, 30, 40, 50, 60, 70, 80, 90,], 10 | // [11, 21, 31, 41, 51, 61, 71, 81, 91,], 11 | // [12, 22, 32, 42, 52, 62, 72, 82, 92,], 12 | // [13, 23, 33, 43, 53, 63, 73, 83, 93,], 13 | // [14, 24, 34, 44, 54, 64, 74, 84, 94,], 14 | // [15, 25, 35, 45, 55, 65, 75, 85, 95,], 15 | // [16, 26, 36, 46, 56, 66, 76, 86, 96,], 16 | // [17, 27, 37, 47, 57, 67, 77, 87, 97,], 17 | // [18, 28, 38, 48, 58, 68, 78, 88, 98,], 18 | // [19, 29, 39, 49, 59, 69, 79, 89, 99,], 19 | // ]; 20 | 21 | 22 | // function seriesToSupervised(data, n_in = 1, n_out = 1) { 23 | // // let n_vars = data[ 0 ].length; 24 | // let cols = []; 25 | // // let names = []; 26 | // // input sequence (t-n, ... t-1) 27 | // for (let x in data) { 28 | // x = parseInt(x); 29 | // let maxIndex = x + n_in + n_out; 30 | // if (maxIndex > data.length) break; 31 | // cols[ x ] = []; 32 | // // console.log({ x,maxIndex }); 33 | // for (let i in range(n_in)) { 34 | // i = parseInt(i); 35 | // cols[ x ].push(...data[x+i]); 36 | // // console.log({ i, cols, }); 37 | // } 38 | // for (let j in range(n_out)){ 39 | // j = parseInt(j); 40 | // cols[ x ].push(...data[ x+j+n_in ]); 41 | // // console.log({ j, cols, }); 42 | // } 43 | // } 44 | // return cols; 45 | // } 46 | // function drop(data,columns) { 47 | // return data.reduce((cols, row, i) => { 48 | // cols[ i ] = []; 49 | // row.forEach((col, idx) => { 50 | // if (columns.indexOf(idx)===-1) { 51 | // cols[ i ].push(col); 52 | // } 53 | // }); 54 | // return cols; 55 | // }, []); 56 | // } 57 | 58 | // function getDropableColumns(features, n_in, n_out) { 59 | // const cols = features + 1; 60 | // const total_cols = cols * n_in + cols * n_out; 61 | // // console.log({ cols, total_cols }); 62 | // return range(total_cols - cols +1, total_cols); 63 | // } 64 | 65 | // // const n_in = 1; //lookbacks 66 | // // const n_out = 1; 67 | // // const series = seriesToSupervised(ds, n_in, n_out); 68 | // // const dropped = getDropableColumns(8, n_in, n_out); 69 | // // // const droppedColumns = drop(series, [ 10, 11, 12, 13, 14, 15, 16, 17, ]); 70 | // // const droppedColumns = drop(series, dropped); 71 | // // const y = ms.util.pivotVector(droppedColumns)[ droppedColumns[0].length - 1 ]; 72 | // // const x = drop(droppedColumns, [ droppedColumns[ 0 ].length - 1 ]); 73 | // // console.log({ series, dropped, droppedColumns, y, x, }); 74 | 75 | 76 | // function scaleColumnMap(columnName) { 77 | // return { 78 | // name: columnName, 79 | // options: { 80 | // strategy: 'scale', 81 | // scaleOptions: { 82 | // strategy:'standard', 83 | // }, 84 | // }, 85 | // }; 86 | // } 87 | 88 | 89 | 90 | // async function mainTS() { 91 | // const csvData = await ms.csv.loadCSV('./test/mock/data/sample.csv'); 92 | // const DataSet = new ms.DataSet(csvData); 93 | // const columns = dependentVariables.concat(independentVariables); 94 | // const scaledData = DataSet.fitColumns({ 95 | // columns: columns.map(scaleColumnMap), 96 | // returnData: true, 97 | // }); 98 | 99 | // const train_size = parseInt(DataSet.data.length * 0.67); 100 | // const test_size = DataSet.data.length - train_size; 101 | // // console.log({ train_size, test_size }); 102 | 103 | // const train_x_data = DataSet.data.slice(0, train_size); 104 | // const test_x_data = DataSet.data.slice(train_size, DataSet.data.length); 105 | // const trainDataSet = new ms.DataSet(train_x_data); 106 | // const testDataSet = new ms.DataSet(test_x_data); 107 | // const x_matrix = trainDataSet.columnMatrix(columns); 108 | // const x_matrix_test = testDataSet.columnMatrix(columns); 109 | // // console.log({ csvData, scaledData, }); 110 | 111 | // const TSTSONE = new LSTMMultivariateTimeSeries({ 112 | // lookback: 1, 113 | // features: 8, 114 | // }); 115 | // const TSTS = new LSTMMultivariateTimeSeries({ 116 | // lookback: 2, 117 | // features: 8, 118 | // }); 119 | // const evals = [ 120 | // // { 121 | // // model: TSTS, 122 | // // modelname: 'TSTS', 123 | // // }, 124 | // { 125 | // model: TSTSONE, 126 | // modelname: 'TSTSONE', 127 | // }, 128 | // ]; 129 | // for (let i in evals) { 130 | // let preddata = evals[ i ]; 131 | // const testData = preddata.model.getTimeseriesDataSet(x_matrix_test); 132 | // const m = await preddata.model.train(x_matrix,undefined,undefined,testData.x_matrix,testData.y_matrix); 133 | // // const preInputShape = LSTMTimeSeries.getInputShape(preddata.input); 134 | // // // const predictions = await preddata.model.predict(preddata.input); 135 | // // const predictions = await preddata.model.predict(x_matrix_test); 136 | // const predictions = await preddata.model.predict(testData.x_matrix); 137 | // const predictions_unscaled = predictions.map(pred => [DataSet.scalers.get('o1').descale(pred[ 0 ]),]); 138 | // const actuals_unscaled = testData.y_matrix.map(act => [DataSet.scalers.get('o1').descale(act[ 0 ]),]); 139 | // // let results = ms.DataSet.reverseColumnMatrix({ 140 | // // vectors: predictions_unscaled, 141 | // // labels: dependentVariables, 142 | // // }); 143 | // console.log({ 144 | // // model: preddata.modelname, 145 | // predictions, 146 | // actuals_unscaled, 147 | // predictions_unscaled, 148 | // // // results, 149 | // accuracy: (ms.util.rSquared( 150 | // ms.util.pivotVector(actuals_unscaled)[ 0 ], //actuals, 151 | // ms.util.pivotVector(predictions_unscaled)[ 0 ], //estimates, 152 | // ) * 100).toFixed(2)+'%', 153 | // }); 154 | // } 155 | // console.log('built network'); 156 | // } 157 | // mainTS(); -------------------------------------------------------------------------------- /test/mock/data/international-airline-passengers-no_footer.csv: -------------------------------------------------------------------------------- 1 | "Month","Passengers" 2 | "1949-01",112 3 | "1949-02",118 4 | "1949-03",132 5 | "1949-04",129 6 | "1949-05",121 7 | "1949-06",135 8 | "1949-07",148 9 | "1949-08",148 10 | "1949-09",136 11 | "1949-10",119 12 | "1949-11",104 13 | "1949-12",118 14 | "1950-01",115 15 | "1950-02",126 16 | "1950-03",141 17 | "1950-04",135 18 | "1950-05",125 19 | "1950-06",149 20 | "1950-07",170 21 | "1950-08",170 22 | "1950-09",158 23 | "1950-10",133 24 | "1950-11",114 25 | "1950-12",140 26 | "1951-01",145 27 | "1951-02",150 28 | "1951-03",178 29 | "1951-04",163 30 | "1951-05",172 31 | "1951-06",178 32 | "1951-07",199 33 | "1951-08",199 34 | "1951-09",184 35 | "1951-10",162 36 | "1951-11",146 37 | "1951-12",166 38 | "1952-01",171 39 | "1952-02",180 40 | "1952-03",193 41 | "1952-04",181 42 | "1952-05",183 43 | "1952-06",218 44 | "1952-07",230 45 | "1952-08",242 46 | "1952-09",209 47 | "1952-10",191 48 | "1952-11",172 49 | "1952-12",194 50 | "1953-01",196 51 | "1953-02",196 52 | "1953-03",236 53 | "1953-04",235 54 | "1953-05",229 55 | "1953-06",243 56 | "1953-07",264 57 | "1953-08",272 58 | "1953-09",237 59 | "1953-10",211 60 | "1953-11",180 61 | "1953-12",201 62 | "1954-01",204 63 | "1954-02",188 64 | "1954-03",235 65 | "1954-04",227 66 | "1954-05",234 67 | "1954-06",264 68 | "1954-07",302 69 | "1954-08",293 70 | "1954-09",259 71 | "1954-10",229 72 | "1954-11",203 73 | "1954-12",229 74 | "1955-01",242 75 | "1955-02",233 76 | "1955-03",267 77 | "1955-04",269 78 | "1955-05",270 79 | "1955-06",315 80 | "1955-07",364 81 | "1955-08",347 82 | "1955-09",312 83 | "1955-10",274 84 | "1955-11",237 85 | "1955-12",278 86 | "1956-01",284 87 | "1956-02",277 88 | "1956-03",317 89 | "1956-04",313 90 | "1956-05",318 91 | "1956-06",374 92 | "1956-07",413 93 | "1956-08",405 94 | "1956-09",355 95 | "1956-10",306 96 | "1956-11",271 97 | "1956-12",306 98 | "1957-01",315 99 | "1957-02",301 100 | "1957-03",356 101 | "1957-04",348 102 | "1957-05",355 103 | "1957-06",422 104 | "1957-07",465 105 | "1957-08",467 106 | "1957-09",404 107 | "1957-10",347 108 | "1957-11",305 109 | "1957-12",336 110 | "1958-01",340 111 | "1958-02",318 112 | "1958-03",362 113 | "1958-04",348 114 | "1958-05",363 115 | "1958-06",435 116 | "1958-07",491 117 | "1958-08",505 118 | "1958-09",404 119 | "1958-10",359 120 | "1958-11",310 121 | "1958-12",337 122 | "1959-01",360 123 | "1959-02",342 124 | "1959-03",406 125 | "1959-04",396 126 | "1959-05",420 127 | "1959-06",472 128 | "1959-07",548 129 | "1959-08",559 130 | "1959-09",463 131 | "1959-10",407 132 | "1959-11",362 133 | "1959-12",405 134 | "1960-01",417 135 | "1960-02",391 136 | "1960-03",419 137 | "1960-04",461 138 | "1960-05",472 139 | "1960-06",535 140 | "1960-07",622 141 | "1960-08",606 142 | "1960-09",508 143 | "1960-10",461 144 | "1960-11",390 145 | "1960-12",432 -------------------------------------------------------------------------------- /test/mock/data/iris_data.csv: -------------------------------------------------------------------------------- 1 | sepal_length_cm,sepal_width_cm,petal_length_cm,petal_width_cm,plant 2 | 5.1,3.5,1.4,0.2,Iris-setosa 3 | 4.9,3.0,1.4,0.2,Iris-setosa 4 | 4.7,3.2,1.3,0.2,Iris-setosa 5 | 4.6,3.1,1.5,0.2,Iris-setosa 6 | 5.0,3.6,1.4,0.2,Iris-setosa 7 | 5.4,3.9,1.7,0.4,Iris-setosa 8 | 4.6,3.4,1.4,0.3,Iris-setosa 9 | 5.0,3.4,1.5,0.2,Iris-setosa 10 | 4.4,2.9,1.4,0.2,Iris-setosa 11 | 4.9,3.1,1.5,0.1,Iris-setosa 12 | 5.4,3.7,1.5,0.2,Iris-setosa 13 | 4.8,3.4,1.6,0.2,Iris-setosa 14 | 4.8,3.0,1.4,0.1,Iris-setosa 15 | 4.3,3.0,1.1,0.1,Iris-setosa 16 | 5.8,4.0,1.2,0.2,Iris-setosa 17 | 5.7,4.4,1.5,0.4,Iris-setosa 18 | 5.4,3.9,1.3,0.4,Iris-setosa 19 | 5.1,3.5,1.4,0.3,Iris-setosa 20 | 5.7,3.8,1.7,0.3,Iris-setosa 21 | 5.1,3.8,1.5,0.3,Iris-setosa 22 | 5.4,3.4,1.7,0.2,Iris-setosa 23 | 5.1,3.7,1.5,0.4,Iris-setosa 24 | 4.6,3.6,1.0,0.2,Iris-setosa 25 | 5.1,3.3,1.7,0.5,Iris-setosa 26 | 4.8,3.4,1.9,0.2,Iris-setosa 27 | 5.0,3.0,1.6,0.2,Iris-setosa 28 | 5.0,3.4,1.6,0.4,Iris-setosa 29 | 5.2,3.5,1.5,0.2,Iris-setosa 30 | 5.2,3.4,1.4,0.2,Iris-setosa 31 | 4.7,3.2,1.6,0.2,Iris-setosa 32 | 4.8,3.1,1.6,0.2,Iris-setosa 33 | 5.4,3.4,1.5,0.4,Iris-setosa 34 | 5.2,4.1,1.5,0.1,Iris-setosa 35 | 5.5,4.2,1.4,0.2,Iris-setosa 36 | 4.9,3.1,1.5,0.1,Iris-setosa 37 | 5.0,3.2,1.2,0.2,Iris-setosa 38 | 5.5,3.5,1.3,0.2,Iris-setosa 39 | 4.9,3.1,1.5,0.1,Iris-setosa 40 | 4.4,3.0,1.3,0.2,Iris-setosa 41 | 5.1,3.4,1.5,0.2,Iris-setosa 42 | 5.0,3.5,1.3,0.3,Iris-setosa 43 | 4.5,2.3,1.3,0.3,Iris-setosa 44 | 4.4,3.2,1.3,0.2,Iris-setosa 45 | 5.0,3.5,1.6,0.6,Iris-setosa 46 | 5.1,3.8,1.9,0.4,Iris-setosa 47 | 4.8,3.0,1.4,0.3,Iris-setosa 48 | 5.1,3.8,1.6,0.2,Iris-setosa 49 | 4.6,3.2,1.4,0.2,Iris-setosa 50 | 5.3,3.7,1.5,0.2,Iris-setosa 51 | 5.0,3.3,1.4,0.2,Iris-setosa 52 | 7.0,3.2,4.7,1.4,Iris-versicolor 53 | 6.4,3.2,4.5,1.5,Iris-versicolor 54 | 6.9,3.1,4.9,1.5,Iris-versicolor 55 | 5.5,2.3,4.0,1.3,Iris-versicolor 56 | 6.5,2.8,4.6,1.5,Iris-versicolor 57 | 5.7,2.8,4.5,1.3,Iris-versicolor 58 | 6.3,3.3,4.7,1.6,Iris-versicolor 59 | 4.9,2.4,3.3,1.0,Iris-versicolor 60 | 6.6,2.9,4.6,1.3,Iris-versicolor 61 | 5.2,2.7,3.9,1.4,Iris-versicolor 62 | 5.0,2.0,3.5,1.0,Iris-versicolor 63 | 5.9,3.0,4.2,1.5,Iris-versicolor 64 | 6.0,2.2,4.0,1.0,Iris-versicolor 65 | 6.1,2.9,4.7,1.4,Iris-versicolor 66 | 5.6,2.9,3.6,1.3,Iris-versicolor 67 | 6.7,3.1,4.4,1.4,Iris-versicolor 68 | 5.6,3.0,4.5,1.5,Iris-versicolor 69 | 5.8,2.7,4.1,1.0,Iris-versicolor 70 | 6.2,2.2,4.5,1.5,Iris-versicolor 71 | 5.6,2.5,3.9,1.1,Iris-versicolor 72 | 5.9,3.2,4.8,1.8,Iris-versicolor 73 | 6.1,2.8,4.0,1.3,Iris-versicolor 74 | 6.3,2.5,4.9,1.5,Iris-versicolor 75 | 6.1,2.8,4.7,1.2,Iris-versicolor 76 | 6.4,2.9,4.3,1.3,Iris-versicolor 77 | 6.6,3.0,4.4,1.4,Iris-versicolor 78 | 6.8,2.8,4.8,1.4,Iris-versicolor 79 | 6.7,3.0,5.0,1.7,Iris-versicolor 80 | 6.0,2.9,4.5,1.5,Iris-versicolor 81 | 5.7,2.6,3.5,1.0,Iris-versicolor 82 | 5.5,2.4,3.8,1.1,Iris-versicolor 83 | 5.5,2.4,3.7,1.0,Iris-versicolor 84 | 5.8,2.7,3.9,1.2,Iris-versicolor 85 | 6.0,2.7,5.1,1.6,Iris-versicolor 86 | 5.4,3.0,4.5,1.5,Iris-versicolor 87 | 6.0,3.4,4.5,1.6,Iris-versicolor 88 | 6.7,3.1,4.7,1.5,Iris-versicolor 89 | 6.3,2.3,4.4,1.3,Iris-versicolor 90 | 5.6,3.0,4.1,1.3,Iris-versicolor 91 | 5.5,2.5,4.0,1.3,Iris-versicolor 92 | 5.5,2.6,4.4,1.2,Iris-versicolor 93 | 6.1,3.0,4.6,1.4,Iris-versicolor 94 | 5.8,2.6,4.0,1.2,Iris-versicolor 95 | 5.0,2.3,3.3,1.0,Iris-versicolor 96 | 5.6,2.7,4.2,1.3,Iris-versicolor 97 | 5.7,3.0,4.2,1.2,Iris-versicolor 98 | 5.7,2.9,4.2,1.3,Iris-versicolor 99 | 6.2,2.9,4.3,1.3,Iris-versicolor 100 | 5.1,2.5,3.0,1.1,Iris-versicolor 101 | 5.7,2.8,4.1,1.3,Iris-versicolor 102 | 6.3,3.3,6.0,2.5,Iris-virginica 103 | 5.8,2.7,5.1,1.9,Iris-virginica 104 | 7.1,3.0,5.9,2.1,Iris-virginica 105 | 6.3,2.9,5.6,1.8,Iris-virginica 106 | 6.5,3.0,5.8,2.2,Iris-virginica 107 | 7.6,3.0,6.6,2.1,Iris-virginica 108 | 4.9,2.5,4.5,1.7,Iris-virginica 109 | 7.3,2.9,6.3,1.8,Iris-virginica 110 | 6.7,2.5,5.8,1.8,Iris-virginica 111 | 7.2,3.6,6.1,2.5,Iris-virginica 112 | 6.5,3.2,5.1,2.0,Iris-virginica 113 | 6.4,2.7,5.3,1.9,Iris-virginica 114 | 6.8,3.0,5.5,2.1,Iris-virginica 115 | 5.7,2.5,5.0,2.0,Iris-virginica 116 | 5.8,2.8,5.1,2.4,Iris-virginica 117 | 6.4,3.2,5.3,2.3,Iris-virginica 118 | 6.5,3.0,5.5,1.8,Iris-virginica 119 | 7.7,3.8,6.7,2.2,Iris-virginica 120 | 7.7,2.6,6.9,2.3,Iris-virginica 121 | 6.0,2.2,5.0,1.5,Iris-virginica 122 | 6.9,3.2,5.7,2.3,Iris-virginica 123 | 5.6,2.8,4.9,2.0,Iris-virginica 124 | 7.7,2.8,6.7,2.0,Iris-virginica 125 | 6.3,2.7,4.9,1.8,Iris-virginica 126 | 6.7,3.3,5.7,2.1,Iris-virginica 127 | 7.2,3.2,6.0,1.8,Iris-virginica 128 | 6.2,2.8,4.8,1.8,Iris-virginica 129 | 6.1,3.0,4.9,1.8,Iris-virginica 130 | 6.4,2.8,5.6,2.1,Iris-virginica 131 | 7.2,3.0,5.8,1.6,Iris-virginica 132 | 7.4,2.8,6.1,1.9,Iris-virginica 133 | 7.9,3.8,6.4,2.0,Iris-virginica 134 | 6.4,2.8,5.6,2.2,Iris-virginica 135 | 6.3,2.8,5.1,1.5,Iris-virginica 136 | 6.1,2.6,5.6,1.4,Iris-virginica 137 | 7.7,3.0,6.1,2.3,Iris-virginica 138 | 6.3,3.4,5.6,2.4,Iris-virginica 139 | 6.4,3.1,5.5,1.8,Iris-virginica 140 | 6.0,3.0,4.8,1.8,Iris-virginica 141 | 6.9,3.1,5.4,2.1,Iris-virginica 142 | 6.7,3.1,5.6,2.4,Iris-virginica 143 | 6.9,3.1,5.1,2.3,Iris-virginica 144 | 5.8,2.7,5.1,1.9,Iris-virginica 145 | 6.8,3.2,5.9,2.3,Iris-virginica 146 | 6.7,3.3,5.7,2.5,Iris-virginica 147 | 6.7,3.0,5.2,2.3,Iris-virginica 148 | 6.3,2.5,5.0,1.9,Iris-virginica 149 | 6.5,3.0,5.2,2.0,Iris-virginica 150 | 6.2,3.4,5.4,2.3,Iris-virginica 151 | 5.9,3.0,5.1,1.8,Iris-virginica 152 | 153 | -------------------------------------------------------------------------------- /test/mock/data/iris_names.txt: -------------------------------------------------------------------------------- 1 | 1. Title: Iris Plants Database 2 | Updated Sept 21 by C.Blake - Added discrepency information 3 | 4 | 2. Sources: 5 | (a) Creator: R.A. Fisher 6 | (b) Donor: Michael Marshall (MARSHALL%PLU@io.arc.nasa.gov) 7 | (c) Date: July, 1988 8 | 9 | 3. Past Usage: 10 | - Publications: too many to mention!!! Here are a few. 11 | 1. Fisher,R.A. "The use of multiple measurements in taxonomic problems" 12 | Annual Eugenics, 7, Part II, 179-188 (1936); also in "Contributions 13 | to Mathematical Statistics" (John Wiley, NY, 1950). 14 | 2. Duda,R.O., & Hart,P.E. (1973) Pattern Classification and Scene Analysis. 15 | (Q327.D83) John Wiley & Sons. ISBN 0-471-22361-1. See page 218. 16 | 3. Dasarathy, B.V. (1980) "Nosing Around the Neighborhood: A New System 17 | Structure and Classification Rule for Recognition in Partially Exposed 18 | Environments". IEEE Transactions on Pattern Analysis and Machine 19 | Intelligence, Vol. PAMI-2, No. 1, 67-71. 20 | -- Results: 21 | -- very low misclassification rates (0% for the setosa class) 22 | 4. Gates, G.W. (1972) "The Reduced Nearest Neighbor Rule". IEEE 23 | Transactions on Information Theory, May 1972, 431-433. 24 | -- Results: 25 | -- very low misclassification rates again 26 | 5. See also: 1988 MLC Proceedings, 54-64. Cheeseman et al's AUTOCLASS II 27 | conceptual clustering system finds 3 classes in the data. 28 | 29 | 4. Relevant Information: 30 | --- This is perhaps the best known database to be found in the pattern 31 | recognition literature. Fisher's paper is a classic in the field 32 | and is referenced frequently to this day. (See Duda & Hart, for 33 | example.) The data set contains 3 classes of 50 instances each, 34 | where each class refers to a type of iris plant. One class is 35 | linearly separable from the other 2; the latter are NOT linearly 36 | separable from each other. 37 | --- Predicted attribute: class of iris plant. 38 | --- This is an exceedingly simple domain. 39 | --- This data differs from the data presented in Fishers article 40 | (identified by Steve Chadwick, spchadwick@espeedaz.net ) 41 | The 35th sample should be: 4.9,3.1,1.5,0.2,"Iris-setosa" 42 | where the error is in the fourth feature. 43 | The 38th sample: 4.9,3.6,1.4,0.1,"Iris-setosa" 44 | where the errors are in the second and third features. 45 | 46 | 5. Number of Instances: 150 (50 in each of three classes) 47 | 48 | 6. Number of Attributes: 4 numeric, predictive attributes and the class 49 | 50 | 7. Attribute Information: 51 | 1. sepal length in cm 52 | 2. sepal width in cm 53 | 3. petal length in cm 54 | 4. petal width in cm 55 | 5. class: 56 | -- Iris Setosa 57 | -- Iris Versicolour 58 | -- Iris Virginica 59 | 60 | 8. Missing Attribute Values: None 61 | 62 | Summary Statistics: 63 | Min Max Mean SD Class Correlation 64 | sepal length: 4.3 7.9 5.84 0.83 0.7826 65 | sepal width: 2.0 4.4 3.05 0.43 -0.4194 66 | petal length: 1.0 6.9 3.76 1.76 0.9490 (high!) 67 | petal width: 0.1 2.5 1.20 0.76 0.9565 (high!) 68 | 69 | 9. Class Distribution: 33.3% for each of 3 classes. 70 | -------------------------------------------------------------------------------- /test/mock/data/portland_housing_data.csv: -------------------------------------------------------------------------------- 1 | sqft,bedrooms,price 2 | 2104,3,399900 3 | 1600,3,329900 4 | 2400,3,369000 5 | 1416,2,232000 6 | 3000,4,539900 7 | 1985,4,299900 8 | 1534,3,314900 9 | 1427,3,198999 10 | 1380,3,212000 11 | 1494,3,242500 12 | 1940,4,239999 13 | 2000,3,347000 14 | 1890,3,329999 15 | 4478,5,699900 16 | 1268,3,259900 17 | 2300,4,449900 18 | 1320,2,299900 19 | 1236,3,199900 20 | 2609,4,499998 21 | 3031,4,599000 22 | 1767,3,252900 23 | 1888,2,255000 24 | 1604,3,242900 25 | 1962,4,259900 26 | 3890,3,573900 27 | 1100,3,249900 28 | 1458,3,464500 29 | 2526,3,469000 30 | 2200,3,475000 31 | 2637,3,299900 32 | 1839,2,349900 33 | 1000,1,169900 34 | 2040,4,314900 35 | 3137,3,579900 36 | 1811,4,285900 37 | 1437,3,249900 38 | 1239,3,229900 39 | 2132,4,345000 40 | 4215,4,549000 41 | 2162,4,287000 42 | 1664,2,368500 43 | 2238,3,329900 44 | 2567,4,314000 45 | 1200,3,299000 46 | 852,2,179900 47 | 1852,4,299900 48 | 1203,3,239500 -------------------------------------------------------------------------------- /test/mock/data/sample.csv: -------------------------------------------------------------------------------- 1 | o1, i1, i2, i3, i4, i5, i6, i7, i8, 2 | 10, 20, 30, 40, 50, 60, 70, 80, 90, 3 | 11, 21, 31, 41, 51, 61, 71, 81, 91, 4 | 12, 22, 32, 42, 52, 62, 72, 82, 92, 5 | 13, 23, 33, 43, 53, 63, 73, 83, 93, 6 | 14, 24, 34, 44, 54, 64, 74, 84, 94, 7 | 15, 25, 35, 45, 55, 65, 75, 85, 95, 8 | 16, 26, 36, 46, 56, 66, 76, 86, 96, 9 | 17, 27, 37, 47, 57, 67, 77, 87, 97, 10 | 18, 28, 38, 48, 58, 68, 78, 88, 98, 11 | 19, 29, 39, 49, 59, 69, 79, 89, 99, 12 | 19, 29, 39, 49, 59, 69, 79, 89, 99, 13 | 18, 28, 38, 48, 58, 68, 78, 88, 98, 14 | 17, 27, 37, 47, 57, 67, 77, 87, 97, 15 | 16, 26, 36, 46, 56, 66, 76, 86, 96, 16 | 15, 25, 35, 45, 55, 65, 75, 85, 95, 17 | 14, 24, 34, 44, 54, 64, 74, 84, 94, 18 | 13, 23, 33, 43, 53, 63, 73, 83, 93, 19 | 12, 22, 32, 42, 52, 62, 72, 82, 92, 20 | 11, 21, 31, 41, 51, 61, 71, 81, 91, 21 | 10, 20, 30, 40, 50, 60, 70, 80, 90, 22 | 10, 20, 30, 40, 50, 60, 70, 80, 90, 23 | 11, 21, 31, 41, 51, 61, 71, 81, 91, 24 | 12, 22, 32, 42, 52, 62, 72, 82, 92, 25 | 13, 23, 33, 43, 53, 63, 73, 83, 93, 26 | 14, 24, 34, 44, 54, 64, 74, 84, 94, -------------------------------------------------------------------------------- /test/mock_saved_files/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/test/mock_saved_files/.DS_Store -------------------------------------------------------------------------------- /test/mock_saved_files/mlr_model/model.json: -------------------------------------------------------------------------------- 1 | {"modelTopology":{"class_name":"Sequential","config":[{"class_name":"Dense","config":{"units":1,"activation":"linear","use_bias":true,"kernel_initializer":{"class_name":"VarianceScaling","config":{"scale":1,"mode":"fan_avg","distribution":"normal","seed":null}},"bias_initializer":{"class_name":"Zeros","config":{}},"kernel_regularizer":null,"bias_regularizer":null,"activity_regularizer":null,"kernel_constraint":null,"bias_constraint":null,"name":"dense_Dense1","trainable":true,"batch_input_shape":[null,2],"dtype":"float32"}}],"keras_version":"tfjs-layers 1.1.2","backend":"tensor_flow.js"},"weightsManifest":[{"paths":["weights.bin"],"weights":[{"name":"dense_Dense1/kernel","shape":[2,1],"dtype":"float32"},{"name":"dense_Dense1/bias","shape":[1],"dtype":"float32"}]}]} -------------------------------------------------------------------------------- /test/mock_saved_files/mlr_model/weights.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/repetere/tensorscript/8ec5126a3b27d938c77158145f9bce47ffdb108d/test/mock_saved_files/mlr_model/weights.bin -------------------------------------------------------------------------------- /test/unit/base_neural_network_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | import sinon from 'sinon'; 3 | import * as ms from 'modelscript'; 4 | import sinonChai from 'sinon-chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import 'babel-polyfill'; 7 | import { BaseNeuralNetwork, } from '../../index.js'; 8 | const expect = chai.expect; 9 | const independentVariables = [ 10 | 'CRIM', 11 | 'ZN', 12 | 'INDUS', 13 | 'CHAS', 14 | 'NOX', 15 | 'RM', 16 | 'AGE', 17 | 'DIS', 18 | 'RAD', 19 | 'TAX', 20 | 'PTRATIO', 21 | 'B', 22 | 'LSTAT', 23 | ]; 24 | const dependentVariables = [ 25 | 'MEDV', 26 | ]; 27 | const columns = independentVariables.concat(dependentVariables); 28 | let housingDataCSV; 29 | let DataSet; 30 | 31 | chai.use(sinonChai); 32 | chai.use(chaiAsPromised); 33 | function scaleColumnMap(columnName) { 34 | return { 35 | name: columnName, 36 | options: { 37 | strategy: 'scale', 38 | scaleOptions: { 39 | strategy:'standard', 40 | }, 41 | }, 42 | }; 43 | } 44 | /** @test {BaseNeuralNetwork} */ 45 | describe('BaseNeuralNetwork', function () { 46 | this.timeout(10000); 47 | before(async function () { 48 | housingDataCSV = await ms.csv.loadCSV('./test/mock/data/boston_housing_data.csv'); 49 | DataSet = new ms.DataSet(housingDataCSV); 50 | DataSet.fitColumns({ 51 | columns: columns.map(scaleColumnMap), 52 | returnData:false, 53 | }); 54 | }); 55 | /** @test {BaseNeuralNetwork#constructor} */ 56 | describe('constructor', () => { 57 | it('should export a named module class', () => { 58 | const MLR = new BaseNeuralNetwork(); 59 | const MLRConfigured = new BaseNeuralNetwork({ test: 'prop', }); 60 | expect(BaseNeuralNetwork).to.be.a('function'); 61 | expect(MLR).to.be.instanceOf(BaseNeuralNetwork); 62 | expect(MLRConfigured.settings.test).to.eql('prop'); 63 | }); 64 | }); 65 | /** @test {BaseNeuralNetwork#generateLayers} */ 66 | describe('generateLayers', () => { 67 | it('should throw an error if generateLayers method is not implemented', () => { 68 | class NN extends BaseNeuralNetwork{ 69 | generateLayers(x, y, layers) { 70 | return true; 71 | } 72 | } 73 | const TS = new BaseNeuralNetwork(); 74 | const TSNN = new NN(); 75 | expect(TS.generateLayers).to.be.a('function'); 76 | expect(TS.generateLayers.bind(null)).to.throw('generateLayers method is not implemented'); 77 | expect(TSNN.generateLayers).to.be.a('function'); 78 | expect(TSNN.generateLayers.bind(null)).to.be.ok; 79 | }); 80 | }); 81 | /** @test {BaseNeuralNetwork#train} */ 82 | describe('train', () => { 83 | it('should train a NN', async function () { 84 | const NN = new BaseNeuralNetwork(); 85 | const x = []; 86 | const y = []; 87 | const layers = []; 88 | const tf = { 89 | tensor: () => ({ 90 | dispose: () => { }, 91 | }), 92 | sequential: () => ({ 93 | compile: () => true, 94 | fit: () => true, 95 | }), 96 | }; 97 | const settings = {}; 98 | function getInputShape() { } 99 | function generateLayers() { } 100 | const trainedModel = await NN.train.call({ 101 | getInputShape, 102 | generateLayers, 103 | tf, 104 | settings, 105 | }, x, y, layers); 106 | const trainedModel2 = await NN.train.call({ 107 | getInputShape, 108 | generateLayers, 109 | tf, 110 | settings, 111 | layers:[], 112 | }, x, y); 113 | expect(trainedModel).to.be.an('object'); 114 | expect(trainedModel2).to.be.an('object'); 115 | }); 116 | }); 117 | /** @test {BaseNeuralNetwork#calculate} */ 118 | describe('calculate', () => { 119 | it('should throw an error if input is invalid', () => { 120 | const NN = new BaseNeuralNetwork(); 121 | expect(NN.calculate).to.be.a('function'); 122 | expect(NN.calculate.bind()).to.throw(/invalid input matrix/); 123 | expect(NN.calculate.bind(null, 'invalid')).to.throw(/invalid input matrix/); 124 | }); 125 | it('should train a NN', async function () { 126 | const NN = new BaseNeuralNetwork(); 127 | const x = [1, 2, 3, ]; 128 | const x2 = [[1, 2, 3, ], [1, 2, 3, ], ]; 129 | const tf = { 130 | tensor: () => ({ 131 | dispose: () => { }, 132 | }), 133 | sequential: () => ({ 134 | compile: () => true, 135 | fit: () => true, 136 | }), 137 | }; 138 | const model = { 139 | predict: () => true, 140 | }; 141 | const prediction = NN.calculate.call({ 142 | tf, 143 | model, 144 | }, x); 145 | const prediction2 = NN.calculate.call({ 146 | tf, 147 | model, 148 | }, x2); 149 | expect(prediction).to.be.true; 150 | expect(prediction2).to.be.true; 151 | }); 152 | }); 153 | }); -------------------------------------------------------------------------------- /test/unit/classification_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | // import sinon from 'sinon'; 3 | import * as ms from 'modelscript'; 4 | import sinonChai from 'sinon-chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import 'babel-polyfill'; 7 | import { DeepLearningClassification, } from '../../index.js'; 8 | const expect = chai.expect; 9 | const independentVariables = [ 10 | 'sepal_length_cm', 11 | 'sepal_width_cm', 12 | 'petal_length_cm', 13 | 'petal_width_cm', 14 | ]; 15 | const dependentVariables = [ 16 | 'plant_Iris-setosa', 17 | 'plant_Iris-versicolor', 18 | 'plant_Iris-virginica', 19 | ]; 20 | const columns = independentVariables.concat(dependentVariables); 21 | let housingDataCSV; 22 | let DataSet; 23 | let x_matrix; 24 | let y_matrix; 25 | let nnClassification; 26 | let nnClassificationModel; 27 | const fit = { 28 | epochs: 100, 29 | batchSize: 5, 30 | }; 31 | const encodedAnswers = { 32 | 'Iris-setosa': [1, 0, 0, ], 33 | 'Iris-versicolor': [0, 1, 0, ], 34 | 'Iris-virginica': [0, 0, 1, ], 35 | }; 36 | const input_x = [ 37 | [5.1, 3.5, 1.4, 0.2, ], 38 | [6.3,3.3,6.0,2.5, ], 39 | [5.6, 3.0, 4.5, 1.5, ], 40 | [5.0, 3.2, 1.2, 0.2, ], 41 | [4.5, 2.3, 1.3, 0.3, ], 42 | ]; 43 | chai.use(sinonChai); 44 | chai.use(chaiAsPromised); 45 | function scaleColumnMap(columnName) { 46 | return { 47 | name: columnName, 48 | options: { 49 | strategy: 'scale', 50 | scaleOptions: { 51 | strategy:'standard', 52 | }, 53 | }, 54 | }; 55 | } 56 | /** @test {DeepLearningClassification} */ 57 | describe('DeepLearningClassification', function () { 58 | this.timeout(120000); 59 | before(async function () { 60 | /** 61 | * encodedData = [ 62 | * { sepal_length_cm: 5.1, 63 | sepal_width_cm: 3.5, 64 | petal_length_cm: 1.4, 65 | petal_width_cm: 0.2, 66 | plant: 'Iris-setosa', 67 | 'plant_Iris-setosa': 1, 68 | 'plant_Iris-versicolor': 0, 69 | 'plant_Iris-virginica': 0 }, 70 | ... 71 | { sepal_length_cm: 5.9, 72 | sepal_width_cm: 3, 73 | petal_length_cm: 4.2, 74 | petal_width_cm: 1.5, 75 | plant: 'Iris-versicolor', 76 | 'plant_Iris-setosa': 0, 77 | 'plant_Iris-versicolor': 1, 78 | 'plant_Iris-virginica': 0 }, 79 | ]; 80 | */ 81 | housingDataCSV = await ms.csv.loadCSV('./test/mock/data/iris_data.csv'); 82 | DataSet = new ms.DataSet(housingDataCSV); 83 | // DataSet.fitColumns({ 84 | // columns: columns.map(scaleColumnMap), 85 | // returnData:false, 86 | // }); 87 | const encodedData = DataSet.fitColumns({ 88 | columns: [ 89 | { 90 | name: 'plant', 91 | options: { 92 | strategy: 'onehot', 93 | }, 94 | }, 95 | ], 96 | returnData:true, 97 | }); 98 | x_matrix = DataSet.columnMatrix(independentVariables); 99 | y_matrix = DataSet.columnMatrix(dependentVariables); 100 | /* 101 | x_matrix = [ 102 | [ 5.1, 3.5, 1.4, 0.2 ], 103 | [ 4.9, 3, 1.4, 0.2 ], 104 | [ 4.7, 3.2, 1.3, 0.2 ], 105 | ... 106 | ]; 107 | y_matrix = [ 108 | [ 1, 0, 0 ], 109 | [ 1, 0, 0 ], 110 | [ 1, 0, 0 ], 111 | ... 112 | ] 113 | */ 114 | // console.log({ x_matrix, y_matrix, }); 115 | 116 | nnClassification = new DeepLearningClassification({ fit, }); 117 | nnClassificationModel = await nnClassification.train(x_matrix, y_matrix); 118 | }); 119 | /** @test {DeepLearningClassification#constructor} */ 120 | describe('constructor', () => { 121 | it('should export a named module class', () => { 122 | const NN = new DeepLearningClassification(); 123 | const NNConfigured = new DeepLearningClassification({ test: 'prop', }); 124 | expect(DeepLearningClassification).to.be.a('function'); 125 | expect(NN).to.be.instanceOf(DeepLearningClassification); 126 | expect(NNConfigured.settings.test).to.eql('prop'); 127 | }); 128 | }); 129 | /** @test {DeepLearningClassification#generateLayers} */ 130 | describe('generateLayers', () => { 131 | it('should generate a classification network', async () => { 132 | const predictions = await nnClassification.predict(input_x); 133 | const answers = await nnClassification.predict(input_x, { 134 | probability:false, 135 | }); 136 | const shape = nnClassification.getInputShape(predictions); 137 | // console.log('nnClassification.layers', nnClassification.layers); 138 | // console.log({ 139 | // predictions_unscaled, 140 | // predictions, 141 | // shape, 142 | // }); 143 | 144 | // const probabilities = ms.DataSet.reverseColumnMatrix({ 145 | // vectors: predictions, 146 | // labels: dependentVariables, 147 | // }); 148 | // const results = ms.DataSet.reverseColumnMatrix({ 149 | // vectors: answers, 150 | // labels: dependentVariables, 151 | // }); 152 | // console.log({ 153 | // predictions, 154 | // // probabilities, 155 | // answers, 156 | // // results, 157 | // shape, 158 | // }); 159 | expect(predictions).to.have.lengthOf(input_x.length); 160 | expect(nnClassification.layers).to.have.lengthOf(2); 161 | expect(shape).to.eql([5, 3,]); 162 | expect(answers[ 0 ]).to.eql(encodedAnswers[ 'Iris-setosa' ]); 163 | // expect(answers[ 1 ]).to.eql(encodedAnswers[ 'Iris-virginica' ]); 164 | // expect(answers[ 2 ]).to.eql(encodedAnswers[ 'Iris-versicolor' ]); 165 | // expect(answers[ 3 ]).to.eql(encodedAnswers[ 'Iris-setosa' ]); 166 | // expect(answers[ 4 ]).to.eql(encodedAnswers[ 'Iris-setosa' ]); 167 | return true; 168 | }); 169 | it('should generate a network from layers', async () => { 170 | const nnClassificationCustom = new DeepLearningClassification({ layerPreference:'custom', fit, }); 171 | await nnClassificationCustom.train(x_matrix, y_matrix, nnClassification.layers); 172 | expect(nnClassificationCustom.layers).to.have.lengthOf(2); 173 | }); 174 | }); 175 | }); -------------------------------------------------------------------------------- /test/unit/logistic_regression_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | // import sinon from 'sinon'; 3 | import * as ms from 'modelscript'; 4 | import sinonChai from 'sinon-chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import 'babel-polyfill'; 7 | import { LogisticRegression, } from '../../index.js'; 8 | 9 | const expect = chai.expect; 10 | const independentVariables = [ 11 | 'Age', 12 | 'EstimatedSalary', 13 | ]; 14 | const dependentVariables = [ 15 | 'Purchased', 16 | ]; 17 | let CSVData; 18 | let DataSet; 19 | let x_matrix; 20 | let y_matrix; 21 | let nnLR; 22 | let nnLRClass; 23 | let nnLRReg; 24 | let nnLRModel; 25 | let nnLRClassModel; 26 | let nnLRRegModel; 27 | const encodedAnswers = { 28 | 'yes': [1,], 29 | 'no': [0,], 30 | }; 31 | const fit = { 32 | epochs: 10, 33 | batchSize: 5, 34 | }; 35 | const input_x = [ 36 | [-0.062482849427819266, 0.30083326827486173, ], //0 37 | [0.7960601198093905, -1.1069168538010206, ], //1 38 | [0.7960601198093905, 0.12486450301537644, ], //0 39 | [0.4144854668150751, -0.49102617539282206, ], //0 40 | [0.3190918035664962, 0.5061301610775946, ], //1 41 | ]; 42 | chai.use(sinonChai); 43 | chai.use(chaiAsPromised); 44 | function scaleColumnMap(columnName) { 45 | return { 46 | name: columnName, 47 | options: { 48 | strategy: 'scale', 49 | scaleOptions: { 50 | strategy:'standard', 51 | }, 52 | }, 53 | }; 54 | } 55 | /** @test {LogisticRegression} */ 56 | describe('LogisticRegression', function () { 57 | this.timeout(120000); 58 | before(async function () { 59 | CSVData = await ms.csv.loadCSV('./test/mock/data/social_network_ads.csv'); 60 | DataSet = new ms.DataSet(CSVData); 61 | const scaledData = DataSet.fitColumns({ 62 | columns: independentVariables.map(scaleColumnMap), 63 | returnData:true, 64 | }); 65 | /* 66 | scaledData = [ 67 | { 'User ID': 15624510, 68 | Gender: 'Male', 69 | Age: -1.7795687879022388, 70 | EstimatedSalary: -1.4881825118632386, 71 | Purchased: 0 }, 72 | { 'User ID': 15810944, 73 | Gender: 'Male', 74 | Age: -0.253270175924977, 75 | EstimatedSalary: -1.458854384319991, 76 | Purchased: 0 }, 77 | ... 78 | ]; 79 | */ 80 | x_matrix = DataSet.columnMatrix(independentVariables); 81 | y_matrix = DataSet.columnMatrix(dependentVariables); 82 | /* 83 | x_matrix = [ 84 | [ -1.7795687879022388, -1.4881825118632386 ], 85 | [ -0.253270175924977, -1.458854384319991 ], 86 | ... 87 | ]; 88 | y_matrix = [ 89 | [ 0 ], 90 | [ 0 ], 91 | ... 92 | ] 93 | */ 94 | // console.log({ x_matrix, y_matrix, }); 95 | 96 | nnLR = new LogisticRegression({ fit, }); 97 | nnLRClass = new LogisticRegression({ type: 'class', fit, }); 98 | nnLRReg = new LogisticRegression({ type: 'l1l2', fit, }); 99 | const models = await Promise.all([ 100 | nnLR.train(x_matrix, y_matrix), 101 | nnLRClass.train(x_matrix, y_matrix), 102 | nnLRReg.train(x_matrix, y_matrix), 103 | ]); 104 | nnLRModel = models[ 0 ]; 105 | nnLRClassModel = models[ 1 ]; 106 | nnLRRegModel = models[ 2 ]; 107 | }); 108 | /** @test {LogisticRegression#constructor} */ 109 | describe('constructor', () => { 110 | it('should export a named module class', () => { 111 | const NN = new LogisticRegression(); 112 | const NNConfigured = new LogisticRegression({ test: 'prop', }); 113 | expect(LogisticRegression).to.be.a('function'); 114 | expect(NN).to.be.instanceOf(LogisticRegression); 115 | expect(NNConfigured.settings.test).to.eql('prop'); 116 | }); 117 | }); 118 | /** @test {LogisticRegression#generateLayers} */ 119 | describe('generateLayers', () => { 120 | it('should generate a classification network', async () => { 121 | const predictions = await nnLR.predict(input_x); 122 | const answers = await nnLR.predict(input_x, { 123 | probability:false, 124 | }); 125 | const shape = nnLR.getInputShape(predictions); 126 | 127 | expect(predictions).to.have.lengthOf(input_x.length); 128 | expect(nnLR.layers).to.have.lengthOf(1); 129 | expect(shape).to.eql([5, 1, ]); 130 | // expect(answers[ 0 ]).to.eql(encodedAnswers[ 'Iris-setosa' ]); 131 | return true; 132 | }); 133 | it('should generate a network from layers', async () => { 134 | const nnLRCustom = new LogisticRegression({ type:'custom', fit, }); 135 | await nnLRCustom.train(x_matrix, y_matrix, nnLR.layers); 136 | expect(nnLRCustom.layers).to.have.lengthOf(1); 137 | }); 138 | // it('should validate trainning data', async () => { 139 | // const nnLRCustom = new LogisticRegression({ type:'custom', fit, }); 140 | // await nnLRCustom.train(x_matrix, y_matrix, nnLR.layers, x_matrix, y_matrix); 141 | // expect(nnLRCustom.layers).to.have.lengthOf(1); 142 | // }); 143 | }); 144 | }); -------------------------------------------------------------------------------- /test/unit/math_js_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | import { TensorScriptModelInterface, size, flatten, } from '../../lib/model_interface.js'; 3 | import assert from 'assert'; 4 | const expect = chai.expect; 5 | const array = { 6 | reshape: TensorScriptModelInterface.reshape, 7 | flatten, 8 | }; 9 | const reshape = array.reshape; 10 | describe('util.array', function () { 11 | /** @test {../../lib/model_interface.mjs~size} */ 12 | describe('size', function () { 13 | it('should calculate the size of a scalar', function () { 14 | assert.deepEqual(size(2), []); 15 | assert.deepEqual(size('string'), []); 16 | }); 17 | 18 | it('should calculate the size of a 1-dimensional array', function () { 19 | assert.deepEqual(size([]), [0, ]); 20 | assert.deepEqual(size([1, ]), [1, ]); 21 | assert.deepEqual(size([1, 2, 3, ]), [3, ]); 22 | }); 23 | 24 | it('should calculate the size of a 2-dimensional array', function () { 25 | assert.deepEqual(size([[], ]), [1, 0, ]); 26 | assert.deepEqual(size([[], [], ]), [2, 0, ]); 27 | assert.deepEqual(size([[1, 2, ], [3, 4, ], ]), [2, 2, ]); 28 | assert.deepEqual(size([[1, 2, 3, ], [4, 5, 6, ], ]), [2, 3, ]); 29 | }); 30 | 31 | it('should calculate the size of a 3-dimensional array', function () { 32 | assert.deepEqual(size([[[], ], ]), [1, 1, 0, ]); 33 | assert.deepEqual(size([[[], [], ], ]), [1, 2, 0, ]); 34 | assert.deepEqual(size([[[], [], ], [[], [], ], ]), [2, 2, 0, ]); 35 | assert.deepEqual(size([[[1, ], [2, ], ], [[3, ], [4, ], ], ]), [2, 2, 1, ]); 36 | assert.deepEqual(size([[[1, 2, ], [3, 4, ], ], [[5, 6, ], [7, 8, ], ], ]), [2, 2, 2, ]); 37 | assert.deepEqual(size([ 38 | [[1, 2, 3, 4, ], [5, 6, 7, 8, ], ], 39 | [[1, 2, 3, 4, ], [5, 6, 7, 8, ], ], 40 | [[1, 2, 3, 4, ], [5, 6, 7, 8, ], ], 41 | ]), [3, 2, 4, ]); 42 | }); 43 | 44 | it('should not validate whether all dimensions match', function () { 45 | assert.deepEqual(size([[1, 2, ], [3, 4, 5, ], ]), [2, 2, ]); 46 | }); 47 | }); 48 | /** @test {../../lib/model_interface.mjs~reshape} */ 49 | describe('reshape', function () { 50 | it('should reshape a 1 dimensional array into a 2 dimensional array', function () { 51 | const a = [1, 2, 3, 4, 5, 6, 7, 8, ]; 52 | 53 | assert.deepEqual( 54 | reshape(a, [2, 4, ]), 55 | [[1, 2, 3, 4, ], 56 | [5, 6, 7, 8, ], ] 57 | ); 58 | assert.deepEqual( 59 | reshape(a, [4, 2, ]), 60 | [[1, 2, ], 61 | [3, 4, ], 62 | [5, 6, ], 63 | [7, 8, ], ] 64 | ); 65 | assert.deepEqual( 66 | reshape(a, [1, 8, ]), 67 | [[1, 2, 3, 4, 5, 6, 7, 8, ], ] 68 | ); 69 | assert.deepEqual( 70 | reshape(a, [1, 1, 8, ]), 71 | [[[1, 2, 3, 4, 5, 6, 7, 8, ], ], ] 72 | ); 73 | }); 74 | 75 | it('should reshape a 2 dimensional array into a 1 dimensional array', function () { 76 | const a = [ 77 | [0, 1, ], 78 | [2, 3, ], 79 | ]; 80 | 81 | assert.deepEqual( 82 | reshape(a, [4, ]), 83 | [0, 1, 2, 3, ] 84 | ); 85 | }); 86 | 87 | it('should reshape a 3 dimensional array', function () { 88 | const a = [[[1, 2, ], 89 | [3, 4, ], ], 90 | 91 | [[5, 6, ], 92 | [7, 8, ], ], ]; 93 | 94 | assert.deepEqual( 95 | reshape(a, [8, ]), 96 | [1, 2, 3, 4, 5, 6, 7, 8, ] 97 | ); 98 | 99 | assert.deepEqual( 100 | reshape(a, [2, 4, ]), 101 | [[1, 2, 3, 4, ], 102 | [5, 6, 7, 8, ], ] 103 | ); 104 | }); 105 | 106 | it('should throw an error when reshaping to a dimension with length 0', function () { 107 | assert.throws(function () { 108 | reshape([1, 2, ], [0, 2, ]); 109 | }, /DimensionError/); 110 | assert.throws(function () { 111 | reshape([1, 2, ], [2, 0, ]); 112 | }, /DimensionError/); 113 | }); 114 | 115 | it('should throw an error when reshaping a non-empty array to an empty array', function () { 116 | assert.throws(function () { 117 | reshape([1, ], []); 118 | }, /DimensionError/); 119 | assert.throws(function () { 120 | reshape([1, 2, ], []); 121 | }, /DimensionError/); 122 | }); 123 | 124 | it('should throw an error when reshaping to a size that differs from the original', function () { 125 | const a = [1, 2, 3, 4, 5, 6, 7, 8, 9, ]; 126 | 127 | assert.deepEqual( 128 | reshape(a, [3, 3, ]), 129 | [[1, 2, 3, ], 130 | [4, 5, 6, ], 131 | [7, 8, 9, ], ] 132 | ); 133 | assert.throws(function () { 134 | reshape(a, [3, 2, ]); 135 | }, /DimensionError/); 136 | assert.throws(function () { 137 | reshape(a, [2, 3, ]); 138 | }, /DimensionError/); 139 | assert.throws(function () { 140 | reshape(a, [3, 3, 3, ]); 141 | }, /DimensionError/); 142 | assert.throws(function () { 143 | reshape(a, [3, 4, ]); 144 | }, /DimensionError/); 145 | assert.throws(function () { 146 | reshape(a, [4, 3, ]); 147 | }, /DimensionError/); 148 | }); 149 | 150 | it('should throw an error in case of wrong type of arguments', function () { 151 | assert.throws(function () { 152 | reshape([], 2); 153 | }, /Array expected/); 154 | assert.throws(function () { 155 | reshape(2); 156 | }, /Array expected/); 157 | }); 158 | }); 159 | /** @test {../../lib/model_interface.mjs~flatten} */ 160 | describe('flatten', function () { 161 | it('should flatten a scalar', function () { 162 | assert.deepEqual(array.flatten(1), 1); 163 | }); 164 | 165 | it('should flatten a 1 dimensional array', function () { 166 | assert.deepEqual(array.flatten([1, 2, 3, ]), [1, 2, 3, ]); 167 | }); 168 | 169 | it('should flatten a 2 dimensional array', function () { 170 | assert.deepEqual(array.flatten([[1, 2, ], [3, 4, ], ]), [1, 2, 3, 4, ]); 171 | }); 172 | 173 | it('should flatten a 3 dimensional array', function () { 174 | assert.deepEqual(array.flatten([[[1, 2, ], [3, 4, ], ], [[5, 6, ], [7, 8, ], ], ]), [1, 2, 3, 4, 5, 6, 7, 8, ]); 175 | }); 176 | 177 | it('should return a new array', function () { 178 | const input = [3, 2, 1, ]; 179 | const flat = array.flatten(input); 180 | flat.sort(); 181 | assert.deepEqual(input, [3, 2, 1, ]); 182 | }); 183 | }); 184 | }); -------------------------------------------------------------------------------- /test/unit/multiple_linear_regression_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | import sinon from 'sinon'; 3 | import * as ms from 'modelscript'; 4 | import sinonChai from 'sinon-chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import 'babel-polyfill'; 7 | import { MultipleLinearRegression, } from '../../index.js'; 8 | const expect = chai.expect; 9 | const independentVariables = ['sqft', 'bedrooms', ]; 10 | const dependentVariables = ['price',]; 11 | let housingDataCSV; 12 | let input_x; 13 | let DataSet; 14 | let x_matrix; 15 | let y_matrix; 16 | let trainedMLR; 17 | let trainedMLRModel; 18 | 19 | chai.use(sinonChai); 20 | chai.use(chaiAsPromised); 21 | function scaleColumnMap(columnName) { 22 | return { 23 | name: columnName, 24 | options: { 25 | strategy: 'scale', 26 | scaleOptions: { 27 | strategy:'standard', 28 | }, 29 | }, 30 | }; 31 | } 32 | /** @test {MultipleLinearRegression} */ 33 | describe('MultipleLinearRegression', function () { 34 | this.timeout(20000); 35 | before(async function () { 36 | housingDataCSV = await ms.csv.loadCSV('./test/mock/data/portland_housing_data.csv'); 37 | /* 38 | housingdataCSV = [ 39 | { sqft: 2104, bedrooms: 3, price: 399900 }, 40 | { sqft: 1600, bedrooms: 3, price: 329900 }, 41 | ... 42 | { sqft: 1203, bedrooms: 3, price: 239500 } 43 | ] 44 | */ 45 | DataSet = new ms.DataSet(housingDataCSV); 46 | DataSet.fitColumns({ 47 | columns: independentVariables.concat(dependentVariables).map(scaleColumnMap), 48 | returnData:false, 49 | }); 50 | x_matrix = DataSet.columnMatrix(independentVariables); 51 | y_matrix = DataSet.columnMatrix(dependentVariables); 52 | // const y_vector = ms.util.pivotVector(y_matrix)[ 0 ];// not used but just illustrative 53 | /* x_matrix = [ 54 | [2014, 3], 55 | [1600, 3], 56 | ] 57 | y_matrix = [ 58 | [399900], 59 | [329900], 60 | ] 61 | y_vector = [ 399900, 329900] 62 | */ 63 | trainedMLR = new MultipleLinearRegression({ 64 | fit: { 65 | epochs: 100, 66 | batchSize: 5, 67 | }, 68 | }); 69 | trainedMLRModel = await trainedMLR.train(x_matrix, y_matrix); 70 | input_x = [ 71 | [ 72 | DataSet.scalers.get('sqft').scale(4215), 73 | DataSet.scalers.get('bedrooms').scale(4), 74 | ], //549000 75 | [ 76 | DataSet.scalers.get('sqft').scale(852), 77 | DataSet.scalers.get('bedrooms').scale(2), 78 | ], //179900 79 | ]; 80 | return true; 81 | }); 82 | /** @test {MultipleLinearRegression#constructor} */ 83 | describe('constructor', () => { 84 | it('should export a named module class', () => { 85 | const MLR = new MultipleLinearRegression({ 86 | fit: { 87 | epochs: 200, 88 | batchSize: 5, 89 | }, 90 | }); 91 | const MLRConfigured = new MultipleLinearRegression({ test: 'prop', }, {}); 92 | expect(MultipleLinearRegression).to.be.a('function'); 93 | expect(MLR).to.be.instanceOf(MultipleLinearRegression); 94 | expect(MLRConfigured.settings.test).to.eql('prop'); 95 | }); 96 | }); 97 | /** @test {MultipleLinearRegression#generateLayers} */ 98 | describe('generateLayers', () => { 99 | it('should generate a classification network', async () => { 100 | const predictions = await trainedMLR.predict(input_x); 101 | const shape = trainedMLR.getInputShape(predictions); 102 | // console.log('nnLR.layers', nnLR.layers); 103 | // console.log({ 104 | // predictions, 105 | // shape, 106 | // }); 107 | expect(predictions).to.have.lengthOf(input_x.length); 108 | expect(trainedMLR.layers).to.have.lengthOf(1); 109 | const descaledPredictions = predictions.map(DataSet.scalers.get('price').descale); 110 | expect(descaledPredictions[ 0 ]).to.be.closeTo(630000, 20000); 111 | expect(descaledPredictions[ 1 ]).to.be.closeTo(190000, 10000); 112 | return true; 113 | }); 114 | it('should generate a network from layers', async () => { 115 | const nnLRCustom = new MultipleLinearRegression({ 116 | type: 'custom', 117 | fit: { 118 | epochs: 10, 119 | batchSize: 5, 120 | }, 121 | }); 122 | await nnLRCustom.train(x_matrix, y_matrix, trainedMLR.layers); 123 | expect(nnLRCustom.layers).to.have.lengthOf(1); 124 | return true; 125 | }); 126 | }); 127 | }); -------------------------------------------------------------------------------- /test/unit/regression_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | // import sinon from 'sinon'; 3 | import * as ms from 'modelscript'; 4 | import sinonChai from 'sinon-chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import 'babel-polyfill'; 7 | import { DeepLearningRegression, } from '../../index.js'; 8 | const expect = chai.expect; 9 | const independentVariables = [ 10 | 'CRIM', 11 | 'ZN', 12 | 'INDUS', 13 | 'CHAS', 14 | 'NOX', 15 | 'RM', 16 | 'AGE', 17 | 'DIS', 18 | 'RAD', 19 | 'TAX', 20 | 'PTRATIO', 21 | 'B', 22 | 'LSTAT', 23 | ]; 24 | const dependentVariables = [ 25 | 'MEDV', 26 | ]; 27 | const columns = independentVariables.concat(dependentVariables); 28 | let housingDataCSV; 29 | let DataSet; 30 | let x_matrix; 31 | let y_matrix; 32 | let nnRegressionDeep; 33 | let nnRegressionWide; 34 | let nnRegressionDeepModel; 35 | let nnRegressionWideModel; 36 | const fit = { 37 | epochs: 10, 38 | batchSize: 5, 39 | }; 40 | const input_x = [ 41 | [-0.41936692921321594, 0.2845482693404666, -1.2866362317172035, -0.272329067679207, -0.1440748547324509, 0.4132629204530747, -0.119894767215809, 0.1400749839795629, -0.981871187861867, -0.6659491794887338, -1.457557967289609, 0.4406158949991029, -1.074498970343932,], 42 | [-0.41692666996409716, -0.4872401872268264, -0.5927943782429392, -0.272329067679207, -0.7395303607434242, 0.1940823874370036, 0.3668034264326209, 0.5566090495704026, -0.8670244885881488, -0.9863533804386945, -0.3027944997494681, 0.4406158949991029, -0.49195252491856634,], 43 | ]; 44 | 45 | chai.use(sinonChai); 46 | chai.use(chaiAsPromised); 47 | function scaleColumnMap(columnName) { 48 | return { 49 | name: columnName, 50 | options: { 51 | strategy: 'scale', 52 | scaleOptions: { 53 | strategy:'standard', 54 | }, 55 | }, 56 | }; 57 | } 58 | /** @test {DeepLearningRegression} */ 59 | describe('DeepLearningRegression', function () { 60 | this.timeout(120000); 61 | before(async function () { 62 | /* 63 | housingdataCSV = [ 64 | { CRIM: 0.00632, ZN: 18, INDUS: 2.31, CHAS: 0, NOX: 0.538, RM: 6.575, AGE: 65.2, DIS: 4.09, RAD: 1, TAX: 296, PTRATIO: 15.3, B: 396.9, LSTAT: 4.98, MEDV: 24 }, 65 | { CRIM: 0.02731, ZN: 0, INDUS: 7.07, CHAS: 0, NOX: 0.469, RM: 6.421, AGE: 78.9, DIS: 4.9671, RAD: 2, TAX: 242, PTRATIO: 17.8, B: 396.9, LSTAT: 9.14, MEDV: 21.6 }, 66 | ... 67 | ] 68 | */ 69 | housingDataCSV = await ms.csv.loadCSV('./test/mock/data/boston_housing_data.csv'); 70 | DataSet = new ms.DataSet(housingDataCSV); 71 | DataSet.fitColumns({ 72 | columns: columns.map(scaleColumnMap), 73 | returnData:false, 74 | }); 75 | x_matrix = DataSet.columnMatrix(independentVariables); 76 | y_matrix = DataSet.columnMatrix(dependentVariables); 77 | /* x_matrix = [ 78 | [ -0.41936692921321594, 0.2845482693404666, -1.2866362317172035, -0.272329067679207, -0.1440748547324509, 0.4132629204530747, -0.119894767215809, 0.1400749839795629, -0.981871187861867, -0.6659491794887338, -1.457557967289609, 0.4406158949991029, -1.074498970343932 ], 79 | [ -0.41692666996409716, -0.4872401872268264, -0.5927943782429392, -0.272329067679207, -0.7395303607434242, 0.1940823874370036, 0.3668034264326209, 0.5566090495704026, -0.8670244885881488, -0.9863533804386945, -0.3027944997494681, 0.4406158949991029, -0.49195252491856634 ] 80 | ... 81 | ]; 82 | y_matrix = [ 83 | [ 0.15952778852449556 ], 84 | [ -0.1014239172731213 ], 85 | ... 86 | ] 87 | const y_vector = ms.util.pivotVector(y_matrix)[ 0 ];// not used but just illustrative 88 | y_vector = [ 0.15952778852449556, 89 | -0.1014239172731213, ... ] 90 | */ 91 | 92 | nnRegressionDeep = new DeepLearningRegression({ layerPreference:'deep', fit, }); 93 | nnRegressionWide = new DeepLearningRegression({ layerPreference: 'wide', fit, }); 94 | const models = await Promise.all([ 95 | nnRegressionDeep.train(x_matrix, y_matrix), 96 | nnRegressionWide.train(x_matrix, y_matrix), 97 | ]); 98 | nnRegressionDeepModel = models[ 0 ]; 99 | nnRegressionWideModel = models[ 1 ]; 100 | return true; 101 | }); 102 | /** @test {DeepLearningRegression#constructor} */ 103 | describe('constructor', () => { 104 | it('should export a named module class', () => { 105 | const NN = new DeepLearningRegression(); 106 | const NNConfigured = new DeepLearningRegression({ test: 'prop', }); 107 | expect(DeepLearningRegression).to.be.a('function'); 108 | expect(NN).to.be.instanceOf(DeepLearningRegression); 109 | expect(NNConfigured.settings.test).to.eql('prop'); 110 | }); 111 | }); 112 | /** @test {DeepLearningRegression#generateLayers} */ 113 | describe('generateLayers', () => { 114 | it('should generate a deep network', async () => { 115 | const predictions = await nnRegressionDeep.predict(input_x); 116 | const predictions_unscaled = predictions.map(pred=>DataSet.scalers.get('MEDV').descale(pred[0])); 117 | const shape = nnRegressionDeep.getInputShape(predictions); 118 | // console.log('nnRegressionDeep.layers', nnRegressionDeep.layers); 119 | // console.log({ 120 | // predictions_unscaled, 121 | // predictions, 122 | // shape, 123 | // }); 124 | expect(predictions).to.have.lengthOf(input_x.length); 125 | expect(nnRegressionDeep.layers).to.have.lengthOf(3); 126 | expect(shape).to.eql([2, 1,]); 127 | expect(predictions_unscaled[ 0 ]).to.be.closeTo(24, 15); 128 | expect(predictions_unscaled[ 0 ]).to.be.closeTo(21, 15); 129 | }); 130 | it('should generate a wide network', async () => { 131 | const predictions = await nnRegressionWide.predict(input_x); 132 | const predictions_unscaled = predictions.map(pred=>DataSet.scalers.get('MEDV').descale(pred[0])); 133 | const shape = nnRegressionWide.getInputShape(predictions); 134 | // console.log('nnRegressionWide.layers', nnRegressionWide.layers); 135 | expect(predictions).to.have.lengthOf(input_x.length); 136 | expect(nnRegressionWide.layers).to.have.lengthOf(2); 137 | expect(shape).to.eql([2, 1,]); 138 | expect(predictions_unscaled[ 0 ]).to.be.closeTo(24, 15); 139 | expect(predictions_unscaled[ 0 ]).to.be.closeTo(21, 15); 140 | }); 141 | it('should generate a network from layers', async () => { 142 | const nnRegressionCustom = new DeepLearningRegression({ layerPreference:'custom', fit, }); 143 | await nnRegressionCustom.train(x_matrix, y_matrix, nnRegressionWide.layers); 144 | expect(nnRegressionCustom.layers).to.have.lengthOf(2); 145 | }); 146 | }); 147 | }); -------------------------------------------------------------------------------- /test/unit/text_embedding_spec.js: -------------------------------------------------------------------------------- 1 | import chai from 'chai'; 2 | import sinon from 'sinon'; 3 | import * as ms from 'modelscript'; 4 | import sinonChai from 'sinon-chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import 'babel-polyfill'; 7 | import { TextEmbedding, } from '../../index.js'; 8 | 9 | const expect = chai.expect; 10 | let housingDataCSV; 11 | let DataSet; 12 | 13 | chai.use(sinonChai); 14 | chai.use(chaiAsPromised); 15 | 16 | /** @test {TextEmbedding} */ 17 | describe('TextEmbedding', function () { 18 | this.timeout(10000); 19 | before(async function () { 20 | // housingDataCSV = await ms.csv.loadCSV('./test/mock/data/boston_housing_data.csv'); 21 | // DataSet = new ms.DataSet(housingDataCSV); 22 | // DataSet.fitColumns({ 23 | // columns: columns.map(scaleColumnMap), 24 | // returnData:false, 25 | // }); 26 | return true; 27 | }); 28 | /** @test {TextEmbedding#constructor} */ 29 | describe('constructor', () => { 30 | it('should export a named module class', () => { 31 | const TE = new TextEmbedding(); 32 | const TEConfigured = new TextEmbedding({ test: 'prop', }); 33 | expect(TextEmbedding).to.be.a('function'); 34 | expect(TE).to.be.instanceOf(TextEmbedding); 35 | expect(TEConfigured.settings.test).to.eql('prop'); 36 | }); 37 | }); 38 | /** @test {TextEmbedding#train} */ 39 | describe('train', () => { 40 | it('should Load and Return Universal Sentence Encoder and Tokenizer', async function () { 41 | const NN = new TextEmbedding(); 42 | const trainedModel = await NN.train(); 43 | expect(trainedModel).to.be.an('object'); 44 | // expect(trainedModel).to.be.instanceOf(UniversalSentenceEncoder); 45 | // expect(trainedModel2).to.be.an('object'); 46 | }); 47 | }); 48 | /** @test {TextEmbedding#calculate} */ 49 | describe('calculate', () => { 50 | it('should throw an error if input is invalid', () => { 51 | const NN = new TextEmbedding(); 52 | expect(NN.calculate).to.be.a('function'); 53 | expect(NN.calculate.bind()).to.throw(/invalid input array of sentences/); 54 | expect(NN.calculate.bind(null, 'invalid')).to.throw(/invalid input array of sentences/); 55 | }); 56 | it('should train a TextEmbedder', async function () { 57 | const TextEmbedder = new TextEmbedding(); 58 | await TextEmbedder.train(); 59 | const sentences = [ 60 | 'Hello.', 61 | 'How are you?', 62 | ]; 63 | const predictions = await TextEmbedder.predict(sentences); 64 | const tokens = await TextEmbedder.tokenizer.encode('Hello, how are you?'); 65 | expect(tokens).to.be.an('array').that.includes.members([341, 4125, 8, 140, 31, 19, 54, ]); 66 | expect(predictions).to.be.an('array'); 67 | expect(predictions).to.have.lengthOf(2); 68 | expect(predictions[0]).to.have.lengthOf(512); 69 | }); 70 | }); 71 | }); --------------------------------------------------------------------------------