├── .gitignore ├── .travis.yml ├── example ├── mnist.run.js └── mnist.js ├── test └── index.test.js ├── package.json ├── lib ├── index.js └── keras.js ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | node_modules/ 3 | tensorflow/ 4 | protobuf/ 5 | coverage/ 6 | package-lock.json 7 | *.DS_Store 8 | .editorconfig 9 | *.h5 10 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: trusty 2 | sudo: false 3 | language: node_js 4 | node_js: 5 | - "14" 6 | install: 7 | - npm install 8 | script: 9 | - npm test 10 | -------------------------------------------------------------------------------- /example/mnist.run.js: -------------------------------------------------------------------------------- 1 | const tf = require('../'); 2 | 3 | const dataset = tf.keras.datasets.mnist(); 4 | const model = tf.keras.models.load(__dirname + '/mnist.h5'); 5 | model.summary(); 6 | -------------------------------------------------------------------------------- /test/index.test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const assert = require('assert'); 4 | const tf = require('../'); 5 | 6 | test('index', () => { 7 | console.log(tf.version); 8 | console.log(tf.constant([100, 200, 300])); 9 | }); 10 | 11 | test('leras.datasets', () => { 12 | const dataset = tf.keras.datasets.mnist(); 13 | console.log(dataset); 14 | }); 15 | 16 | test('keras.layers and model', () => { 17 | const model = tf.keras.models.Sequential([ 18 | tf.keras.layers.Flatten({ 19 | input_shape: [28, 28] 20 | }), 21 | tf.keras.layers.Dense(128, { 22 | activation: 'relu' 23 | }), 24 | tf.keras.layers.Dropout(0.2), 25 | tf.keras.layers.Dense(10) 26 | ]); 27 | console.log(model.summary()); 28 | console.log(model.toString('json')); 29 | }); -------------------------------------------------------------------------------- /example/mnist.js: -------------------------------------------------------------------------------- 1 | const tf = require('../'); 2 | 3 | const dataset = tf.keras.datasets.mnist(); 4 | const model = tf.keras.models.Sequential([ 5 | tf.keras.layers.Flatten({ 6 | input_shape: [28, 28] 7 | }), 8 | tf.keras.layers.Dense(128, { 9 | activation: 'relu' 10 | }), 11 | tf.keras.layers.Dropout(0.2), 12 | tf.keras.layers.Dense(10) 13 | ]); 14 | model.summary(); 15 | 16 | const loss_fn = tf.keras.losses.SparseCategoricalCrossentropy({ from_logits: true }); 17 | model.compile({ 18 | optimizer: 'adam', 19 | loss: loss_fn, 20 | metrics: [ 'accuracy' ], 21 | }); 22 | console.log('compiled model'); 23 | 24 | model.fit(dataset.train.x, dataset.train.y, { epochs: 5 }); 25 | console.log('train done'); 26 | 27 | model.evaluate(dataset.test.x, dataset.test.y, { verbose: 2 }); 28 | model.save(__dirname + '/mnist.h5'); 29 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tensorflow2", 3 | "version": "2.0.0", 4 | "description": "TensorFlow Node.js provides idiomatic JavaScript language bindings and a high layer API for Node.js users.", 5 | "main": "lib/index.js", 6 | "scripts": { 7 | "test": "jest", 8 | "test-cov": "npm run test -- --coverage && npm install codecov && ./node_modules/.bin/codecov", 9 | "clean": "rm -rf ./tensorflow ./protobuf && node-gyp clean", 10 | "postinstall": "npx bip install tensorflow" 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "git+ssh://git@github.com/yorkie/tensorflow-nodejs.git" 15 | }, 16 | "keywords": [ 17 | "tensorflow" 18 | ], 19 | "author": "Yorkie Liu ", 20 | "license": "MIT", 21 | "bugs": { 22 | "url": "https://github.com/yorkie/tensorflow-nodejs/issues" 23 | }, 24 | "homepage": "https://github.com/yorkie/tensorflow-nodejs#readme", 25 | "dependencies": { 26 | "@pipcook/boa": "^1.2.0" 27 | }, 28 | "devDependencies": { 29 | "jest": "20.0.4" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const boa = require('@pipcook/boa'); 4 | const pytf = boa.import('tensorflow'); 5 | 6 | module.exports = { 7 | 8 | /** 9 | * returns the tensorflow versions 10 | */ 11 | get version() { 12 | const pyver = pytf.version; 13 | return { 14 | COMPILER_VERSION: pyver.COMPILER_VERSION, 15 | GIT_VERSION: pyver.GIT_VERSION, 16 | GRAPH_DEF_VERSION: pyver.GRAPH_DEF_VERSION, 17 | GRAPH_DEF_VERSION_MIN_CONSUMER: pyver.GRAPH_DEF_VERSION_MIN_CONSUMER, 18 | GRAPH_DEF_VERSION_MIN_PRODUCER: pyver.GRAPH_DEF_VERSION_MIN_PRODUCER, 19 | VERSION: pyver.VERSION 20 | }; 21 | }, 22 | 23 | /** 24 | * Creates a constant tensor from a tensor-like object. 25 | * @param {*} val 26 | * @param {*} dtype 27 | * @param {*} shape 28 | * @param {*} name 29 | */ 30 | constant(val, dtype, shape, name = 'Const') { 31 | return pytf.constant(val, dtype, shape, name); 32 | }, 33 | 34 | /** 35 | * Implementation of the Keras API meant to be a high-level API for TensorFlow. 36 | */ 37 | keras: require('./keras'), 38 | }; 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Yazhong Liu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TensorFlow for Node.js 2 | 3 | | NPM | Dependency | Build | Coverage | 4 | |-----|------------|-------|----------| 5 | |[![NPM version][npm-image]][npm-url]|[![Dependency Status][david-image]][david-url]|[![Build Status][travis-image]][travis-url]|[![Coverage][coveralls-image]][coveralls-url] 6 | 7 | [npm-image]: https://img.shields.io/npm/v/tensorflow2.svg?style=flat-square 8 | [npm-url]: https://npmjs.org/package/tensorflow2 9 | [travis-image]: https://img.shields.io/travis/yorkie/tensorflow-nodejs.svg?style=flat-square 10 | [travis-url]: https://travis-ci.org/yorkie/tensorflow-nodejs 11 | [david-image]: http://img.shields.io/david/yorkie/tensorflow-nodejs.svg?style=flat-square 12 | [david-url]: https://david-dm.org/yorkie/tensorflow-nodejs 13 | [coveralls-image]: https://img.shields.io/codecov/c/github/yorkie/tensorflow-nodejs.svg?style=flat-square 14 | [coveralls-url]: https://codecov.io/github/yorkie/tensorflow-nodejs?branch=master 15 | 16 | This library wraps [Tensorflow][] Python for Node.js developers, it's powered by [@pipcook/boa](https://github.com/alibaba/pipcook/blob/master/docs/manual/intro-to-boa.md). 17 | 18 | **Notice:** This project is still under active development and not guaranteed to have a 19 | stable API. This is especially true because the underlying TensorFlow C API has not yet 20 | been stabilized as well. 21 | 22 | ## Installation 23 | 24 | ```sh 25 | $ npm install tensorflow2 --save 26 | ``` 27 | 28 | ## Usage 29 | 30 | ```js 31 | const tf = require('tensorflow2'); 32 | 33 | // load mnist dataset. 34 | const dataset = tf.keras.dataset.mnist(); 35 | // { 36 | // train: { x: [Getter], y: [Getter] }, 37 | // test: { x: [Getter], y: [Getter] } 38 | // } 39 | 40 | // create model. 41 | const model = tf.keras.models.Sequential([ 42 | tf.keras.layers.Flatten({ 43 | input_shape: [28, 28] 44 | }), 45 | tf.keras.layers.Dense(128, { 46 | activation: 'relu' 47 | }), 48 | tf.keras.layers.Dropout(0.2), 49 | tf.keras.layers.Dense(10) 50 | ]); 51 | model.summary(); 52 | 53 | // compile the model. 54 | const loss_fn = tf.keras.losses.SparseCategoricalCrossentropy({ from_logits: true }); 55 | model.compile({ 56 | optimizer: 'adam', 57 | loss: loss_fn, 58 | metrics: [ 'accuracy' ], 59 | }); 60 | 61 | // train the model. 62 | model.fit(dataset.train.x, dataset.train.y, { epochs: 5 }); 63 | 64 | // save the model 65 | model.save('your-model.h5'); 66 | ``` 67 | 68 | See [example/mnist.js](./example/mnist.js) for complete example. 69 | 70 | ## Tests 71 | 72 | ```sh 73 | $ npm test 74 | ``` 75 | 76 | ## License 77 | 78 | [MIT](./LICENSE) licensed @ 2020 79 | 80 | [TensorFlow]: http://tensorflow.org 81 | -------------------------------------------------------------------------------- /lib/keras.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const boa = require('@pipcook/boa'); 4 | const { tuple } = boa.builtins(); 5 | const { keras } = boa.import('tensorflow'); 6 | 7 | /** 8 | * Model groups layers into an object with training and inference features. 9 | */ 10 | class Model { 11 | /** 12 | * 13 | * @param {*} pymodel the Python model. 14 | */ 15 | constructor(pymodel) { 16 | this.pymodel = pymodel; 17 | } 18 | /** 19 | * Compile the model. 20 | * @param {object} opts 21 | * @param {string} opts.optimizer 22 | * @param {*} opts.loss 23 | * @param {*} opts.metrics 24 | */ 25 | compile(opts) { 26 | return this.pymodel.compile(boa.kwargs(opts)); 27 | } 28 | /** 29 | * Train the model with tensor x and y. 30 | * @param {*} x 31 | * @param {*} y 32 | * @param {*} opts 33 | * @param {*} opts.epochs 34 | */ 35 | fit(x, y, opts) { 36 | return this.pymodel.fit(x, y, boa.kwargs(opts)); 37 | } 38 | /** 39 | * Evaluate the model. 40 | * @param {*} x 41 | * @param {*} y 42 | * @param {*} opts 43 | * @param {number} opts.verbose 44 | */ 45 | evaluate(x, y, opts) { 46 | return this.pymodel.fit(x, y, boa.kwargs(opts)); 47 | } 48 | /** 49 | * Generates output predictions for the input samples. 50 | * @param {*} x 51 | * @param {*} opts 52 | */ 53 | predict(x, opts) { 54 | return this.pymodel.predict(x, boa.kwargs(opts)); 55 | } 56 | /** 57 | * Save the model. 58 | * @param {*} path 59 | */ 60 | save(pathname) { 61 | return this.pymodel.save(pathname); 62 | } 63 | /** 64 | * Output the summary of this model. 65 | */ 66 | summary() { 67 | return this.pymodel.summary(); 68 | } 69 | /** 70 | * set the current model if trainable. 71 | */ 72 | setTrainable(val) { 73 | this.pymodel.trainable = !!val; 74 | } 75 | /** 76 | * Returns a JSON/YAML string containing the network configuration. 77 | * @param {*} format 78 | */ 79 | toString(format = 'json') { 80 | if (format === 'yaml') { 81 | return this.pymodel.to_yaml(); 82 | } else { 83 | return this.pymodel.to_json(); 84 | } 85 | } 86 | } 87 | 88 | class Dataset { 89 | constructor(data) { 90 | this._x = data[0]; 91 | this._y = data[1]; 92 | } 93 | get x() { 94 | return this._x; 95 | } 96 | get y() { 97 | return this._y; 98 | } 99 | } 100 | 101 | module.exports = { 102 | 103 | /** 104 | * Keras Applications are canned architectures with pre-trained weights. 105 | */ 106 | applications: { 107 | /** 108 | * Instantiates the MobileNet architecture. 109 | * @param {*} opts 110 | */ 111 | MobileNet(opts) { 112 | let pymodel; 113 | if (!opts) { 114 | pymodel = keras.applications.MobileNet(); 115 | } 116 | pymodel = keras.applications.MobileNet(boa.kwargs(opts)); 117 | return new Model(pymodel); 118 | }, 119 | /** 120 | * Instantiates the MobileNet architecture. 121 | * @param {*} opts 122 | */ 123 | ResNet50(opts) { 124 | let pymodel; 125 | if (!opts) { 126 | pymodel = keras.applications.ResNet50(); 127 | } 128 | pymodel = keras.applications.ResNet50(boa.kwargs(opts)); 129 | return new Model(pymodel); 130 | }, 131 | /** 132 | * Instantiates the Inception-ResNet v2 architecture. 133 | * @param {*} opts 134 | */ 135 | InceptionResNetV2(opts) { 136 | let pymodel; 137 | if (!opts) { 138 | pymodel = keras.applications.InceptionResNetV2(); 139 | } 140 | pymodel = keras.applications.InceptionResNetV2(boa.kwargs(opts)); 141 | return new Model(pymodel); 142 | }, 143 | /** 144 | * Instantiates the Inception v3 architecture. 145 | * @param {*} opts 146 | */ 147 | InceptionV3(opts) { 148 | let pymodel; 149 | if (!opts) { 150 | pymodel = keras.applications.InceptionV3(); 151 | } 152 | pymodel = keras.applications.InceptionV3(boa.kwargs(opts)); 153 | return new Model(pymodel); 154 | } 155 | }, 156 | 157 | /** 158 | * datasets 159 | */ 160 | datasets: { 161 | /** 162 | * Loads the MNIST dataset. 163 | * @param {string} path path where to cache the dataset locally (relative to ~/.keras/datasets). 164 | */ 165 | mnist(path) { 166 | let dataset; 167 | if (typeof path === 'string') { 168 | const opts = boa.kwargs({ path }); 169 | dataset = keras.datasets.mnist.load_data(opts); 170 | } else { 171 | dataset = keras.datasets.mnist.load_data(); 172 | } 173 | const [ train, test ] = dataset; 174 | return { 175 | train: new Dataset(train), 176 | test: new Dataset(test), 177 | }; 178 | }, 179 | /** 180 | * Loads the fashion MNIST dataset. 181 | * @param {*} path 182 | */ 183 | fashion_mnist(path) { 184 | let dataset; 185 | if (typeof path === 'string') { 186 | const opts = boa.kwargs({ path }); 187 | dataset = keras.datasets.mnist.load_data(opts); 188 | } else { 189 | dataset = keras.datasets.mnist.load_data(); 190 | } 191 | const [ train, test ] = dataset; 192 | return { 193 | train: new Dataset(train), 194 | test: new Dataset(test), 195 | }; 196 | }, 197 | /** 198 | * Loads the IMDB dataset. 199 | * @param {*} path 200 | * @param {*} opts 201 | */ 202 | imdb(path, opts) { 203 | let dataset; 204 | if (typeof path === 'string') { 205 | const opts = boa.kwargs({ path }); 206 | dataset = keras.datasets.imdb.load_data(opts); 207 | } else { 208 | dataset = keras.datasets.imdb.load_data(); 209 | } 210 | const [ train, test ] = dataset; 211 | return { 212 | train: new Dataset(train), 213 | test: new Dataset(test), 214 | }; 215 | }, 216 | }, 217 | 218 | /** 219 | * models 220 | */ 221 | models: { 222 | /** 223 | * Loads a model saved via `model.save()`. 224 | */ 225 | load: keras.models.load_model, 226 | /** 227 | * Sequential model. 228 | */ 229 | Sequential(layers, name) { 230 | const pymodel = keras.models.Sequential(layers, name); 231 | return new Model(pymodel); 232 | } 233 | }, 234 | 235 | /** 236 | * layers 237 | */ 238 | layers: { 239 | 240 | /** 241 | * Just your regular densely-connected NN layer. 242 | * @param {*} units 243 | * @param {*} opts 244 | */ 245 | Dense(units, opts) { 246 | if (!opts) { 247 | return keras.layers.Dense(units); 248 | } 249 | return keras.layers.Dense(units, boa.kwargs(opts)); 250 | }, 251 | /** 252 | * Applies Dropout to the input. 253 | */ 254 | Dropout: keras.layers.Dropout, 255 | /** 256 | * Turns positive integers (indexes) into dense vectors of fixed size. 257 | * @param {*} inputDim 258 | * @param {*} outputDim 259 | * @param {*} opts 260 | */ 261 | Embedding(inputDim, outputDim, opts) { 262 | if (!opts) { 263 | return keras.layers.LSTM(inputDim, outputDim); 264 | } 265 | return keras.layers.LSTM(inputDim, outputDim, boa.kwargs(opts)); 266 | }, 267 | /** 268 | * Flattens the input. Does not affect the batch size. 269 | * @param {*} opts 270 | */ 271 | Flatten(opts) { 272 | if (!opts) { 273 | return keras.layers.Flatten(); 274 | } 275 | if (opts.input_shape) { 276 | opts.input_shape = tuple(opts.input_shape); 277 | } 278 | return keras.layers.Flatten(boa.kwargs(opts)); 279 | }, 280 | /** 281 | * Long Short-Term Memory layer - Hochreiter 1997. 282 | * @param {*} units 283 | * @param {*} opts 284 | */ 285 | LSTM(units, opts) { 286 | if (!opts) { 287 | return keras.layers.LSTM(units); 288 | } 289 | return keras.layers.LSTM(units, boa.kwargs(opts)); 290 | }, 291 | /** 292 | * Base class for recurrent layers. 293 | * @param {*} cell 294 | * @param {*} opts 295 | */ 296 | RNN(cell, opts) { 297 | if (!opts) { 298 | return keras.layers.RNN(units); 299 | } 300 | return keras.layers.RNN(units, boa.kwargs(opts)); 301 | } 302 | }, 303 | 304 | /** 305 | * losses 306 | */ 307 | losses: { 308 | SparseCategoricalCrossentropy(opts) { 309 | return keras.losses.SparseCategoricalCrossentropy(boa.kwargs(opts)); 310 | }, 311 | }, 312 | 313 | /** 314 | * utils 315 | */ 316 | utils: { 317 | /** 318 | * Converts a Keras model to dot format and save to a file. 319 | * @param {Model} model 320 | * @param {*} opts 321 | */ 322 | plot(model, opts = {}) { 323 | if (!(model instanceof Model)) { 324 | throw new TypeError('must be a Model'); 325 | } 326 | return keras.utils.plot_model(model.pymodel, boa.kwargs(opts)); 327 | } 328 | } 329 | }; 330 | --------------------------------------------------------------------------------