├── src
├── flux-metadata.json
├── __version__.py
├── icon.png
├── favicon.ico
├── __init__.py
├── bigdl-metadata.json
├── dl4j-metadata.json
├── uff-metadata.json
├── paddle-metadata.json
├── base.js
├── tar.js
├── mlnet-metadata.json
├── flux.js
├── gzip.js
├── bson.js
├── view-grapher.css
├── view-sidebar.css
├── server.py
└── numpy.js
├── setup
├── icon.ico
├── icon.icns
├── background.png
├── background@2x.png
├── version.js
├── background.svg
├── notarize.js
├── cask.js
├── winget.js
└── icon.svg
├── .github
├── logo.png
├── screenshot.png
└── workflows
│ ├── build.yml
│ └── publish.yml
├── .vscode
├── settings.json
└── launch.json
├── .gitignore
├── setup.cfg
├── tools
├── update_icon
├── ncnn
├── torch
├── chainer
├── darknet
├── tengine
├── update_icon_preview.html
├── coreml-script.py
├── mediapipe
├── dl4j
├── mlnet-script.py
├── mxnet
├── uff
├── mlnet
├── bigdl
├── mxnet-script.py
├── paddle
├── caffe
├── cntk
├── uff.proto
├── coreml
├── keras
├── sklearn
├── mnn
├── armnn
├── pytorch
├── pytorch-script.py
├── tflite
├── update_pbjs.js
├── onnx
├── tf
├── caffe2-script.py
├── onnx-script.py
└── sklearn-script.py
├── test
└── .vscode
│ └── launch.json
├── .eslintrc.json
├── DEVELOPMENT.md
├── LICENSE
├── package.json
├── electron-builder.yml
├── README.md
├── Makefile
└── setup.py
/src/flux-metadata.json:
--------------------------------------------------------------------------------
1 | [
2 | ]
--------------------------------------------------------------------------------
/src/__version__.py:
--------------------------------------------------------------------------------
1 | __version__ = '0.0.0'
--------------------------------------------------------------------------------
/setup/icon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/setup/icon.ico
--------------------------------------------------------------------------------
/src/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/src/icon.png
--------------------------------------------------------------------------------
/.github/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/.github/logo.png
--------------------------------------------------------------------------------
/setup/icon.icns:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/setup/icon.icns
--------------------------------------------------------------------------------
/src/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/src/favicon.ico
--------------------------------------------------------------------------------
/setup/background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/setup/background.png
--------------------------------------------------------------------------------
/.github/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/.github/screenshot.png
--------------------------------------------------------------------------------
/setup/background@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shinh/netron/master/setup/background@2x.png
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "files.exclude": {
3 | "dist": true,
4 | "node_modules": true,
5 | "third_party": true
6 | }
7 | }
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .DS_Store?
3 | .nyc_output/*
4 | converage/*
5 | dist/*
6 | node_modules/*
7 | third_party/*
8 | test/data/*
9 | package-lock.json
10 | *.pyc
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [build]
2 | build-base = ./dist
3 | build-lib = ./dist/lib
4 |
5 | [bdist_wheel]
6 | universal=1
7 | dist-dir = ./dist/dist
8 |
9 | [egg_info]
10 | egg_base = ./dist
11 |
--------------------------------------------------------------------------------
/setup/version.js:
--------------------------------------------------------------------------------
1 |
2 | const fs = require('fs');
3 |
4 | const packageManifestFile = process.argv[2];
5 | const packageManifest = JSON.parse(fs.readFileSync(packageManifestFile, 'utf-8'));
6 | packageManifest.version = Array.from((parseInt(packageManifest.version.split('.').join(''), 10) + 1).toString()).join('.');
7 | fs.writeFileSync(packageManifestFile, JSON.stringify(packageManifest, null, 4) + '\n', 'utf-8');
8 |
--------------------------------------------------------------------------------
/setup/background.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tools/update_icon:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | root=$(cd $(dirname ${0})/..; pwd)
6 | setup=${root}/setup
7 | src=${root}/src
8 | icon=${root}/build/icon-update
9 |
10 | mkdir -p ${icon}
11 | pushd ${icon} > /dev/null
12 | npm install --silent --no-save icon-convert
13 | npx icon-convert --types ico,icns,png512 --out ${setup} ${setup}/icon.svg
14 | mv ${setup}/icon_512x512.png ${src}/icon.png
15 | popd > /dev/null
16 |
17 | rm -rf ${icon}
--------------------------------------------------------------------------------
/test/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // Use IntelliSense to learn about possible attributes.
3 | // Hover to view descriptions of existing attributes.
4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "type": "node",
9 | "request": "launch",
10 | "name": "Launch Program",
11 | "program": "${workspaceFolder}/test.js"
12 | }
13 | ]
14 | }
--------------------------------------------------------------------------------
/tools/ncnn:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "ncnn clean"
12 | rm -rf "./third_party/src/ncnn"
13 | }
14 |
15 | sync() {
16 | bold "ncnn sync"
17 | [ -d "./third_party/src/ncnn" ] || git clone --quiet https://github.com/Tencent/ncnn.git "./third_party/src/ncnn"
18 | pushd "./third_party/src/ncnn" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | while [ "$#" != 0 ]; do
24 | command="$1" && shift
25 | case "${command}" in
26 | "clean") clean;;
27 | "sync") sync;;
28 | esac
29 | done
30 |
--------------------------------------------------------------------------------
/tools/torch:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "torch clean"
12 | rm -rf "./third_party/src/torch"
13 | }
14 |
15 | sync() {
16 | bold "torch sync"
17 | [ -d "./third_party/src/torch" ] || git clone --quiet https://github.com/torch/torch7.git "./third_party/src/torch"
18 | pushd "./third_party/src/torch" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | while [ "$#" != 0 ]; do
24 | command="$1" && shift
25 | case "${command}" in
26 | "clean") clean;;
27 | "sync") sync;;
28 | esac
29 | done
30 |
--------------------------------------------------------------------------------
/tools/chainer:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "chainer clean"
12 | rm -rf "./third_party/src/chainer"
13 | }
14 |
15 | sync() {
16 | bold "chainer sync"
17 | [ -d "./third_party/src/chainer" ] || git clone --quiet https://github.com/chainer/chainer.git "./third_party/src/chainer"
18 | pushd "./third_party/src/chainer" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | while [ "$#" != 0 ]; do
24 | command="$1" && shift
25 | case "${command}" in
26 | "clean") clean;;
27 | "sync") sync;;
28 | esac
29 | done
30 |
--------------------------------------------------------------------------------
/tools/darknet:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "darknet clean"
12 | rm -rf "./third_party/src/darknet"
13 | }
14 |
15 | sync() {
16 | bold "darknet sync"
17 | [ -d "./third_party/src/darknet" ] || git clone --quiet https://github.com/AlexeyAB/darknet.git "./third_party/src/darknet"
18 | pushd "./third_party/src/darknet" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | while [ "$#" != 0 ]; do
24 | command="$1" && shift
25 | case "${command}" in
26 | "clean") clean;;
27 | "sync") sync;;
28 | esac
29 | done
30 |
--------------------------------------------------------------------------------
/tools/tengine:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "tengine clean"
12 | rm -rf ./third_party/src/tengine
13 | }
14 |
15 | sync() {
16 | bold "tengine sync"
17 | [ -d "./third_party/src/tengine" ] || git clone --quiet --branch master https://github.com/OAID/Tengine.git "./third_party/src/tengine"
18 | pushd "./third_party/src/tengine" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | while [ "$#" != 0 ]; do
24 | command="$1" && shift
25 | case "${command}" in
26 | "clean") clean;;
27 | "sync") sync;;
28 | esac
29 | done
30 |
--------------------------------------------------------------------------------
/tools/update_icon_preview.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |

7 |
8 |
9 |
10 |

11 |
12 |
13 |
14 |

15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/setup/notarize.js:
--------------------------------------------------------------------------------
1 |
2 | const notarize = require('electron-notarize');
3 |
4 | exports.default = function (context) {
5 | if (process.platform === 'darwin' && context.electronPlatformName === 'darwin') {
6 | const config = context.packager.info.options.config;
7 | if (process.env.CSC_IDENTITY_AUTO_DISCOVERY !== 'false' && (!config || !config.mac || config.mac.identity !== null)) {
8 | return notarize.notarize({
9 | appBundleId: context.packager.info.config.appId,
10 | appPath: context.appOutDir + '/' + context.packager.appInfo.productFilename + '.app',
11 | appleApiKey: process.env.API_KEY_ID,
12 | appleApiIssuer: process.env.API_KEY_ISSUER_ID
13 | });
14 | }
15 | }
16 | };
--------------------------------------------------------------------------------
/tools/coreml-script.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import os
4 | import sys
5 |
6 | def convert():
7 | file = sys.argv[2];
8 | base, extension = os.path.splitext(file)
9 | if extension == '.h5':
10 | import coremltools
11 | coreml_model = coremltools.converters.keras.convert(file)
12 | coreml_model.save(base + '.mlmodel')
13 | elif extension == '.pkl':
14 | import coremltools
15 | import sklearn
16 | sklearn_model = sklearn.externals.joblib.load(file)
17 | coreml_model = coremltools.converters.sklearn.convert(sklearn_model)
18 | coreml_model.save(base + '.mlmodel')
19 |
20 | if __name__ == '__main__':
21 | command_table = { 'convert': convert }
22 | command = sys.argv[1];
23 | command_table[command]()
24 |
--------------------------------------------------------------------------------
/tools/mediapipe:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
6 |
7 | bold() {
8 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
9 | }
10 |
11 | clean() {
12 | bold "mediapipe clean"
13 | rm -rf "./third_party/src/mediapipe"
14 | }
15 |
16 | sync() {
17 | bold "mediapipe sync"
18 | [ -d "./third_party/src/mediapipe" ] || git clone --quiet https://github.com/google/mediapipe.git "./third_party/src/mediapipe"
19 | pushd "./third_party/src/mediapipe" > /dev/null
20 | git pull --quiet --prune
21 | popd > /dev/null
22 | }
23 |
24 | while [ "$#" != 0 ]; do
25 | command="$1" && shift
26 | case "${command}" in
27 | "clean") clean;;
28 | "sync") sync;;
29 | "schema") schema;;
30 | esac
31 | done
32 |
--------------------------------------------------------------------------------
/tools/dl4j:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 | function exit_trap() {
6 | popd > /dev/null
7 | }
8 | trap exit_trap EXIT
9 |
10 | bold() {
11 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
12 | }
13 |
14 | clean() {
15 | bold "deeplearning4j clean"
16 | rm -rf "./third_party/src/dl4j"
17 | }
18 |
19 | sync() {
20 | bold "deeplearning4j sync"
21 | [ -d "./third_party/src/dl4j" ] || git clone --quiet https://github.com/eclipse/deeplearning4j.git "./third_party/src/dl4j"
22 | pushd "./third_party/src/dl4j" > /dev/null
23 | git pull --quiet --prune
24 | popd > /dev/null
25 | }
26 |
27 | while [ "$#" != 0 ]; do
28 | command="$1" && shift
29 | case "${command}" in
30 | "clean") clean;;
31 | "sync") sync;;
32 | esac
33 | done
34 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "es6": true,
4 | "browser": true,
5 | "node": true
6 | },
7 | "extends": "eslint:recommended",
8 | "parserOptions": {
9 | "ecmaVersion": 2015,
10 | "sourceType": "module"
11 | },
12 | "rules": {
13 | "brace-style": [ "error", "stroustrup", { "allowSingleLine": true } ],
14 | "indent": [ "error", 4 ],
15 | "linebreak-style": "off",
16 | "semi": 2,
17 | "no-trailing-spaces": "error"
18 | },
19 | "overrides": [
20 | { "files": ["src/*-proto.js"], "rules": { "brace-style": "off", "no-trailing-spaces": "off" } },
21 | { "files": ["src/*-schema.js"], "rules": { "indent": "off", "semi": "off" } }
22 | ],
23 | "globals": {
24 | "flatbuffers": "readonly",
25 | "protobuf": "readonly"
26 | }
27 | }
--------------------------------------------------------------------------------
/tools/mlnet-script.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import unicode_literals
3 | from __future__ import print_function
4 |
5 | import io
6 | import json
7 | import os
8 | import re
9 | import sys
10 |
11 | def metadata():
12 | json_file = os.path.join(os.path.dirname(__file__), '../src/mlnet-metadata.json')
13 | json_data = open(json_file).read()
14 | json_root = json.loads(json_data)
15 | manifest_file = os.path.join(os.path.dirname(__file__), '../third_party/src/mlnet/test/BaselineOutput/Common/EntryPoints/core_manifest.json')
16 | manifest_data = open(manifest_file).read()
17 | manifest_root = json.loads(manifest_data)
18 | schema_map = {}
19 | # for manifest in manifest_root['EntryPoints']:
20 | # print(manifest['Name'])
21 |
22 | if __name__ == '__main__':
23 | command_table = { 'metadata': metadata }
24 | command = sys.argv[1];
25 | command_table[command]()
26 |
--------------------------------------------------------------------------------
/tools/mxnet:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "mxnet clean"
12 | rm -rf "./third_party/src/mxnet"
13 | }
14 |
15 | sync() {
16 | bold "mxnet sync"
17 | [ -d "./third_party/src/mxnet" ] || git clone --quiet --recursive https://github.com/apache/incubator-mxnet.git "./third_party/src/mxnet"
18 | pushd "./third_party/src/mxnet" > /dev/null
19 | git pull --quiet --prune
20 | git submodule sync --quiet
21 | git submodule update --quiet --init --recursive
22 | popd > /dev/null
23 | }
24 |
25 | metadata() {
26 | bold "mxnet metadata"
27 | # python ./tools/mxnet-script.py
28 | }
29 |
30 | while [ "$#" != 0 ]; do
31 | command="$1" && shift
32 | case "${command}" in
33 | "clean") clean;;
34 | "sync") sync;;
35 | "metadata") metadata;;
36 | esac
37 | done
38 |
--------------------------------------------------------------------------------
/tools/uff:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "torch clean"
12 | rm -rf "./third_party/src/torch"
13 | }
14 |
15 | sync() {
16 | bold "uff sync"
17 | [ -d "./third_party/src/tensorrt" ] || git clone --quiet https://github.com/NVIDIA/TensorRT.git "./third_party/src/tensorrt"
18 | pushd "./third_party/src/tensorrt" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | schema() {
24 | bold "uff schema"
25 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case --decode-text -r uff -o ./src/uff-proto.js ./tools/uff.proto
26 | }
27 |
28 | while [ "$#" != 0 ]; do
29 | command="$1" && shift
30 | case "${command}" in
31 | "sync") sync;;
32 | "schema") schema;;
33 | esac
34 | done
35 |
--------------------------------------------------------------------------------
/DEVELOPMENT.md:
--------------------------------------------------------------------------------
1 | # How to Develop Netron
2 |
3 | Netron can run as both an [Electron](https://electronjs.org) app or a Python web server.
4 |
5 | ## Develop the Electron app
6 |
7 | To start the Electron app, install [Node.js](https://nodejs.org) and run:
8 |
9 | ```bash
10 | git clone https://github.com/lutzroeder/netron.git
11 | cd netron
12 | npm install
13 | npx electron .
14 | ```
15 |
16 | To debug the Electron app use [Visual Studio Code](https://code.visualstudio.com) and install the [Debugger for Chrome](https://marketplace.visualstudio.com/items?itemName=msjsdiag.debugger-for-chrome) extension. Open the `./netron` root folder and press `F5`. To attach the debugger to a render process select the `Debug` tab and `Debug Renderer Process` before launching.
17 |
18 | ## Develop the Python server
19 |
20 | To build and launch the Python server run:
21 |
22 | ```bash
23 | git clone https://github.com/lutzroeder/netron.git
24 | cd netron
25 | npm install
26 | python setup.py build
27 | export PYTHONPATH=dist/lib:${PYTHONPATH}
28 | python -c "import netron; netron.start()"
29 | ```
30 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.2.0",
3 | "configurations": [
4 | {
5 | "name": "Launch Program",
6 | "type": "node",
7 | "request": "launch",
8 | "program": "${workspaceFolder}/src/app.js",
9 | "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron",
10 | "windows": {
11 | "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron.cmd"
12 | }
13 | },
14 | {
15 | "name": "Debug Renderer Process",
16 | "type": "chrome",
17 | "request": "launch",
18 | "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron",
19 | "windows": {
20 | "runtimeExecutable": "${workspaceRoot}/node_modules/.bin/electron.cmd"
21 | },
22 | "runtimeArgs": [
23 | "${workspaceRoot}",
24 | "--enable-logging",
25 | "--remote-debugging-port=9222"
26 | ],
27 | "webRoot": "${workspaceRoot}"
28 | }
29 | ]
30 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Lutz Roeder
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/tools/mlnet:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | bold() {
12 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
13 | }
14 |
15 | clean() {
16 | bold "mlnet clean"
17 | rm -rf "./third_party/src/mlnet"
18 | }
19 |
20 | sync() {
21 | bold "mlnet sync"
22 | [ -d "./third_party/src/mlnet" ] || git clone --quiet --recursive https://github.com/dotnet/machinelearning.git "./third_party/src/mlnet"
23 | pushd "./third_party/src/mlnet" > /dev/null
24 | git pull --quiet --prune
25 | git submodule sync --quiet
26 | git submodule update --quiet --init --recursive
27 | popd > /dev/null
28 | }
29 |
30 | metadata() {
31 | bold "mlnet metadata"
32 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
33 | ${python} ./tools/mlnet-script.py metadata
34 | }
35 |
36 | while [ "$#" != 0 ]; do
37 | command="$1" && shift
38 | case "${command}" in
39 | "clean") clean;;
40 | "sync") sync;;
41 | "metadata") metadata;;
42 | esac
43 | done
44 |
--------------------------------------------------------------------------------
/tools/bigdl:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "bigdl clean"
12 | rm -rf ./third_party/src/bigdl
13 | }
14 |
15 | sync() {
16 | bold "bigdl sync"
17 | [ -d "./third_party/src/bigdl" ] || git clone --quiet --recursive https://github.com/intel-analytics/BigDL.git "./third_party/src/bigdl"
18 | pushd "./third_party/src/bigdl" > /dev/null
19 | git pull --quiet --prune
20 | git submodule sync --quiet
21 | git submodule update --quiet --init --recursive
22 | popd > /dev/null
23 | }
24 |
25 | schema() {
26 | bold "bigdl schema"
27 | [[ $(grep -U $'\x0D' ./src/bigdl-proto.js) ]] && crlf=1
28 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case -r bigdl -o ./src/bigdl-proto.js ./third_party/src/bigdl/spark/dl/src/main/resources/serialization/bigdl.proto
29 | if [[ -n ${crlf} ]]; then
30 | unix2dos --quiet --newfile ./src/bigdl-proto.js ./src/bigdl-proto.js
31 | fi
32 | }
33 |
34 | while [ "$#" != 0 ]; do
35 | command="$1" && shift
36 | case "${command}" in
37 | "clean") clean;;
38 | "sync") sync;;
39 | "schema") schema;;
40 | esac
41 | done
42 |
--------------------------------------------------------------------------------
/tools/mxnet-script.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import unicode_literals
3 | from __future__ import print_function
4 |
5 | import io
6 | import json
7 | import pydoc
8 | import re
9 | import sys
10 |
11 | json_file = os.path.join(os.path.dirname(__file__), '../src/mxnet-metadata.json')
12 | json_data = open(json_file).read()
13 | json_root = json.loads(json_data)
14 |
15 | for entry in json_root:
16 | name = entry['name']
17 | schema = entry['schema']
18 | class_name = 'mxnet.symbol.' + name
19 | class_definition = pydoc.locate(class_name)
20 | if not class_definition:
21 | print('NOT FOUND: ' + class_name)
22 | # raise Exception('\'' + class_name + '\' not found.')
23 | else:
24 | docstring = class_definition.__doc__
25 | if docstring:
26 | schema['description'] = docstring
27 | # if not docstring:
28 | # print('NO DOCSTRING: ' + class_name)
29 | # raise Exception('\'' + class_name + '\' missing __doc__.')
30 | # print(docstring)
31 |
32 | with io.open(json_file, 'w', newline='') as fout:
33 | json_data = json.dumps(json_root, sort_keys=True, indent=2)
34 | for line in json_data.splitlines():
35 | line = line.rstrip()
36 | if sys.version_info[0] < 3:
37 | line = unicode(line)
38 | fout.write(line)
39 | fout.write('\n')
40 |
41 |
--------------------------------------------------------------------------------
/tools/paddle:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "paddle clean"
12 | rm -rf "./third_party/src/paddle"
13 | }
14 |
15 | sync() {
16 | bold "paddle sync"
17 | [ -d "./third_party/src/paddle" ] || git clone --quiet --recursive https://github.com/PaddlePaddle/Paddle.git "./third_party/src/paddle"
18 | pushd "./third_party/src/paddle" > /dev/null
19 | git pull --quiet --prune
20 | git submodule sync --quiet
21 | git submodule update --quiet --init --recursive
22 | popd > /dev/null
23 | }
24 |
25 | schema() {
26 | bold "paddle schema"
27 | [[ $(grep -U $'\x0D' ./src/paddle-proto.js) ]] && crlf=1
28 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case -r paddle -o ./src/paddle-proto.js ./third_party/src/paddle/paddle/fluid/framework/framework.proto
29 | if [[ -n ${crlf} ]]; then
30 | unix2dos --quiet --newfile ./src/paddle-proto.js ./src/paddle-proto.js
31 | fi
32 | }
33 |
34 | while [ "$#" != 0 ]; do
35 | command="$1" && shift
36 | case "${command}" in
37 | "clean") clean;;
38 | "sync") sync;;
39 | "schema") schema;;
40 | esac
41 | done
42 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | from .server import start
3 | from .server import stop
4 | from .server import wait
5 | from .server import serve
6 | from .__version__ import __version__
7 |
8 | import argparse
9 | import sys
10 | import os
11 |
12 | def main():
13 | parser = argparse.ArgumentParser(description='Viewer for neural network, deep learning and machine learning models.')
14 | parser.add_argument('file', metavar='MODEL_FILE', help='model file to serve', nargs='?', default=None)
15 | parser.add_argument('-v', '--version', help="print version", action='store_true')
16 | parser.add_argument('-b', '--browse', help='launch web browser', action='store_true')
17 | parser.add_argument('-p', '--port', help='port to serve (default: 8080)', type=int, default=8080)
18 | parser.add_argument('--host', help="host to serve (default: 'localhost')", default='localhost')
19 | parser.add_argument('--log', help='log details to console', action='store_true')
20 | args = parser.parse_args()
21 | if args.file and not os.path.exists(args.file):
22 | print("Model file '" + args.file + "' does not exist.")
23 | sys.exit(2)
24 | if args.version:
25 | print(__version__)
26 | sys.exit(0)
27 | serve(args.file, None, log=args.log, browse=args.browse, port=args.port, host=args.host)
28 | wait()
29 | sys.exit(0)
30 |
31 | if __name__ == '__main__':
32 | main()
33 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "netron",
3 | "productName": "Netron",
4 | "author": {
5 | "name": "Lutz Roeder",
6 | "email": "lutzroeder@users.noreply.github.com",
7 | "url": "https://www.lutzroeder.com"
8 | },
9 | "version": "4.3.2",
10 | "description": "Visualizer for neural network, deep learning and machine learning models",
11 | "license": "MIT",
12 | "repository": "lutzroeder/netron",
13 | "main": "src/app.js",
14 | "scripts": {
15 | "start": "[ -d node_modules ] || npm install && npx electron .",
16 | "start_server": "[ -d node_modules ] || npm install && python setup.py --quiet build && PYTHONPATH=./dist/lib python -c 'import netron; netron.main()' $@",
17 | "postinstall": "electron-builder install-app-deps",
18 | "postuninstall": "electron-builder install-app-deps"
19 | },
20 | "dependencies": {
21 | "d3": "5.16.0",
22 | "dagre": "0.8.5",
23 | "electron-updater": "4.3.1",
24 | "flatbuffers": "1.12.0",
25 | "long": "4.0.0",
26 | "marked": "1.1.0",
27 | "pako": "1.0.11",
28 | "protobufjs": "github:lutzroeder/protobuf.js",
29 | "universal-analytics": "0.4.20"
30 | },
31 | "devDependencies": {
32 | "electron": "9.0.3",
33 | "electron-builder": "22.7.0",
34 | "electron-notarize": "1.0.0",
35 | "eslint": "7.2.0",
36 | "xmldom": "0.3.0"
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 |
2 | name: Build
3 |
4 | on:
5 | push:
6 | branches: [ '**' ]
7 | tags-ignore: [ '**' ]
8 |
9 | jobs:
10 | build:
11 | runs-on: ${{ matrix.os }}
12 |
13 | strategy:
14 | matrix:
15 | os: [ macos-latest, ubuntu-latest, windows-latest ]
16 |
17 | steps:
18 | - name: Check out Git repository
19 | uses: actions/checkout@v1
20 |
21 | - name: Install Node.js
22 | uses: actions/setup-node@v1
23 | with:
24 | node-version: 12
25 |
26 | - name: Install Python
27 | uses: actions/setup-python@v2
28 | with:
29 | python-version: 3.7
30 |
31 | - name: Install npm packages
32 | run: make install
33 |
34 | - name: ESLint
35 | run: make lint
36 |
37 | - name: Build Python Server
38 | run: make build_python
39 |
40 | - name: Build Electron
41 | shell: bash
42 | run: |
43 | case "${{ matrix.os }}" in
44 | macos*)
45 | CSC_IDENTITY_AUTO_DISCOVERY=false npx electron-builder --mac --publish never -c.mac.identity=null
46 | ;;
47 | ubuntu*)
48 | npx electron-builder --linux appimage --publish never
49 | npx electron-builder --linux snap --publish never
50 | ;;
51 | windows*)
52 | npx electron-builder --win --publish never
53 | ;;
54 | esac
55 |
--------------------------------------------------------------------------------
/tools/caffe:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
6 |
7 | bold() {
8 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
9 | }
10 |
11 | clean() {
12 | bold "caffe clean"
13 | rm -rf "./third_party/src/caffe"
14 | }
15 |
16 | sync() {
17 | bold "caffe sync"
18 | [ -d "./third_party/src/caffe" ] || git clone --quiet https://github.com/BVLC/caffe.git "./third_party/src/caffe"
19 | pushd "./third_party/src/caffe" > /dev/null
20 | git pull --quiet --prune
21 | popd > /dev/null
22 | }
23 |
24 | schema() {
25 | bold "caffe schema"
26 | [[ $(grep -U $'\x0D' ./src/caffe-proto.js) ]] && crlf=1
27 | sed 's/required float min = 1;/optional float min = 1;/g;s/required float max = 2;/optional float max = 2;/g' < ./third_party/src/caffe/src/caffe/proto/caffe.proto >./tools/caffe.proto
28 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case --decode-text -r caffe -o ./src/caffe-proto.js ./tools/caffe.proto
29 | rm ./tools/caffe.proto
30 | node ./tools/update_pbjs.js array ./src/caffe-proto.js data float 1
31 | if [[ -n ${crlf} ]]; then
32 | unix2dos --quiet --newfile ./src/caffe-proto.js ./src/caffe-proto.js
33 | fi
34 | }
35 |
36 | while [ "$#" != 0 ]; do
37 | command="$1" && shift
38 | case "${command}" in
39 | "clean") clean;;
40 | "sync") sync;;
41 | "schema") schema;;
42 | esac
43 | done
44 |
--------------------------------------------------------------------------------
/tools/cntk:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "cntk clean"
12 | rm -rf "./third_party/src/cntk"
13 | }
14 |
15 | sync() {
16 | bold "cntk sync"
17 | case ${OSTYPE} in
18 | linux*|darwin*)
19 | [ -d "./third_party/src/cntk" ] || git clone --quiet --recursive https://github.com/Microsoft/CNTK.git "./third_party/src/cntk"
20 | pushd "./third_party/src/cntk" > /dev/null
21 | git pull --quiet --prune
22 | git submodule sync --quiet
23 | git submodule update --quiet --init --recursive
24 | popd > /dev/null
25 | ;;
26 | *)
27 | [ -d "./third_party/src/cntk" ] || git clone --quiet https://github.com/Microsoft/CNTK.git "./third_party/src/cntk"
28 | pushd "./third_party/src/cntk" > /dev/null
29 | git pull --quiet --prune
30 | popd > /dev/null
31 | ;;
32 | esac
33 | }
34 |
35 | schema() {
36 | bold "cntk schema"
37 | [[ $(grep -U $'\x0D' ./src/cntk-proto.js) ]] && crlf=1
38 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case -r cntk -o ./src/cntk-proto.js ./third_party/src/cntk/Source/CNTKv2LibraryDll/proto/CNTK.proto
39 | node ./tools/update_pbjs.js array ./src/cntk-proto.js value float 1
40 | if [[ -n ${crlf} ]]; then
41 | unix2dos --quiet --newfile ./src/cntk-proto.js ./src/cntk-proto.js
42 | fi
43 | }
44 |
45 | while [ "$#" != 0 ]; do
46 | command="$1" && shift
47 | case "${command}" in
48 | "clean") clean;;
49 | "sync") sync;;
50 | "schema") schema;;
51 | esac
52 | done
53 |
--------------------------------------------------------------------------------
/tools/uff.proto:
--------------------------------------------------------------------------------
1 |
2 | syntax = "proto2";
3 |
4 | package uff;
5 |
6 | message MetaGraph {
7 | optional int64 version = 1;
8 | optional int64 descriptor_core_version = 2;
9 | repeated Descriptor descriptors = 3;
10 | repeated Graph graphs = 4;
11 | repeated KeyValuePair referenced_data = 5;
12 | }
13 |
14 | message Descriptor {
15 | required string id = 1;
16 | required int64 version = 2;
17 | }
18 |
19 | message Graph {
20 | optional string id = 1;
21 | repeated Node nodes = 2;
22 | }
23 |
24 | message Node {
25 | required string id = 1;
26 | repeated string inputs = 2;
27 | required string operation = 3;
28 | repeated KeyValuePair fields = 4;
29 | repeated KeyValuePair extra_fields = 5;
30 | }
31 |
32 | message KeyValuePair {
33 | required string key = 1;
34 | required Value value = 2;
35 | }
36 |
37 | message Value {
38 | oneof type {
39 | string s = 1;
40 | ListString s_list = 2;
41 | double d = 3;
42 | ListDouble d_list = 4;
43 | bool b = 5;
44 | bool b_list = 6;
45 | int64 i = 7;
46 | ListInt i_list = 8;
47 | bytes blob = 9;
48 | string ref = 100;
49 | DataType dtype = 101;
50 | ListDataType dtype_list = 102;
51 | DimOrders dim_orders = 103;
52 | ListDimOrders dim_orders_list = 104;
53 | }
54 | }
55 |
56 | enum DataType {
57 | DT_INT8 = 0x10008;
58 | DT_INT16 = 0x10010;
59 | DT_INT32 = 0x10020;
60 | DT_INT64 = 0x10040;
61 | DT_FLOAT16 = 0x20010;
62 | DT_FLOAT32 = 0x20020;
63 | }
64 |
65 | message DimOrder {
66 | required int64 key = 1;
67 | required ListInt value = 2;
68 | }
69 |
70 | message DimOrders {
71 | repeated DimOrder orders = 1;
72 | }
73 |
74 | message ListString {
75 | repeated string val = 1;
76 | }
77 |
78 | message ListInt {
79 | repeated int64 val = 1;
80 | }
81 |
82 | message ListDouble {
83 | repeated double val = 1;
84 | }
85 |
86 | message ListDataType {
87 | repeated DataType val = 1;
88 | }
89 |
90 | message ListDimOrders {
91 | repeated DimOrders val = 1;
92 | }
93 |
--------------------------------------------------------------------------------
/tools/coreml:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | venv() {
12 | env_dir=./third_party/env/coremltools
13 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
14 | case "${env_dir}" in
15 | msys*) source ${env_dir}/Scripts/activate;;
16 | *) source ${env_dir}/bin/activate;;
17 | esac
18 | ${python} -m pip install --quiet --upgrade pip
19 | }
20 |
21 | bold() {
22 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
23 | }
24 |
25 | clean() {
26 | bold "coreml clean"
27 | rm -rf "./third_party/src/env/coremltools"
28 | rm -rf "./third_party/src/coremltools"
29 | }
30 |
31 | sync() {
32 | bold "coreml sync"
33 | [ -d "./third_party/src/coremltools" ] || git clone --quiet https://github.com/apple/coremltools.git "./third_party/src/coremltools"
34 | pushd "./third_party/src/coremltools" > /dev/null
35 | git pull --quiet --prune
36 | popd > /dev/null
37 | }
38 |
39 | schema() {
40 | bold "coreml schema"
41 | [[ $(grep -U $'\x0D' ./src/coreml-proto.js) ]] && crlf=1
42 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case -r coreml -o ./src/coreml-proto.js ./third_party/src/coremltools/mlmodel/format/Model.proto
43 | node ./tools/update_pbjs.js array ./src/coreml-proto.js floatValue float 2
44 | if [[ -n ${crlf} ]]; then
45 | unix2dos --quiet --newfile ./src/coreml-proto.js ./src/coreml-proto.js
46 | fi
47 | }
48 |
49 | convert() {
50 | bold "coreml convert"
51 | venv
52 | ${python} -m pip install --quiet --upgrade six numpy protobuf
53 | ${python} -m pip install --quiet ./third_party/src/coremltools
54 | ${python} -m pip install --quiet onnx
55 | ${python} -m pip install --quiet sklearn
56 | ${python} ./tools/coreml-script.py convert ${1}
57 | deactivate
58 | }
59 |
60 | while [ "$#" != 0 ]; do
61 | command="$1" && shift
62 | case "${command}" in
63 | "clean") clean;;
64 | "sync") sync;;
65 | "install") install;;
66 | "schema") schema;;
67 | "convert") convert ${1} && shift;;
68 | esac
69 | done
70 |
--------------------------------------------------------------------------------
/tools/keras:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | bold() {
12 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
13 | }
14 |
15 | venv() {
16 | env_dir=./third_party/env/keras
17 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
18 | case "${OSTYPE}" in
19 | msys*) source ${env_dir}/Scripts/activate;;
20 | *) source ${env_dir}/bin/activate;;
21 | esac
22 | ${python} -m pip install --quiet --upgrade pip setuptools
23 | }
24 |
25 | clean() {
26 | bold "keras clean"
27 | rm -rf "./third_party/env/keras"
28 | rm -rf "./third_party/src/keras"
29 | }
30 |
31 | sync() {
32 | bold "keras sync"
33 | [ -d "./third_party/src/keras" ] || git clone --quiet https://github.com/keras-team/keras.git "./third_party/src/keras"
34 | pushd "./third_party/src/keras" > /dev/null
35 | git pull --quiet --prune
36 | popd > /dev/null
37 | }
38 |
39 | install() {
40 | bold "keras install"
41 | case "${OSTYPE}" in
42 | msys*) [ ! -z "$(choco list --local-only --exacty --limit-output vcredist140)" ] || $(choco install -yes vcredist140) > /dev/null;;
43 | esac
44 | venv
45 | ${python} -m pip install --quiet --upgrade tensorflow
46 | ${python} -m pip install --quiet ./third_party/src/keras
47 | deactivate
48 | }
49 |
50 | metadata() {
51 | bold "keras metadata"
52 | [[ $(grep -U $'\x0D' ./src/keras-metadata.json) ]] && crlf=1
53 | venv
54 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
55 | ${python} ./tools/keras-script.py metadata
56 | deactivate
57 | if [[ -n ${crlf} ]]; then
58 | unix2dos --quiet --newfile ./src/keras-metadata.json ./src/keras-metadata.json
59 | fi
60 | }
61 |
62 | zoo() {
63 | bold "keras zoo"
64 | venv
65 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
66 | ${python} ./tools/keras-script.py zoo
67 | deactivate
68 | }
69 |
70 | while [ "$#" != 0 ]; do
71 | command="$1" && shift
72 | case "${command}" in
73 | "clean") clean;;
74 | "sync") sync;;
75 | "install") install;;
76 | "metadata") metadata;;
77 | "zoo") zoo;;
78 | esac
79 | done
80 |
--------------------------------------------------------------------------------
/electron-builder.yml:
--------------------------------------------------------------------------------
1 | appId: com.lutzroeder.netron
2 | productName: Netron
3 | files:
4 | - 'src/**/*'
5 | directories:
6 | buildResources: ./setup
7 | fileAssociations:
8 | - name: "Arm NN Model"
9 | ext: armnn
10 | - name: "CNTK Model"
11 | ext: cmf
12 | - name: "CNTK Model"
13 | ext: dnn
14 | - name: "Keras Model"
15 | ext: h5
16 | - name: "Keras Model"
17 | ext: hd5
18 | - name: "Keras Model"
19 | ext: hdf5
20 | - name: "Keras Model"
21 | ext: keras
22 | - name: "TensorFlow Lite Model"
23 | ext: lite
24 | - name: "MXNet Model"
25 | ext: mar
26 | - name: "TensorFlow Meta Graph"
27 | ext: meta
28 | - name: "Core ML Model"
29 | ext: mlmodel
30 | - name: "MNN Model"
31 | ext: mnn
32 | - name: "Model"
33 | ext: model
34 | - name: "Barracuda Model"
35 | ext: nn
36 | - name: "ONNX Model"
37 | ext: onnx
38 | - name: "Protocol Buffer"
39 | ext: pb
40 | - name: "Text Protocol Buffer"
41 | ext: pbtxt
42 | - name: "Text Protocol Buffer"
43 | ext: prototxt
44 | - name: "Caffe Model"
45 | ext: caffemodel
46 | - name: "NCNN Model"
47 | ext: param
48 | - name: "PyTorch Model"
49 | ext: pth
50 | - name: "PyTorch Model"
51 | ext: pt
52 | - name: "Torch Model"
53 | ext: t7
54 | - name: "TensorFlow Lite Model"
55 | ext: tflite
56 | - name: "TensorFlow Lite Model"
57 | ext: tfl
58 | - name: "Tengine"
59 | ext: tmfile
60 | - name: "UFF Model"
61 | ext: uff
62 | afterSign: "./setup/notarize.js"
63 | publish:
64 | - provider: github
65 | releaseType: release
66 | mac:
67 | category: public.app-category.developer-tools
68 | darkModeSupport: true
69 | hardenedRuntime: true
70 | gatekeeperAssess: false
71 | target:
72 | - dmg
73 | - zip
74 | linux:
75 | target:
76 | - AppImage
77 | - snap
78 | snap:
79 | plugs:
80 | - default
81 | - removable-media
82 | publish:
83 | - provider: snapStore
84 | channels:
85 | - stable
86 | win:
87 | target:
88 | - nsis
89 | verifyUpdateCodeSignature: false
90 | dmg:
91 | iconSize: 160
92 | contents:
93 | - x: 180
94 | y: 170
95 | - x: 480
96 | y: 170
97 | type: link
98 | path: /Applications
99 | nsis:
100 | perMachine: true
101 |
--------------------------------------------------------------------------------
/src/bigdl-metadata.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "Dropout",
4 | "schema": {
5 | "category": "Dropout"
6 | }
7 | },
8 | {
9 | "name": "Linear",
10 | "schema": {
11 | "category": "Layer",
12 | "inputs": [
13 | { "name": "inputs" },
14 | { "name": "weight" },
15 | { "name": "bias" }
16 | ]
17 | }
18 | },
19 | {
20 | "name": "NormalizeScale",
21 | "schema": {
22 | "category": "Normalization",
23 | "inputs": [
24 | { "name": "inputs" },
25 | { "name": "w" }
26 | ]
27 | }
28 | },
29 | {
30 | "name": "ReLU",
31 | "schema": {
32 | "category": "Activation"
33 | }
34 | },
35 | {
36 | "name": "Scale",
37 | "schema": {
38 | "category": "Layer",
39 | "inputs": [
40 | { "name": "inputs" },
41 | { "name": "weight" },
42 | { "name": "bias" }
43 | ]
44 | }
45 | },
46 | {
47 | "name": "SoftMax",
48 | "schema": {
49 | "category": "Activation"
50 | }
51 | },
52 | {
53 | "name": "SpatialBatchNormalization",
54 | "schema": {
55 | "category": "Normalization"
56 | }
57 | },
58 | {
59 | "name": "SpatialConvolution",
60 | "schema": {
61 | "category": "Layer",
62 | "inputs": [
63 | { "name": "inputs" },
64 | { "name": "weight" },
65 | { "name": "bias" }
66 | ]
67 | }
68 | },
69 | {
70 | "name": "SpatialCrossMapLRN",
71 | "schema": {
72 | "category": "Normalization"
73 | }
74 | },
75 | {
76 | "name": "SpatialDilatedConvolution",
77 | "schema": {
78 | "category": "Layer",
79 | "inputs": [
80 | { "name": "inputs" },
81 | { "name": "weight" },
82 | { "name": "bias" }
83 | ]
84 | }
85 | },
86 | {
87 | "name": "SpatialAveragePooling",
88 | "schema": {
89 | "category": "Pool"
90 | }
91 | },
92 | {
93 | "name": "SpatialMaxPooling",
94 | "schema": {
95 | "category": "Pool"
96 | }
97 | },
98 | {
99 | "name": "Transpose",
100 | "schema": {
101 | "category": "Shape"
102 | }
103 | },
104 | {
105 | "name": "InferReshape",
106 | "schema": {
107 | "category": "Shape"
108 | }
109 | }
110 | ]
--------------------------------------------------------------------------------
/tools/sklearn:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | bold() {
12 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
13 | }
14 |
15 | venv() {
16 | env_dir=./third_party/env/scikit-learn
17 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
18 | case "${OSTYPE}" in
19 | msys*) source ${env_dir}/Scripts/activate;;
20 | *) source ${env_dir}/bin/activate;;
21 | esac
22 | ${python} -m pip install --quiet --upgrade pip
23 | }
24 |
25 | git_sync() {
26 | [ -d "./third_party/src/${1}" ] || git clone --quiet --recursive ${2} "./third_party/src/${1}"
27 | pushd "./third_party/src/${1}" > /dev/null
28 | git pull --quiet --prune
29 | git submodule sync --quiet
30 | git submodule update --quiet --init --recursive
31 | popd > /dev/null
32 | }
33 |
34 | clean() {
35 | bold "sklearn clean"
36 | rm -rf "./third_party/env/scikit-learn"
37 | rm -rf "./third_party/src/numpy"
38 | rm -rf "./third_party/src/scikit-learn"
39 | rm -rf "./third_party/src/lightgbm"
40 | rm -rf "./third_party/src/xgboost"
41 | }
42 |
43 | sync() {
44 | bold "sklearn sync"
45 | git_sync numpy https://github.com/numpy/numpy.git
46 | git_sync scikit-learn https://github.com/scikit-learn/scikit-learn.git
47 | git_sync lightgbm https://github.com/Microsoft/LightGBM.git
48 | git_sync xgboost https://github.com/dmlc/xgboost.git
49 | }
50 |
51 | install() {
52 | bold "sklearn install"
53 | venv
54 | ${python} -m pip install --quiet six cython pytest flake8 numpy scipy pylint astroid isort
55 | ${python} -m pip install --quiet --pre -f https://sklearn-nightly.scdn8.secure.raxcdn.com scikit-learn
56 | # ${python} -m pip install --quiet ./third_party/src/scikit-learn
57 | deactivate
58 | }
59 |
60 | metadata() {
61 | bold "sklearn metadata"
62 | [[ $(grep -U $'\x0D' ./src/sklearn-metadata.json) ]] && crlf=1
63 | venv
64 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
65 | ${python} ./tools/sklearn-script.py
66 | deactivate
67 | if [[ -n ${crlf} ]]; then
68 | unix2dos --quiet --newfile ./src/sklearn-metadata.json ./src/sklearn-metadata.json
69 | fi
70 | }
71 |
72 | while [ "$#" != 0 ]; do
73 | command="$1" && shift
74 | case "${command}" in
75 | "clean") clean;;
76 | "sync") sync;;
77 | "install") install;;
78 | "metadata") metadata;;
79 | esac
80 | done
81 |
--------------------------------------------------------------------------------
/src/dl4j-metadata.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "Dense",
4 | "schema": {
5 | "category": "Layer",
6 | "attributes": [
7 | ]
8 | }
9 | },
10 | {
11 | "name": "Output",
12 | "schema": {
13 | "category": "Layer",
14 | "attributes": [
15 | ]
16 | }
17 | },
18 | {
19 | "name": "Convolution",
20 | "schema": {
21 | "category": "Layer",
22 | "attributes": [
23 | { "name": "dilation" },
24 | { "name": "kernelSize" },
25 | { "name": "padding" }
26 | ]
27 | }
28 | },
29 | {
30 | "name": "SeparableConvolution2D",
31 | "schema": {
32 | "category": "Layer",
33 | "attributes": [
34 | ]
35 | }
36 | },
37 | {
38 | "name": "BatchNormalization",
39 | "schema": {
40 | "category": "Normalization",
41 | "attributes": [
42 | { "name": "eps" },
43 | { "name": "gamma" },
44 | { "name": "decay" }
45 | ]
46 | }
47 | },
48 | {
49 | "name": "Sigmoid",
50 | "schema": {
51 | "category": "Activation",
52 | "attributes": [
53 | ]
54 | }
55 | },
56 | {
57 | "name": "LReLU",
58 | "schema": {
59 | "category": "Activation",
60 | "attributes": [
61 | ]
62 | }
63 | },
64 | {
65 | "name": "ReLU",
66 | "schema": {
67 | "category": "Activation",
68 | "attributes": [
69 | ]
70 | }
71 | },
72 | {
73 | "name": "TanH",
74 | "schema": {
75 | "category": "Activation",
76 | "attributes": [
77 | ]
78 | }
79 | },
80 | {
81 | "name": "Softmax",
82 | "schema": {
83 | "category": "Activation",
84 | "attributes": [
85 | ]
86 | }
87 | },
88 | {
89 | "name": "Merge",
90 | "schema": {
91 | "category": "Tensor",
92 | "attributes": [
93 | ]
94 | }
95 | },
96 | {
97 | "name": "Upsampling2D",
98 | "schema": {
99 | "category": "Layer",
100 | "attributes": [
101 | ]
102 | }
103 | },
104 | {
105 | "name": "Dropout",
106 | "schema": {
107 | "category": "Dropout",
108 | "attributes": [
109 | ]
110 | }
111 | },
112 | {
113 | "name": "GlobalPooling",
114 | "schema": {
115 | "category": "Pool",
116 | "attributes": [
117 | ]
118 | }
119 | },
120 | {
121 | "name": "Subsampling",
122 | "schema": {
123 | "category": "Layer",
124 | "attributes": [
125 | ]
126 | }
127 | }
128 | ]
--------------------------------------------------------------------------------
/src/uff-metadata.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "Activation",
4 | "schema": {
5 | "category": "Activation",
6 | "inputs": [
7 | { "name": "input" }
8 | ]
9 | }
10 | },
11 | {
12 | "name": "Binary",
13 | "schema": {
14 | "inputs": [
15 | { "name": "x" },
16 | { "name": "y" }
17 | ]
18 | }
19 | },
20 | {
21 | "name": "Unary",
22 | "schema": {
23 | "inputs": [
24 | { "name": "input" }
25 | ]
26 | }
27 | },
28 | {
29 | "name": "Conv",
30 | "schema": {
31 | "category": "Layer",
32 | "inputs": [
33 | { "name": "input" },
34 | { "name": "kernel" }
35 | ]
36 | }
37 | },
38 | {
39 | "name": "FullyConnected",
40 | "schema": {
41 | "category": "Layer",
42 | "inputs": [
43 | { "name": "input" },
44 | { "name": "weights" }
45 | ]
46 | }
47 | },
48 | {
49 | "name": "Reshape",
50 | "schema": {
51 | "category": "Shape",
52 | "inputs": [
53 | { "name": "input" },
54 | { "name": "shape" }
55 | ]
56 | }
57 | },
58 | {
59 | "name": "StridedSlice",
60 | "schema": {
61 | "category": "Tensor",
62 | "inputs": [
63 | { "name": "input" },
64 | { "name": "begin" },
65 | { "name": "end" },
66 | { "name": "strides" }
67 | ]
68 | }
69 | },
70 | {
71 | "name": "Squeeze",
72 | "schema": {
73 | "category": "Transform"
74 | }
75 | },
76 | {
77 | "name": "BatchNorm",
78 | "schema": {
79 | "category": "Normalization",
80 | "inputs": [
81 | { "name": "input" },
82 | { "name": "gamma" },
83 | { "name": "beta" },
84 | { "name": "moving_mean" },
85 | { "name": "moving_variance" }
86 | ]
87 | }
88 | },
89 | {
90 | "name": "Pool",
91 | "schema": {
92 | "category": "Pool",
93 | "inputs": [
94 | { "name": "input" }
95 | ]
96 | }
97 | },
98 | {
99 | "name": "_MaxPool",
100 | "schema": {
101 | "category": "Pool",
102 | "inputs": [
103 | { "name": "input" }
104 | ]
105 | }
106 | },
107 | {
108 | "name": "Concat",
109 | "schema": {
110 | "category": "Tensor"
111 | }
112 | },
113 | {
114 | "name": "Flatten",
115 | "schema": {
116 | "category": "Shape"
117 | }
118 | },
119 | {
120 | "name": "GatherV2",
121 | "schema": {
122 | "category": "Data",
123 | "inputs": [
124 | { "name": "input" },
125 | { "name": "indices" }
126 | ]
127 | }
128 | }
129 | ]
--------------------------------------------------------------------------------
/tools/mnn:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "mnn clean"
12 | rm -rf "./third_party/src/mnn"
13 | }
14 |
15 | sync() {
16 | bold "mnn sync"
17 | [ -d "./third_party/src/mnn" ] || git clone --quiet https://github.com/alibaba/MNN.git "./third_party/src/mnn"
18 | pushd "./third_party/src/mnn" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | schema() {
24 | bold "mnn schema"
25 | case "${OSTYPE}" in
26 | linux*)
27 | flatc_version=$(curl -s https://api.github.com/repos/google/flatbuffers/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"')
28 | flatc_dir=./third_party/bin/flatbuffers/${flatc_version}
29 | if [ ! -f "${flatc_dir}/flatc" ]; then
30 | mkdir -p "${flatc_dir}"
31 | pushd "${flatc_dir}" > /dev/null
32 | curl -sL https://github.com/google/flatbuffers/archive/${flatc_version}.tar.gz | tar zx --strip-components 1
33 | cmake -G "Unix Makefiles" . &> /dev/null
34 | make > /dev/null
35 | popd > /dev/null
36 | fi
37 | export PATH=${flatc_dir}:${PATH}
38 | ;;
39 | darwin*)
40 | brew list flatbuffers > /dev/null 2>&1 || brew install flatbuffers > /dev/null
41 | ;;
42 | msys*)
43 | flatc_version=$(curl -s https://api.github.com/repos/google/flatbuffers/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"')
44 | flatc_dir=./third_party/bin/flatbuffers/${flatc_version}
45 | if [ ! -f "${flatc_dir}/flatc.exe" ]; then
46 | mkdir -p "${flatc_dir}"
47 | pushd "${flatc_dir}" > /dev/null
48 | curl -sL -O https://github.com/google/flatbuffers/releases/download/${flatc_version}/flatc_windows.zip
49 | unzip flatc_windows.zip > /dev/null
50 | popd > /dev/null
51 | fi
52 | export PATH=${flatc_dir}:${PATH}
53 | ;;
54 | esac
55 | [[ $(grep -U $'\x0D' ./src/mnn-schema.js) ]] && crlf=1
56 | flatc --no-js-exports --gen-all -o ./tools/. --js ./third_party/src/mnn/schema/default/MNN.fbs
57 | mv ./tools/MNN_generated.js ./src/mnn-schema.js
58 | cat <> ./src/mnn-schema.js
59 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
60 | module.exports = { mnn_schema: MNN };
61 | }
62 | EOT
63 | if [[ -n ${crlf} ]]; then
64 | unix2dos --quiet --newfile ./src/mnn-schema.js ./src/mnn-schema.js
65 | fi
66 | }
67 |
68 | while [ "$#" != 0 ]; do
69 | command="$1" && shift
70 | case "${command}" in
71 | "clean") clean;;
72 | "sync") sync;;
73 | "schema") schema;;
74 | esac
75 | done
76 |
--------------------------------------------------------------------------------
/tools/armnn:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | bold() {
7 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
8 | }
9 |
10 | clean() {
11 | bold "armnn clean"
12 | rm -rf ./third_party/src/armnn
13 | }
14 |
15 | sync() {
16 | bold "armnn sync"
17 | [ -d "./third_party/src/armnn" ] || git clone --quiet --branch master https://github.com/ARM-software/armnn.git "./third_party/src/armnn"
18 | pushd "./third_party/src/armnn" > /dev/null
19 | git pull --quiet --prune
20 | popd > /dev/null
21 | }
22 |
23 | schema() {
24 | bold "armnn schema"
25 | case "${OSTYPE}" in
26 | linux*)
27 | flatc_version=$(curl -s https://api.github.com/repos/google/flatbuffers/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"')
28 | flatc_dir=./third_party/bin/flatbuffers/${flatc_version}
29 | if [ ! -f "${flatc_dir}/flatc" ]; then
30 | mkdir -p "${flatc_dir}"
31 | pushd "${flatc_dir}" > /dev/null
32 | curl -sL https://github.com/google/flatbuffers/archive/${flatc_version}.tar.gz | tar zx --strip-components 1
33 | cmake -G "Unix Makefiles" . &> /dev/null
34 | make > /dev/null
35 | popd > /dev/null
36 | fi
37 | export PATH=${flatc_dir}:${PATH}
38 | ;;
39 | darwin*)
40 | brew list flatbuffers > /dev/null 2>&1 || brew install flatbuffers > /dev/null
41 | ;;
42 | msys*)
43 | flatc_version=$(curl -s https://api.github.com/repos/google/flatbuffers/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"')
44 | flatc_dir=./third_party/bin/flatbuffers/${flatc_version}
45 | if [ ! -f "${flatc_dir}/flatc.exe" ]; then
46 | mkdir -p "${flatc_dir}"
47 | pushd "${flatc_dir}" > /dev/null
48 | curl -sL -O https://github.com/google/flatbuffers/releases/download/${flatc_version}/flatc_windows.zip
49 | unzip flatc_windows.zip > /dev/null
50 | popd > /dev/null
51 | fi
52 | export PATH=${flatc_dir}:${PATH}
53 | ;;
54 | esac
55 | [[ $(grep -U $'\x0D' ./src/armnn-schema.js) ]] && crlf=1
56 | flatc --no-js-exports -o ./tools/. --js ./third_party/src/armnn/src/armnnSerializer/ArmnnSchema.fbs
57 | mv ./tools/ArmnnSchema_generated.js ./src/armnn-schema.js
58 | cat <> ./src/armnn-schema.js
59 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
60 | module.exports = { armnn_schema: armnnSerializer };
61 | }
62 | EOT
63 | if [[ -n ${crlf} ]]; then
64 | unix2dos --quiet --newfile ./src/armnn-schema.js ./src/armnn-schema.js
65 | fi
66 | }
67 |
68 | while [ "$#" != 0 ]; do
69 | command="$1" && shift
70 | case "${command}" in
71 | "clean") clean;;
72 | "sync") sync;;
73 | "schema") schema;;
74 | esac
75 | done
76 |
--------------------------------------------------------------------------------
/setup/cask.js:
--------------------------------------------------------------------------------
1 |
2 |
3 | const fs = require('fs');
4 | const http = require('http');
5 | const https = require('https');
6 | const crypto = require('crypto');
7 |
8 | const packageManifestFile = process.argv[2];
9 | const caskFile = process.argv[3];
10 |
11 | const request = (url, timeout) => {
12 | return new Promise((resolve, reject) => {
13 | const httpModule = url.split(':').shift() === 'https' ? https : http;
14 | httpModule.get(url, (response) => {
15 | if (response.statusCode === 200) {
16 | let data = [];
17 | let position = 0;
18 | response.on('data', (chunk) => {
19 | data.push(chunk);
20 | position += chunk.length;
21 | process.stdout.write(' ' + position + ' bytes\r');
22 | });
23 | response.on('err', (err) => {
24 | reject(err);
25 | });
26 | response.on('end', () => {
27 | resolve(Buffer.concat(data));
28 | });
29 | }
30 | else if (response.statusCode === 302) {
31 | request(response.headers.location).then((data) => {
32 | resolve(data);
33 | }).catch((err) => {
34 | request(err);
35 | });
36 | }
37 | else {
38 | const err = new Error("The web request failed with status code " + response.statusCode + " at '" + url + "'.");
39 | err.type = 'error';
40 | err.url = url;
41 | err.status = response.statusCode;
42 | reject(err);
43 | }
44 | }).on("error", (err) => {
45 | reject(err);
46 | });
47 | if (timeout) {
48 | request.setTimeout(timeout, () => {
49 | request.abort();
50 | const err = new Error("The web request timed out at '" + url + "'.");
51 | err.type = 'timeout';
52 | err.url = url;
53 | reject(err);
54 | });
55 | }
56 | });
57 | };
58 |
59 | const packageManifest = JSON.parse(fs.readFileSync(packageManifestFile, 'utf-8'));
60 | const name = packageManifest.name;
61 | const version = packageManifest.version;
62 | const productName = packageManifest.productName;
63 | const repository = 'https://github.com/' + packageManifest.repository;
64 | const url = repository + '/releases/download/v#{version}/' + productName + '-#{version}-mac.zip';
65 |
66 | request(url.replace(/#{version}/g, version)).then((data) => {
67 | const sha256 = crypto.createHash('sha256').update(data).digest('hex').toLowerCase();
68 | const lines = [
69 | "cask '" + name + "' do",
70 | " version '" + version + "'",
71 | " sha256 '" + sha256 + "'",
72 | "",
73 | ' url "' + url + '"',
74 | " appcast '" + repository + "/releases.atom'",
75 | " name '" + productName + "'",
76 | " homepage '" + repository + "'",
77 | "",
78 | " auto_updates true",
79 | "",
80 | " app '" + productName + ".app'",
81 | "end",
82 | ""
83 | ];
84 | fs.writeFileSync(caskFile, lines.join('\n'));
85 |
86 | }).catch((err) => {
87 | console.log(err.message);
88 | });
89 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 |
2 | name: Publish
3 |
4 | on:
5 | push:
6 | tags: [ 'v**' ]
7 |
8 | jobs:
9 | publish:
10 | runs-on: ${{ matrix.os }}
11 |
12 | strategy:
13 | matrix:
14 | os: [ macos-latest, ubuntu-latest, windows-latest ]
15 |
16 | steps:
17 | - name: Check out Git repository
18 | uses: actions/checkout@v1
19 |
20 | - name: Git credentials
21 | run: |
22 | git config --global user.name ${{ secrets.PUBLISH_USER_NAME }}
23 | git config --global user.email ${{ secrets.PUBLISH_USER_EMAIL }}
24 |
25 | - name: Install Node.js
26 | uses: actions/setup-node@v1
27 | with:
28 | node-version: 12
29 |
30 | - name: Install Python
31 | uses: actions/setup-python@v2
32 | with:
33 | python-version: 3.7
34 |
35 | - name: Install npm packages
36 | run: make install
37 |
38 | - name: ESLint
39 | run: make lint
40 |
41 | - name: Publish Electron
42 | shell: bash
43 | env:
44 | GITHUB_TOKEN: ${{ secrets.PUBLISH_GITHUB_TOKEN }}
45 | API_KEY_ID: ${{ secrets.API_KEY_ID }}
46 | API_KEY_ISSUER_ID: ${{ secrets.API_KEY_ISSUER_ID }}
47 | CSC_LINK: ${{ secrets.CSC_LINK }}
48 | CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
49 | run: |
50 | case "${{ matrix.os }}" in
51 | macos*)
52 | mkdir -p ~/.private_keys
53 | echo '${{ secrets.API_KEY }}' > ~/.private_keys/AuthKey_${{ secrets.API_KEY_ID }}.p8
54 | npx electron-builder --mac --publish always
55 | ;;
56 | ubuntu*)
57 | sudo snap install snapcraft --classic
58 | mkdir -p ~/.private_keys
59 | echo '${{ secrets.SNAPCRAFT_TOKEN }}' > ~/.private_keys/snapcraft_token.txt
60 | snapcraft login --with ~/.private_keys/snapcraft_token.txt
61 | npx electron-builder --linux appimage --publish always
62 | npx electron-builder --linux snap --publish always
63 | ;;
64 | windows*)
65 | unset CSC_LINK;
66 | unset CSC_KEY_PASSWORD;
67 | npx electron-builder --win --publish always
68 | ;;
69 | esac
70 |
71 | - if: startsWith(matrix.os, 'ubuntu')
72 | name: Publish Python Server
73 | env:
74 | TWINE_USERNAME: __token__
75 | TWINE_PASSWORD: ${{ secrets.pypi_api_token }}
76 | run: make publish_python
77 |
78 | - if: startsWith(matrix.os, 'ubuntu')
79 | name: Publish GitHub Pages
80 | env:
81 | GITHUB_TOKEN: ${{ secrets.PUBLISH_GITHUB_TOKEN }}
82 | GITHUB_USER: ${{ secrets.PUBLISH_GITHUB_USER }}
83 | run: make publish_github_pages
84 |
85 | - if: startsWith(matrix.os, 'macos')
86 | name: Publish cask
87 | env:
88 | GITHUB_TOKEN: ${{ secrets.PUBLISH_GITHUB_TOKEN }}
89 | GITHUB_USER: ${{ secrets.PUBLISH_GITHUB_USER }}
90 | run: make publish_cask
91 |
92 | - if: startsWith(matrix.os, 'windows')
93 | name: Publish winget
94 | env:
95 | GITHUB_TOKEN: ${{ secrets.PUBLISH_GITHUB_TOKEN }}
96 | GITHUB_USER: ${{ secrets.PUBLISH_GITHUB_USER }}
97 | run: make publish_winget
98 |
--------------------------------------------------------------------------------
/src/paddle-metadata.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "conv2d",
4 | "schema": {
5 | "category": "Layer",
6 | "attributes": [
7 | { "name": "workspace_size_MB", "default": 4096 },
8 | { "name": "fuse_residual_connection", "default": false },
9 | { "name": "fuse_eltwise", "default": false },
10 | { "name": "fuse_relu", "default": false },
11 | { "name": "data_format", "default": "AnyLayout" },
12 | { "name": "groups", "default": 1 },
13 | { "name": "paddings", "default": [0, 0] },
14 | { "name": "dilations", "default": [1, 1] },
15 | { "name": "strides", "default": [1, 1] }
16 | ]
17 | }
18 | },
19 | {
20 | "name": "depthwise_conv2d",
21 | "schema": {
22 | "category": "Layer",
23 | "attributes": [
24 | { "name": "workspace_size_MB", "default": 4096 },
25 | { "name": "fuse_residual_connection", "default": false },
26 | { "name": "data_format", "default": "AnyLayout" },
27 | { "name": "groups", "default": 1 },
28 | { "name": "fuse_relu", "default": false }
29 | ]
30 | }
31 | },
32 | {
33 | "name": "relu",
34 | "schema": {
35 | "category": "Activation"
36 | }
37 | },
38 | {
39 | "name": "softmax",
40 | "schema": {
41 | "category": "Activation",
42 | "attributes": [
43 | { "name": "data_format", "default": "AnyLayout" }
44 | ]
45 | }
46 | },
47 | {
48 | "name": "batch_norm",
49 | "schema": {
50 | "category": "Normalization",
51 | "attributes": [
52 | { "name": "momentum", "default": 0.8999999761581421 },
53 | { "name": "epsilon", "default": 9.999999747378752e-06 },
54 | { "name": "fuse_with_relu", "default": false },
55 | { "name": "data_layout", "default": "NCHW" }
56 | ]
57 | }
58 | },
59 | {
60 | "name": "pool2d",
61 | "schema": {
62 | "category": "Pool",
63 | "attributes": [
64 | { "name": "data_format", "default": "AnyLayout" },
65 | { "name": "ceil_mode", "default": false },
66 | { "name": "global_pooling", "default": false },
67 | { "name": "exclusive", "default": true },
68 | { "name": "pooling_type", "default": "max" },
69 | { "name": "paddings", "default": [0, 0] }
70 | ]
71 | }
72 | },
73 | {
74 | "name": "elementwise_add",
75 | "schema": {
76 | "attributes": [
77 | { "name": "axis", "default": -1 }
78 | ]
79 | }
80 | },
81 | {
82 | "name": "concat",
83 | "schema": {
84 | "category": "Tensor"
85 | }
86 | },
87 | {
88 | "name": "reshape",
89 | "schema": {
90 | "category": "Shape"
91 | }
92 | },
93 | {
94 | "name": "reshape2",
95 | "schema": {
96 | "category": "Shape"
97 | }
98 | },
99 | {
100 | "name": "lrn",
101 | "schema": {
102 | "category": "Normalization",
103 | "attributes": [
104 | { "name": "alpha", "default": 9.999999747378752e-05 },
105 | { "name": "beta", "default": 0.75 },
106 | { "name": "k", "default": 1 }
107 | ]
108 | }
109 | },
110 | {
111 | "name": "pad2d",
112 | "schema": {
113 | "category": "Tensor"
114 | }
115 | }
116 | ]
117 |
--------------------------------------------------------------------------------
/tools/pytorch:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | bold() {
12 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
13 | }
14 |
15 | venv() {
16 | env_dir=./third_party/env/pytorch
17 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
18 | case "${OSTYPE}" in
19 | msys*) source ${env_dir}/Scripts/activate;;
20 | *) source ${env_dir}/bin/activate;;
21 | esac
22 | ${python} -m pip install --quiet --upgrade pip
23 | }
24 |
25 | clean() {
26 | bold "pytorch clean"
27 | rm -rf "./third_party/env/pytorch"
28 | rm -rf "./third_party/src/pytorch"
29 | rm -rf "./third_party/src/torchvision"
30 | }
31 |
32 | sync() {
33 | bold "pytorch sync"
34 | [ -d "./third_party/src/pytorch" ] || git clone --quiet --recursive https://github.com/pytorch/pytorch.git "./third_party/src/pytorch"
35 | pushd "./third_party/src/pytorch" > /dev/null
36 | git pull --quiet --prune
37 | git submodule sync --quiet
38 | git submodule update --quiet --init --recursive
39 | popd > /dev/null
40 | [ -d "./third_party/src/torchvision" ] || git clone --quiet --recursive https://github.com/pytorch/vision.git "./third_party/src/torchvision"
41 | pushd "./third_party/src/torchvision" > /dev/null
42 | git pull --quiet --prune
43 | popd > /dev/null
44 | }
45 |
46 | install() {
47 | bold "pytorch install"
48 | venv
49 | ${python} -m pip install --quiet --upgrade future protobuf scipy
50 | ${python} -m pip install --quiet --upgrade --pre torch torchvision -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
51 | deactivate
52 | }
53 |
54 | schema() {
55 | bold "caffe2 schema"
56 | [[ $(grep -U $'\x0D' ./src/caffe2-proto.js) ]] && crlf=1
57 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case --decode-text -r caffe2 -o ./src/caffe2-proto.js ./third_party/src/pytorch/caffe2/proto/caffe2.proto
58 | node ./tools/update_pbjs.js enumeration ./src/caffe2-proto.js floats float 1
59 | if [[ -n ${crlf} ]]; then
60 | unix2dos --quiet --newfile ./src/caffe2-proto.js ./src/caffe2-proto.js
61 | fi
62 | }
63 |
64 | metadata() {
65 | [[ $(grep -U $'\x0D' ./src/pytorch-metadata.json) ]] && crlf=1
66 | venv
67 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
68 | bold "pytorch metadata"
69 | ${python} ./tools/pytorch-script.py metadata
70 | bold "caffe2 metadata"
71 | ${python} ./tools/caffe2-script.py metadata
72 | deactivate
73 | if [[ -n ${crlf} ]]; then
74 | unix2dos --quiet --newfile ./src/pytorch-metadata.json ./src/pytorch-metadata.json
75 | unix2dos --quiet --newfile ./src/caffe2-metadata.json ./src/caffe2-metadata.json
76 | fi
77 | }
78 |
79 | zoo() {
80 | bold "pytorch zoo"
81 | venv
82 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
83 | ${python} ./tools/pytorch-script.py zoo
84 | deactivate
85 | }
86 |
87 | while [ "$#" != 0 ]; do
88 | command="$1" && shift
89 | case "${command}" in
90 | "clean") clean;;
91 | "sync") sync;;
92 | "install") install;;
93 | "schema") schema;;
94 | "metadata") metadata;;
95 | "zoo") zoo;;
96 | esac
97 | done
98 |
--------------------------------------------------------------------------------
/tools/pytorch-script.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import unicode_literals
3 | from __future__ import print_function
4 |
5 | import io
6 | import json
7 | import pydoc
8 | import os
9 | import re
10 | import sys
11 |
12 | def metadata():
13 | json_file = os.path.join(os.path.dirname(__file__), '../src/pytorch-metadata.json')
14 | json_data = open(json_file).read()
15 | json_root = json.loads(json_data)
16 |
17 | schema_map = {}
18 |
19 | for entry in json_root:
20 | name = entry['name']
21 | schema = entry['schema']
22 | schema_map[name] = schema
23 |
24 | for entry in json_root:
25 | name = entry['name']
26 | schema = entry['schema']
27 | if 'package' in schema:
28 | class_name = schema['package'] + '.' + name
29 | # print(class_name)
30 | class_definition = pydoc.locate(class_name)
31 | if not class_definition:
32 | raise Exception('\'' + class_name + '\' not found.')
33 | docstring = class_definition.__doc__
34 | if not docstring:
35 | raise Exception('\'' + class_name + '\' missing __doc__.')
36 | # print(docstring)
37 |
38 | with io.open(json_file, 'w', newline='') as fout:
39 | json_data = json.dumps(json_root, sort_keys=True, indent=2)
40 | for line in json_data.splitlines():
41 | line = line.rstrip()
42 | if sys.version_info[0] < 3:
43 | line = unicode(line)
44 | fout.write(line)
45 | fout.write('\n')
46 |
47 | def download_torchvision_model(name, input):
48 | folder = os.path.expandvars('${test}/data/pytorch')
49 | if not os.path.exists(folder):
50 | os.makedirs(folder)
51 | base = folder + '/' + name.split('.')[-1]
52 | model = pydoc.locate(name)(pretrained=True)
53 | import torch
54 | torch.save(model, base + '.pkl.pth', _use_new_zipfile_serialization=False);
55 | torch.save(model, base + '.zip.pth', _use_new_zipfile_serialization=True);
56 | model.eval()
57 | torch.jit.script(model).save(base + '.pt')
58 | traced_model = torch.jit.trace(model, torch.rand(input))
59 | torch.jit.save(traced_model, base + '_traced.pt')
60 |
61 | def zoo():
62 | if not os.environ.get('test'):
63 | os.environ['test'] = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../test'))
64 | download_torchvision_model('torchvision.models.alexnet', [ 1, 3, 299, 299 ])
65 | download_torchvision_model('torchvision.models.densenet161', [ 1, 3, 224, 224 ])
66 | download_torchvision_model('torchvision.models.inception_v3', [ 1, 3, 299, 299 ])
67 | download_torchvision_model('torchvision.models.mobilenet_v2', [ 1, 3, 224, 224 ])
68 | # download_torchvision_model('torchvision.models.resnet18', [ 1, 3, 224, 224 ])
69 | download_torchvision_model('torchvision.models.resnet101', [ 1, 3, 224, 224 ])
70 | download_torchvision_model('torchvision.models.shufflenet_v2_x1_0', [ 1, 3, 224, 224 ])
71 | download_torchvision_model('torchvision.models.squeezenet1_1', [ 1, 3, 224, 224 ])
72 | download_torchvision_model('torchvision.models.video.r3d_18', [ 1, 3, 4, 112, 112 ])
73 | # download_torchvision_model('torchvision.models.vgg11_bn', [ 1, 3, 224, 224 ])
74 | # download_torchvision_model('torchvision.models.vgg16', [ 1, 3, 224, 224 ])
75 |
76 | if __name__ == '__main__':
77 | command_table = { 'metadata': metadata, 'zoo': zoo }
78 | command = sys.argv[1];
79 | command_table[command]()
80 |
--------------------------------------------------------------------------------
/src/base.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 |
3 | var base = base || {};
4 |
5 | if (typeof window !== 'undefined' && typeof window.Long != 'undefined') {
6 | window.long = { Long: window.Long };
7 | }
8 |
9 | if (!DataView.prototype.getFloat16) {
10 | DataView.prototype.getFloat16 = function(byteOffset, littleEndian) {
11 | const value = this.getUint16(byteOffset, littleEndian);
12 | const e = (value & 0x7C00) >> 10;
13 | let f = value & 0x03FF;
14 | if (e == 0) {
15 | f = 0.00006103515625 * (f / 1024);
16 | }
17 | else if (e == 0x1F) {
18 | f = f ? NaN : Infinity;
19 | }
20 | else {
21 | f = DataView.__float16_pow[e] * (1 + (f / 1024));
22 | }
23 | return value & 0x8000 ? -f : f;
24 | };
25 | DataView.__float16_pow = {
26 | 1: 1/16384, 2: 1/8192, 3: 1/4096, 4: 1/2048, 5: 1/1024, 6: 1/512, 7: 1/256, 8: 1/128,
27 | 9: 1/64, 10: 1/32, 11: 1/16, 12: 1/8, 13: 1/4, 14: 1/2, 15: 1, 16: 2,
28 | 17: 4, 18: 8, 19: 16, 20: 32, 21: 64, 22: 128, 23: 256, 24: 512,
29 | 25: 1024, 26: 2048, 27: 4096, 28: 8192, 29: 16384, 30: 32768, 31: 65536
30 | };
31 | }
32 |
33 | if (!DataView.prototype.setFloat16) {
34 | DataView.prototype.setFloat16 = function(byteOffset, value, littleEndian) {
35 | DataView.__float16_float[0] = value;
36 | value = DataView.__float16_int[0];
37 | const s = (value >>> 16) & 0x8000;
38 | const e = (value >>> 23) & 0xff;
39 | const f = value & 0x7fffff;
40 | const v = s | DataView.__float16_base[e] | (f >> DataView.__float16_shift[e]);
41 | this.setUint16(byteOffset, v, littleEndian);
42 | };
43 | DataView.__float16_float = new Float32Array(1);
44 | DataView.__float16_int = new Uint32Array(DataView.__float16_float.buffer, 0, DataView.__float16_float.length);
45 | DataView.__float16_base = new Uint32Array(256);
46 | DataView.__float16_shift = new Uint32Array(256);
47 | for (let i = 0; i < 256; ++i) {
48 | let e = i - 127;
49 | if (e < -27) {
50 | DataView.__float16_base[i] = 0x0000;
51 | DataView.__float16_shift[i] = 24;
52 | }
53 | else if (e < -14) {
54 | DataView.__float16_base[i] = 0x0400 >> -e - 14;
55 | DataView.__float16_shift[i] = -e - 1;
56 | }
57 | else if (e <= 15) {
58 | DataView.__float16_base[i] = e + 15 << 10;
59 | DataView.__float16_shift[i] = 13;
60 | }
61 | else if (e < 128) {
62 | DataView.__float16_base[i] = 0x7c00;
63 | DataView.__float16_shift[i] = 24;
64 | }
65 | else {
66 | DataView.__float16_base[i] = 0x7c00;
67 | DataView.__float16_shift[i] = 13;
68 | }
69 | }
70 | }
71 |
72 | if (!DataView.prototype.getBits) {
73 | DataView.prototype.getBits = function(offset, bits /*, signed */) {
74 | offset = offset * bits;
75 | const available = (this.byteLength << 3) - offset;
76 | if (bits > available) {
77 | throw new RangeError();
78 | }
79 | let value = 0;
80 | let index = 0;
81 | while (index < bits) {
82 | const remainder = offset & 7;
83 | const size = Math.min(bits - index, 8 - remainder);
84 | value <<= size;
85 | value |= (this.getUint8(offset >> 3) >> (8 - size - remainder)) & ~(0xff << size);
86 | offset += size;
87 | index += size;
88 | }
89 | return value;
90 | };
91 | }
92 |
93 |
--------------------------------------------------------------------------------
/src/tar.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 |
3 | var tar = tar || {};
4 |
5 | tar.Archive = class {
6 |
7 | constructor(buffer) {
8 | this._entries = [];
9 | const reader = new tar.Reader(buffer, 0, buffer.length);
10 | while (reader.peek()) {
11 | this._entries.push(new tar.Entry(reader));
12 | if (reader.match(512, 0)) {
13 | break;
14 | }
15 | }
16 | }
17 |
18 | get entries() {
19 | return this._entries;
20 | }
21 | };
22 |
23 | tar.Entry = class {
24 |
25 | constructor(reader) {
26 | const header = reader.bytes(512);
27 | reader.skip(-512);
28 | let sum = 0;
29 | for (let i = 0; i < header.length; i++) {
30 | sum += (i >= 148 && i < 156) ? 32 : header[i];
31 | }
32 | this._name = reader.string(100);
33 | reader.string(8); // file mode
34 | reader.string(8); // owner
35 | reader.string(8); // group
36 | const size = parseInt(reader.string(12).trim(), 8); // size
37 | reader.string(12); // timestamp
38 | const checksum = parseInt(reader.string(8).trim(), 8); // checksum
39 | if (isNaN(checksum) || sum != checksum) {
40 | throw new tar.Error('Invalid tar archive.');
41 | }
42 | reader.string(1); // link indicator
43 | reader.string(100); // name of linked file
44 | reader.bytes(255);
45 | this._data = reader.bytes(size);
46 | reader.bytes(((size % 512) != 0) ? (512 - (size % 512)) : 0);
47 | }
48 |
49 | get name() {
50 | return this._name;
51 | }
52 |
53 | get data() {
54 | return this._data;
55 | }
56 | };
57 |
58 | tar.Reader = class {
59 |
60 | constructor(buffer) {
61 | this._buffer = buffer;
62 | this._position = 0;
63 | this._end = buffer.length;
64 | }
65 |
66 | skip(offset) {
67 | this._position += offset;
68 | if (this._position > this._buffer.length) {
69 | throw new tar.Error('Expected ' + (this._position - this._buffer.length) + ' more bytes. The file might be corrupted. Unexpected end of file.');
70 | }
71 | }
72 |
73 | peek() {
74 | return this._position < this._end;
75 | }
76 |
77 | match(size, value) {
78 | if (this._position + size <= this._end) {
79 | if (this._buffer.subarray(this._position, this._position + size).every((c) => c == value)) {
80 | this._position += size;
81 | return true;
82 | }
83 | }
84 | return false;
85 | }
86 |
87 | bytes(size) {
88 | const position = this._position;
89 | this.skip(size);
90 | return this._buffer.subarray(position, this._position);
91 | }
92 |
93 | string(size) {
94 | const buffer = this.bytes(size);
95 | let position = 0;
96 | let str = '';
97 | for (let i = 0; i < size; i++) {
98 | let c = buffer[position++];
99 | if (c == 0) {
100 | break;
101 | }
102 | str += String.fromCharCode(c);
103 | }
104 | return str;
105 | }
106 | };
107 |
108 | tar.Error = class extends Error {
109 | constructor(message) {
110 | super(message);
111 | this.name = 'tar Error';
112 | }
113 | };
114 |
115 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
116 | module.exports.Archive = tar.Archive;
117 | }
--------------------------------------------------------------------------------
/src/mlnet-metadata.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "name": "ImageLoaderTransform",
4 | "schema": {
5 | "description": "Load images from files.",
6 | "attributes": [
7 | { "name": "ImageFolder", "type": "string", "description": "Folder where to search for images" }
8 | ]
9 | }
10 | },
11 | {
12 | "name": "ImageScalerTransform",
13 | "schema": {
14 | "description": "Scales an image to specified dimensions using one of the three scale types: isotropic with padding, isotropic with cropping or anisotropic. In case of isotropic padding, transparent color is used to pad resulting image.",
15 | "attributes": [
16 | { "name": "Width" },
17 | { "name": "Height" },
18 | { "name": "Resizing", "type": "ImageResizingTransformer.ResizingKind" },
19 | { "name": "Anchor", "type": "ImageResizingTransformer.Anchor" }
20 | ]
21 | }
22 | },
23 | {
24 | "name": "ImagePixelExtractor",
25 | "schema": {
26 | "description": "Scales an image to specified dimensions using one of the three scale types: isotropic with padding, isotropic with cropping or anisotropic. In case of isotropic padding, transparent color is used to pad resulting image.",
27 | "attributes": [
28 | { "name": "ColorsToExtract", "type": "ImagePixelExtractingTransformer.ColorBits" },
29 | { "name": "OrderOfExtraction", "type": "ImagePixelExtractingTransformer.ColorsOrder" },
30 | { "name": "Planes", "type": "uint8" },
31 | { "name": "OutputAsFloatArray", "type": "boolean" },
32 | { "name": "OffsetImage", "type": "float32" },
33 | { "name": "ScaleImage", "type": "float32" },
34 | { "name": "InterleavePixelColors", "type": "boolean" }
35 | ]
36 | }
37 | },
38 | {
39 | "name": "TensorFlowTransform",
40 | "schema": {
41 | "description": "Transforms the data using the TensorFlow model.",
42 | "attributes": [
43 | { "name": "IsFrozen", "type": "boolean" },
44 | { "name": "AddBatchDimensionInput", "type": "boolean" }
45 | ]
46 | }
47 | },
48 | {
49 | "name": "TextNormalizerTransform",
50 | "schema": {
51 | "description": "A text normalization transform that allows normalizing text case, removing diacritical marks, punctuation marks and/or numbers. The transform operates on text input as well as vector of tokens/text (vector of ReadOnlyMemory).",
52 | "attributes": [
53 | { "name": "CaseMode", "type": "TextNormalizingTransformer.CaseMode" },
54 | { "name": "KeepDiacritics", "type": "boolean" },
55 | { "name": "KeepPunctuations", "type": "boolean" },
56 | { "name": "KeepNumbers", "type": "boolean" }
57 | ]
58 | }
59 | },
60 | {
61 | "name": "CharToken",
62 | "schema": {
63 | "description": "Character-oriented tokenizer where text is considered a sequence of characters.",
64 | "attributes": [
65 | { "name": "UseMarkerChars", "type": "boolean" },
66 | { "name": "IsSeparatorStartEnd", "type": "boolean" }
67 | ]
68 | }
69 | },
70 | {
71 | "name": "ConcatTransform",
72 | "schema": {
73 | "category": "Tensor",
74 | "description": "Concatenates one or more columns of the same item type."
75 | }
76 | },
77 | {
78 | "name": "CopyTransform",
79 | "schema": {
80 | "category": "Tensor",
81 | "description": "Duplicates columns from the dataset."
82 | }
83 | },
84 | {
85 | "name": "SSAModel",
86 | "schema": {
87 | "attributes": [
88 | { "name": "UseMarkerChars", "type": "boolean" }
89 | ]
90 | }
91 | }
92 | ]
--------------------------------------------------------------------------------
/setup/winget.js:
--------------------------------------------------------------------------------
1 |
2 | const crypto = require('crypto');
3 | const fs = require('fs');
4 | const http = require('http');
5 | const https = require('https');
6 | const path = require('path');
7 |
8 | const packageManifestFile = process.argv[2];
9 | const manifestDir = process.argv[3];
10 |
11 | const request = (url, timeout) => {
12 | return new Promise((resolve, reject) => {
13 | const httpModule = url.split(':').shift() === 'https' ? https : http;
14 | httpModule.get(url, (response) => {
15 | if (response.statusCode === 200) {
16 | let data = [];
17 | let position = 0;
18 | response.on('data', (chunk) => {
19 | data.push(chunk);
20 | position += chunk.length;
21 | process.stdout.write(' ' + position + ' bytes\r');
22 | });
23 | response.on('err', (err) => {
24 | reject(err);
25 | });
26 | response.on('end', () => {
27 | resolve(Buffer.concat(data));
28 | });
29 | }
30 | else if (response.statusCode === 302) {
31 | request(response.headers.location).then((data) => {
32 | resolve(data);
33 | }).catch((err) => {
34 | request(err);
35 | });
36 | }
37 | else {
38 | const err = new Error("The web request failed with status code " + response.statusCode + " at '" + url + "'.");
39 | err.type = 'error';
40 | err.url = url;
41 | err.status = response.statusCode;
42 | reject(err);
43 | }
44 | }).on("error", (err) => {
45 | reject(err);
46 | });
47 | if (timeout) {
48 | request.setTimeout(timeout, () => {
49 | request.abort();
50 | const err = new Error("The web request timed out at '" + url + "'.");
51 | err.type = 'timeout';
52 | err.url = url;
53 | reject(err);
54 | });
55 | }
56 | });
57 | };
58 |
59 | const packageManifest = JSON.parse(fs.readFileSync(packageManifestFile, 'utf-8'));
60 | const name = packageManifest.name;
61 | const version = packageManifest.version;
62 | const productName = packageManifest.productName;
63 | const publisher = packageManifest.author.name;
64 | const repository = packageManifest.repository;
65 | const url = 'https://github.com/' + repository + '/releases/download/v' + version + '/' + productName + '-Setup-' + version + '.exe';
66 |
67 | request(url).then((data) => {
68 | const sha256 = crypto.createHash('sha256').update(data).digest('hex').toUpperCase();
69 | const lines = [
70 | 'Id: ' + publisher.replace(' ', '') + '.' + productName,
71 | 'Version: ' + version,
72 | 'Name: ' + productName,
73 | 'Publisher: ' + publisher,
74 | 'AppMoniker: ' + name,
75 | 'Description: ' + packageManifest.description,
76 | 'License: Copyright (c) ' + publisher,
77 | 'Homepage: ' + 'https://github.com/' + repository,
78 | 'Installers:',
79 | ' - Arch: x86',
80 | ' InstallerType: nullsoft',
81 | ' Url: ' + url,
82 | ' Sha256: ' + sha256
83 | ];
84 | const productDir = path.join(manifestDir, publisher.replace(' ', ''), productName);
85 | for (const file of fs.readdirSync(productDir)) {
86 | const versionFile = path.join(productDir, file);
87 | if (fs.lstatSync(versionFile).isFile()) {
88 | fs.unlinkSync(versionFile);
89 | }
90 | }
91 | const manifestFile = path.join(productDir, version + '.yaml');
92 | fs.writeFileSync(manifestFile, lines.join('\n'));
93 | }).catch((err) => {
94 | console.log(err.message);
95 | });
96 |
--------------------------------------------------------------------------------
/tools/tflite:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | bold() {
12 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
13 | }
14 |
15 | venv() {
16 | env_dir=./third_party/env/tflite
17 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
18 | case "${OSTYPE}" in
19 | msys*) source ${env_dir}/Scripts/activate;;
20 | *) source ${env_dir}/bin/activate;;
21 | esac
22 | ${python} -m pip install --quiet --upgrade pip
23 | }
24 |
25 | clean() {
26 | bold "tflite clean"
27 | rm -rf "./third_party/env/tflite"
28 | rm -rf "./third_party/src/tensorflow"
29 | }
30 |
31 | sync() {
32 | bold "tflite sync"
33 | [ -d "./third_party/src/tensorflow" ] || git clone --quiet --recursive https://github.com/tensorflow/tensorflow.git "./third_party/src/tensorflow"
34 | pushd "./third_party/src/tensorflow" > /dev/null
35 | git pull --quiet --prune
36 | git submodule sync --quiet
37 | git submodule update --quiet --init --recursive
38 | popd > /dev/null
39 | }
40 |
41 | schema() {
42 | bold "tflite schema"
43 | case "${OSTYPE}" in
44 | linux*)
45 | flatc_version=$(curl -s https://api.github.com/repos/google/flatbuffers/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"')
46 | flatc_dir=./third_party/bin/flatbuffers/${flatc_version}
47 | if [ ! -f "${flatc_dir}/flatc" ]; then
48 | mkdir -p "${flatc_dir}"
49 | pushd "${flatc_dir}" > /dev/null
50 | curl -sL https://github.com/google/flatbuffers/archive/${flatc_version}.tar.gz | tar zx --strip-components 1
51 | cmake -G "Unix Makefiles" . &> /dev/null
52 | make > /dev/null
53 | popd > /dev/null
54 | fi
55 | export PATH=${flatc_dir}:${PATH}
56 | ;;
57 | darwin*)
58 | brew list flatbuffers > /dev/null 2>&1 || brew install flatbuffers > /dev/null
59 | ;;
60 | msys*)
61 | flatc_version=$(curl -s https://api.github.com/repos/google/flatbuffers/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"')
62 | flatc_dir=./third_party/bin/flatbuffers/${flatc_version}
63 | if [ ! -f "${flatc_dir}/flatc.exe" ]; then
64 | mkdir -p "${flatc_dir}"
65 | pushd "${flatc_dir}" > /dev/null
66 | curl -sL -O https://github.com/google/flatbuffers/releases/download/${flatc_version}/flatc_windows.zip
67 | unzip flatc_windows.zip > /dev/null
68 | popd > /dev/null
69 | fi
70 | export PATH=${flatc_dir}:${PATH}
71 | ;;
72 | esac
73 | [[ $(grep -U $'\x0D' ./src/tflite-schema.js) ]] && crlf=1
74 | sed 's/namespace tflite;/namespace tflite_schema;/g' < ./third_party/src/tensorflow/tensorflow/lite/schema/schema.fbs > ./tools/tflite_schema.fbs
75 | sed 's/namespace tflite;/namespace tflite_metadata_schema;/g' < ./third_party/src/tensorflow/tensorflow/lite/experimental/support/metadata/metadata_schema.fbs > ./tools/tflite_metadata_schema.fbs
76 | flatc --no-js-exports --js ./tools/tflite_schema.fbs
77 | flatc --no-js-exports --js ./tools/tflite_metadata_schema.fbs
78 | mv ./tflite_schema_generated.js ./src/tflite-schema.js
79 | cat ./tflite_metadata_schema_generated.js >> ./src/tflite-schema.js
80 | cat <> ./src/tflite-schema.js
81 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
82 | module.exports = { tflite_schema: tflite_schema, tflite_metadata_schema: tflite_metadata_schema };
83 | }
84 | EOT
85 | rm ./tools/tflite_schema.fbs
86 | rm ./tools/tflite_metadata_schema.fbs
87 | rm ./tflite_metadata_schema_generated.js
88 | if [[ -n ${crlf} ]]; then
89 | unix2dos --quiet --newfile ./src/tflite-schema.js ./src/tflite-schema.js
90 | fi
91 | }
92 |
93 | while [ "$#" != 0 ]; do
94 | command="$1" && shift
95 | case "${command}" in
96 | "clean") clean;;
97 | "sync") sync;;
98 | "install") install;;
99 | "schema") schema;;
100 | esac
101 | done
102 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | 
3 |
4 | Netron is a viewer for neural network, deep learning and machine learning models.
5 |
6 | Netron supports **ONNX** (`.onnx`, `.pb`, `.pbtxt`), **Keras** (`.h5`, `.keras`), **Core ML** (`.mlmodel`), **Caffe** (`.caffemodel`, `.prototxt`), **Caffe2** (`predict_net.pb`, `predict_net.pbtxt`), **Darknet** (`.cfg`), **MXNet** (`.model`, `-symbol.json`), **ncnn** (`.param`) and **TensorFlow Lite** (`.tflite`).
7 |
8 | Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **Arm NN** (`.armnn`), **Barracuda** (`.nn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **MediaPipe** (`.pbtxt`), **ML.NET** (`.zip`), **MNN** (`.mnn`), **OpenVINO** (`.xml`), **PaddlePaddle** (`.zip`, `__model__`), **scikit-learn** (`.pkl`), **Tengine** (`.tmfile`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).
9 |
10 | 
11 |
12 | ## Install
13 |
14 | **macOS**: [**Download**](https://github.com/lutzroeder/netron/releases/latest) the `.dmg` file or run `brew cask install netron`
15 |
16 | **Linux**: [**Download**](https://github.com/lutzroeder/netron/releases/latest) the `.AppImage` file or run `snap install netron`
17 |
18 | **Windows**: [**Download**](https://github.com/lutzroeder/netron/releases/latest) the `.exe` installer or run `winget install netron`
19 |
20 | **Browser**: [**Start**](https://www.lutzroeder.com/ai/netron) the browser version.
21 |
22 | **Python Server**: Run `pip install netron` and `netron [FILE]` or `import netron; netron.start('[FILE]')`.
23 |
24 | ## Models
25 |
26 | Sample model files to download or open using the browser version:
27 |
28 | * **ONNX**: [squeezenet](https://raw.githubusercontent.com/onnx/tutorials/master/tutorials/assets/squeezenet.onnx) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/onnx/tutorials/master/tutorials/assets/squeezenet.onnx)]
29 | * **CoreML**: [exermote](https://raw.githubusercontent.com/Lausbert/Exermote/master/ExermoteInference/ExermoteCoreML/ExermoteCoreML/Model/Exermote.mlmodel) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/Lausbert/Exermote/master/ExermoteInference/ExermoteCoreML/ExermoteCoreML/Model/Exermote.mlmodel)]
30 | * **Darknet**: [yolo](https://raw.githubusercontent.com/AlexeyAB/darknet/master/cfg/yolo.cfg) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/AlexeyAB/darknet/master/cfg/yolo.cfg)]
31 | * **Keras**: [mobilenet](https://raw.githubusercontent.com/aio-libs/aiohttp-demos/master/demos/imagetagger/tests/data/mobilenet.h5) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/aio-libs/aiohttp-demos/master/demos/imagetagger/tests/data/mobilenet.h5)]
32 | * **MXNet**: [inception_v3](https://raw.githubusercontent.com/soeaver/mxnet-model/master/cls/inception/inception_v3-symbol.json) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/soeaver/mxnet-model/master/cls/inception/inception_v3-symbol.json)]
33 | * **TensorFlow**: [chessbot](https://raw.githubusercontent.com/srom/chessbot/master/model/chessbot.pb) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/srom/chessbot/master/model/chessbot.pb)]
34 | * **TensorFlow Lite**: [hair_segmentation](https://raw.githubusercontent.com/google/mediapipe/master/mediapipe/models/hair_segmentation.tflite) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/google/mediapipe/master/mediapipe/models/hair_segmentation.tflite)]
35 | * **TorchScript**: [traced_online_pred_layer](https://raw.githubusercontent.com/ApolloAuto/apollo/master/modules/prediction/data/traced_online_pred_layer.pt) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/ApolloAuto/apollo/master/modules/prediction/data/traced_online_pred_layer.pt)]
36 | * **Caffe**: [mobilenet_v2](https://raw.githubusercontent.com/shicai/MobileNet-Caffe/master/mobilenet_v2.caffemodel) [[open](https://lutzroeder.github.io/netron?url=https://raw.githubusercontent.com/shicai/MobileNet-Caffe/master/mobilenet_v2.caffemodel)]
37 |
--------------------------------------------------------------------------------
/tools/update_pbjs.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 | /* eslint "indent": [ "error", 4, { "SwitchCase": 1 } ] */
3 | /* eslint "no-console": off */
4 |
5 | const fs = require('fs');
6 | const process = require('process');
7 |
8 | const pattern = process.argv[2];
9 | const file = process.argv[3];
10 | const variable = process.argv[4];
11 | const type = process.argv[5];
12 | const count = parseInt(process.argv[6]);
13 |
14 | let arrayType = '';
15 | let dataViewMethod = '';
16 | let shift = 0;
17 |
18 | switch (type) {
19 | case 'float':
20 | arrayType = 'Float32Array';
21 | dataViewMethod = 'getFloat32';
22 | shift = '2';
23 | break;
24 | case 'double':
25 | arrayType = 'Float64Array';
26 | dataViewMethod = 'getFloat64';
27 | shift = '3';
28 | break;
29 | default:
30 | console.log('ERROR: Type is not supported.');
31 | process.exit(1);
32 | break;
33 | }
34 |
35 | const source = fs.readFileSync(file, 'utf-8');
36 |
37 | let search = '';
38 | let replace = '';
39 |
40 | switch (pattern) {
41 | case 'array':
42 | search = `if ((tag & 7) === 2) {
43 | var end2 = reader.uint32() + reader.pos;
44 | while (reader.pos < end2)
45 | message.$(variable).push(reader.$(type)());
46 | } else`;
47 | replace = `if ((tag & 7) === 2) {
48 | var end2 = reader.uint32() + reader.pos;
49 | if (message.$(variable).length == 0 && (end2 - reader.pos) > 1048576) {
50 | var $(variable)Length = end2 - reader.pos;
51 | var $(variable)View = new DataView(reader.buf.buffer, reader.buf.byteOffset + reader.pos, $(variable)Length);
52 | $(variable)Length = $(variable)Length >>> $(shift);
53 | var $(variable) = new $(arrayType)($(variable)Length);
54 | for (var i = 0; i < $(variable)Length; i++) {
55 | $(variable)[i] = $(variable)View.$(dataViewMethod)(i << $(shift), true);
56 | }
57 | message.$(variable) = $(variable);
58 | reader.pos = end2;
59 | }
60 | else {
61 | while (reader.pos < end2)
62 | message.$(variable).push(reader.$(type)());
63 | }
64 | } else`;
65 | break;
66 |
67 | case 'enumeration':
68 | search = `if (!(message.$(variable) && message.$(variable).length))
69 | message.$(variable) = [];
70 | if ((tag & 7) === 2) {
71 | var end2 = reader.uint32() + reader.pos;
72 | while (reader.pos < end2)
73 | message.$(variable).push(reader.$(type)());
74 | } else
75 | message.$(variable).push(reader.$(type)());
76 | break;`;
77 |
78 | replace = `if (!(message.$(variable) && message.$(variable).length)) {
79 | if (message.$(variable) != -1) {
80 | message.$(variable) = [];
81 | message.$(variable)Count = 0;
82 | }
83 | }
84 | if (message.$(variable)Count < 1000000) {
85 | if ((tag & 7) === 2) {
86 | var end2 = reader.uint32() + reader.pos;
87 | while (reader.pos < end2) {
88 | message.$(variable).push(reader.$(type)());
89 | message.$(variable)Count++;
90 | }
91 | }
92 | else {
93 | message.$(variable).push(reader.$(type)());
94 | message.$(variable)Count++;
95 | }
96 | }
97 | else {
98 | message.$(variable) = -1;
99 | if ((tag & 7) === 2) {
100 | var endx = reader.uint32() + reader.pos;
101 | while (reader.pos < endx)
102 | reader.$(type)();
103 | }
104 | else {
105 | reader.$(type)();
106 | }
107 | }
108 | break;`;
109 | break;
110 |
111 | default:
112 | console.log('ERROR: Unknown pattern.');
113 | process.exit(1);
114 | }
115 |
116 | search = search.split('$(variable)').join(variable);
117 | search = search.split('$(type)').join(type);
118 |
119 | replace = replace.split('$(variable)').join(variable);
120 | replace = replace.split('$(type)').join(type);
121 | replace = replace.split('$(arrayType)').join(arrayType);
122 | replace = replace.split('$(dataViewMethod)').join(dataViewMethod);
123 | replace = replace.split('$(shift)').join(shift);
124 |
125 | for (let i = 0; i < 8; i++) {
126 |
127 | search = search.split('\n').map((line) => ' ' + line).join('\n');
128 | replace = replace.split('\n').map((line) => ' ' + line).join('\n');
129 |
130 | const parts = source.split(search);
131 | if (parts.length == (count + 1)) {
132 | const target = parts.join(replace);
133 | fs.writeFileSync(file, target, 'utf-8');
134 | process.exit(0);
135 | }
136 | }
137 |
138 | console.log('ERROR: Replace failed.');
139 | process.exit(1);
140 |
--------------------------------------------------------------------------------
/src/flux.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 | /* eslint "indent": [ "error", 4, { "SwitchCase": 1 } ] */
3 |
4 | // Experimental
5 |
6 | var flux = flux || {};
7 |
8 | flux.ModelFactory = class {
9 |
10 | match(context) {
11 | const identifier = context.identifier;
12 | const extension = identifier.split('.').pop().toLowerCase();
13 | if (extension === 'bson') {
14 | return true;
15 | }
16 | return false;
17 | }
18 |
19 | open(context, host) {
20 | return host.require('./bson').then((bson) => {
21 | let model = null;
22 | const identifier = context.identifier;
23 | try {
24 | const reader = new bson.Reader(context.buffer);
25 | const root = reader.read();
26 | const obj = flux.ModelFactory._backref(root, root);
27 | model = obj.model;
28 | if (!model) {
29 | throw new flux.Error('File does not contain Flux model.');
30 | }
31 | }
32 | catch (error) {
33 | const message = error && error.message ? error.message : error.toString();
34 | throw new flux.Error(message.replace(/\.$/, '') + " in '" + identifier + "'.");
35 | }
36 | return flux.Metadata.open(host).then((metadata) => {
37 | try {
38 | return new flux.Model(metadata, model);
39 | }
40 | catch (error) {
41 | const message = error && error.message ? error.message : error.toString();
42 | throw new flux.Error(message.replace(/\.$/, '') + " in '" + identifier + "'.");
43 | }
44 | });
45 | });
46 | }
47 |
48 | static _backref(obj, root) {
49 | if (Array.isArray(obj)) {
50 | for (let i = 0; i < obj.length; i++) {
51 | obj[i] = flux.ModelFactory._backref(obj[i], root);
52 | }
53 | }
54 | else if (obj === Object(obj)) {
55 | if (obj.tag == 'backref' && obj.ref) {
56 | if (!root._backrefs[obj.ref - 1]) {
57 | throw new flux.Error("Invalid backref '" + obj.ref + "'.");
58 | }
59 | obj = root._backrefs[obj.ref - 1];
60 | }
61 | for (const key of Object.keys(obj)) {
62 | if (obj !== root || key !== '_backrefs') {
63 | obj[key] = flux.ModelFactory._backref(obj[key], root);
64 | }
65 | }
66 | }
67 | return obj;
68 | }
69 | };
70 |
71 | flux.Model = class {
72 |
73 | constructor(/* root */) {
74 | this._format = 'Flux';
75 | this._graphs = [];
76 | }
77 |
78 | get format() {
79 | return this._format;
80 | }
81 |
82 | get graphs() {
83 | return this._graphs;
84 | }
85 | };
86 |
87 | flux.Metadata = class {
88 |
89 | static open(host) {
90 | if (flux.Metadata._metadata) {
91 | return Promise.resolve(flux.Metadata._metadata);
92 | }
93 | return host.request(null, 'flux-metadata.json', 'utf-8').then((data) => {
94 | flux.Metadata._metadata = new flux.Metadata(data);
95 | return flux.Metadata._metadata;
96 | }).catch(() => {
97 | flux.Metadata._metadata = new flux.Metadata(null);
98 | return flux.Metadata._metadatas;
99 | });
100 | }
101 |
102 | constructor(data) {
103 | this._map = {};
104 | this._attributeCache = {};
105 | if (data) {
106 | const items = JSON.parse(data);
107 | if (items) {
108 | for (const item of items) {
109 | if (item.name && item.schema) {
110 | this._map[item.name] = item.schema;
111 | }
112 | }
113 | }
114 | }
115 | }
116 |
117 | type(name) {
118 | return this._map[name] || null;
119 | }
120 |
121 | attribute(type, name) {
122 | let map = this._attributeCache[type];
123 | if (!map) {
124 | map = {};
125 | const schema = this.type(type);
126 | if (schema && schema.attributes && schema.attributes.length > 0) {
127 | for (const attribute of schema.attributes) {
128 | map[attribute.name] = attribute;
129 | }
130 | }
131 | this._attributeCache[type] = map;
132 | }
133 | return map[name] || null;
134 | }
135 | };
136 |
137 | flux.Error = class extends Error {
138 |
139 | constructor(message) {
140 | super(message);
141 | this.name = 'Flux Error';
142 | }
143 | };
144 |
145 | if (module && module.exports) {
146 | module.exports.ModelFactory = flux.ModelFactory;
147 | }
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 |
2 | .PHONY: test
3 |
4 | build: clean lint build_python build_electron
5 |
6 | publish: clean lint publish_electron publish_python publish_github_pages publish_cask publish_winget
7 |
8 | install:
9 | @[ -d node_modules ] || npm install
10 |
11 | clean:
12 | rm -rf ./dist
13 | rm -rf ./node_modules
14 | rm -rf ./package-lock.json
15 |
16 | reset: clean
17 | rm -rf ./third_party
18 |
19 | update: install
20 | @./tools/armnn sync schema
21 | @./tools/bigdl sync schema
22 | @./tools/caffe sync schema
23 | @./tools/coreml sync schema
24 | @./tools/chainer sync
25 | @./tools/cntk sync schema
26 | @./tools/darknet sync
27 | @./tools/dl4j sync
28 | @./tools/keras sync install metadata
29 | @./tools/mediapipe sync
30 | @./tools/mlnet sync metadata
31 | @./tools/mnn sync schema
32 | @./tools/mxnet sync metadata
33 | @./tools/ncnn sync
34 | @./tools/onnx sync install schema metadata
35 | @./tools/paddle sync schema
36 | @./tools/pytorch sync install schema metadata
37 | @./tools/sklearn sync install metadata
38 | @./tools/tengine sync
39 | @./tools/tf sync install schema metadata
40 | @./tools/tflite sync schema
41 | @./tools/torch sync
42 | @./tools/uff sync schema
43 |
44 | build_python: install
45 | python -m pip install --user wheel
46 | python ./setup.py build --version bdist_wheel
47 |
48 | build_electron: install
49 | CSC_IDENTITY_AUTO_DISCOVERY=false npx electron-builder --mac --publish never
50 | npx electron-builder --win --publish never
51 | npx electron-builder --linux appimage --publish never
52 | npx electron-builder --linux snap --publish never
53 |
54 | lint: install
55 | npx eslint src/*.js test/*.js setup/*.js tools/*.js
56 |
57 | test: install
58 | node ./test/test.js
59 |
60 | start: install
61 | npx electron .
62 |
63 | publish_python: build_python
64 | python -m pip install --user twine
65 | python -m twine upload --non-interactive --skip-existing --verbose dist/dist/*
66 |
67 | publish_electron: install
68 | npx electron-builder --mac --publish always
69 | npx electron-builder --win --publish always
70 | npx electron-builder --linux appimage --publish always
71 | npx electron-builder --linux snap --publish always
72 |
73 | publish_github_pages: build_python
74 | @rm -rf ./dist/gh-pages
75 | @git clone --depth=1 https://x-access-token:$(GITHUB_TOKEN)@github.com/$(GITHUB_USER)/netron.git --branch gh-pages ./dist/gh-pages 2>&1 > /dev/null
76 | @rm -rf ./dist/gh-pages/*
77 | @cp -R ./dist/lib/netron/* ./dist/gh-pages/
78 | @rm -rf ./dist/gh-pages/*.py*
79 | @git -C ./dist/gh-pages add --all
80 | @git -C ./dist/gh-pages commit --amend --no-edit
81 | @git -C ./dist/gh-pages push --force origin gh-pages
82 |
83 | publish_cask:
84 | @curl -s -H "Authorization: token $(GITHUB_TOKEN)" https://api.github.com/repos/Homebrew/homebrew-cask/forks -d '' 2>&1 > /dev/null
85 | @rm -rf ./dist/homebrew-cask
86 | @git clone --depth=1 https://x-access-token:$(GITHUB_TOKEN)@github.com/$(GITHUB_USER)/homebrew-cask.git ./dist/homebrew-cask
87 | @node ./setup/cask.js ./package.json ./dist/homebrew-cask/Casks/netron.rb
88 | @git -C ./dist/homebrew-cask add --all
89 | @git -C ./dist/homebrew-cask commit -m "Update $$(node -pe "require('./package.json').productName") to $$(node -pe "require('./package.json').version")"
90 | @git -C ./dist/homebrew-cask push
91 | @curl -H "Authorization: token $(GITHUB_TOKEN)" https://api.github.com/repos/Homebrew/homebrew-cask/pulls -d "{\"title\":\"Add $$(node -pe "require('./package.json').name") $$(node -pe "require('./package.json').version")\",\"base\":\"master\",\"head\":\"$(GITHUB_USER):master\",\"body\":\"\"}" 2>&1 > /dev/null
92 | @rm -rf ./dist/homebrew-cask
93 | @curl -s -H "Authorization: token $(GITHUB_TOKEN)" -X "DELETE" https://api.github.com/repos/$(GITHUB_USER)/homebrew-cask 2>&1 > /dev/null
94 |
95 | publish_winget:
96 | @curl -s -H "Authorization: token $(GITHUB_TOKEN)" https://api.github.com/repos/microsoft/winget-pkgs/forks -d '' 2>&1 > /dev/null
97 | @rm -rf ./dist/winget-pkgs
98 | @git clone --depth=1 https://x-access-token:$(GITHUB_TOKEN)@github.com/$(GITHUB_USER)/winget-pkgs.git ./dist/winget-pkgs
99 | @node ./setup/winget.js ./package.json ./dist/winget-pkgs/manifests
100 | @git -C ./dist/winget-pkgs add --all
101 | @git -C ./dist/winget-pkgs commit -m "Update $$(node -pe "require('./package.json').name") to $$(node -pe "require('./package.json').version")"
102 | @git -C ./dist/winget-pkgs push
103 | @curl -H "Authorization: token $(GITHUB_TOKEN)" https://api.github.com/repos/microsoft/winget-pkgs/pulls -d "{\"title\":\"Add $$(node -pe "require('./package.json').productName") $$(node -pe "require('./package.json').version")\",\"base\":\"master\",\"head\":\"$(GITHUB_USER):master\",\"body\":\"\"}" 2>&1 > /dev/null
104 | @rm -rf ./dist/winget-pkgs
105 | @curl -s -H "Authorization: token $(GITHUB_TOKEN)" -X "DELETE" https://api.github.com/repos/$(GITHUB_USER)/winget-pkgs 2>&1 > /dev/null
106 |
107 | version:
108 | @node ./setup/version.js ./package.json
109 | @git add ./package.json
110 | @git commit -m "Update to $$(node -pe "require('./package.json').version")"
111 | @git tag v$$(node -pe "require('./package.json').version")
112 | @git push --force
113 | @git push --tags
114 | @git tag -d v$$(node -pe "require('./package.json').version")
115 |
--------------------------------------------------------------------------------
/src/gzip.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 | /* global pako */
3 |
4 | var gzip = gzip || {};
5 |
6 | gzip.Archive = class {
7 |
8 | constructor(buffer) {
9 | this._entries = [];
10 | if (buffer.length < 18 || buffer[0] != 0x1f || buffer[1] != 0x8b) {
11 | throw new gzip.Error('Invalid GZIP archive.');
12 | }
13 | const reader = new gzip.Reader(buffer, 0, buffer.length);
14 | this._entries.push(new gzip.Entry(reader));
15 | }
16 |
17 | get entries() {
18 | return this._entries;
19 | }
20 | };
21 |
22 | gzip.Entry = class {
23 |
24 | constructor(reader) {
25 | if (!reader.match([ 0x1f, 0x8b ])) {
26 | throw new gzip.Error('Invalid GZIP signature.');
27 | }
28 | const compressionMethod = reader.byte();
29 | if (compressionMethod != 8) {
30 | throw new gzip.Error("Invalid compression method '" + compressionMethod.toString() + "'.");
31 | }
32 | const flags = reader.byte();
33 | reader.uint32(); // MTIME
34 | reader.byte();
35 | reader.byte(); // OS
36 | if ((flags & 4) != 0) {
37 | const xlen = reader.uint16();
38 | reader.skip(xlen);
39 | }
40 | if ((flags & 8) != 0) {
41 | this._name = reader.string();
42 | }
43 | if ((flags & 16) != 0) { // FLG.FCOMMENT
44 | reader.string();
45 | }
46 | if ((flags & 1) != 0) {
47 | reader.uint16(); // CRC16
48 | }
49 | const compressedData = reader.bytes();
50 | if (typeof process === 'object' && typeof process.versions == 'object' && typeof process.versions.node !== 'undefined') {
51 | this._data = require('zlib').inflateRawSync(compressedData);
52 | }
53 | else if (typeof pako !== 'undefined') {
54 | this._data = pako.inflateRaw(compressedData);
55 | }
56 | else {
57 | this._data = new require('./zip').Inflater().inflateRaw(compressedData);
58 | }
59 | reader.position = -8;
60 | reader.uint32(); // CRC32
61 | const size = reader.uint32();
62 | if (size != this._data.length) {
63 | throw new gzip.Error('Invalid size.');
64 | }
65 | }
66 |
67 | get name() {
68 | return this._name;
69 | }
70 |
71 | get data() {
72 | return this._data;
73 | }
74 |
75 | };
76 |
77 | gzip.Reader = class {
78 |
79 | constructor(buffer, start, end) {
80 | this._buffer = buffer;
81 | this._position = start;
82 | this._end = end;
83 | }
84 |
85 | match(signature) {
86 | if (this._position + signature.length <= this._end) {
87 | for (let i = 0; i < signature.length; i++) {
88 | if (this._buffer[this._position + i] != signature[i]) {
89 | return false;
90 | }
91 | }
92 | }
93 | this._position += signature.length;
94 | return true;
95 | }
96 |
97 | get position() {
98 | return this._position;
99 | }
100 |
101 | set position(value) {
102 | this._position = value >= 0 ? value : this._end + value;
103 | }
104 |
105 | skip(size) {
106 | if (this._position + size > this._end) {
107 | throw new gzip.Error('Data not available.');
108 | }
109 | this._position += size;
110 | }
111 |
112 | bytes(size) {
113 | if (this._position + size > this._end) {
114 | throw new gzip.Error('Data not available.');
115 | }
116 | size = size === undefined ? this._end : size;
117 | const data = this._buffer.subarray(this._position, this._position + size);
118 | this._position += size;
119 | return data;
120 | }
121 |
122 | byte() {
123 | if (this._position + 1 > this._end) {
124 | throw new gzip.Error('Data not available.');
125 | }
126 | const value = this._buffer[this._position];
127 | this._position++;
128 | return value;
129 | }
130 |
131 | uint16() {
132 | if (this._position + 2 > this._end) {
133 | throw new gzip.Error('Data not available.');
134 | }
135 | const value = this._buffer[this._position] | (this._buffer[this._position + 1] << 8);
136 | this._position += 2;
137 | return value;
138 | }
139 |
140 | uint32() {
141 | return this.uint16() | (this.uint16() << 16);
142 | }
143 |
144 | string() {
145 | let result = '';
146 | const end = this._buffer.indexOf(0x00, this._position);
147 | if (end < 0) {
148 | throw new gzip.Error('End of string not found.');
149 | }
150 | while (this._position < end) {
151 | result += String.fromCharCode(this._buffer[this._position++]);
152 | }
153 | this._position++;
154 | return result;
155 | }
156 |
157 | };
158 |
159 | gzip.Error = class extends Error {
160 | constructor(message) {
161 | super(message);
162 | this.name = 'gzip Error';
163 | }
164 | };
165 |
166 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
167 | module.exports.Archive = gzip.Archive;
168 | }
--------------------------------------------------------------------------------
/src/bson.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 | /* eslint "indent": [ "error", 4, { "SwitchCase": 1 } ] */
3 |
4 | // Experimental BSON JavaScript reader
5 |
6 | var bson = {};
7 | var long = long || { Long: require('long') };
8 |
9 | // http://bsonspec.org/spec.html
10 | bson.Reader = class {
11 |
12 | constructor(buffer) {
13 | this._asciiDecoder = new TextDecoder('ascii');
14 | this._utf8Decoder = new TextDecoder('utf-8');
15 | this._buffer = buffer;
16 | this._position = 0;
17 | this._view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
18 | }
19 |
20 | read() {
21 | return this.document();
22 | }
23 |
24 | document(isArray) {
25 | const start = this._position;
26 | const size = this.int32();
27 | if (size < 5 || start + size > this._buffer.length || this._buffer[start + size - 1] != 0x00) {
28 | throw new bson.Reader('Invalid BSON size.');
29 | }
30 | let element = isArray ? [] : {};
31 | let index = 0;
32 | for (;;) {
33 | const type = this.byte();
34 | if (type == 0x00) {
35 | break;
36 | }
37 | const key = this.cstring();
38 | let value = null;
39 | switch (type) {
40 | case 0x01:
41 | value = this.double();
42 | break;
43 | case 0x02:
44 | value = this.string();
45 | break;
46 | case 0x03:
47 | value = this.document(false);
48 | break;
49 | case 0x04:
50 | value = this.document(true);
51 | break;
52 | case 0x05:
53 | value = this.binary();
54 | break;
55 | case 0x08:
56 | value = this.boolean();
57 | break;
58 | case 0x0A:
59 | value = null;
60 | break;
61 | case 0x10:
62 | value = this.int32();
63 | break;
64 | case 0x11:
65 | value = this.uint64();
66 | break;
67 | case 0x12:
68 | value = this.int64();
69 | break;
70 | default:
71 | throw new bson.Error("Unknown value type '" + type + "'.");
72 | }
73 | if (isArray) {
74 | if (index !== parseInt(key, 10)) {
75 | throw new bson.Error("Invalid array index '" + key + "'.");
76 | }
77 | element.push(value);
78 | index++;
79 | }
80 | else {
81 | element[key] = value;
82 | }
83 | }
84 | return element;
85 | }
86 |
87 | cstring() {
88 | const end = this._buffer.indexOf(0x00, this._position);
89 | const value = this._asciiDecoder.decode(this._buffer.subarray(this._position, end));
90 | this._position = end + 1;
91 | return value;
92 | }
93 |
94 | string() {
95 | const end = this.int32() + this._position - 1;
96 | const value = this._utf8Decoder.decode(this._buffer.subarray(this._position, end));
97 | this._position = end;
98 | if (this.byte() != '0x00') {
99 | throw new bson.Error('String missing terminal 0.');
100 | }
101 | return value;
102 | }
103 |
104 | binary() {
105 | const size = this.int32();
106 | const subtype = this.byte();
107 | const data = this._buffer.subarray(this._position, this._position + size);
108 | this._position += size;
109 | switch (subtype) {
110 | case 0x00:
111 | return data;
112 | default:
113 | throw new bson.Error("Unknown binary subtype '" + subtype + "'.");
114 | }
115 | }
116 |
117 | boolean() {
118 | const value = this.byte();
119 | switch (value) {
120 | case 0x00: return false;
121 | case 0x01: return true;
122 | default: throw new bson.Error("Invalid boolean value '" + value + "'.");
123 | }
124 | }
125 |
126 | byte() {
127 | return this._buffer[this._position++];
128 | }
129 |
130 | int32() {
131 | const value = this._view.getInt32(this._position, true);
132 | this._position += 4;
133 | return value;
134 | }
135 |
136 | int64() {
137 | const low = this._view.getUint32(this._position, true);
138 | const hi = this._view.getUint32(this._position + 4, true);
139 | this._position += 8;
140 | return new long.Long(low, hi, false).toNumber();
141 | }
142 |
143 | uint64() {
144 | const low = this._view.getUint32(this._position, true);
145 | const hi = this._view.getUint32(this._position + 4, true);
146 | this._position += 8;
147 | return new long.Long(low, hi, true).toNumber();
148 | }
149 | };
150 |
151 | bson.Error = class extends Error {
152 |
153 | constructor(message) {
154 | super(message);
155 | this.name = 'BSON Error';
156 | }
157 | };
158 |
159 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
160 | module.exports.Reader = bson.Reader;
161 | }
--------------------------------------------------------------------------------
/tools/onnx:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | identifier=onnx
7 |
8 | case "${OSTYPE}" in
9 | msys*) python="winpty python";;
10 | *) python="python";;
11 | esac
12 |
13 | bold() {
14 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
15 | }
16 |
17 | venv() {
18 | env_dir=./third_party/env/onnx
19 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
20 | case "${OSTYPE}" in
21 | msys*) source ${env_dir}/Scripts/activate;;
22 | *) source ${env_dir}/bin/activate;;
23 | esac
24 | ${python} -m pip install --quiet --upgrade pip
25 | }
26 |
27 | git_sync() {
28 | [ -d "./third_party/src/${1}" ] || git clone --quiet --recursive ${2} "./third_party/src/${1}"
29 | pushd "./third_party/src/${1}" > /dev/null
30 | git pull --quiet --prune
31 | git submodule sync --quiet
32 | git submodule update --quiet --init --recursive
33 | popd > /dev/null
34 | }
35 |
36 | clean() {
37 | bold "onnx clean"
38 | rm -rf "./third_party/env/onnx"
39 | rm -rf "./third_party/src/onnx"
40 | rm -rf "./third_party/src/onnxmltools"
41 | }
42 |
43 | sync() {
44 | bold "onnx sync"
45 | git_sync onnx https://github.com/onnx/onnx.git
46 | git_sync onnxmltools https://github.com/onnx/onnxmltools.git
47 | }
48 |
49 | install() {
50 | bold "onnx install"
51 | case "${OSTYPE}" in
52 | linux*)
53 | [ -x "$(command -v cmake)" ] || sudo apt install -y cmake
54 | [ -x "$(command -v protoc)" ] || sudo apt install -y protobuf-compiler libprotoc-dev
55 | protobuf=protobuf
56 | ;;
57 | darwin*)
58 | brew list cmake > /dev/null 2>&1 || brew install cmake > /dev/null
59 | brew list protobuf > /dev/null 2>&1 || brew install protobuf > /dev/null
60 | protobuf=protobuf
61 | ;;
62 | msys*)
63 | [ ! -z "$(choco list --local-only --exacty --limit-output visualstudio2017-workload-vctools)" ] || $(choco install -yes visualstudio2017-workload-vctools) > /dev/null
64 | protoc_version=3.9.x
65 | protoc_dir="$(pwd)/third_party/bin/protobuf/v${protoc_version}"
66 | programfiles_x86_dir=$(env | grep "^ProgramFiles(x86)=" | cut -d '=' -f 2)
67 | cmake_dir="${programfiles_x86_dir}\Microsoft Visual Studio\2017\BuildTools\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin"
68 | msbuild_dir="${programfiles_x86_dir}\Microsoft Visual Studio\2017\BuildTools\MSBuild\15.0\Bin"
69 | if [ ! -f "${protoc_dir}/bin/protoc.exe" ]; then
70 | rm -rf ${protoc_dir}
71 | git clone --quiet --branch ${protoc_version} https://github.com/protocolbuffers/protobuf.git ${protoc_dir}/src
72 | pushd "${protoc_dir}/src/cmake" > /dev/null
73 | "${cmake_dir}\cmake.exe" -G "Visual Studio 15 2017 Win64" -Dprotobuf_MSVC_STATIC_RUNTIME=OFF -Dprotobuf_BUILD_TESTS=OFF -Dprotobuf_BUILD_EXAMPLES=OFF -DCMAKE_INSTALL_PREFIX="..\.." > /dev/null
74 | "${msbuild_dir}\MSBuild.exe" protobuf.sln //m //p:Configuration=Release > /dev/null
75 | "${msbuild_dir}\MSBuild.exe" INSTALL.vcxproj //p:Configuration=Release > /dev/null
76 | popd > /dev/null
77 | fi
78 | export PATH="${protoc_dir}\bin":"$(cygpath -u "${cmake_dir}")":${PATH}
79 | export USE_MSVC_STATIC_RUNTIME=0
80 | protobuf="protobuf==3.9.2"
81 | ;;
82 | esac
83 | venv
84 | ${python} -m pip install --quiet --upgrade ${protobuf}
85 | export ONNX_ML=1
86 | export ONNX_NAMESPACE=onnx
87 | ${python} -m pip install --quiet "./third_party/src/onnx"
88 | deactivate
89 | }
90 |
91 | schema() {
92 | bold "onnx schema"
93 | [[ $(grep -U $'\x0D' ./src/onnx-proto.js) ]] && crlf=1
94 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case --decode-text -r onnx -o ./src/onnx-proto.js ./third_party/src/onnx/onnx/onnx-ml.proto ./third_party/src/onnx/onnx/onnx-operators-ml.proto
95 | node ./tools/update_pbjs.js array ./src/onnx-proto.js float_data float 1
96 | node ./tools/update_pbjs.js array ./src/onnx-proto.js double_data double 1
97 | if [[ -n ${crlf} ]]; then
98 | unix2dos --quiet --newfile ./src/onnx-proto.js ./src/onnx-proto.js
99 | fi
100 | }
101 |
102 | metadata() {
103 | bold "onnx metadata"
104 | [[ $(grep -U $'\x0D' ./src/onnx-metadata.json) ]] && crlf=1
105 | venv
106 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
107 | ${python} ./tools/onnx-script.py metadata
108 | deactivate
109 | if [[ -n ${crlf} ]]; then
110 | unix2dos --quiet --newfile ./src/onnx-metadata.json ./src/onnx-metadata.json
111 | fi
112 | }
113 |
114 | convert() {
115 | bold "onnx convert"
116 | venv
117 | ${python} -m pip install --quiet ./third_party/src/onnxmltools
118 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
119 | ${python} ./tools/onnx-script.py convert ${1}
120 | deactivate
121 | }
122 |
123 | infer() {
124 | bold "onnx infer"
125 | venv
126 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
127 | ${python} ./tools/onnx-script.py infer ${1}
128 | deactivate
129 | }
130 |
131 | optimize() {
132 | bold "onnx optimize"
133 | venv
134 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
135 | ${python} ./tools/onnx-script.py optimize ${1}
136 | deactivate
137 | }
138 |
139 | while [ "$#" != 0 ]; do
140 | command="$1" && shift
141 | case "${command}" in
142 | "clean") clean;;
143 | "sync") sync;;
144 | "install") install;;
145 | "schema") schema;;
146 | "metadata") metadata;;
147 | "convert") convert ${1} && shift;;
148 | "infer") infer ${1} && shift;;
149 | "optimize") optimize ${1} && shift;;
150 | esac
151 | done
152 |
--------------------------------------------------------------------------------
/src/view-grapher.css:
--------------------------------------------------------------------------------
1 |
2 | .node path { stroke: #333; fill: none; stroke-width: 1px; }
3 | .node line { stroke: #333; fill: none; stroke-width: 1px; }
4 |
5 | .node-item path { stroke-width: 0; stroke: #000; fill: #fff; }
6 | .node-item text { font-family: -apple-system, BlinkMacSystemFont, "Segoe WPC", "Segoe UI", "Ubuntu", "Droid Sans", sans-serif, "PingFang SC"; font-size: 11px; text-rendering: geometricPrecision; }
7 |
8 | .node-item-function path { fill: #fff; }
9 | .node-item-function text { fill: #000; }
10 | .node-item-function:hover { cursor: hand; }
11 | .node-item-function:hover path { fill: #eee; }
12 |
13 | .node-item-type path { fill: #000; }
14 | .node-item-type text { fill: #fff; }
15 | .node-item-type:hover { cursor: hand; }
16 | .node-item-type:hover path { fill: #fff; }
17 | .node-item-type:hover text { fill: #000; }
18 |
19 | .node-item-type-constant path { fill: #eee; }
20 | .node-item-type-constant text { fill: #000; }
21 | .node-item-type-constant:hover path { fill: #fff; }
22 |
23 | .node-item-type-control path { fill: #eee; }
24 | .node-item-type-control text { fill: #000; }
25 | .node-item-type-control:hover path { fill: #fff; }
26 |
27 | .node-item-type-layer path { fill: rgb(51, 85, 136); }
28 | .node-item-type-wrapper path { fill: rgb(238, 238, 238); }
29 | .node-item-type-wrapper text { fill: rgb(0, 0, 0) }
30 | .node-item-type-activation path { fill: rgb(75, 27, 22); }
31 | .node-item-type-pool path { fill: rgb(51, 85, 51); }
32 | .node-item-type-normalization path { fill: rgb(51, 85, 68); }
33 | .node-item-type-dropout path { fill: rgb(69, 71, 112); }
34 | .node-item-type-shape path { fill: rgb(108, 79, 71); }
35 | .node-item-type-tensor path { fill: rgb(89, 66, 59); }
36 | .node-item-type-transform path { fill: rgb(51, 85, 68); }
37 | .node-item-type-data path { fill: rgb(85, 85, 85); }
38 | .node-item-type-custom path { fill: rgb(128, 128, 128); }
39 |
40 | .node-item-input path { fill: #fff; }
41 | .node-item-input:hover { cursor: hand; }
42 | .node-item-input:hover path { fill: #fff; }
43 |
44 | .node-item-constant path { fill: #eee; }
45 | .node-item-constant:hover { cursor: hand; }
46 | .node-item-constant:hover path { fill: #fff; }
47 |
48 | .node-item-undefined path { fill: #f00; }
49 | .node-item-undefined:hover { cursor: hand; }
50 | .node-item-undefined:hover path { fill: #fff; }
51 |
52 | .node-attribute:hover { cursor: hand; }
53 | .node-attribute text { font-family: -apple-system, BlinkMacSystemFont, "Segoe WPC", "Segoe UI", "Ubuntu", "Droid Sans", sans-serif, "PingFang SC"; font-size: 9px; font-weight: normal; text-rendering: geometricPrecision; }
54 | .node-attribute path { fill: #fff; stroke-width: 0; stroke: #000; }
55 | .node-attribute:hover path { fill: #f6f6f6; }
56 |
57 | .graph-item-input path { fill: #eee; }
58 | .graph-item-input:hover { cursor: hand; }
59 | .graph-item-input:hover path { fill: #fff; }
60 |
61 | .graph-item-output path { fill: #eee; }
62 | .graph-item-output:hover { cursor: hand; }
63 | .graph-item-output:hover path { fill: #fff; }
64 |
65 | .edge-label text { font-family: -apple-system, BlinkMacSystemFont, "Segoe WPC", "Segoe UI", "Ubuntu", "Droid Sans", sans-serif, "PingFang SC"; font-size: 10px; }
66 | .edge-path { stroke: #000; stroke-width: 1px; fill: none; }
67 | #arrowhead-vee { fill: #000; }
68 | .edge-path-control-dependency { stroke-dasharray: 3, 2; }
69 |
70 | .cluster rect { stroke: #000; fill: #000; fill-opacity: 0.02; stroke-opacity: 0.06; stroke-width: 1px; }
71 |
72 | .select .node.border { stroke: #333; stroke-width: 2px; stroke-dasharray: 6px 3px; stroke-dashoffset: 0; animation: pulse 4s infinite linear; }
73 | .select.edge-path { stroke: #333; stroke-width: 2px; stroke-dasharray: 6px 3px; stroke-dashoffset: 0; animation: pulse 4s infinite linear; }
74 |
75 | @keyframes pulse { from { stroke-dashoffset: 100px; } to { stroke-dashoffset: 0; } }
76 |
77 | @media (prefers-color-scheme: dark) {
78 |
79 | .edge-label text { fill: #b2b2b2; }
80 | .edge-path { stroke: #888888; }
81 | #arrowhead-vee { fill: #888888; }
82 |
83 | .node path { stroke: #1d1d1d; }
84 | .node line { stroke: #1d1d1d; }
85 |
86 | .select .node.border { stroke: #dfdfdf; }
87 | .select.edge-path { stroke: #dfdfdf; }
88 |
89 | .node-item-function path { fill: #404040; }
90 | .node-item-function text { fill: #dfdfdfdf; }
91 | .node-item-function:hover { cursor: hand; }
92 | .node-item-function:hover path { fill: #666666; }
93 |
94 | .node-item-type path { fill: #303030; }
95 | .node-item-type text { fill: #dfdfdf; }
96 | .node-item-type:hover { cursor: hand; }
97 | .node-item-type:hover path { fill: #808080; }
98 | .node-item-type:hover text { fill: #dfdfdf; }
99 |
100 | .node-item path { stroke: #fff; }
101 | .node-item text { fill: #dfdfdf; }
102 |
103 | .node-attribute text { fill: #b2b2b2; }
104 | .node-attribute path { fill: #2d2d2d; }
105 | .node-attribute:hover path { fill: #666666; }
106 |
107 | .graph-item-input path { fill: #404040; }
108 | .graph-item-input:hover { cursor: hand; }
109 | .graph-item-input:hover path { fill: #666666; }
110 |
111 | .graph-item-output path { fill: #404040; }
112 | .graph-item-output:hover { cursor: hand; }
113 | .graph-item-output:hover path { fill: #666666; }
114 |
115 | .node-item-input path { fill: #404040; }
116 | .node-item-input:hover path { fill: #666666; }
117 | .node-item-constant path { fill: #4b4b4b; }
118 | .node-item-constant:hover path { fill: #666666; }
119 |
120 | .node-item-type-layer path { fill: rgba(51, 85, 136, 0.7); }
121 | .node-item-type-activation path { fill: rgba(75, 27, 22, 0.7); }
122 | .node-item-type-activation path { fill: rgba(75, 27, 22, 0.7); }
123 | .node-item-type-pool path { fill: rgba(51, 85, 51, 0.7); }
124 | .node-item-type-pool path { fill: rgba(51, 85, 51, 0.7); }
125 | .node-item-type-normalization path { fill: rgba(51, 85, 68, 0.7); }
126 | .node-item-type-dropout path { fill: rgba(69, 71, 112, 0.7); }
127 | .node-item-type-shape path { fill: rgba(108, 79, 71, 0.7); }
128 | .node-item-type-tensor path { fill: rgba(89, 66, 59, 0.7); }
129 | .node-item-type-transform path { fill: rgba(51, 85, 68, 0.7); }
130 | .node-item-type-data path { fill: rgba(85, 85, 85, 0.7); }
131 | .node-item-type-custom path { fill: rgb(64, 64, 64, 0.7); }
132 | }
133 |
--------------------------------------------------------------------------------
/setup/icon.svg:
--------------------------------------------------------------------------------
1 |
114 |
--------------------------------------------------------------------------------
/tools/tf:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | pushd $(cd $(dirname ${0})/..; pwd) > /dev/null
5 |
6 | case "${OSTYPE}" in
7 | msys*) python="winpty python";;
8 | *) python="python";;
9 | esac
10 |
11 | bold() {
12 | echo "$(tty -s && tput bold)$1$(tty -s && tput sgr0)"
13 | }
14 |
15 | venv() {
16 | env_dir=./third_party/env/tensorflow
17 | [ -d "${env_dir}" ] || ${python} -m venv ${env_dir}
18 | case "${OSTYPE}" in
19 | msys*) source ${env_dir}/Scripts/activate;;
20 | *) source ${env_dir}/bin/activate;;
21 | esac
22 | ${python} -m pip install --quiet --upgrade pip
23 | }
24 |
25 | clean() {
26 | bold "tf clean"
27 | rm -rf "./third_party/env/tensorflow"
28 | rm -rf "./third_party/src/tensorflow"
29 | }
30 |
31 | sync() {
32 | bold "tf sync"
33 | [ -d "./third_party/src/tensorflow" ] || git clone --quiet --recursive https://github.com/tensorflow/tensorflow.git "./third_party/src/tensorflow"
34 | pushd "./third_party/src/tensorflow" > /dev/null
35 | git pull --quiet --prune
36 | git submodule sync --quiet
37 | git submodule update --quiet --init --recursive
38 | popd > /dev/null
39 | }
40 |
41 | install() {
42 | bold "tf install"
43 | venv
44 | ${python} -m pip install --quiet --upgrade protobuf
45 | deactivate
46 | }
47 |
48 | schema() {
49 | bold "tf schema"
50 | [[ $(grep -U $'\x0D' ./src/tf-proto.js) ]] && crlf=1
51 | npx pbjs -t static-module -w closure --no-encode --no-delimited --no-comments --no-convert --no-verify --no-create --keep-case --decode-text -r tf -o ./src/tf-proto.js \
52 | ./third_party/src/tensorflow/tensorflow/core/protobuf/saved_model.proto \
53 | ./third_party/src/tensorflow/tensorflow/core/protobuf/meta_graph.proto \
54 | ./third_party/src/tensorflow/tensorflow/core/protobuf/saver.proto \
55 | ./third_party/src/tensorflow/tensorflow/core/framework/graph.proto \
56 | ./third_party/src/tensorflow/tensorflow/core/framework/op_def.proto \
57 | ./third_party/src/tensorflow/tensorflow/core/framework/tensor_shape.proto \
58 | ./third_party/src/tensorflow/tensorflow/core/framework/types.proto \
59 | ./third_party/src/tensorflow/tensorflow/core/framework/node_def.proto \
60 | ./third_party/src/tensorflow/tensorflow/core/framework/versions.proto \
61 | ./third_party/src/tensorflow/tensorflow/core/framework/function.proto \
62 | ./third_party/src/tensorflow/tensorflow/core/framework/attr_value.proto \
63 | ./third_party/src/tensorflow/tensorflow/core/framework/tensor.proto \
64 | ./third_party/src/tensorflow/tensorflow/core/framework/variable.proto \
65 | ./third_party/src/tensorflow/tensorflow/core/framework/resource_handle.proto \
66 | ./third_party/src/tensorflow/tensorflow/core/protobuf/saved_object_graph.proto \
67 | ./third_party/src/tensorflow/tensorflow/core/protobuf/trackable_object_graph.proto \
68 | ./third_party/src/tensorflow/tensorflow/core/protobuf/struct.proto \
69 | ./third_party/src/tensorflow/tensorflow/core/protobuf/tensor_bundle.proto \
70 | ./third_party/src/tensorflow/tensorflow/core/framework/tensor_slice.proto \
71 | ./third_party/src/tensorflow/tensorflow/core/util/saved_tensor_slice.proto
72 | if [[ -n ${crlf} ]]; then
73 | unix2dos --quiet --newfile ./src/tf-proto.js ./src/tf-proto.js
74 | fi
75 | }
76 |
77 | metadata() {
78 | bold "tf metadata"
79 | [[ $(grep -U $'\x0D' ./src/tf-metadata.json) ]] && crlf=1
80 | venv
81 | case "${OSTYPE}" in
82 | linux*)
83 | [ -x "$(command -v protoc)" ] || sudo apt install -y protobuf-compiler libprotoc-dev
84 | ;;
85 | darwin*)
86 | brew list protobuf > /dev/null 2>&1 || brew install protobuf > /dev/null
87 | ;;
88 | msys*)
89 | protoc_version=$(curl -s https://api.github.com/repos/protocolbuffers/protobuf/releases/latest | grep tag_name | cut -f 2 -d : | cut -f 2 -d '"' | cut -f 2 -d v)
90 | protoc_dir=./third_party/bin/protobuf/v${protoc_version}
91 | if [ ! -f "${protoc_dir}/bin/protoc.exe" ]; then
92 | mkdir -p "${protoc_dir}"
93 | pushd "${protoc_dir}" > /dev/null
94 | curl -sL -O https://github.com/protocolbuffers/protobuf/releases/download/v${protoc_version}/protoc-${protoc_version}-win32.zip
95 | unzip protoc-${protoc_version}-win32.zip > /dev/null
96 | rm protoc-${protoc_version}-win32.zip
97 | popd > /dev/null
98 | fi
99 | export PATH="$(cygpath -a -u "${protoc_dir}/bin")":${PATH}
100 | ;;
101 | esac
102 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/attr_value.proto --python_out=./tools
103 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/tensor.proto --python_out=./tools
104 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/types.proto --python_out=./tools
105 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/tensor_shape.proto --python_out=./tools
106 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/resource_handle.proto --python_out=./tools
107 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/api_def.proto --python_out=./tools
108 | protoc --proto_path ./third_party/src/tensorflow ./third_party/src/tensorflow/tensorflow/core/framework/op_def.proto --python_out=./tools
109 | touch ./tools/tensorflow/__init__.py
110 | touch ./tools/tensorflow/core/__init__.py
111 | touch ./tools/tensorflow/core/framework/__init__.py
112 | export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
113 | ${python} ./tools/tf-script.py metadata
114 | rm -rf ./tools/tensorflow
115 | deactivate
116 | if [[ -n ${crlf} ]]; then
117 | unix2dos --quiet --newfile ./src/tf-metadata.json ./src/tf-metadata.json
118 | fi
119 | }
120 |
121 | while [ "$#" != 0 ]; do
122 | command="$1" && shift
123 | case "${command}" in
124 | "clean") clean;;
125 | "sync") sync;;
126 | "install") install;;
127 | "schema") schema;;
128 | "metadata") metadata;;
129 | esac
130 | done
131 |
--------------------------------------------------------------------------------
/tools/caffe2-script.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import unicode_literals
3 | from __future__ import print_function
4 |
5 | import io
6 | import json
7 | import logging
8 | import pydoc
9 | import os
10 | import re
11 | import sys
12 |
13 | def get_support_level(dir):
14 | dir = dir.replace('\\', '/')
15 | if 'caffe2/caffe2/operators' in dir:
16 | return 'core'
17 | if 'contrib' in dir.split('/'):
18 | return 'contribution'
19 | if 'experiments' in dir.split('/'):
20 | return 'experimental'
21 | return 'default'
22 |
23 | def update_argument_type(type):
24 | if type == 'int' or type == 'int64_t':
25 | return 'int64'
26 | if type == 'int32_t':
27 | return 'int32'
28 | elif type == '[int]' or type == 'int[]':
29 | return 'int64[]'
30 | elif type == 'float':
31 | return 'float32'
32 | elif type == 'string':
33 | return 'string'
34 | elif type == 'List(string)':
35 | return 'string[]'
36 | elif type == 'bool':
37 | return 'boolean'
38 | raise Exception('Unknown argument type ' + str(type))
39 |
40 | def update_argument_default(value, type):
41 | if type == 'int64':
42 | return int(value)
43 | elif type == 'float32':
44 | return float(value.rstrip('~'))
45 | elif type == 'boolean':
46 | if value == 'True':
47 | return True
48 | if value == 'False':
49 | return False
50 | elif type == 'string':
51 | return value.strip('\"')
52 | raise Exception('Unknown argument type ' + str(type))
53 |
54 | def update_argument(schema, arg):
55 | if not 'attributes' in schema:
56 | schema['attributes'] = []
57 | attribute = None
58 | for current_attribute in schema['attributes']:
59 | if 'name' in current_attribute and current_attribute['name'] == arg.name:
60 | attribute = current_attribute
61 | break
62 | if not attribute:
63 | attribute = {}
64 | attribute['name'] = arg.name
65 | schema['attributes'].append(attribute)
66 | description = arg.description.strip()
67 | if description.startswith('*('):
68 | index = description.find(')*')
69 | properties = []
70 | if index != -1:
71 | properties = description[2:index].split(';')
72 | description = description[index+2:].lstrip()
73 | else:
74 | index = description.index(')')
75 | properties = description[2:index].split(';')
76 | description = description[index+1:].lstrip()
77 | if len(properties) == 1 and properties[0].find(',') != -1:
78 | properties = properties[0].split(',')
79 | for property in properties:
80 | parts = property.split(':')
81 | name = parts[0].strip()
82 | if name == 'type':
83 | type = parts[1].strip()
84 | if type == 'primitive' or type == 'int | Tuple(int)' or type == '[]' or type == 'TensorProto_DataType' or type == 'Tuple(int)':
85 | continue
86 | attribute['type'] = update_argument_type(type)
87 | elif name == 'default':
88 | if 'type' in attribute:
89 | type = attribute['type']
90 | default = parts[1].strip()
91 | if default == '2, possible values':
92 | default = '2'
93 | if type == 'float32' and default == '\'NCHW\'':
94 | continue
95 | if type == 'int64[]':
96 | continue
97 | attribute['default'] = update_argument_default(default, type)
98 | elif name == 'optional':
99 | attribute['option'] = 'optional'
100 | elif name == 'must be > 1.0' or name == 'default=\'NCHW\'' or name == 'type depends on dtype' or name == 'Required=True':
101 | continue
102 | elif name == 'List(string)':
103 | attribute['type'] = 'string[]'
104 | else:
105 | raise Exception('Unknown property ' + str(parts[0].strip()))
106 | attribute['description'] = description
107 | if not arg.required:
108 | attribute['option'] = 'optional'
109 | return
110 |
111 | def update_input(schema, input_desc):
112 | input_name = input_desc[0]
113 | description = input_desc[1]
114 | if not 'inputs' in schema:
115 | schema['inputs'] = []
116 | input_arg = None
117 | for current_input in schema['inputs']:
118 | if 'name' in current_input and current_input['name'] == input_name:
119 | input_arg = current_input
120 | break
121 | if not input_arg:
122 | input_arg = {}
123 | input_arg['name'] = input_name
124 | schema['inputs'].append(input_arg)
125 | input_arg['description'] = description
126 | if len(input_desc) > 2:
127 | return
128 |
129 | def update_output(operator_name, schema, output_desc):
130 | output_name = output_desc[0]
131 | description = output_desc[1]
132 | if not 'outputs' in schema:
133 | schema['outputs'] = []
134 | output_arg = None
135 | for current_output in schema['outputs']:
136 | if 'name' in current_output and current_output['name'] == output_name:
137 | output_arg = current_output
138 | break
139 | if not output_arg:
140 | output_arg = {}
141 | output_arg['name'] = output_name
142 | schema['outputs'].append(output_arg)
143 | output_arg['description'] = description
144 | if len(output_desc) > 2:
145 | return
146 |
147 | class Caffe2Filter(logging.Filter):
148 | def filter(self, record):
149 | return record.getMessage().startswith('WARNING:root:This caffe2 python run does not have GPU support.')
150 |
151 | def metadata():
152 |
153 | logging.getLogger('').addFilter(Caffe2Filter())
154 |
155 | import caffe2.python.core
156 |
157 | json_file = os.path.join(os.path.dirname(__file__), '../src/caffe2-metadata.json')
158 | json_data = open(json_file).read()
159 | json_root = json.loads(json_data)
160 |
161 | schema_map = {}
162 |
163 | for entry in json_root:
164 | operator_name = entry['name']
165 | schema = entry['schema']
166 | schema_map[operator_name] = schema
167 |
168 | for operator_name in caffe2.python.core._GetRegisteredOperators():
169 | op_schema = caffe2.python.workspace.C.OpSchema.get(operator_name)
170 | if op_schema:
171 | if operator_name == 'Crash':
172 | continue
173 | if operator_name in schema_map:
174 | schema = schema_map[operator_name]
175 | else:
176 | schema = {}
177 | entry = { 'name': operator_name, 'schema': schema }
178 | schema_map[operator_name] = entry
179 | json_root.append(entry)
180 | schema['description'] = op_schema.doc
181 | for arg in op_schema.args:
182 | update_argument(schema, arg)
183 | for input_desc in op_schema.input_desc:
184 | update_input(schema, input_desc)
185 | for output_desc in op_schema.output_desc:
186 | update_output(operator_name, schema, output_desc)
187 | schema['support_level'] = get_support_level(os.path.dirname(op_schema.file))
188 |
189 | with io.open(json_file, 'w', newline='') as fout:
190 | json_data = json.dumps(json_root, sort_keys=True, indent=2)
191 | for line in json_data.splitlines():
192 | line = line.rstrip()
193 | if sys.version_info[0] < 3:
194 | line = unicode(line)
195 | fout.write(line)
196 | fout.write('\n')
197 |
198 | if __name__ == '__main__':
199 | command_table = { 'metadata': metadata }
200 | command = sys.argv[1];
201 | command_table[command]()
202 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import distutils
4 | import io
5 | import json
6 | import os
7 | import re
8 | import setuptools
9 | import setuptools.command.build_py
10 | import distutils.command.build
11 |
12 | node_dependencies = [
13 | ( 'netron', [
14 | 'node_modules/d3/dist/d3.min.js',
15 | 'node_modules/dagre/dist/dagre.min.js',
16 | 'node_modules/marked/marked.min.js',
17 | 'node_modules/pako/dist/pako.min.js',
18 | 'node_modules/long/dist/long.js',
19 | 'node_modules/protobufjs/dist/protobuf.min.js',
20 | 'node_modules/protobufjs/ext/prototxt/prototxt.js',
21 | 'node_modules/flatbuffers/js/flatbuffers.js' ] )
22 | ]
23 |
24 | class build(distutils.command.build.build):
25 | user_options = distutils.command.build.build.user_options + [ ('version', None, 'version' ) ]
26 | def initialize_options(self):
27 | distutils.command.build.build.initialize_options(self)
28 | self.version = None
29 | def finalize_options(self):
30 | distutils.command.build.build.finalize_options(self)
31 | def run(self):
32 | build_py.version = bool(self.version)
33 | return distutils.command.build.build.run(self)
34 |
35 | class build_py(setuptools.command.build_py.build_py):
36 | user_options = setuptools.command.build_py.build_py.user_options + [ ('version', None, 'version' ) ]
37 | def initialize_options(self):
38 | setuptools.command.build_py.build_py.initialize_options(self)
39 | self.version = None
40 | def finalize_options(self):
41 | setuptools.command.build_py.build_py.finalize_options(self)
42 | def run(self):
43 | setuptools.command.build_py.build_py.run(self)
44 | for target, files in node_dependencies:
45 | target = os.path.join(self.build_lib, target)
46 | if not os.path.exists(target):
47 | os.makedirs(target)
48 | for file in files:
49 | self.copy_file(file, target)
50 | if build_py.version:
51 | for package, src_dir, build_dir, filenames in self.data_files:
52 | for filename in filenames:
53 | if filename == 'index.html':
54 | filepath = os.path.join(build_dir, filename)
55 | with open(filepath, 'r') as file :
56 | content = file.read()
57 | content = re.sub(r'()', r'\g<1>' + package_version() + r'\g<2>', content)
58 | with open(filepath, 'w') as file:
59 | file.write(content)
60 | def build_module(self, module, module_file, package):
61 | setuptools.command.build_py.build_py.build_module(self, module, module_file, package)
62 | if build_py.version and module == '__version__':
63 | outfile = self.get_module_outfile(self.build_lib, package.split('.'), module)
64 | with open(outfile, 'w+') as file:
65 | file.write("__version__ = '" + package_version() + "'\n")
66 |
67 | def package_version():
68 | folder = os.path.realpath(os.path.dirname(__file__))
69 | with open(os.path.join(folder, 'package.json')) as package_file:
70 | package_manifest = json.load(package_file)
71 | return package_manifest['version']
72 |
73 | setuptools.setup(
74 | name="netron",
75 | version=package_version(),
76 | description="Viewer for neural network, deep learning and machine learning models",
77 | long_description='Netron is a viewer for neural network, deep learning and machine learning models.\n\n' +
78 | 'Netron supports **ONNX** (`.onnx`, `.pb`), **Keras** (`.h5`, `.keras`), **Core ML** (`.mlmodel`), **Caffe** (`.caffemodel`, `.prototxt`), **Caffe2** (`predict_net.pb`), **Darknet** (`.cfg`), **MXNet** (`.model`, `-symbol.json`), ncnn (`.param`) and **TensorFlow Lite** (`.tflite`). Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **ArmNN** (`.armnn`), **Barracuda** (`.nn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **PaddlePaddle** (`__model__`), **MediaPipe** (`.pbtxt`), **ML.NET** (`.zip`), MNN (`.mnn`), **OpenVINO** (`.xml`), **scikit-learn** (`.pkl`), **Tengine** (`.tmfile`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).',
79 | keywords=[
80 | 'onnx', 'keras', 'tensorflow', 'tflite', 'coreml', 'mxnet', 'caffe', 'caffe2', 'torchscript', 'pytorch', 'ncnn', 'mnn', 'openvino', 'darknet', 'paddlepaddle', 'chainer',
81 | 'artificial intelligence', 'machine learning', 'deep learning', 'neural network',
82 | 'visualizer', 'viewer'
83 | ],
84 | license="MIT",
85 | cmdclass={
86 | 'build': build,
87 | 'build_py': build_py
88 | },
89 | package_dir={
90 | 'netron': 'src'
91 | },
92 | packages=[
93 | 'netron'
94 | ],
95 | package_data={
96 | 'netron': [
97 | 'favicon.ico', 'icon.png',
98 | 'base.js',
99 | 'numpy.js', 'pickle.js', 'hdf5.js', 'bson.js',
100 | 'zip.js', 'tar.js', 'gzip.js',
101 | 'armnn.js', 'armnn-metadata.json', 'armnn-schema.js',
102 | 'bigdl.js', 'bigdl-metadata.json', 'bigdl-proto.js',
103 | 'barracuda.js',
104 | 'caffe.js', 'caffe-metadata.json', 'caffe-proto.js',
105 | 'caffe2.js', 'caffe2-metadata.json', 'caffe2-proto.js',
106 | 'chainer.js',
107 | 'cntk.js', 'cntk-metadata.json', 'cntk-proto.js',
108 | 'coreml.js', 'coreml-metadata.json', 'coreml-proto.js',
109 | 'darknet.js', 'darknet-metadata.json',
110 | 'dl4j.js', 'dl4j-metadata.json',
111 | 'flux.js', 'flux-metadata.json',
112 | 'keras.js', 'keras-metadata.json',
113 | 'mediapipe.js',
114 | 'mlnet.js', 'mlnet-metadata.json',
115 | 'mnn.js', 'mnn-metadata.json', 'mnn-schema.js',
116 | 'mxnet.js', 'mxnet-metadata.json',
117 | 'ncnn.js', 'ncnn-metadata.json',
118 | 'onnx.js', 'onnx-metadata.json', 'onnx-proto.js',
119 | 'openvino.js', 'openvino-metadata.json', 'openvino-parser.js',
120 | 'paddle.js', 'paddle-metadata.json', 'paddle-proto.js',
121 | 'pytorch.js', 'pytorch-metadata.json', 'python.js',
122 | 'sklearn.js', 'sklearn-metadata.json',
123 | 'tengine.js', 'tengine-metadata.json',
124 | 'uff.js', 'uff-metadata.json', 'uff-proto.js',
125 | 'tf.js', 'tf-metadata.json', 'tf-proto.js',
126 | 'tflite.js', 'tflite-metadata.json', 'tflite-schema.js',
127 | 'torch.js', 'torch-metadata.json',
128 | 'index.html', 'index.js',
129 | 'view-grapher.css', 'view-grapher.js',
130 | 'view-sidebar.css', 'view-sidebar.js',
131 | 'view.js',
132 | 'server.py'
133 | ]
134 | },
135 | install_requires=[],
136 | author='Lutz Roeder',
137 | author_email='lutzroeder@users.noreply.github.com',
138 | url='https://github.com/lutzroeder/netron',
139 | entry_points={
140 | 'console_scripts': [ 'netron = netron:main' ]
141 | },
142 | classifiers=[
143 | 'Intended Audience :: Developers',
144 | 'Intended Audience :: Education',
145 | 'Intended Audience :: Science/Research',
146 | 'Programming Language :: Python :: 2',
147 | 'Programming Language :: Python :: 2.7',
148 | 'Programming Language :: Python :: 3',
149 | 'Programming Language :: Python :: 3.6',
150 | 'Topic :: Software Development',
151 | 'Topic :: Software Development :: Libraries',
152 | 'Topic :: Software Development :: Libraries :: Python Modules',
153 | 'Topic :: Scientific/Engineering',
154 | 'Topic :: Scientific/Engineering :: Mathematics',
155 | 'Topic :: Scientific/Engineering :: Artificial Intelligence',
156 | 'Topic :: Scientific/Engineering :: Visualization'
157 | ]
158 | )
--------------------------------------------------------------------------------
/src/view-sidebar.css:
--------------------------------------------------------------------------------
1 |
2 | .sidebar { font-family: -apple-system, BlinkMacSystemFont, "Segoe WPC", "Segoe UI", "Ubuntu", "Droid Sans", sans-serif; font-size: 12px; height: 100%; width: 0; position: fixed; transition: 0.2s; z-index: 1; top: 0; right: 0; background-color: #ececec; color: #242424; overflow: hidden; border-left: 1px solid #ccc; }
3 | .sidebar-title { font-weight: bold; font-size: 12px; letter-spacing: 0.5px; height: 20px; margin: 0; padding: 20px; user-select: none; -webkit-user-select: none; -moz-user-select: none; }
4 | .sidebar-closebutton { padding: 8px 8px 8px 32px; text-decoration: none; font-size: 25px; color: #777777; opacity: 1.0; display: block; transition: 0.2s; position: absolute; top: 0; right: 15px; margin-left: 50px; user-select: none; -webkit-user-select: none; -moz-user-select: none; }
5 | .sidebar-closebutton:hover { color: #242424; }
6 | .sidebar-content { padding-left: 20px; padding-right: 20px; overflow-y: auto; height: calc(100vh - 60px); }
7 |
8 | .sidebar-view-title { font-weight: bold; font-size: 11px; line-height: 1.25; border-bottom: 1px solid #ececec; padding-bottom: 0.3em; margin-top: 0; margin-bottom: 16px; color: #333; user-select: none; -webkit-user-select: none; -moz-user-select: none; cursor: default; }
9 | .sidebar-view-title-button { display: inline-block; color: #888; text-align: center; vertical-align: middle; font-weight: bold; width: 12px; height: 12px; font-size: 10px; line-height: 12px; border-radius: 50%; transform: translateY(-1px); padding: 1px; background: transparent; border: 1px solid #aaa; text-decoration: none; cursor: hand; user-select: none; -webkit-user-select: none; -moz-user-select: none; }
10 | .sidebar-view-title-button:hover { color: #333; border: 1px solid #333; }
11 | .sidebar-view-header { font-weight: bold; font-size: 11px; text-transform: uppercase; line-height: 1.25; margin-top: 16px; margin-bottom: 16px; border-bottom: 1px solid #ececec; display: block; user-select: none; -webkit-user-select: none; -moz-user-select: none; cursor: default; }
12 | .sidebar-view-item { margin-bottom: 0px; display: block; }
13 | .sidebar-view-item-name { float: left; font-size: 11px; min-width: 95px; max-width: 95px; padding-right: 5px; padding-top: 7px; display: block; }
14 | .sidebar-view-item-name input { color: #777; font-family: inherit; font-size: inherit; color: inherit; background-color: inherit; width: 100%; text-align: right; margin: 0; padding: 0; border: 0; outline: none; text-overflow: ellipsis; }
15 | .sidebar-view-item-value-list { margin: 0; margin-left: 105px; overflow: hidden; display: block; padding: 0; }
16 | .sidebar-view-item-value { font-size: 11px; background-color: #fcfcfc; border-radius: 2px; border: 1px solid #fcfcfc; margin-top: 3px; margin-bottom: 3px; overflow: auto; }
17 | .sidebar-view-item-value-dark { background-color: #f8f8f8; border: 1px solid #f8f8f8; }
18 | .sidebar-view-item-value b { font-weight: bold; }
19 | .sidebar-view-item-value code { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; overflow: auto; white-space: pre-wrap; word-wrap: break-word; }
20 | .sidebar-view-item-value pre { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; margin: 0; overflow: auto; white-space: pre; word-wrap: normal; display: block; }
21 | .sidebar-view-item-value-line { padding: 4px 6px 4px 6px; }
22 | .sidebar-view-item-value-line-border { padding: 4px 6px 4px 6px; border-top: 1px solid rgba(27, 31, 35, 0.05); }
23 | .sidebar-view-item-value-line-content { white-space: pre; word-wrap: normal; overflow: auto; display: block; }
24 | .sidebar-view-item-value-expander { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; float: right; color: #aaa; cursor: hand; user-select: none; -webkit-user-select: none; -moz-user-select: none; padding: 4px 6px 4px 6px; }
25 | .sidebar-view-item-value-expander:hover { color: #000; }
26 | .sidebar-view-item-select {
27 | font-family: inherit; font-size: 12px;
28 | background-color: #fcfcfc; border: #fcfcfc; color: #333;
29 | border-radius: 2px; width: 100%; height: 23px; padding: 3px 12px 3px 7px;
30 | margin-top: 3px; margin-bottom: 3px; box-sizing: border-box; outline: none;
31 | -moz-box-sizing: border-box; -webkit-appearance: none; -moz-appearance: none;
32 | background-image: linear-gradient(45deg, transparent 50%, #333 50%), linear-gradient(135deg, #333 50%, transparent 50%);
33 | background-position: calc(100% - 12px) calc(10px), calc(100% - 7px) calc(10px);
34 | background-size: 5px 5px, 5px 5px;
35 | background-repeat: no-repeat;
36 | }
37 |
38 | .sidebar-view-find input[type=text] { font-family: inherit; font-size: 13px; padding: 4px 6px 4px 6px; background: #fff; border-radius: 4px; border: 1px solid #ccc; outline: 0; }
39 | .sidebar-view-find ol { list-style-type: none; overflow-y: auto; margin: 8px 0 20px 0; padding: 0; }
40 | .sidebar-view-find li { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; font-size: 12px; margin: 0; padding: 5px 8px 5px 8px; outline: 0; white-space: nowrap; user-select: none; -webkit-user-select: none; -moz-user-select: none; }
41 | .sidebar-view-find li:not(:first-child) { border-top: 1px solid #f0f0f0; }
42 | .sidebar-view-find li:hover { background: #eee; }
43 |
44 | .sidebar-view-documentation { font-size: 13px; line-height: 1.5; margin: 0; }
45 | .sidebar-view-documentation h1 { font-weight: bold; font-size: 13px; line-height: 1.25; border-bottom: 1px solid #e8e8e8; padding-bottom: 0.3em; margin-top: 0; margin-bottom: 16px; }
46 | .sidebar-view-documentation h2 { font-weight: bold; font-size: 11px; line-height: 1.25; margin-top: 20px; margin-bottom: 16px; text-transform: uppercase; border: 0; }
47 | .sidebar-view-documentation h3 { font-weight: bold; font-size: 11px; line-height: 1.25; }
48 | .sidebar-view-documentation p { margin-top: 2px; margin-bottom: 2px; margin-left: 0px; }
49 | .sidebar-view-documentation a { color: #237; }
50 | .sidebar-view-documentation code { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; font-size: 12px; background-color: rgba(27, 31, 35, 0.05); padding: 0.2em 0.4em; margin: 0; border-radius: 3px; }
51 | .sidebar-view-documentation pre { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; font-size: 12px; padding: 16px; overflow: auto; line-height: 1.45; background-color: rgba(27, 31, 35, 0.05); border-radius: 3px; }
52 | .sidebar-view-documentation pre code { font-size: 13px; padding: 16px; line-height: 1.45; background-color: transparent; padding: 0; border-radius: 0; }
53 | .sidebar-view-documentation tt { font-family: 'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace; font-weight: bold; font-size: 90%; background-color: rgba(27, 31, 35, 0.05); border-radius: 3px; padding: 0.2em 0.4em; margin: 0; }
54 | .sidebar-view-documentation dl dt { font-size: 13px; font-weight: bold; padding: 0; margin-top: 16px; margin-left: 0px; }
55 | .sidebar-view-documentation dd { padding: 0 16px; margin-left: 0; margin-bottom: 16px; }
56 | .sidebar-view-documentation ul { margin-top: 6px; margin-bottom: 6px; padding-left: 20px; }
57 | .sidebar-view-documentation blockquote { margin-left: 15px; margin-right: 15px; }
58 |
59 | @media (prefers-color-scheme: dark) {
60 | .sidebar html { color: #dfdfdf; }
61 | .sidebar { background-color: #2d2d2d; color: #dfdfdf; border-left: 1px solid #000; }
62 | .sidebar-closebutton { padding: 8px 8px 8px 32px; text-decoration: none; font-size: 25px; color: #777777; opacity: 1.0; display: block; transition: 0.2s; position: absolute; top: 0; right: 15px; margin-left: 50px; user-select: none; -webkit-user-select: none; -moz-user-select: none; }
63 | .sidebar-closebutton:hover { color: #ffffff; }
64 | .sidebar-view-item-value { background-color: #383838; border-color: #383838; }
65 | .sidebar-view-item-value-dark { background-color: #3e3e3e; border-color: #3e3e3e; }
66 | .sidebar-view-item-value-line-border { border-color: rgba(0, 0, 0, 0.09); }
67 | .sidebar-view-item-select { background-color: #383838; border: #383838; color: #dfdfdf; background-image: linear-gradient(45deg, transparent 50%, #aaa 50%), linear-gradient(135deg, #aaa 50%, transparent 50%); }
68 |
69 | .sidebar-view-title { border-bottom-color: #2d2d2d; color: #dfdfdf; }
70 | .sidebar-view-title-button { color: #888888; border-color: #888888; }
71 | .sidebar-view-title-button:hover { color: #dfdfdf; border-color: #dfdfdf; }
72 | .sidebar-view-header { border-bottom-color: #2d2d2d; color: #dfdfdf; }
73 |
74 | .sidebar-view-documentation h1 { border-bottom: 1px solid #424242; color: #dfdfdf; }
75 | .sidebar-view-documentation h2 { color: #dfdfdf; }
76 | .sidebar-view-documentation p { color: #aaaaaa; }
77 | .sidebar-view-documentation a { color: #6688aa; }
78 | .sidebar-view-documentation tt { background-color:#1e1e1e; }
79 | .sidebar-view-documentation code { background-color: #1e1e1e; }
80 | .sidebar-view-documentation pre { background-color: #1e1e1e; }
81 |
82 | .sidebar-view-find input[type=text] { background: #383838; color: #dfdfdf; border-color: #424242; }
83 | .sidebar-view-find li:not(:first-child) { border-top: 1px solid #2a2a2a; }
84 | .sidebar-view-find li:hover { background: #383838; }
85 |
86 | }
87 |
--------------------------------------------------------------------------------
/src/server.py:
--------------------------------------------------------------------------------
1 |
2 | import codecs
3 | import errno
4 | import os
5 | import platform
6 | import re
7 | import sys
8 | import threading
9 | import webbrowser
10 | import time
11 | import urllib.parse
12 |
13 | from .__version__ import __version__
14 |
15 | if sys.version_info[0] > 2:
16 | from urllib.parse import urlparse
17 | from http.server import HTTPServer
18 | from http.server import BaseHTTPRequestHandler
19 | from socketserver import ThreadingMixIn
20 | else:
21 | from urlparse import urlparse
22 | from BaseHTTPServer import HTTPServer
23 | from BaseHTTPServer import BaseHTTPRequestHandler
24 | from SocketServer import ThreadingMixIn
25 |
26 | class HTTPRequestHandler(BaseHTTPRequestHandler):
27 | def handler(self):
28 | if not hasattr(self, 'mime_types_map'):
29 | self.mime_types_map = {
30 | '.html': 'text/html',
31 | '.js': 'text/javascript',
32 | '.css': 'text/css',
33 | '.png': 'image/png',
34 | '.gif': 'image/gif',
35 | '.jpg': 'image/jpeg',
36 | '.ico': 'image/x-icon',
37 | '.json': 'application/json',
38 | '.pb': 'application/octet-stream',
39 | '.ttf': 'font/truetype',
40 | '.otf': 'font/opentype',
41 | '.eot': 'application/vnd.ms-fontobject',
42 | '.woff': 'font/woff',
43 | '.woff2': 'application/font-woff2',
44 | '.svg': 'image/svg+xml'
45 | }
46 | pathname = urlparse(self.path).path
47 | folder = os.path.dirname(os.path.realpath(__file__))
48 | location = folder + pathname
49 | status_code = 0
50 | headers = {}
51 | buffer = None
52 | data = '/data/'
53 | if status_code == 0:
54 | if pathname == '/':
55 | meta = []
56 | meta.append('')
57 | meta.append('')
58 | if self.file:
59 | meta.append('')
60 | with codecs.open(location + 'index.html', mode="r", encoding="utf-8") as open_file:
61 | buffer = open_file.read()
62 | buffer = re.sub(r'', '\n'.join(meta), buffer)
63 | buffer = buffer.encode('utf-8')
64 | headers['Content-Type'] = 'text/html'
65 | headers['Content-Length'] = len(buffer)
66 | status_code = 200
67 | elif pathname.startswith(data):
68 | file = pathname[len(data):]
69 | if file == self.file and self.data:
70 | buffer = self.data
71 | else:
72 | file = self.folder + '/' + urllib.parse.unquote(file)
73 | status_code = 404
74 | if os.path.exists(file):
75 | with open(file, 'rb') as binary:
76 | buffer = binary.read()
77 | if buffer:
78 | headers['Content-Type'] = 'application/octet-stream'
79 | headers['Content-Length'] = len(buffer)
80 | status_code = 200
81 | else:
82 | if os.path.exists(location) and not os.path.isdir(location):
83 | extension = os.path.splitext(location)[1]
84 | content_type = self.mime_types_map[extension]
85 | if content_type:
86 | with open(location, 'rb') as binary:
87 | buffer = binary.read()
88 | headers['Content-Type'] = content_type
89 | headers['Content-Length'] = len(buffer)
90 | status_code = 200
91 | else:
92 | status_code = 404
93 | if self.log:
94 | sys.stdout.write(str(status_code) + ' ' + self.command + ' ' + self.path + '\n')
95 | sys.stdout.flush()
96 | self.send_response(status_code)
97 | for key in headers:
98 | self.send_header(key, headers[key])
99 | self.end_headers()
100 | if self.command != 'HEAD':
101 | if status_code == 404 and buffer is None:
102 | self.wfile.write(bytes(status_code))
103 | elif (status_code == 200 or status_code == 404) and buffer != None:
104 | self.wfile.write(buffer)
105 | return
106 | def do_GET(self):
107 | self.handler()
108 | def do_HEAD(self):
109 | self.handler()
110 | def log_message(self, format, *args):
111 | return
112 |
113 | class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): pass
114 |
115 | class HTTPServerThread(threading.Thread):
116 | def __init__(self, data, file, log, port, host, url):
117 | threading.Thread.__init__(self)
118 | self.port = port
119 | self.host = host
120 | self.file = file
121 | self.url = url
122 | self.server = ThreadedHTTPServer((host, port), HTTPRequestHandler)
123 | self.server.timeout = 0.25
124 | if file:
125 | self.server.RequestHandlerClass.folder = os.path.dirname(file) if os.path.dirname(file) else '.'
126 | self.server.RequestHandlerClass.file = os.path.basename(file)
127 | else:
128 | self.server.RequestHandlerClass.folder = ''
129 | self.server.RequestHandlerClass.file = ''
130 | self.server.RequestHandlerClass.data = data
131 | self.server.RequestHandlerClass.log = log
132 | self.terminate_event = threading.Event()
133 | self.terminate_event.set()
134 | self.stop_event = threading.Event()
135 |
136 | def run(self):
137 | self.stop_event.clear()
138 | self.terminate_event.clear()
139 | try:
140 | while not self.stop_event.is_set():
141 | self.server.handle_request()
142 | except Exception:
143 | pass
144 | self.terminate_event.set()
145 | self.stop_event.clear()
146 |
147 | def stop(self):
148 | if self.alive():
149 | sys.stdout.write("\nStopping " + self.url + "\n")
150 | self.stop_event.set()
151 | self.server.server_close()
152 | self.terminate_event.wait(1000)
153 |
154 | def alive(self):
155 | return not self.terminate_event.is_set()
156 |
157 | thread_list = []
158 |
159 | def stop(port=8080, host='localhost'):
160 | '''Stop serving model at host:port.
161 |
162 | Args:
163 | port (int, optional): port to stop. Default: 8080
164 | host (string, optional): host to stop. Default: ''
165 | '''
166 | global thread_list
167 | for thread in thread_list:
168 | if port == thread.port and host == thread.host:
169 | thread.stop()
170 | thread_list = [ thread for thread in thread_list if thread.alive() ]
171 |
172 | def wait():
173 | '''Wait for console exit and stop all model servers.'''
174 | global thread_list
175 | try:
176 | while len(thread_list) > 0:
177 | thread_list = [ thread for thread in thread_list if thread.alive() ]
178 | time.sleep(1000)
179 | except (KeyboardInterrupt, SystemExit):
180 | for thread in thread_list:
181 | thread.stop()
182 | thread_list = [ thread for thread in thread_list if thread.alive() ]
183 |
184 | def serve(file, data, log=False, browse=False, port=8080, host='localhost'):
185 | '''Start serving model from file or data buffer at host:port and open in web browser.
186 |
187 | Args:
188 | file (string): Model file to serve. Required to detect format.
189 | data (bytes): Model data to serve. None will load data from file.
190 | log (bool, optional): Log details to console. Default: False
191 | browse (bool, optional): Launch web browser, Default: True
192 | port (int, optional): Port to serve. Default: 8080
193 | host (string, optional): Host to serve. Default: ''
194 | '''
195 | global thread_list
196 |
197 | if not data and file and not os.path.exists(file):
198 | raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file)
199 |
200 | stop(port, host)
201 |
202 | url = 'http://' + host + ':' + str(port)
203 |
204 | thread_list = [ thread for thread in thread_list if thread.alive() ]
205 | thread = HTTPServerThread(data, file, log, port, host, url)
206 | thread.start()
207 | while not thread.alive():
208 | time.sleep(10)
209 | thread_list.append(thread)
210 |
211 | if file:
212 | sys.stdout.write("Serving '" + file + "' at " + url + "\n")
213 | else:
214 | sys.stdout.write("Serving at " + url + "\n")
215 | sys.stdout.flush()
216 | if browse:
217 | webbrowser.open(url)
218 |
219 | def start(file=None, log=False, browse=True, port=8080, host='localhost'):
220 | '''Start serving model file at host:port and open in web browser
221 |
222 | Args:
223 | file (string): Model file to serve.
224 | log (bool, optional): Log details to console. Default: False
225 | browse (bool, optional): Launch web browser, Default: True
226 | port (int, optional): Port to serve. Default: 8080
227 | host (string, optional): Host to serve. Default: ''
228 | '''
229 | serve(file, None, log=log, browse=browse, port=port, host=host)
230 |
--------------------------------------------------------------------------------
/src/numpy.js:
--------------------------------------------------------------------------------
1 | /* jshint esversion: 6 */
2 | /* eslint "indent": [ "error", 4, { "SwitchCase": 1 } ] */
3 |
4 | var numpy = numpy || {};
5 |
6 | numpy.Array = class {
7 |
8 | constructor(buffer) {
9 | if (buffer) {
10 | const reader = new numpy.Reader(buffer);
11 | const signature = [ 0x93, 0x4E, 0x55, 0x4D, 0x50, 0x59 ];
12 | if (!reader.bytes(6).every((v, i) => v == signature[i])) {
13 | throw new numpy.Error('Invalid signature.');
14 | }
15 | const major = reader.byte();
16 | const minor = reader.byte();
17 | if (major !== 1 && minor !== 0) {
18 | throw new numpy.Error("Invalid version '" + [ major, minor ].join('.') + "'.");
19 | }
20 | const header = JSON.parse(reader.string().trim().replace(/'/g, '"').replace("False", "false").replace("(", "[").replace(/,*\),*/g, "]"));
21 | if (header.fortran_order) {
22 | throw new numpy.Error("Fortran order is not supported.'");
23 | }
24 | if (!header.descr || header.descr.length < 2) {
25 | throw new numpy.Error("Missing property 'descr'.");
26 | }
27 | if (!header.shape) {
28 | throw new numpy.Error("Missing property 'shape'.");
29 | }
30 | this._shape = header.shape;
31 | this._byteOrder = header.descr[0];
32 | switch (this._byteOrder) {
33 | case '|': {
34 | this._dataType = header.descr.substring(1);
35 | this._data = reader.bytes(reader.size - reader.position);
36 | break;
37 | }
38 | case '>':
39 | case '<': {
40 | if (header.descr.length !== 3) {
41 | throw new numpy.Error("Unsupported data type '" + header.descr + "'.");
42 | }
43 | this._dataType = header.descr.substring(1);
44 | const size = parseInt(header.descr[2]) * this._shape.reduce((a, b) => a * b);
45 | this._data = reader.bytes(size);
46 | break;
47 | }
48 | default:
49 | throw new numpy.Error("Unsupported data type '" + header.descr + "'.");
50 | }
51 | }
52 | }
53 |
54 | get data() {
55 | return this._data;
56 | }
57 |
58 | set data(value) {
59 | this._data = value;
60 | }
61 |
62 | get dataType() {
63 | return this._dataType;
64 | }
65 |
66 | set dataType(value) {
67 | this._dataType = value;
68 | }
69 |
70 | get shape() {
71 | return this._shape;
72 | }
73 |
74 | set shape(value) {
75 | this._shape = value;
76 | }
77 |
78 | get byteOrder() {
79 | return this._byteOrder;
80 | }
81 |
82 | set byteOrder(value) {
83 | this._byteOrder = value;
84 | }
85 |
86 | toBuffer() {
87 |
88 | const writer = new numpy.Writer();
89 |
90 | writer.bytes([ 0x93, 0x4E, 0x55, 0x4D, 0x50, 0x59 ]); // '\\x93NUMPY'
91 | writer.byte(1); // major
92 | writer.byte(0); // minor
93 |
94 | const context = {
95 | itemSize: 1,
96 | position: 0,
97 | dataType: this._dataType,
98 | byteOrder: this._byteOrder || '<',
99 | shape: this._shape,
100 | descr: '',
101 | };
102 |
103 | if (context.byteOrder !== '<' && context.byteOrder !== '>') {
104 | throw new numpy.Error("Unknown byte order '" + this._byteOrder + "'.");
105 | }
106 | if (context.dataType.length !== 2 || (context.dataType[0] !== 'f' && context.dataType[0] !== 'i' && context.dataType[0] !== 'u')) {
107 | throw new numpy.Error("Unsupported data type '" + this._dataType + "'.");
108 | }
109 |
110 | context.itemSize = parseInt(context.dataType[1], 10);
111 |
112 | let shape = '';
113 | switch (this._shape.length) {
114 | case 0:
115 | throw new numpy.Error('Invalid shape.');
116 | case 1:
117 | shape = '(' + this._shape[0].toString() + ',)';
118 | break;
119 | default:
120 | shape = '(' + this._shape.map((dimension) => dimension.toString()).join(', ') + ')';
121 | break;
122 | }
123 |
124 | const properties = [
125 | "'descr': '" + context.byteOrder + context.dataType + "'",
126 | "'fortran_order': False",
127 | "'shape': " + shape
128 | ];
129 | let header = '{ ' + properties.join(', ') + ' }';
130 | header += ' '.repeat(16 - ((header.length + 2 + 8 + 1) & 0x0f)) + '\n';
131 | writer.string(header);
132 |
133 | const size = context.itemSize * this._shape.reduce((a, b) => a * b);
134 | context.data = new Uint8Array(size);
135 | context.dataView = new DataView(context.data.buffer, context.data.byteOffset, size);
136 | numpy.Array._encodeDimension(context, this._data, 0);
137 | writer.bytes(context.data);
138 |
139 | return writer.toBuffer();
140 | }
141 |
142 | static _encodeDimension(context, data, dimension) {
143 | const size = context.shape[dimension];
144 | const littleEndian = context.byteOrder === '<';
145 | if (dimension == context.shape.length - 1) {
146 | for (let i = 0; i < size; i++) {
147 | switch (context.dataType) {
148 | case 'f2':
149 | context.dataView.setFloat16(context.position, data[i], littleEndian);
150 | break;
151 | case 'f4':
152 | context.dataView.setFloat32(context.position, data[i], littleEndian);
153 | break;
154 | case 'f8':
155 | context.dataView.setFloat64(context.position, data[i], littleEndian);
156 | break;
157 | case 'i1':
158 | context.dataView.setInt8(context.position, data[i], littleEndian);
159 | break;
160 | case 'i2':
161 | context.dataView.setInt16(context.position, data[i], littleEndian);
162 | break;
163 | case 'i4':
164 | context.dataView.setInt32(context.position, data[i], littleEndian);
165 | break;
166 | case 'i8':
167 | context.data.set(data[i].toBytes(littleEndian), context.position);
168 | break;
169 | case 'u1':
170 | context.dataView.setUint8(context.position, data[i], littleEndian);
171 | break;
172 | case 'u2':
173 | context.dataView.setUint16(context.position, data[i], littleEndian);
174 | break;
175 | case 'u4':
176 | context.dataView.setUint32(context.position, data[i], littleEndian);
177 | break;
178 | case 'u8':
179 | context.data.set(data[i].toBytes(littleEndian), context.position);
180 | break;
181 | }
182 | context.position += context.itemSize;
183 | }
184 | }
185 | else {
186 | for (let j = 0; j < size; j++) {
187 | numpy.Array._encodeDimension(context, data[j], dimension + 1);
188 | }
189 | }
190 | }
191 | };
192 |
193 | numpy.Reader = class {
194 |
195 | constructor(buffer) {
196 | this._buffer = buffer;
197 | this._position = 0;
198 | }
199 |
200 | get position() {
201 | return this._position;
202 | }
203 |
204 | get size() {
205 | return this._buffer.length;
206 | }
207 |
208 | byte() {
209 | return this._buffer[this._position++];
210 | }
211 |
212 | bytes(size) {
213 | const value = this._buffer.slice(this._position, this._position + size);
214 | this._position += size;
215 | return value;
216 | }
217 |
218 | uint16() {
219 | return this.byte() | (this.byte() << 8);
220 | }
221 |
222 | string() {
223 | const size = this.uint16();
224 | let value = '';
225 | for (let i = 0; i < size; i++) {
226 | value += String.fromCharCode(this.byte());
227 | }
228 | return value;
229 | }
230 | };
231 |
232 | numpy.Writer = class {
233 |
234 | constructor() {
235 | this._length = 0;
236 | this._head = null;
237 | this._tail = null;
238 | }
239 |
240 | byte(value) {
241 | this.bytes([ value ]);
242 | }
243 |
244 | uint16(value) {
245 | this.bytes([ value & 0xff, (value >> 8) & 0xff ]);
246 | }
247 |
248 | bytes(values) {
249 | const array = new Uint8Array(values.length);
250 | for (let i = 0; i < values.length; i++) {
251 | array[i] = values[i];
252 | }
253 | this._write(array);
254 | }
255 |
256 | string(value) {
257 | this.uint16(value.length);
258 | const array = new Uint8Array(value.length);
259 | for (let i = 0; i < value.length; i++) {
260 | array[i] = value.charCodeAt(i);
261 | }
262 | this._write(array);
263 | }
264 |
265 | _write(array) {
266 | const node = { buffer: array, next: null };
267 | if (this._tail) {
268 | this._tail.next = node;
269 | }
270 | else {
271 | this._head = node;
272 | }
273 | this._tail = node;
274 | this._length += node.buffer.length;
275 | }
276 |
277 | toBuffer() {
278 | const array = new Uint8Array(this._length);
279 | let position = 0;
280 | let head = this._head;
281 | while (head != null) {
282 | array.set(head.buffer, position);
283 | position += head.buffer.length;
284 | head = head.next;
285 | }
286 | return array;
287 | }
288 | };
289 |
290 | numpy.Error = class extends Error {
291 |
292 | constructor(message) {
293 | super(message);
294 | this.name = 'NumPy Error';
295 | }
296 | };
297 |
298 | if (typeof module !== 'undefined' && typeof module.exports === 'object') {
299 | module.exports.Array = numpy.Array;
300 | }
--------------------------------------------------------------------------------
/tools/onnx-script.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import unicode_literals
3 |
4 | import onnx
5 |
6 | import json
7 | import io
8 | import os
9 | import re
10 | import sys
11 |
12 | from onnx import defs
13 | from onnx.defs import OpSchema
14 | from onnx.backend.test.case import collect_snippets
15 |
16 | snippets = collect_snippets()
17 |
18 | categories = {
19 | 'Constant': 'Constant',
20 |
21 | 'Conv': 'Layer',
22 | 'ConvTranspose': 'Layer',
23 | 'FC': 'Layer',
24 | 'RNN': 'Layer',
25 | 'LSTM': 'Layer',
26 | 'GRU': 'Layer',
27 | 'Gemm': 'Layer',
28 |
29 | 'Dropout': 'Dropout',
30 |
31 | 'Elu': 'Activation',
32 | 'HardSigmoid': 'Activation',
33 | 'LeakyRelu': 'Activation',
34 | 'PRelu': 'Activation',
35 | 'ThresholdedRelu': 'Activation',
36 | 'Relu': 'Activation',
37 | 'Selu': 'Activation',
38 | 'Sigmoid': 'Activation',
39 | 'Tanh': 'Activation',
40 | 'LogSoftmax': 'Activation',
41 | 'Softmax': 'Activation',
42 | 'Softplus': 'Activation',
43 | 'Softsign': 'Activation',
44 |
45 | 'BatchNormalization': 'Normalization',
46 | 'InstanceNormalization': 'Normalization',
47 | 'LpNormalization': 'Normalization',
48 | 'LRN': 'Normalization',
49 |
50 | 'Flatten': 'Shape',
51 | 'Reshape': 'Shape',
52 | 'Tile': 'Shape',
53 |
54 | 'Xor': 'Logic',
55 | 'Not': 'Logic',
56 | 'Or': 'Logic',
57 | 'Less': 'Logic',
58 | 'And': 'Logic',
59 | 'Greater': 'Logic',
60 | 'Equal': 'Logic',
61 |
62 | 'AveragePool': 'Pool',
63 | 'GlobalAveragePool': 'Pool',
64 | 'GlobalLpPool': 'Pool',
65 | 'GlobalMaxPool': 'Pool',
66 | 'LpPool': 'Pool',
67 | 'MaxPool': 'Pool',
68 | 'MaxRoiPool': 'Pool',
69 |
70 | 'Concat': 'Tensor',
71 | 'Slice': 'Tensor',
72 | 'Split': 'Tensor',
73 | 'Pad': 'Tensor',
74 |
75 | 'ImageScaler': 'Data',
76 | 'Crop': 'Data',
77 | 'Upsample': 'Data',
78 |
79 | 'Transpose': 'Transform',
80 | 'Gather': 'Transform',
81 | 'Unsqueeze': 'Transform',
82 | 'Squeeze': 'Transform',
83 | }
84 |
85 | attribute_type_table = {
86 | 'undefined': None,
87 | 'float': 'float32', 'int': 'int64', 'string': 'string', 'tensor': 'tensor', 'graph': 'graph',
88 | 'floats': 'float32[]', 'ints': 'int64[]', 'strings': 'string[]', 'tensors': 'tensor[]', 'graphs': 'graph[]',
89 | }
90 |
91 | def generate_json_attr_type(type):
92 | assert isinstance(type, OpSchema.AttrType)
93 | s = str(type)
94 | s = s[s.rfind('.')+1:].lower()
95 | if s in attribute_type_table:
96 | return attribute_type_table[s]
97 | return None
98 |
99 | def generate_json_attr_default_value(attr_value):
100 | if not str(attr_value):
101 | return None
102 | if attr_value.HasField('i'):
103 | return attr_value.i
104 | if attr_value.HasField('s'):
105 | return attr_value.s.decode('utf8')
106 | if attr_value.HasField('f'):
107 | return attr_value.f
108 | return None
109 |
110 | def generate_json_support_level_name(support_level):
111 | assert isinstance(support_level, OpSchema.SupportType)
112 | s = str(support_level)
113 | return s[s.rfind('.')+1:].lower()
114 |
115 | def generate_json_types(types):
116 | r = []
117 | for type in types:
118 | r.append(type)
119 | r = sorted(r)
120 | return r
121 |
122 | def format_range(value):
123 | if value == 2147483647:
124 | return '∞'
125 | return str(value)
126 |
127 | def format_description(description):
128 | def replace_line(match):
129 | link = match.group(1)
130 | url = match.group(2)
131 | if not url.startswith("http://") and not url.startswith("https://"):
132 | url = "https://github.com/onnx/onnx/blob/master/docs/" + url
133 | return "[" + link + "](" + url + ")";
134 | description = re.sub("\\[(.+)\\]\\(([^ ]+?)( \"(.+)\")?\\)", replace_line, description)
135 | return description
136 |
137 | def generate_json(schemas, json_file):
138 | json_root = []
139 | for schema in schemas:
140 | json_schema = {}
141 | if schema.domain:
142 | json_schema['domain'] = schema.domain
143 | else:
144 | json_schema['domain'] = 'ai.onnx'
145 | json_schema['since_version'] = schema.since_version
146 | json_schema['support_level'] = generate_json_support_level_name(schema.support_level)
147 | if schema.doc:
148 | json_schema['description'] = format_description(schema.doc.lstrip())
149 | if schema.inputs:
150 | json_schema['inputs'] = []
151 | for input in schema.inputs:
152 | json_input = {}
153 | json_input['name'] = input.name
154 | json_input['description'] = format_description(input.description)
155 | json_input['type'] = input.typeStr
156 | if input.option == OpSchema.FormalParameterOption.Optional:
157 | json_input['option'] = 'optional'
158 | elif input.option == OpSchema.FormalParameterOption.Variadic:
159 | json_input['option'] = 'variadic'
160 | json_schema['inputs'].append(json_input)
161 | json_schema['min_input'] = schema.min_input
162 | json_schema['max_input'] = schema.max_input
163 | if schema.outputs:
164 | json_schema['outputs'] = []
165 | for output in schema.outputs:
166 | json_output = {}
167 | json_output['name'] = output.name
168 | json_output['description'] = format_description(output.description)
169 | json_output['type'] = output.typeStr
170 | if output.option == OpSchema.FormalParameterOption.Optional:
171 | json_output['option'] = 'optional'
172 | elif output.option == OpSchema.FormalParameterOption.Variadic:
173 | json_output['option'] = 'variadic'
174 | json_schema['outputs'].append(json_output)
175 | json_schema['min_output'] = schema.min_output
176 | json_schema['max_output'] = schema.max_output
177 | if schema.min_input != schema.max_input:
178 | json_schema['inputs_range'] = format_range(schema.min_input) + ' - ' + format_range(schema.max_input);
179 | if schema.min_output != schema.max_output:
180 | json_schema['outputs_range'] = format_range(schema.min_output) + ' - ' + format_range(schema.max_output);
181 | if schema.attributes:
182 | json_schema['attributes'] = []
183 | for _, attribute in sorted(schema.attributes.items()):
184 | json_attribute = {}
185 | json_attribute['name'] = attribute.name
186 | json_attribute['description'] = format_description(attribute.description)
187 | attribute_type = generate_json_attr_type(attribute.type)
188 | if attribute_type:
189 | json_attribute['type'] = attribute_type
190 | elif 'type' in json_attribute:
191 | del json_attribute['type']
192 | json_attribute['required'] = attribute.required
193 | default_value = generate_json_attr_default_value(attribute.default_value)
194 | if default_value:
195 | json_attribute['default'] = default_value
196 | json_schema['attributes'].append(json_attribute)
197 | if schema.type_constraints:
198 | json_schema['type_constraints'] = []
199 | for type_constraint in schema.type_constraints:
200 | json_schema['type_constraints'].append({
201 | 'description': type_constraint.description,
202 | 'type_param_str': type_constraint.type_param_str,
203 | 'allowed_type_strs': type_constraint.allowed_type_strs
204 | })
205 | if schema.name in snippets:
206 | json_schema['examples'] = []
207 | for summary, code in sorted(snippets[schema.name]):
208 | json_schema['examples'].append({
209 | 'summary': summary,
210 | 'code': code
211 | })
212 | if schema.name in categories:
213 | json_schema['category'] = categories[schema.name]
214 | json_root.append({
215 | 'name': schema.name,
216 | 'schema': json_schema
217 | })
218 | with io.open(json_file, 'w', newline='') as fout:
219 | json_root = json.dumps(json_root, sort_keys=True, indent=2)
220 | for line in json_root.splitlines():
221 | line = line.rstrip()
222 | if sys.version_info[0] < 3:
223 | line = unicode(line)
224 | fout.write(line)
225 | fout.write('\n')
226 |
227 | def metadata():
228 | schemas = defs.get_all_schemas_with_history()
229 | schemas = sorted(schemas, key=lambda schema: schema.name)
230 | json_file = os.path.join(os.path.dirname(__file__), '../src/onnx-metadata.json')
231 | generate_json(schemas, json_file)
232 |
233 | def convert():
234 | def pip_import(package):
235 | import importlib
236 | try:
237 | importlib.import_module(package)
238 | except:
239 | import subprocess
240 | subprocess.call([ 'pip', 'install', '--quiet', package ])
241 | finally:
242 | globals()[package] = importlib.import_module(package)
243 | file = sys.argv[2]
244 | base, extension = os.path.splitext(file)
245 | if extension == '.mlmodel':
246 | pip_import('coremltools')
247 | import onnxmltools
248 | coreml_model = coremltools.utils.load_spec(file)
249 | onnx_model = onnxmltools.convert.convert_coreml(coreml_model)
250 | onnxmltools.utils.save_model(onnx_model, base + '.onnx')
251 | elif extension == '.h5':
252 | pip_import('tensorflow')
253 | pip_import('keras')
254 | import onnxmltools
255 | keras_model = keras.models.load_model(file)
256 | onnx_model = onnxmltools.convert.convert_keras(keras_model)
257 | onnxmltools.utils.save_model(onnx_model, base + '.onnx')
258 | elif extension == '.pkl':
259 | pip_import('sklearn')
260 | import onnxmltools
261 | sklearn_model = sklearn.externals.joblib.load(file)
262 | onnx_model = onnxmltools.convert.convert_sklearn(sklearn_model)
263 | onnxmltools.utils.save_model(onnx_model, base + '.onnx')
264 | base, extension = os.path.splitext(file)
265 | if extension == '.onnx':
266 | import onnx
267 | from google.protobuf import text_format
268 | onnx_model = onnx.load(file)
269 | text = text_format.MessageToString(onnx_model)
270 | with open(base + '.pbtxt', 'w') as text_file:
271 | text_file.write(text)
272 |
273 | def optimize():
274 | import onnx
275 | from onnx import optimizer
276 | file = sys.argv[2]
277 | base = os.path.splitext(file)
278 | onnx_model = onnx.load(file)
279 | passes = optimizer.get_available_passes()
280 | optimized_model = optimizer.optimize(onnx_model, passes)
281 | onnx.save(optimized_model, base + '.optimized.onnx')
282 |
283 | def infer():
284 | import onnx
285 | import onnx.shape_inference
286 | from onnx import shape_inference
287 | file = sys.argv[2]
288 | base = os.path.splitext(file)[0]
289 | onnx_model = onnx.load(base + '.onnx')
290 | onnx_model = onnx.shape_inference.infer_shapes(onnx_model)
291 | onnx.save(onnx_model, base + '.shape.onnx')
292 |
293 | if __name__ == '__main__':
294 | command_table = { 'metadata': metadata, 'convert': convert, 'optimize': optimize, 'infer': infer }
295 | command = sys.argv[1]
296 | command_table[command]()
297 |
--------------------------------------------------------------------------------
/tools/sklearn-script.py:
--------------------------------------------------------------------------------
1 |
2 | from __future__ import unicode_literals
3 | from __future__ import print_function
4 |
5 | import io
6 | import json
7 | import os
8 | import pydoc
9 | import re
10 | import sys
11 |
12 | json_file = os.path.join(os.path.dirname(__file__), '../src/sklearn-metadata.json')
13 | json_data = open(json_file).read()
14 | json_root = json.loads(json_data)
15 |
16 | def split_docstring(docstring):
17 | headers = {}
18 | current_header = ''
19 | current_lines = []
20 | lines = docstring.split('\n')
21 | index = 0
22 | while index < len(lines):
23 | if index + 1 < len(lines) and len(lines[index + 1].strip(' ')) > 0 and len(lines[index + 1].strip(' ').strip('-')) == 0:
24 | headers[current_header] = current_lines
25 | current_header = lines[index].strip(' ')
26 | current_lines = []
27 | index = index + 1
28 | else:
29 | current_lines.append(lines[index])
30 | index = index + 1
31 | headers[current_header] = current_lines
32 | return headers
33 |
34 | def update_description(schema, lines):
35 | if len(''.join(lines).strip(' ')) > 0:
36 | for i in range(0, len(lines)):
37 | lines[i] = lines[i].lstrip(' ')
38 | schema['description'] = '\n'.join(lines)
39 |
40 | def update_attribute(schema, name, description, attribute_type, option, default_value):
41 | attribute = None
42 | if not 'attributes' in schema:
43 | schema['attributes'] = []
44 | for current_attribute in schema['attributes']:
45 | if 'name' in current_attribute and current_attribute['name'] == name:
46 | attribute = current_attribute
47 | break
48 | if not attribute:
49 | attribute = {}
50 | attribute['name'] = name
51 | schema['attributes'].append(attribute)
52 | attribute['description'] = description
53 | if attribute_type:
54 | attribute['type'] = attribute_type
55 | if option:
56 | attribute['option'] = option
57 | if default_value:
58 | if attribute_type == 'float32':
59 | if default_value == 'None':
60 | attribute['default'] = None
61 | elif default_value != "'auto'":
62 | attribute['default'] = float(default_value)
63 | else:
64 | attribute['default'] = default_value.strip("'").strip('"')
65 | elif attribute_type == 'int32':
66 | if default_value == 'None':
67 | attribute['default'] = None
68 | elif default_value == "'auto'" or default_value == '"auto"':
69 | attribute['default'] = default_value.strip("'").strip('"')
70 | else:
71 | attribute['default'] = int(default_value)
72 | elif attribute_type == 'string':
73 | attribute['default'] = default_value.strip("'").strip('"')
74 | elif attribute_type == 'boolean':
75 | if default_value == 'True':
76 | attribute['default'] = True
77 | elif default_value == 'False':
78 | attribute['default'] = False
79 | elif default_value == "'auto'":
80 | attribute['default'] = default_value.strip("'").strip('"')
81 | else:
82 | raise Exception("Unknown boolean default value '" + str(default_value) + "'.")
83 | else:
84 | if attribute_type:
85 | raise Exception("Unknown default type '" + attribute_type + "'.")
86 | else:
87 | if default_value == 'None':
88 | attribute['default'] = None
89 | else:
90 | attribute['default'] = default_value.strip("'")
91 |
92 | def update_attributes(schema, lines):
93 | index = 0
94 | while index < len(lines):
95 | line = lines[index]
96 | if line.endswith('.'):
97 | line = line[0:-1]
98 | colon = line.find(':')
99 | if colon == -1:
100 | raise Exception("Expected ':' in parameter.")
101 | name = line[0:colon].strip(' ')
102 | line = line[colon + 1:].strip(' ')
103 | attribute_type = None
104 | type_map = { 'float': 'float32', 'boolean': 'boolean', 'bool': 'boolean', 'string': 'string', 'int': 'int32', 'integer': 'int32' }
105 | skip_map = {
106 | "'sigmoid' or 'isotonic'",
107 | 'instance BaseEstimator',
108 | 'callable or None (default)',
109 | 'str or callable',
110 | "string {'english'}, list, or None (default)",
111 | 'tuple (min_n, max_n)',
112 | "string, {'word', 'char', 'char_wb'} or callable",
113 | "{'word', 'char'} or callable",
114 | "string, {'word', 'char'} or callable",
115 | 'int, float, None or string',
116 | "int, float, None or str",
117 | "int or None, optional (default=None)",
118 | "'l1', 'l2' or None, optional",
119 | "{'strict', 'ignore', 'replace'} (default='strict')",
120 | "{'ascii', 'unicode', None} (default=None)",
121 | "string {'english'}, list, or None (default=None)",
122 | "tuple (min_n, max_n) (default=(1, 1))",
123 | "float in range [0.0, 1.0] or int (default=1.0)",
124 | "float in range [0.0, 1.0] or int (default=1)",
125 | "'l1', 'l2' or None, optional (default='l2')",
126 | "{'scale', 'auto'} or float, optional (default='scale')",
127 | "str {'auto', 'full', 'arpack', 'randomized'}",
128 | "str {'filename', 'file', 'content'}",
129 | "str, {'word', 'char', 'char_wb'} or callable",
130 | "str {'english'}, list, or None (default=None)",
131 | "{'scale', 'auto'} or float, optional (default='scale')",
132 | "{'word', 'char', 'char_wb'} or callable, default='word'",
133 | "{'scale', 'auto'} or float, default='scale'",
134 | "{'uniform', 'distance'} or callable, default='uniform'",
135 | "int, RandomState instance or None (default)",
136 | "list of (string, transformer) tuples",
137 | "list of tuples",
138 | "{'drop', 'passthrough'} or estimator, default='drop'",
139 | "'auto' or a list of array-like, default='auto'",
140 | "{'first', 'if_binary'} or a array-like of shape (n_features,), default=None",
141 | "callable",
142 | "int or \"all\", optional, default=10",
143 | "number, string, np.nan (default) or None",
144 | "estimator object",
145 | "dict or list of dictionaries",
146 | "int, or str, default=n_jobs",
147 | "'raise' or numeric, default=np.nan",
148 | "'auto' or float, default=None"
149 | }
150 | if line == 'str':
151 | line = 'string'
152 | if line in skip_map:
153 | line = ''
154 | elif line.startswith('{'):
155 | if line.endswith('}'):
156 | line = ''
157 | else:
158 | end = line.find('},')
159 | if end == -1:
160 | raise Exception("Expected '}' in parameter.")
161 | # attribute_type = line[0:end + 1]
162 | line = line[end + 2:].strip(' ')
163 | elif line.startswith("'"):
164 | while line.startswith("'"):
165 | end = line.find("',")
166 | if end == -1:
167 | raise Exception("Expected \' in parameter.")
168 | line = line[end + 2:].strip(' ')
169 | elif line in type_map:
170 | attribute_type = line
171 | line = ''
172 | elif line.startswith('int, RandomState instance or None,'):
173 | line = line[len('int, RandomState instance or None,'):]
174 | elif line.find('|') != -1:
175 | line = ''
176 | else:
177 | space = line.find(' {')
178 | if space != -1 and line[0:space] in type_map and line[space:].find('}') != -1:
179 | attribute_type = line[0:space]
180 | end = line[space:].find('}')
181 | line = line[space+end+1:]
182 | else:
183 | comma = line.find(',')
184 | if comma == -1:
185 | comma = line.find(' (')
186 | if comma == -1:
187 | raise Exception("Expected ',' in parameter.")
188 | attribute_type = line[0:comma]
189 | line = line[comma + 1:].strip(' ')
190 | if attribute_type in type_map:
191 | attribute_type = type_map[attribute_type]
192 | else:
193 | attribute_type = None
194 | # elif type == "{dict, 'balanced'}":
195 | # v = 'map'
196 | # else:
197 | # raise Exception("Unknown attribute type '" + attribute_type + "'.")
198 | option = None
199 | default = None
200 | while len(line.strip(' ')) > 0:
201 | line = line.strip(' ')
202 | if line.startswith('optional ') or line.startswith('optional,'):
203 | option = 'optional'
204 | line = line[9:]
205 | elif line.startswith('optional'):
206 | option = 'optional'
207 | line = ''
208 | elif line.startswith('('):
209 | close = line.index(')')
210 | if (close == -1):
211 | raise Exception("Expected ')' in parameter.")
212 | line = line[1:close]
213 | elif line.endswith(' by default'):
214 | default = line[0:-11]
215 | line = ''
216 | elif line.startswith('default =') or line.startswith('default :'):
217 | default = line[9:].strip(' ')
218 | line = ''
219 | elif line.startswith('default ') or line.startswith('default=') or line.startswith('default:'):
220 | default = line[8:].strip(' ')
221 | line = ''
222 | else:
223 | comma = line.index(',')
224 | if comma == -1:
225 | raise Exception("Expected ',' in parameter.")
226 | line = line[comma+1:]
227 | index = index + 1
228 | attribute_lines = []
229 | while index < len(lines) and (len(lines[index].strip(' ')) == 0 or lines[index].startswith(' ')):
230 | attribute_lines.append(lines[index].lstrip(' '))
231 | index = index + 1
232 | description = '\n'.join(attribute_lines)
233 | update_attribute(schema, name, description, attribute_type, option, default)
234 |
235 | for entry in json_root:
236 | name = entry['name']
237 | entry['schema'] = entry['schema'] if 'schema' in entry else {};
238 | schema = entry['schema']
239 | skip_modules = [
240 | 'lightgbm.',
241 | 'sklearn.svm.classes',
242 | 'sklearn.ensemble.forest.',
243 | 'sklearn.ensemble.weight_boosting.',
244 | 'sklearn.neural_network.multilayer_perceptron.',
245 | 'sklearn.tree.tree.'
246 | ]
247 | if not any(name.startswith(module) for module in skip_modules):
248 | class_definition = pydoc.locate(name)
249 | if not class_definition:
250 | raise Exception('\'' + name + '\' not found.')
251 | docstring = class_definition.__doc__
252 | if not docstring:
253 | raise Exception('\'' + name + '\' missing __doc__.')
254 | headers = split_docstring(docstring)
255 | if '' in headers:
256 | update_description(schema, headers[''])
257 | if 'Parameters' in headers:
258 | update_attributes(schema, headers['Parameters'])
259 |
260 | with io.open(json_file, 'w', newline='') as fout:
261 | json_data = json.dumps(json_root, sort_keys=True, indent=2)
262 | for line in json_data.splitlines():
263 | fout.write(line.rstrip())
264 | fout.write('\n')
265 |
--------------------------------------------------------------------------------