├── .gitignore
├── CONTRIBUTING.md
├── LICENSE.txt
├── Makefile
├── README.md
├── docs
├── .nojekyll
├── README.md
├── _navbar.md
├── _sidebar.md
├── about.md
├── assets
│ ├── arch-sm.jpg
│ ├── docsify.min.js
│ ├── ga.min.js
│ ├── graphpipe.css
│ ├── graphpipe.js
│ ├── logo.png
│ ├── prism-bash.min.js
│ ├── prism-c.min.js
│ ├── prism-cpp.min.js
│ ├── prism-go.min.js
│ ├── prism-java.min.js
│ ├── prism-latex.min.js
│ ├── prism-python.min.js
│ ├── search.min.js
│ ├── toaster-js.css
│ ├── toaster-js.js
│ ├── vue-material.min.css
│ ├── vue-material@beta
│ ├── vue.css
│ └── vue.min.js
├── favicon.ico
├── guide
│ ├── _sidebar.md
│ ├── clients
│ │ ├── _media
│ │ │ └── client_interaction.png
│ │ ├── installation.md
│ │ ├── overview.md
│ │ └── usage.md
│ ├── examples
│ │ └── overview.md
│ ├── servers
│ │ ├── _media
│ │ │ └── server_flow.png
│ │ ├── converting.md
│ │ ├── graphpipe-onnx.md
│ │ ├── graphpipe-tf.md
│ │ ├── installation.md
│ │ ├── overview.md
│ │ └── serving.md
│ └── user-guide
│ │ ├── _examples
│ │ ├── _metadata_1.py
│ │ └── _squeezenet_req.python
│ │ ├── _media
│ │ ├── gparch.001.jpg
│ │ ├── gparch.002.jpg
│ │ ├── logo.png
│ │ ├── mug.png
│ │ ├── mug227.png
│ │ ├── perf.001.jpg
│ │ └── perf.002.jpg
│ │ ├── get_involved.md
│ │ ├── overview.md
│ │ ├── performance.md
│ │ ├── quickstart.md
│ │ └── spec.md
├── index.html
└── models
│ ├── squeezenet.h5
│ ├── squeezenet.init_net.pb
│ ├── squeezenet.onnx
│ ├── squeezenet.pb
│ ├── squeezenet.predict_net.pb
│ ├── squeezenet.value_inputs.json
│ └── squeezenet.value_inputs_caffe2.json
└── graphpipe.fbs
/.gitignore:
--------------------------------------------------------------------------------
1 | go
2 | python
3 | cpp
4 | *.sw*
5 | .idea
6 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Graphpipe #
2 |
3 | *Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.*
4 |
5 | Pull requests can be made under
6 | [The Oracle Contributor Agreement](https://www.oracle.com/technetwork/community/oca-486395.html) (OCA).
7 |
8 | For pull requests to be accepted, the bottom of
9 | your commit message must have the following line using your name and
10 | e-mail address as it appears in the OCA Signatories list.
11 |
12 | ```
13 | Signed-off-by: Your Name
14 | ```
15 |
16 | This can be automatically added to pull requests by committing with:
17 |
18 | ```
19 | git commit --signoff
20 | ```
21 |
22 | Only pull requests from committers that can be verified as having
23 | signed the OCA can be accepted.
24 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
2 |
3 | This software is licensed to you under the Universal Permissive License (UPL). See below for license terms.
4 | ____________________________
5 | The Universal Permissive License (UPL), Version 1.0
6 | Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
7 |
8 | Subject to the condition set forth below, permission is hereby granted to any person obtaining a copy of this software, associated documentation and/or data (collectively the "Software"), free of charge and under any and all copyright rights in the Software, and any and all patent rights owned or freely licensable by each licensor hereunder covering either (i) the unmodified Software as contributed to or provided by such licensor, or (ii) the Larger Works (as defined below), to deal in both
9 |
10 | (a) the Software, and
11 | (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is included with the Software (each a "Larger Work" to which the Software is contributed by such licensors),
12 |
13 | without restriction, including without limitation the rights to copy, create derivative works of, display, perform, and distribute the Software and make, use, sell, offer for sale, import, export, have made, and have sold the Software and the Larger Work(s), and to sublicense the foregoing rights on either these or other terms.
14 |
15 | This license is subject to the following condition:
16 |
17 | The above copyright notice and either this complete permission notice or at a minimum a reference to the UPL must be included in all copies or substantial portions of the Software.
18 |
19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
2 | #
3 | # Licensed under the Universal Permissive License v 1.0 as shown at
4 | # http://oss.oracle.com/licenses/upl.
5 |
6 | FLATC=flatc
7 | ROOT=.
8 |
9 | ifeq ('$(USE_DOCKER)', '1')
10 | ROOT=/src
11 | FLATC=docker run --rm -it \
12 | --user $$(id -u):$$(id -g) \
13 | -v $(PWD):/src neomantra/flatbuffers:v1.9.0 flatc
14 | endif
15 |
16 | all: cpp/graphpipe_generated.h go/graphpipe python/graphpipe
17 |
18 | .PHONY: go python cpp
19 |
20 | go: go/graphpipe
21 |
22 | cpp: cpp/graphpipe_generated.h
23 |
24 | python: python/graphpipe
25 |
26 | cpp/graphpipe_generated.h:
27 | $(FLATC) --cpp -o $(ROOT)/cpp $(ROOT)/graphpipe.fbs
28 |
29 | go/graphpipe:
30 | $(FLATC) --go -o $(ROOT)/go $(ROOT)/graphpipe.fbs
31 |
32 | python/graphpipe:
33 | $(FLATC) --python -o $(ROOT)/python $(ROOT)/graphpipe.fbs
34 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
6 |
7 | # GraphPipe
8 |
9 | > Machine Learning Model Deployment Made Simple
10 |
11 | # What is it?
12 |
13 | GraphPipe is a protocol and collection of software designed to simplify machine
14 | learning model deployment and decouple it from framework-specific model
15 | implementations.
16 |
17 | The existing solutions for model serving are inconsistent and/or inefficient.
18 | There is no consistent protocol for communicating with these model servers so
19 | it is often necessary to build custom clients for each workload. GraphPipe
20 | solves these problems by standardizing on an efficient communication protocol
21 | and providing simple model servers for the major ML frameworks.
22 |
23 | We hope that open sourcing GraphPipe makes the model serving landscape a
24 | friendlier place. See more about why we built it
25 | [here](https://oracle.github.io/graphpipe/#/guide/user-guide/overview).
26 |
27 | Or browse the rest of the [documentation](https://oracle.github.io/graphpipe).
28 |
29 | # Features
30 |
31 | * A minimalist machine learning transport specification based on [flatbuffers]
32 | * Simple, efficient reference model servers for [Tensorflow], [Caffe2], and [ONNX].
33 | * Efficient client implementations in Go, Python, and Java.
34 |
35 | [flatbuffers]: https://google.github.io/flatbuffers/
36 | [Tensorflow]: https://www.tensorflow.org
37 | [Caffe2]: https://caffe2.ai
38 | [ONNX]: https://onnx.ai
39 |
40 | # What is in this repo?
41 | This repo contains documentation as well as the flatubuffer definition files
42 | that are used by other language specific repos. If you are looking for
43 | GraphPipe clients, servers, and example code, check out our other GraphPipe
44 | repos:
45 |
46 | - https://github.com/oracle/graphpipe-go - the GraphPipe go client library
47 | - https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-tf -
48 | Go implementation of a GraphPipe TensorFlow model server
49 | - https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-onnx -
50 | a Go implementation a GraphPipe ONNX/Caffe2 model server
51 | - https://github.com/oracle/graphpipe-py - the GraphPipe client library for
52 | Python
53 | - https://github.com/oracle/graphpipe-tf-py - a Python implementation of
54 | a remote operation client for TensorFlow, as well as some example server
55 | implementations
56 |
57 | ## Building flatbuffer definitions
58 |
59 | If you've got flatc installed you can just `make all` but if you don't want
60 | to install it, you can `export USE_DOCKER=1` and then `make all`. (Remember,
61 | make needs vars exported, not just on the command-line where you run make).
62 |
63 | This will produce the go, c, and python libraries, which can then be copied
64 | into their projects graphpipe-go, graphpipe-tf-py, and graphpipe-py,
65 | respectively.
66 |
67 | ## Contributing
68 |
69 | All of the GraphPipe projects are open source. To find out how to contribute
70 | see [CONTRIBUTING.md](CONTRIBUTING.md)
71 |
72 | You can also chat us up on our [Slack Channel](https://join.slack.com/t/graphpipe/shared_invite/enQtNDE4MTUyODk2NzQzLTUwODlkZDRiYTI4NmE1OTA5NzRmNjk5MGZiY2M0ZDRiYzNiMTQ0ZmIxODYzZjY2NzRmNzM4NTI0OGVlZGYzZTA).
73 |
--------------------------------------------------------------------------------
/docs/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/.nojekyll
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 |
6 |
7 | __Machine Learning Model Deployment Made Simple__
8 |
9 |
14 |
15 | # What is it?
16 |
17 | GraphPipe is a protocol and collection of software designed to simplify machine
18 | learning model deployment and decouple it from framework-specific model
19 | implementations.
20 |
21 | # Why did we make it?
22 |
23 | We found existing solutions for model serving to be inconsistent and/or inefficient.
24 | Without a consistent protocol for communicating with different model servers,
25 | it is often necessary to build custom clients for each workload. GraphPipe
26 | solves these problems by standardizing on an efficient communication protocol
27 | and providing simple model servers for the major ML frameworks.
28 |
29 | We hope that open sourcing GraphPipe makes the model serving landscape a
30 | friendlier place. See more about why we built it
31 | [here](guide/user-guide/overview).
32 |
33 | # Features
34 |
35 | * A minimalist machine learning transport specification based on [flatbuffers]
36 | * Simple, efficient reference model servers for [Tensorflow], [Caffe2], and [ONNX].
37 | * Efficient client implementations in Go, Python, and Java.
38 |
39 | [flatbuffers]: https://google.github.io/flatbuffers/
40 | [Tensorflow]: https://www.tensorflow.org
41 | [Caffe2]: https://caffe2.ai
42 | [ONNX]: https://onnx.ai
43 |
44 |
45 | ### Quick Navigation
46 |
47 | - [Project Overview](guide/user-guide/overview.md)
48 | - [Getting Started](guide/user-guide/quickstart.md)
49 | - [GraphPipe Specification](guide/user-guide/spec.md)
50 | - [Performance Measurements](guide/user-guide/performance.md)
51 | - [Running a GraphPipe Server](guide/servers/overview.md)
52 | - [Using GraphPipe Clients](guide/clients/overview.md)
53 |
--------------------------------------------------------------------------------
/docs/_navbar.md:
--------------------------------------------------------------------------------
1 | - [Home](/)
2 |
3 | - Documentation ▾
4 | - [Overview](/guide/user-guide/overview)
5 | - [Quick Start](/guide/user-guide/quickstart)
6 | - [Spec](/guide/user-guide/spec)
7 | - [Performance](/guide/user-guide/performance)
8 | - [Serving Models](/guide/servers/overview)
9 | - [Using GraphPipe Clients](/guide/clients/overview)
10 |
--------------------------------------------------------------------------------
/docs/_sidebar.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/_sidebar.md
--------------------------------------------------------------------------------
/docs/about.md:
--------------------------------------------------------------------------------
1 | about us!
2 |
--------------------------------------------------------------------------------
/docs/assets/arch-sm.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/assets/arch-sm.jpg
--------------------------------------------------------------------------------
/docs/assets/docsify.min.js:
--------------------------------------------------------------------------------
1 | !function(){function e(e){var t=Object.create(null);return function(n){var r=i(n)?n:JSON.stringify(n);return t[r]||(t[r]=e(n))}}var t=e(function(e){return e.replace(/([A-Z])/g,function(e){return"-"+e.toLowerCase()})}),n=Object.prototype.hasOwnProperty,r=Object.assign||function(e){for(var t=arguments,r=1;r=i.length)r(n);else if("function"==typeof t)if(2===t.length)t(n,function(t){n=t,o(e+1)});else{var a=t(n);n=void 0===a?n:a,o(e+1)}else o(e+1)};o(0)}var l=!0,c=l&&document.body.clientWidth<=600,u=l&&window.history&&window.history.pushState&&window.history.replaceState&&!navigator.userAgent.match(/((iPod|iPhone|iPad).+\bOS\s+[1-4]\D|WebApps\/.+CFNetwork)/),h={};function p(e,t){if(void 0===t&&(t=!1),"string"==typeof e){if(void 0!==window.Vue)return m(e);e=t?m(e):h[e]||(h[e]=m(e))}return e}var d=l&&document,g=l&&d.body,f=l&&d.head;function m(e,t){return t?e.querySelector(t):d.querySelector(e)}function v(e,t){return[].slice.call(t?e.querySelectorAll(t):d.querySelectorAll(e))}function b(e,t){return e=d.createElement(e),t&&(e.innerHTML=t),e}function y(e,t){return e.appendChild(t)}function k(e,t){return e.insertBefore(t,e.children[0])}function w(e,t,n){o(t)?window.addEventListener(e,t):e.addEventListener(t,n)}function x(e,t,n){o(t)?window.removeEventListener(e,t):e.removeEventListener(t,n)}function _(e,t,n){e&&e.classList[n?t:"toggle"](n||t)}var S=Object.freeze({getNode:p,$:d,body:g,head:f,find:m,findAll:v,create:b,appendTo:y,before:k,on:w,off:x,toggleClass:_,style:function(e){y(f,b("style",e))}});function C(e,t){return void 0===t&&(t=""),e&&e.length?(e.forEach(function(e){t+=''+e.title+" ",e.children&&(t+=' ")}),t):""}function L(e,t){return''+t.slice(5).trim()+"
"}var E,A;function $(e){var t,n=e.loaded,r=e.total,i=e.step;!E&&function(){var e=b("div");e.classList.add("progress"),y(g,e),E=e}(),t=i?(t=parseInt(E.style.width||0,10)+i)>80?80:t:Math.floor(n/r*100),E.style.opacity=1,E.style.width=t>=95?"100%":t+"%",t>=95&&(clearTimeout(A),A=setTimeout(function(e){E.style.opacity=0,E.style.width="0%"},200))}var T={};function P(e,t,r){void 0===t&&(t=!1),void 0===r&&(r={});var i=new XMLHttpRequest,o=function(){i.addEventListener.apply(i,arguments)},s=T[e];if(s)return{then:function(e){return e(s.content,s.opt)},abort:a};i.open("GET",e);for(var l in r)n.call(r,l)&&i.setRequestHeader(l,r[l]);return i.send(),{then:function(n,r){if(void 0===r&&(r=a),t){var s=setInterval(function(e){return $({step:Math.floor(5*Math.random()+1)})},500);o("progress",$),o("loadend",function(e){$(e),clearInterval(s)})}o("error",r),o("load",function(t){var a=t.target;if(a.status>=400)r(a);else{var o=T[e]={content:a.response,opt:{updatedAt:i.getResponseHeader("last-modified")}};n(o.content,o.opt)}})},abort:function(e){return 4!==i.readyState&&i.abort()}}}function F(e,t){e.innerHTML=e.innerHTML.replace(/var\(\s*--theme-color.*?\)/g,t)}var O=/([^{]*?)\w(?=\})/g,M={YYYY:"getFullYear",YY:"getYear",MM:function(e){return e.getMonth()+1},DD:"getDate",HH:"getHours",mm:"getMinutes",ss:"getSeconds"};var N="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{};function j(e,t){return e(t={exports:{}},t.exports),t.exports}var R=j(function(e,t){(function(){var t={newline:/^\n+/,code:/^( {4}[^\n]+\n*)+/,fences:p,hr:/^( *[-*_]){3,} *(?:\n+|$)/,heading:/^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/,nptable:p,lheading:/^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/,blockquote:/^( *>[^\n]+(\n(?!def)[^\n]+)*\n*)+/,list:/^( *)(bull) [\s\S]+?(?:hr|def|\n{2,}(?! )(?!\1bull )\n*|\s*$)/,html:/^ *(?:comment *(?:\n|\s*$)|closed *(?:\n{2,}|\s*$)|closing *(?:\n{2,}|\s*$))/,def:/^ *\[([^\]]+)\]: *([^\s>]+)>?(?: +["(]([^\n]+)[")])? *(?:\n+|$)/,table:p,paragraph:/^((?:[^\n]+\n?(?!hr|heading|lheading|blockquote|tag|def))+)\n*/,text:/^[^\n]+/};t.bullet=/(?:[*+-]|\d+\.)/,t.item=/^( *)(bull) [^\n]*(?:\n(?!\1bull )[^\n]*)*/,t.item=l(t.item,"gm")(/bull/g,t.bullet)(),t.list=l(t.list)(/bull/g,t.bullet)("hr","\\n+(?=\\1?(?:[-*_] *){3,}(?:\\n+|$))")("def","\\n+(?="+t.def.source+")")(),t.blockquote=l(t.blockquote)("def",t.def)(),t._tag="(?!(?:a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img)\\b)\\w+(?!:/|[^\\w\\s@]*@)\\b",t.html=l(t.html)("comment",//)("closed",/<(tag)[\s\S]+?<\/\1>/)("closing",/])*?>/)(/tag/g,t._tag)(),t.paragraph=l(t.paragraph)("hr",t.hr)("heading",t.heading)("lheading",t.lheading)("blockquote",t.blockquote)("tag","<"+t._tag)("def",t.def)(),t.normal=d({},t),t.gfm=d({},t.normal,{fences:/^ *(`{3,}|~{3,})[ \.]*(\S+)? *\n([\s\S]*?)\s*\1 *(?:\n+|$)/,paragraph:/^/,heading:/^ *(#{1,6}) +([^\n]+?) *#* *(?:\n+|$)/}),t.gfm.paragraph=l(t.paragraph)("(?!","(?!"+t.gfm.fences.source.replace("\\1","\\2")+"|"+t.list.source.replace("\\1","\\3")+"|")(),t.tables=d({},t.gfm,{nptable:/^ *(\S.*\|.*)\n *([-:]+ *\|[-| :]*)\n((?:.*\|.*(?:\n|$))*)\n*/,table:/^ *\|(.+)\n *\|( *[-:]+[-| :]*)\n((?: *\|.*(?:\n|$))*)\n*/});function n(e){this.tokens=[],this.tokens.links={},this.options=e||g.defaults,this.rules=t.normal,this.options.gfm&&(this.options.tables?this.rules=t.tables:this.rules=t.gfm)}n.rules=t,n.lex=function(e,t){return new n(t).lex(e)},n.prototype.lex=function(e){return e=e.replace(/\r\n|\r/g,"\n").replace(/\t/g," ").replace(/\u00a0/g," ").replace(/\u2424/g,"\n"),this.token(e,!0)},n.prototype.token=function(e,n,r){var i,a,o,s,l,c,u,h,p;for(e=e.replace(/^ +$/gm,"");e;)if((o=this.rules.newline.exec(e))&&(e=e.substring(o[0].length),o[0].length>1&&this.tokens.push({type:"space"})),o=this.rules.code.exec(e))e=e.substring(o[0].length),o=o[0].replace(/^ {4}/gm,""),this.tokens.push({type:"code",text:this.options.pedantic?o:o.replace(/\n+$/,"")});else if(o=this.rules.fences.exec(e))e=e.substring(o[0].length),this.tokens.push({type:"code",lang:o[2],text:o[3]||""});else if(o=this.rules.heading.exec(e))e=e.substring(o[0].length),this.tokens.push({type:"heading",depth:o[1].length,text:o[2]});else if(n&&(o=this.rules.nptable.exec(e))){for(e=e.substring(o[0].length),c={type:"table",header:o[1].replace(/^ *| *\| *$/g,"").split(/ *\| */),align:o[2].replace(/^ *|\| *$/g,"").split(/ *\| */),cells:o[3].replace(/\n$/,"").split("\n")},h=0;h ?/gm,""),this.token(o,n,!0),this.tokens.push({type:"blockquote_end"});else if(o=this.rules.list.exec(e)){for(e=e.substring(o[0].length),s=o[2],this.tokens.push({type:"list_start",ordered:s.length>1}),i=!1,p=(o=o[0].match(this.rules.item)).length,h=0;h1&&l.length>1||(e=o.slice(h+1).join("\n")+e,h=p-1)),a=i||/\n\n(?!\s*$)/.test(c),h!==p-1&&(i="\n"===c.charAt(c.length-1),a||(a=i)),this.tokens.push({type:a?"loose_item_start":"list_item_start"}),this.token(c,!1,r),this.tokens.push({type:"list_item_end"});this.tokens.push({type:"list_end"})}else if(o=this.rules.html.exec(e))e=e.substring(o[0].length),this.tokens.push({type:this.options.sanitize?"paragraph":"html",pre:!this.options.sanitizer&&("pre"===o[1]||"script"===o[1]||"style"===o[1]),text:o[0]});else if(!r&&n&&(o=this.rules.def.exec(e)))e=e.substring(o[0].length),this.tokens.links[o[1].toLowerCase()]={href:o[2],title:o[3]};else if(n&&(o=this.rules.table.exec(e))){for(e=e.substring(o[0].length),c={type:"table",header:o[1].replace(/^ *| *\| *$/g,"").split(/ *\| */),align:o[2].replace(/^ *|\| *$/g,"").split(/ *\| */),cells:o[3].replace(/(?: *\| *)?\n$/,"").split("\n")},h=0;h])/,autolink:/^<([^ <>]+(@|:\/)[^ <>]+)>/,url:p,tag:/^|^<\/?\w+(?:"[^"]*"|'[^']*'|[^<'">])*?>/,link:/^!?\[(inside)\]\(href\)/,reflink:/^!?\[(inside)\]\s*\[([^\]]*)\]/,nolink:/^!?\[((?:\[[^\]]*\]|[^\[\]])*)\]/,strong:/^__([\s\S]+?)__(?!_)|^\*\*([\s\S]+?)\*\*(?!\*)/,em:/^\b_((?:[^_]|__)+?)_\b|^\*((?:\*\*|[\s\S])+?)\*(?!\*)/,code:/^(`+)([\s\S]*?[^`])\1(?!`)/,br:/^ {2,}\n(?!\s*$)/,del:p,text:/^[\s\S]+?(?=[\\?(?:\s+['"]([\s\S]*?)['"])?\s*/,r.link=l(r.link)("inside",r._inside)("href",r._href)(),r.reflink=l(r.reflink)("inside",r._inside)(),r.normal=d({},r),r.pedantic=d({},r.normal,{strong:/^__(?=\S)([\s\S]*?\S)__(?!_)|^\*\*(?=\S)([\s\S]*?\S)\*\*(?!\*)/,em:/^_(?=\S)([\s\S]*?\S)_(?!_)|^\*(?=\S)([\s\S]*?\S)\*(?!\*)/}),r.gfm=d({},r.normal,{escape:l(r.escape)("])","~|])")(),url:/^(https?:\/\/[^\s<]+[^<.,:;"')\]\s])/,del:/^~~(?=\S)([\s\S]*?\S)~~/,text:l(r.text)("]|","~]|")("|","|https?://|")()}),r.breaks=d({},r.gfm,{br:l(r.br)("{2,}","*")(),text:l(r.gfm.text)("{2,}","*")()});function i(e,t){if(this.options=t||g.defaults,this.links=e,this.rules=r.normal,this.renderer=this.options.renderer||new a,this.renderer.options=this.options,!this.links)throw new Error("Tokens array requires a `links` property.");this.options.gfm?this.options.breaks?this.rules=r.breaks:this.rules=r.gfm:this.options.pedantic&&(this.rules=r.pedantic)}i.rules=r,i.output=function(e,t,n){return new i(t,n).output(e)},i.prototype.output=function(e){for(var t,n,r,i,a="";e;)if(i=this.rules.escape.exec(e))e=e.substring(i[0].length),a+=i[1];else if(i=this.rules.autolink.exec(e))e=e.substring(i[0].length),"@"===i[2]?(n=s(":"===i[1].charAt(6)?this.mangle(i[1].substring(7)):this.mangle(i[1])),r=this.mangle("mailto:")+n):r=n=s(i[1]),a+=this.renderer.link(r,null,n);else if(this.inLink||!(i=this.rules.url.exec(e))){if(i=this.rules.tag.exec(e))!this.inLink&&/^/i.test(i[0])&&(this.inLink=!1),e=e.substring(i[0].length),a+=this.options.sanitize?this.options.sanitizer?this.options.sanitizer(i[0]):s(i[0]):i[0];else if(i=this.rules.link.exec(e))e=e.substring(i[0].length),this.inLink=!0,a+=this.outputLink(i,{href:i[2],title:i[3]}),this.inLink=!1;else if((i=this.rules.reflink.exec(e))||(i=this.rules.nolink.exec(e))){if(e=e.substring(i[0].length),t=(i[2]||i[1]).replace(/\s+/g," "),!(t=this.links[t.toLowerCase()])||!t.href){a+=i[0].charAt(0),e=i[0].substring(1)+e;continue}this.inLink=!0,a+=this.outputLink(i,t),this.inLink=!1}else if(i=this.rules.strong.exec(e))e=e.substring(i[0].length),a+=this.renderer.strong(this.output(i[2]||i[1]));else if(i=this.rules.em.exec(e))e=e.substring(i[0].length),a+=this.renderer.em(this.output(i[2]||i[1]));else if(i=this.rules.code.exec(e))e=e.substring(i[0].length),a+=this.renderer.codespan(s(i[2].trim(),!0));else if(i=this.rules.br.exec(e))e=e.substring(i[0].length),a+=this.renderer.br();else if(i=this.rules.del.exec(e))e=e.substring(i[0].length),a+=this.renderer.del(this.output(i[1]));else if(i=this.rules.text.exec(e))e=e.substring(i[0].length),a+=this.renderer.text(s(this.smartypants(i[0])));else if(e)throw new Error("Infinite loop on byte: "+e.charCodeAt(0))}else e=e.substring(i[0].length),r=n=s(i[1]),a+=this.renderer.link(r,null,n);return a},i.prototype.outputLink=function(e,t){var n=s(t.href),r=t.title?s(t.title):null;return"!"!==e[0].charAt(0)?this.renderer.link(n,r,this.output(e[1])):this.renderer.image(n,r,s(e[1]))},i.prototype.smartypants=function(e){return this.options.smartypants?e.replace(/---/g,"—").replace(/--/g,"–").replace(/(^|[-\u2014/(\[{"\s])'/g,"$1‘").replace(/'/g,"’").replace(/(^|[-\u2014/(\[{\u2018\s])"/g,"$1“").replace(/"/g,"”").replace(/\.{3}/g,"…"):e},i.prototype.mangle=function(e){if(!this.options.mangle)return e;for(var t,n="",r=e.length,i=0;i.5&&(t="x"+t.toString(16)),n+=""+t+";";return n};function a(e){this.options=e||{}}a.prototype.code=function(e,t,n){if(this.options.highlight){var r=this.options.highlight(e,t);null!=r&&r!==e&&(n=!0,e=r)}return t?''+(n?e:s(e,!0))+"\n
\n":""+(n?e:s(e,!0))+"\n
"},a.prototype.blockquote=function(e){return"\n"+e+" \n"},a.prototype.html=function(e){return e},a.prototype.heading=function(e,t,n){return"\n"},a.prototype.hr=function(){return this.options.xhtml?" \n":" \n"},a.prototype.list=function(e,t){var n=t?"ol":"ul";return"<"+n+">\n"+e+""+n+">\n"},a.prototype.listitem=function(e){return""+e+" \n"},a.prototype.paragraph=function(e){return""+e+"
\n"},a.prototype.table=function(e,t){return"\n"},a.prototype.tablerow=function(e){return"\n"+e+" \n"},a.prototype.tablecell=function(e,t){var n=t.header?"th":"td";return(t.align?"<"+n+' style="text-align:'+t.align+'">':"<"+n+">")+e+""+n+">\n"},a.prototype.strong=function(e){return""+e+" "},a.prototype.em=function(e){return""+e+" "},a.prototype.codespan=function(e){return""+e+"
"},a.prototype.br=function(){return this.options.xhtml?" ":" "},a.prototype.del=function(e){return""+e+""},a.prototype.link=function(e,t,n){if(this.options.sanitize){try{var r=decodeURIComponent((i=e,i.replace(/&(#(?:\d+)|(?:#x[0-9A-Fa-f]+)|(?:\w+));?/gi,function(e,t){return"colon"===(t=t.toLowerCase())?":":"#"===t.charAt(0)?"x"===t.charAt(1)?String.fromCharCode(parseInt(t.substring(2),16)):String.fromCharCode(+t.substring(1)):""}))).replace(/[^\w:]/g,"").toLowerCase()}catch(e){return n}if(0===r.indexOf("javascript:")||0===r.indexOf("vbscript:")||0===r.indexOf("data:"))return n}var i;this.options.baseUrl&&!h.test(e)&&(e=c(this.options.baseUrl,e));var a='"+n+" "},a.prototype.image=function(e,t,n){this.options.baseUrl&&!h.test(e)&&(e=c(this.options.baseUrl,e));var r=' ":">"},a.prototype.text=function(e){return e};function o(e){this.tokens=[],this.token=null,this.options=e||g.defaults,this.options.renderer=this.options.renderer||new a,this.renderer=this.options.renderer,this.renderer.options=this.options}o.parse=function(e,t,n){return new o(t,n).parse(e)},o.prototype.parse=function(e){this.inline=new i(e.links,this.options,this.renderer),this.tokens=e.reverse();for(var t="";this.next();)t+=this.tok();return t},o.prototype.next=function(){return this.token=this.tokens.pop()},o.prototype.peek=function(){return this.tokens[this.tokens.length-1]||0},o.prototype.parseText=function(){for(var e=this.token.text;"text"===this.peek().type;)e+="\n"+this.next().text;return this.inline.output(e)},o.prototype.tok=function(){switch(this.token.type){case"space":return"";case"hr":return this.renderer.hr();case"heading":return this.renderer.heading(this.inline.output(this.token.text),this.token.depth,this.token.text);case"code":return this.renderer.code(this.token.text,this.token.lang,this.token.escaped);case"table":var e,t,n,r,i="",a="";for(n="",e=0;e /g,">").replace(/"/g,""").replace(/'/g,"'")}function l(e,t){return e=e.source,t=t||"",function n(r,i){return r?(i=(i=i.source||i).replace(/(^|[^\[])\^/g,"$1"),e=e.replace(r,i),n):new RegExp(e,t)}}function c(e,t){return u[" "+e]||(/^[^:]+:\/*[^/]*$/.test(e)?u[" "+e]=e+"/":u[" "+e]=e.replace(/[^/]*$/,"")),e=u[" "+e],"//"===t.slice(0,2)?e.replace(/:[\s\S]*/,":")+t:"/"===t.charAt(0)?e.replace(/(:\/*[^/]*)[\s\S]*/,"$1")+t:e+t}var u={},h=/^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;function p(){}p.exec=p;function d(e){for(var t,n,r=arguments,i=1;iAn error occurred:
"+s(e.message+"",!0)+" ";throw e}}g.options=g.setOptions=function(e){return d(g.defaults,e),g},g.defaults={gfm:!0,tables:!0,breaks:!1,pedantic:!1,sanitize:!1,sanitizer:null,mangle:!0,smartLists:!1,silent:!1,highlight:null,langPrefix:"lang-",smartypants:!1,headerPrefix:"",renderer:new a,xhtml:!1,baseUrl:null},g.Parser=o,g.parser=o.parse,g.Renderer=a,g.Lexer=n,g.lexer=n.lex,g.InlineLexer=i,g.inlineLexer=i.output,g.parse=g,e.exports=g}).call(function(){return this||("undefined"!=typeof window?window:N)}())}),q=j(function(e){var t="undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{},n=function(){var e=/\blang(?:uage)?-(\w+)\b/i,n=0,r=t.Prism={manual:t.Prism&&t.Prism.manual,disableWorkerMessageHandler:t.Prism&&t.Prism.disableWorkerMessageHandler,util:{encode:function(e){return e instanceof i?new i(e.type,r.util.encode(e.content),e.alias):"Array"===r.util.type(e)?e.map(r.util.encode):e.replace(/&/g,"&").replace(/e.length)return;if(!(w instanceof l)){p.lastIndex=0;var x=1;if(!(A=p.exec(w))&&f&&y!=t.length-1){if(p.lastIndex=k,!(A=p.exec(e)))break;for(var _=A.index+(g?A[1].length:0),S=A.index+A[0].length,C=y,L=k,E=t.length;C=(L+=t[C].length)&&(++y,k=L);if(t[y]instanceof l||t[C-1].greedy)continue;x=C-y,w=e.slice(k,L),A.index-=k}if(A){g&&(m=A[1].length);S=(_=A.index+m)+(A=A[0].slice(m)).length;var A,$=w.slice(0,_),T=w.slice(S),P=[y,x];$&&(++y,k+=$.length,P.push($));var F=new l(c,d?r.tokenize(A,d):A,v,A,f);if(P.push(F),T&&P.push(T),Array.prototype.splice.apply(t,P),1!=x&&r.matchGrammar(e,t,n,y,k,!0,c),o)break}else if(o)break}}}}},tokenize:function(e,t,n){var i=[e],a=t.rest;if(a){for(var o in a)t[o]=a[o];delete t.rest}return r.matchGrammar(e,i,t,0,0,!1),i},hooks:{all:{},add:function(e,t){var n=r.hooks.all;n[e]=n[e]||[],n[e].push(t)},run:function(e,t){var n=r.hooks.all[e];if(n&&n.length)for(var i,a=0;i=n[a++];)i(t)}}},i=r.Token=function(e,t,n,r,i){this.type=e,this.content=t,this.alias=n,this.length=0|(r||"").length,this.greedy=!!i};if(i.stringify=function(e,t,n){if("string"==typeof e)return e;if("Array"===r.util.type(e))return e.map(function(n){return i.stringify(n,t,e)}).join("");var a={type:e.type,content:i.stringify(e.content,t,n),tag:"span",classes:["token",e.type],attributes:{},language:t,parent:n};if(e.alias){var o="Array"===r.util.type(e.alias)?e.alias:[e.alias];Array.prototype.push.apply(a.classes,o)}r.hooks.run("wrap",a);var s=Object.keys(a.attributes).map(function(e){return e+'="'+(a.attributes[e]||"").replace(/"/g,""")+'"'}).join(" ");return"<"+a.tag+' class="'+a.classes.join(" ")+'"'+(s?" "+s:"")+">"+a.content+""+a.tag+">"},!t.document)return t.addEventListener?(r.disableWorkerMessageHandler||t.addEventListener("message",function(e){var n=JSON.parse(e.data),i=n.language,a=n.code,o=n.immediateClose;t.postMessage(r.highlight(a,r.languages[i],i)),o&&t.close()},!1),t.Prism):t.Prism;var a=document.currentScript||[].slice.call(document.getElementsByTagName("script")).pop();return a&&(r.filename=a.src,r.manual||a.hasAttribute("data-manual")||("loading"!==document.readyState?window.requestAnimationFrame?window.requestAnimationFrame(r.highlightAll):window.setTimeout(r.highlightAll,16):document.addEventListener("DOMContentLoaded",r.highlightAll))),t.Prism}();e.exports&&(e.exports=n),void 0!==N&&(N.Prism=n),n.languages.markup={comment://,prolog:/<\?[\s\S]+?\?>/,doctype://i,cdata://i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<]+(?:\s+[^\s>\/=]+(?:=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+))?)*\s*\/?>/i,inside:{tag:{pattern:/^<\/?[^\s>\/]+/i,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=(?:("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|[^\s'">=]+)/i,inside:{punctuation:[/^=/,{pattern:/(^|[^\\])["']/,lookbehind:!0}]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:/?[\da-z]{1,8};/i},n.languages.markup.tag.inside["attr-value"].inside.entity=n.languages.markup.entity,n.hooks.add("wrap",function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))}),n.languages.xml=n.languages.markup,n.languages.html=n.languages.markup,n.languages.mathml=n.languages.markup,n.languages.svg=n.languages.markup,n.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-]+?.*?(?:;|(?=\s*\{))/i,inside:{rule:/@[\w-]+/}},url:/url\((?:(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1|.*?)\)/i,selector:/[^{}\s][^{};]*?(?=\s*\{)/,string:{pattern:/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},property:/[-_a-z\xA0-\uFFFF][-\w\xA0-\uFFFF]*(?=\s*:)/i,important:/\B!important\b/i,function:/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:]/},n.languages.css.atrule.inside.rest=n.util.clone(n.languages.css),n.languages.markup&&(n.languages.insertBefore("markup","tag",{style:{pattern:/(")).firstElementChild),function(e){if(!(window.CSS&&window.CSS.supports&&window.CSS.supports("(--v:red)"))){var t=v("style:not(.inserted),link");[].forEach.call(t,function(t){if("STYLE"===t.nodeName)F(t,e);else if("LINK"===t.nodeName){var n=t.getAttribute("href");if(!/\.css$/.test(n))return;P(n).then(function(t){var n=b("style",t);f.appendChild(n),F(n,e)})}})}}(t.themeColor));var u;e._updateRender(),_(g,"ready")}var Se={};var Ce=function(e){this.config=e};Ce.prototype.getBasePath=function(){return this.config.basePath},Ce.prototype.getFile=function(e,t){void 0===e&&(e=this.getCurrentPath());var n=this.config,r=this.getBasePath(),i="string"==typeof n.ext?n.ext:".md";e=n.alias?function e(t,n,r){var i=Object.keys(n).filter(function(e){return(Se[e]||(Se[e]=new RegExp("^"+e+"$"))).test(t)&&t!==r})[0];return i?e(t.replace(Se[i],n[i]),n,t):t}(e,n.alias):e,a=e,o=i;var a,o;return e=(e=new RegExp("\\.("+o.replace(/^\./,"")+"|html)$","g").test(a)?a:/\/$/g.test(a)?a+"README"+o:""+a+o)==="/README"+i?n.homepage||e:e,e=Q(e)?e:J(r,e),t&&(e=e.replace(new RegExp("^"+r),"")),e},Ce.prototype.onchange=function(e){void 0===e&&(e=a),e()},Ce.prototype.getCurrentPath=function(){},Ce.prototype.normalize=function(){},Ce.prototype.parse=function(){},Ce.prototype.toURL=function(e,t,n){var i=n&&"#"===e[0],a=this.parse(K(e));if(a.query=r({},a.query,t),e=(e=a.path+X(a.query)).replace(/\.md(\?)|\.md$/,"$1"),i){var o=n.indexOf("?");e=(o>0?n.substr(0,o):n)+e}return Z("/"+e)};function Le(e){var t=location.href.indexOf("#");location.replace(location.href.slice(0,t>=0?t:0)+"#"+e)}var Ee=function(e){function t(t){e.call(this,t),this.mode="hash"}return e&&(t.__proto__=e),t.prototype=Object.create(e&&e.prototype),t.prototype.constructor=t,t.prototype.getBasePath=function(){var e=window.location.pathname||"",t=this.config.basePath;return/^(\/|https?:)/g.test(t)?t:Z(e+"/"+t)},t.prototype.getCurrentPath=function(){var e=location.href,t=e.indexOf("#");return-1===t?"":e.slice(t+1)},t.prototype.onchange=function(e){void 0===e&&(e=a),w("hashchange",e)},t.prototype.normalize=function(){var e=this.getCurrentPath();if("/"===(e=K(e)).charAt(0))return Le(e);Le("/"+e)},t.prototype.parse=function(e){void 0===e&&(e=location.href);var t="",n=e.indexOf("#");n>=0&&(e=e.slice(n+1));var r=e.indexOf("?");return r>=0&&(t=e.slice(r+1),e=e.slice(0,r)),{path:e,file:this.getFile(e,!0),query:G(t)}},t.prototype.toURL=function(t,n,r){return"#"+e.prototype.toURL.call(this,t,n,r)},t}(Ce),Ae=function(e){function t(t){e.call(this,t),this.mode="history"}return e&&(t.__proto__=e),t.prototype=Object.create(e&&e.prototype),t.prototype.constructor=t,t.prototype.getCurrentPath=function(){var e=this.getBasePath(),t=window.location.pathname;return e&&0===t.indexOf(e)&&(t=t.slice(e.length)),(t||"/")+window.location.search+window.location.hash},t.prototype.onchange=function(e){void 0===e&&(e=a),w("click",function(t){var n="A"===t.target.tagName?t.target:t.target.parentNode;if("A"===n.tagName&&!/_blank/.test(n.target)){t.preventDefault();var r=n.href;window.history.pushState({key:r},"",r),e()}}),w("popstate",e)},t.prototype.parse=function(e){void 0===e&&(e=location.href);var t="",n=e.indexOf("?");n>=0&&(t=e.slice(n+1),e=e.slice(0,n));var r=J(location.origin),i=e.indexOf(r);return i>-1&&(e=e.slice(i+r.length)),{path:e,file:this.getFile(e),query:G(t)}},t}(Ce);var $e={};function Te(e){e.router.normalize(),e.route=e.router.parse(),g.setAttribute("data-page",e.route.file)}function Pe(e){!function(e){var t=function(e){return g.classList.toggle("close")};w(e=p(e),"click",function(e){e.stopPropagation(),t()}),c&&w(g,"click",function(e){return g.classList.contains("close")&&t()})}("button.sidebar-toggle",e.router),t=".sidebar",e.router,w(t=p(t),"click",function(e){var t=e.target;"A"===t.nodeName&&t.nextSibling&&t.nextSibling.classList.contains("app-sub-sidebar")&&_(t.parentNode,"collapse")});var t;e.config.coverpage?!c&&w("scroll",ae):g.classList.add("sticky")}function Fe(e,t,n,r,i,a){e=a?e:e.replace(/\/$/,""),(e=V(e))&&P(i.router.getFile(e+n)+t,!1,i.config.requestHeaders).then(r,function(a){return Fe(e,t,n,r,i)})}var Oe=Object.freeze({cached:e,hyphenate:t,hasOwn:n,merge:r,isPrimitive:i,noop:a,isFn:o,inBrowser:l,isMobile:c,supportsPushState:u,parseQuery:G,stringifyQuery:X,isAbsolutePath:Q,getParentPath:V,cleanPath:Z,getPath:J,replaceSlug:K});function Me(){this._init()}var Ne=Me.prototype;Ne._init=function(){this.config=function(){var e=r({el:"#app",repo:"",maxLevel:6,subMaxLevel:0,loadSidebar:null,loadNavbar:null,homepage:"README.md",coverpage:"",basePath:"",auto2top:!1,name:"",themeColor:"",nameLink:window.location.pathname,autoHeader:!1,executeScript:null,noEmoji:!1,ga:"",ext:".md",mergeNavbar:!1,formatUpdated:"",externalLinkTarget:"_blank",routerMode:"hash",noCompileLinks:[]},window.$docsify),a=document.currentScript||[].slice.call(document.getElementsByTagName("script")).filter(function(e){return/docsify\./.test(e.src)})[0];if(a){for(var o in e)if(n.call(e,o)){var s=a.getAttribute("data-"+t(o));i(s)&&(e[o]=""===s||s)}!0===e.loadSidebar&&(e.loadSidebar="_sidebar"+e.ext),!0===e.loadNavbar&&(e.loadNavbar="_navbar"+e.ext),!0===e.coverpage&&(e.coverpage="_coverpage"+e.ext),!0===e.repo&&(e.repo=""),!0===e.name&&(e.name="")}return window.$docsify=e,e}(),(e=this)._hooks={},e._lifecycle={},["init","mounted","beforeEach","afterEach","doneEach","ready"].forEach(function(t){var n=e._hooks[t]=[];e._lifecycle[t]=function(e){return n.push(e)}});var e;[].concat((a=this).config.plugins).forEach(function(e){return o(e)&&e(a._lifecycle,a)});var a;s(this,"init"),function(e){var t,n=e.config;t="history"===(n.routerMode||"hash")&&u?new Ae(n):new Ee(n),e.router=t,Te(e),$e=e.route,t.onchange(function(t){Te(e),e._updateRender(),$e.path!==e.route.path?(e.$fetch(),$e=e.route):e.$resetEvents()})}(this),_e(this),Pe(this),function(e){var t=e.config.loadSidebar;if(e.rendered){var n=oe(e.router,".sidebar-nav",!0,!0);t&&n&&(n.parentNode.innerHTML+=window.__SUB_SIDEBAR__),e._bindEventOnRendered(n),e.$resetEvents(),s(e,"doneEach"),s(e,"ready")}else e.$fetch(function(t){return s(e,"ready")})}(this),s(this,"mounted")};Ne.route={};(je=Ne)._renderTo=function(e,t,n){var r=p(e);r&&(r[n?"outerHTML":"innerHTML"]=t)},je._renderSidebar=function(e){var t=this.config,n=t.maxLevel,r=t.subMaxLevel,i=t.loadSidebar;this._renderTo(".sidebar-nav",this.compiler.sidebar(e,n));var a=oe(this.router,".sidebar-nav",!0,!0);i&&a?a.parentNode.innerHTML+=this.compiler.subSidebar(r)||"":this.compiler.subSidebar(),this._bindEventOnRendered(a)},je._bindEventOnRendered=function(e){var t=this.config,n=t.autoHeader,r=t.auto2top;if(function(e){var t=m(".cover.show");de=t?t.offsetHeight:0;for(var n=p(".sidebar"),r=v(n,"li"),i=0,a=r.length;i([^<]*?)
$');if(i){if("color"===i[2])n.style.background=i[1]+(i[3]||"");else{var a=i[1];_(n,"add","has-mask"),Q(i[1])||(a=J(this.router.getBasePath(),i[1])),n.style.backgroundImage="url("+a+")",n.style.backgroundSize="cover",n.style.backgroundPosition="center center"}r=r.replace(i[0],"")}this._renderTo(".cover-main",r),ae()}else _(n,"remove","show")},je._updateRender=function(){!function(e){var t=p(".app-name-link"),n=e.config.nameLink,r=e.route.path;if(t)if(i(e.config.nameLink))t.setAttribute("href",n);else if("object"==typeof n){var a=Object.keys(n).filter(function(e){return r.indexOf(e)>-1})[0];t.setAttribute("href",n[a])}}(this)};var je;!function(e){var t,n=function(e,n,r){return t&&t.abort&&t.abort(),t=P(e,!0,r)};e._loadSideAndNav=function(e,t,n,r){var i=this;return function(){if(!n)return r();Fe(e,t,n,function(e){i._renderSidebar(e),r()},i,!0)}},e._fetch=function(e){var t=this;void 0===e&&(e=a);var r=this.route,i=r.path,o=X(r.query,["id"]),s=this.config,l=s.loadNavbar,c=s.requestHeaders,u=s.loadSidebar,h=this.router.getFile(i),p=n(h+o,0,c);this.isHTML=/\.html$/g.test(h),p.then(function(n,r){return t._renderMain(n,r,t._loadSideAndNav(i,o,u,e))},function(n){t._fetchFallbackPage(h,o,e)||t._fetch404(h,o,e)}),l&&Fe(i,o,l,function(e){return t._renderNav(e)},this,!0)},e._fetchCover=function(){var e=this,t=this.config,n=t.coverpage,r=t.requestHeaders,i=this.route.query,a=V(this.route.path);if(n){var o=null,s=this.route.path;if("string"==typeof n)"/"===s&&(o=n);else if(Array.isArray(n))o=n.indexOf(s)>-1&&"_coverpage";else{var l=n[s];o=!0===l?"_coverpage":l}var c=Boolean(o)&&this.config.onlyCover;return o?(o=this.router.getFile(a+o),this.coverIsHTML=/\.html$/g.test(o),P(o+X(i,["id"]),!1,r).then(function(t){return e._renderCover(t,c)})):this._renderCover(null,c),c}},e.$fetch=function(e){var t=this;void 0===e&&(e=a);var n=function(){s(t,"doneEach"),e()};this._fetchCover()?n():this._fetch(function(){t.$resetEvents(),n()})},e._fetchFallbackPage=function(e,t,r){var i=this;void 0===r&&(r=a);var o=this.config,s=o.requestHeaders,l=o.fallbackLanguages,c=o.loadSidebar;if(!l)return!1;var u=e.split("/")[1];if(-1===l.indexOf(u))return!1;var h=e.replace(new RegExp("^/"+u),"");return n(h+t,0,s).then(function(n,a){return i._renderMain(n,a,i._loadSideAndNav(e,t,c,r))},function(){return i._fetch404(e,t,r)}),!0},e._fetch404=function(e,t,r){var i=this;void 0===r&&(r=a);var o=this.config,s=o.loadSidebar,l=o.requestHeaders,c=o.notFoundPage,u=this._loadSideAndNav(e,t,s,r);if(c){var h=function(e,t){var n,r,i=t.notFoundPage,a="_404"+(t.ext||".md");switch(typeof i){case"boolean":r=a;break;case"string":r=i;break;case"object":r=(n=Object.keys(i).sort(function(e,t){return t.length-e.length}).find(function(t){return e.match(new RegExp("^"+t))}))&&i[n]||a}return r}(e,this.config);return n(this.router.getFile(h),0,l).then(function(e,t){return i._renderMain(e,t,u)},function(){return i._renderMain(null,{},u)}),!0}return this._renderMain(null,{},u),!1}}(Ne),Ne.$resetEvents=function(){me(this.route.path,this.route.query.id),this.config.loadNavbar&&oe(this.router,"nav")};window.Docsify={util:Oe,dom:S,get:P,slugify:U},window.DocsifyCompiler=re,window.marked=R,window.Prism=q,Me.version="4.7.0",function(e){var t=document.readyState;if("complete"===t||"interactive"===t)return setTimeout(e,0);document.addEventListener("DOMContentLoaded",e)}(function(e){return new Me})}();
2 |
--------------------------------------------------------------------------------
/docs/assets/ga.min.js:
--------------------------------------------------------------------------------
1 | !function(){function n(n){!function(){var n=document.createElement("script");n.async=!0,n.src="https://www.google-analytics.com/analytics.js",document.body.appendChild(n)}(),window.ga=window.ga||function(){(window.ga.q=window.ga.q||[]).push(arguments)},window.ga.l=Number(new Date),window.ga("create",n,"auto")}function o(){window.ga||n($docsify.ga),window.ga("set","page",location.hash),window.ga("send","pageview")}$docsify.plugins=[].concat(function(n){$docsify.ga?n.beforeEach(o):console.error("[Docsify] ga is required.")},$docsify.plugins)}();
2 |
--------------------------------------------------------------------------------
/docs/assets/graphpipe.css:
--------------------------------------------------------------------------------
1 | /*---[ Common ]-----------------------*/
2 | .indent {
3 | margin-top: -0.8em;
4 | margin-left: 1em;
5 | padding-left: 1em;
6 | border-left: 1px solid #ededed;
7 | }
8 | /*====================================*/
9 |
10 | /*---[ Header & Footer ]--------------*/
11 | #footer {
12 | color: #B2B3BA;
13 | line-height: 1.7em;
14 | margin-top: 2em;
15 | }
16 |
17 | #footer > hr {
18 | margin-bottom: 0.5em;
19 | }
20 |
21 | #footer a {
22 | color: #B2B3BA;
23 | font-weight: 400;
24 | }
25 |
26 | #footer .docsify-text {
27 | font-weight: 600;
28 | }
29 |
30 | #footer .heart {
31 | color: #F99F9F;
32 | }
33 |
34 | #main {
35 | padding-top: 50px !important;
36 | }
37 |
38 | .markdown-section h1:not(:first-of-type) {
39 | margin-top: 1em;
40 | }
41 |
42 | a:not(.md-button):hover {
43 | text-decoration-color: #F7D096;
44 | }
45 | /*====================================*/
46 |
47 |
48 | /*---[ Navbar ]----------------------*/
49 | .app-nav {
50 | margin-right: 100px;
51 | }
52 |
53 | .app-nav .arrow {
54 | color: #CFD3D9;
55 | margin-left: 3px;
56 | }
57 |
58 | .app-nav > ul {
59 | padding-left: 0px !important;
60 | margin-right: 50px;
61 | }
62 |
63 | .app-nav > ul > li > a {
64 | color: inherit;
65 | text-decoration: none;
66 | transition: color .3s;
67 | }
68 |
69 | .app-nav > ul > li > p > a {
70 | font-size: 15px;
71 | }
72 |
73 | .app-nav a:hover {
74 | text-decoration: none !important;
75 | }
76 | /*====================================*/
77 |
78 |
79 | /*---[ Sidebar ]----------------------*/
80 | /* |---[ Reorder sidebar ]-----------*/
81 | .search {
82 | position: absolute;
83 | width: 100%;
84 | top: 70px;
85 | border-top: 1px solid #EEEEEE;
86 | z-index: 10;
87 | background-color: #FFFFFF;
88 | }
89 |
90 | .sidebar {
91 | overflow: hidden;
92 | }
93 |
94 | .sidebar > h1 {
95 | position: absolute;
96 | top: 20px;
97 | left: 0;
98 | width: 100%;
99 | }
100 |
101 | .sidebar-nav {
102 | position: absolute;
103 | width: 100%;
104 | top: 130px;
105 | /* Make only the nav scrollable */
106 | overflow-y: auto;
107 | height: calc(100% - 168px); /* top (130px) + toggle (38px) */
108 | box-sizing: content-box;
109 | padding-right: 17px;
110 | }
111 | /* |=================================*/
112 |
113 | .sidebar > h1 > a.app-name-link {
114 | color: #2B7FB9;
115 | }
116 |
117 | .app-sub-sidebar li:before {
118 | display: none;
119 | }
120 |
121 | body.no-sidebar .sidebar,
122 | body.no-sidebar .sidebar-toggle {
123 | display: none !important;
124 | }
125 |
126 | body.no-sidebar .content {
127 | left: 0px !important;
128 | }
129 |
130 | .github-corner {
131 | display: block !important;
132 | border-bottom: 0 !important;
133 | position: fixed !important;
134 | right: 0 !important;
135 | text-decoration: none !important;
136 | top: 0 !important;
137 | z-index: 1 !important;
138 | }
139 |
140 | .github-corner svg {
141 | color: #FFFFFF !important;
142 | fill: #00748f !important;
143 | }
144 | /*====================================*/
145 |
146 |
147 | /*---[ Tabs ]-------------------------*/
148 | .md-tabs .md-tabs-content {
149 | margin-bottom: -1em;
150 | }
151 |
152 | .md-button.md-focused:before,
153 | .md-button:active:before,
154 | .md-button:hover:before {
155 | background-color: #00748f;
156 | opacity: .12;
157 | }
158 |
159 | .md-tabs.md-theme-default .md-tabs-navigation .md-button.md-active {
160 | color: #00748f;
161 | }
162 |
163 | .md-tabs-indicator {
164 | background-color: #00748f;
165 | }
166 |
167 | .md-ripple .md-ripple-wave {
168 | background-color: #00748f;
169 | }
170 |
171 | /* Code Blocks */
172 | .md-tabs pre:not(:first-of-type) {
173 | margin-top: 1em;
174 | }
175 |
176 | .md-tabs pre {
177 | margin-top: 0;
178 | margin-bottom: 0;
179 | }
180 |
181 | .markdown-section pre {
182 | margin: 0 !important;
183 | padding: 1.5em !important;
184 | }
185 |
186 | .markdown-section pre:not(:first-child) {
187 | margin-top: 1em !important;
188 | }
189 |
190 | pre:after {
191 | font-size: 0.8em !important;
192 | font-family: Source Sans Pro, Helvetica Neue, Arial, sans-serif;
193 | font-weight: 400;
194 | }
195 |
196 | .markdown-section code {
197 | margin: 0 !important;
198 | padding: 0 !important;
199 | background-color: transparent !important;
200 | }
201 |
202 | blockquote pre {
203 | border: 1px solid #F0B794 !important;
204 | }
205 | /*====================================*/
206 |
207 |
208 | /*---[ Markdown ]---------------------*/
209 | p.warn, p.tip, blockquote {
210 | position: relative !important;
211 | }
212 |
213 | p.warn:before,
214 | p.tip:before,
215 | blockquote:before {
216 | border-radius: 100% !important;
217 | font-family: Dosis,Source Sans Pro,Helvetica Neue,Arial,sans-serif !important;
218 | font-size: 14px !important;
219 | font-weight: 700 !important;
220 | left: -12px !important;
221 | line-height: 20px !important;
222 | position: absolute !important;
223 | height: 20px !important;
224 | width: 20px !important;
225 | text-align: center !important;
226 | top: 17px !important;
227 | }
228 |
229 | .markdown-section p.warn {
230 | background-color: transparent;
231 | border: 1px solid #E9692F;
232 | padding: 1.5rem 1rem;
233 | }
234 |
235 | .markdown-section p.tip {
236 | background-color: #F8F8F8;
237 | border-left: 3px solid #FF6666;
238 | padding: 1.5rem 1rem !important;
239 | }
240 |
241 | .markdown-section p.tip:before {
242 | top: 27px !important;
243 | }
244 |
245 | .markdown-section blockquote {
246 | padding: 1rem !important;
247 | background-color: #F8F8F8;
248 | }
249 |
250 | .markdown-section blockquote > p {
251 | margin: 0 !important;
252 | font-weight: normal !important;
253 | color: #34495E !important;
254 | }
255 |
256 | .markdown-section p > code {
257 | background-color: transparent !important;
258 | font-size: 0.85em !important;
259 | }
260 |
261 | .markdown-section p.warn > code {
262 | color: #F54D4D !important;
263 | }
264 | /*====================================*/
265 |
266 |
267 | /*---[ Attributes ]-------------------*/
268 | code .token.annotation {
269 | color: #00748f;
270 | }
271 |
272 | .transform-arrow {
273 | display: block;
274 | width: 1em;
275 | margin: -0.3em auto;
276 | color: #00748f;
277 | }
278 | /*====================================*/
279 |
280 |
281 | /*---[ API ]--------------------------*/
282 | .api-container h2 {
283 | margin-top: 0.5em;
284 | }
285 |
286 | .api-container h2 a {
287 | text-decoration: none;
288 | }
289 |
290 | .api-container h2 a span {
291 | font-size: 1rem;
292 | font-family: Roboto Mono,Monaco,courier,monospace;
293 | font-weight: lighter;
294 | margin-left: 1.2em;
295 | }
296 |
297 | .api-container h2 a span:before {
298 | content: "#";
299 | color: #00748f;
300 | position: absolute;
301 | left: 1em;
302 | margin-top: 0.25em;
303 | font-size: 1em;
304 | }
305 |
306 | .api-container hr {
307 | margin: 5px 0;
308 | }
309 |
310 | .api-container ul {
311 | list-style-type: none;
312 | }
313 |
314 | .api-container li p:first-child {
315 | margin-bottom: 0.3em;
316 | }
317 |
318 | .api-container li p:nth-child(2) {
319 | margin-top: 0.3em;
320 | }
321 | /*====================================*/
322 |
323 |
324 | /*---[ Team ]-------------------------*/
325 | .member {
326 | display: flex;
327 | margin-top: 2em;
328 | }
329 |
330 | .member .avatar {
331 | flex: 0 0 80px;
332 | }
333 |
334 | .member .avatar .md-avatar {
335 | width: 80px;
336 | height: 80px;
337 | }
338 |
339 | .member .profile {
340 | padding-left: 26px;
341 | flex: 1;
342 | }
343 |
344 | .member .profile h3 {
345 | margin-top: 0;
346 | }
347 |
348 | .member .profile dl {
349 | margin-top: 0.6em;
350 | }
351 |
352 | .member .profile dt {
353 | font-size: 0.84em;
354 | }
355 |
356 | .member .profile dt,
357 | .member .profile dd {
358 | display: inline;
359 | padding: 0;
360 | margin: 0;
361 | line-height: 1.3;
362 | font-weight: 600;
363 | }
364 |
365 | .member .profile dt::after {
366 | content: "";
367 | margin-right: 7px;
368 | }
369 |
370 | .member .profile dd::after {
371 | display: block;
372 | content: " ";
373 | }
374 |
375 | .member .profile dt i {
376 | width: 14px;
377 | text-align: center;
378 | }
379 |
380 | .member .profile dt i.fa-map-marker {
381 | font-size: 1.15em;
382 | }
383 |
384 | .member footer {
385 | margin-top: 0.5em;
386 | }
387 |
388 | .member footer a:not(:first-child) {
389 | margin-left: 5px;
390 | }
391 |
392 | .fa-github {
393 | color: #2C3E50;
394 | }
395 |
396 | .fa-twitter {
397 | color: #1DA1F3;
398 | }
399 |
400 | .fa-google {
401 | color: #4285f4;
402 | }
403 |
404 | .team-header-gap {
405 | height: 1px;
406 | }
407 | /*====================================*/
408 |
409 |
410 | /*---[ Gallery ]----------------------*/
411 | .gallery-entry {
412 | display: flex;
413 | margin-top: 3em;
414 | }
415 |
416 | .gallery-entry .image {
417 | flex: 0 0 100px;
418 | height: 100px;
419 | border-radius: 1000px;
420 | overflow: hidden;
421 | border: 5px solid white;
422 |
423 | box-shadow:
424 | 0px 1px 5px 0px rgba(0, 0, 0, 0.2),
425 | 0px 2px 2px 0px rgba(0, 0, 0, 0.14),
426 | 0px 3px 1px -2px rgba(0, 0, 0, 0.12);
427 | }
428 |
429 | .gallery-entry .image:hover {
430 | box-shadow:
431 | 0px 2px 4px -1px rgba(0, 0, 0, 0.2),
432 | 0px 4px 5px 0px rgba(0, 0, 0, 0.14),
433 | 0px 1px 10px 0px rgba(0, 0, 0, 0.12);
434 | }
435 |
436 | .gallery-entry .image img {
437 | width: 90px;
438 | height: 90px;
439 | }
440 |
441 | .gallery-entry .at {
442 | font-weight: 100;
443 | color: #7A8796;
444 | }
445 |
446 | .gallery-entry .description {
447 | padding-left: 26px;
448 | padding-top: 0.3em;
449 | flex: 1;
450 | }
451 |
452 | .gallery-entry .description h3 {
453 | margin-top: 0;
454 | }
455 | /*====================================*/
456 |
457 |
458 | /*---[ Publications ]-----------------*/
459 | .authors {
460 | margin-top: -0.9em;
461 | font-style: italic;
462 | }
463 |
464 | .authors a {
465 | font-weight: 400;
466 | }
467 | /*====================================*/
468 |
469 |
470 | /*---[ Print ]------------------------*/
471 | @media print {
472 | .sidebar {
473 | display: none,
474 | }
475 | .content {
476 | left: 0;
477 | }
478 | }
479 | /*====================================*/
480 |
481 | .app-nav ul li p {
482 | cursor: pointer;
483 | }
484 |
--------------------------------------------------------------------------------
/docs/assets/graphpipe.js:
--------------------------------------------------------------------------------
1 | var graphpipe = graphpipe || {};
2 |
3 | graphpipe.languageLabels = {
4 | cpp: 'C++',
5 | okl: 'OKL',
6 | };
7 |
8 | graphpipe.getLanguageLabel = (language) => (
9 | graphpipe.languageLabels[language] || language.toUpperCase()
10 | );
11 |
12 | //---[ Header & Footer ]----------------
13 | graphpipe.addFooter = (content) => (
14 | content
15 | + '\n\n'
16 | + '\n'
25 | );
26 | //======================================
27 |
28 | //---[ Indent ]-------------------------
29 | graphpipe.parseIndent = (content) => {
30 | const parts = marked.lexer(content);
31 | const mdContent = graphpipe.tokensToHTML(parts);
32 | return (
33 | '\n'
34 | + mdContent
35 | + '
\n'
36 | );
37 | }
38 |
39 | graphpipe.addIndents = (content) => {
40 | const re = /\n::: indent\n([\s\S]*?)\n:::(\n|$)/g;
41 | const parts = [];
42 | var lastIndex = 0;
43 | while ((match = re.exec(content)) != null) {
44 | const [fullMatch, indentContent] = match;
45 |
46 | parts.push(content.substring(lastIndex, match.index));
47 | parts.push(graphpipe.parseIndent(indentContent));
48 |
49 | lastIndex = match.index + fullMatch.length;
50 | }
51 | parts.push(content.substring(lastIndex));
52 |
53 | return parts.join('\n');
54 | };
55 | //======================================
56 |
57 | //---[ Tabs ]---------------------------
58 | graphpipe.markdown = {
59 | space: () => (
60 | ''
61 | ),
62 | text: ({ text }) => (
63 | `${text}
`
64 | ),
65 | paragraph: ({ text }) => (
66 | `${text}
`
67 | ),
68 | list_start: () => (
69 | ''
70 | ),
71 | list_end: () => (
72 | ' '
73 | ),
74 | list_item_start: () => (
75 | ''
76 | ),
77 | list_item_end: () => (
78 | ' '
79 | ),
80 | html: ({ text }) => (
81 | text
82 | ),
83 | };
84 |
85 | graphpipe.markdown.code = ({ lang, text }) => {
86 | // Remove indentation
87 | const initIndent = text.match(/^\s*/)[0];
88 | if (initIndent.length) {
89 | const lines = text .split(/\r?\n/);
90 | const isIndented = lines.every((line) => (
91 | !line.length
92 | || line.startsWith(initIndent)
93 | ));
94 |
95 | if (isIndented) {
96 | text = lines.map((line) => (
97 | line.substring(initIndent.length)
98 | )).join('\n');
99 | }
100 | }
101 |
102 | // Generate highlighted HTML
103 | const styledCode = Prism.highlight(text,
104 | Prism.languages[lang],
105 | lang);
106 |
107 | // Wrap around pre + code
108 | return (
109 | (
110 | ``
111 | + ``
112 | + `${styledCode}\n`
113 | + '
'
114 | + ' '
115 | )
116 | .replace(/[*]/g, '*')
117 | .replace(/[_]/g, '_')
118 | );
119 | }
120 |
121 | graphpipe.tokenToMarkdown = (token) => {
122 | const { type } = token;
123 | if (type in graphpipe.markdown) {
124 | return graphpipe.markdown[token.type](token);
125 | }
126 | console.error(`Missing token format for: ${token.type}`, token);
127 | return '';
128 | };
129 |
130 | graphpipe.mergeTextTokens = (tokens) => {
131 | const newTokens = [];
132 | let texts = [];
133 | for (var i = 0; i < tokens.length; ++i) {
134 | const token = tokens[i];
135 | if (token.type === 'text') {
136 | texts.push(token.text);
137 | continue;
138 | }
139 | if (texts.length) {
140 | newTokens.push({
141 | type: 'text',
142 | text: texts.join(' '),
143 | });
144 | texts = [];
145 | }
146 | newTokens.push(token);
147 | }
148 | // Join the tail texts
149 | if (texts.length) {
150 | newTokens.push({
151 | type: 'text',
152 | text: texts.join(' '),
153 | });
154 | }
155 | return newTokens;
156 | };
157 |
158 | graphpipe.tokensToHTML = (tokens) => {
159 | tokens = graphpipe.mergeTextTokens(tokens);
160 | return (
161 | tokens
162 | .map(graphpipe.tokenToMarkdown)
163 | .join('\n')
164 | );
165 | };
166 |
167 | graphpipe.getTab = ({ tab, content }) => (
168 | ` \n`
169 | + graphpipe.tokensToHTML(content)
170 | + ' '
171 | );
172 |
173 | graphpipe.getTabs = (namespace, tabs) => {
174 | const content = tabs.map(graphpipe.getTab).join('\n');
175 |
176 | const tab = `vm.$data.tabNamespaces['${namespace}']`;
177 | const onClick = `(tab) => vm.onTabChange('${namespace}', tab)`;
178 |
179 | return (
180 | '\n'
181 | + ' \n'
182 | + ' \n'
187 | + `${content}\n`
188 | + ' \n'
189 | + '
\n'
190 | + ' \n'
191 | );
192 | };
193 |
194 | graphpipe.parseTabs = (namespace, content) => {
195 | const parts = marked.lexer(content);
196 | const newParts = [];
197 |
198 | // Skip begin/end of list
199 | for (var i = 1; i < (parts.length - 1); ++i) {
200 | var stackSize = 1;
201 |
202 | // Skip loose_item_start;
203 | ++i;
204 | const tab = parts[i++].text;
205 | const start = i++;
206 |
207 | while ((i < (parts.length - 1)) && (stackSize > 0)) {
208 | switch (parts[i].type) {
209 | case 'list_item_start':
210 | ++stackSize;
211 | break;
212 | case 'list_item_end':
213 | --stackSize;
214 | break;
215 | }
216 | ++i;
217 | }
218 |
219 | // Don't take the token after list_item_end
220 | --i;
221 |
222 | newParts.push({
223 | tab,
224 | content: parts.slice(start, i),
225 | });
226 | }
227 |
228 | if (!newParts.length) {
229 | return [];
230 | }
231 |
232 | if (!(namespace in vm.$data.tabNamespaces)) {
233 | Vue.set(vm.tabNamespaces, namespace, newParts[0].tab);
234 | }
235 |
236 | return graphpipe.getTabs(namespace, newParts);
237 | };
238 |
239 | graphpipe.addTabs = (content) => {
240 | const re = /\n::: tabs (.*)\n([\s\S]*?)\n:::(\n|$)/g;
241 | const parts = [];
242 | var lastIndex = 0;
243 | while ((match = re.exec(content)) != null) {
244 | const [fullMatch, namespace, tabContent] = match;
245 |
246 | parts.push(content.substring(lastIndex, match.index));
247 | parts.push(graphpipe.parseTabs(namespace, tabContent));
248 |
249 | lastIndex = match.index + fullMatch.length;
250 | }
251 | parts.push(content.substring(lastIndex));
252 |
253 | return parts.join('\n');
254 | };
255 | //======================================
256 |
257 | // Root-level markdowns don't have a sidebar
258 | graphpipe.hasSidebar = (file) => (
259 | !file.match(/^[^/]*\.md$/)
260 | )
261 |
262 | graphpipe.docsifyPlugin = (hook, vm) => {
263 | hook.init(() => {
264 | Prism.languages.okl = Prism.languages.extend('cpp', {
265 | annotation: {
266 | pattern: /@[a-zA-Z][a-zA-Z0-9_]*/,
267 | greedy: true,
268 | },
269 | });
270 | Prism.languages.bibtex = Prism.languages.extend('latex');
271 | });
272 |
273 | hook.beforeEach((content) => {
274 | // No \n means the last line turns into a header
275 | if (!content.endsWith('\n')) {
276 | content += '\n';
277 | }
278 | content = graphpipe.addIndents(content);
279 | content = graphpipe.addTabs(content);
280 | content = graphpipe.addFooter(content);
281 | return content;
282 | });
283 |
284 | hook.doneEach(() => {
285 | const dom = document.querySelector('body');
286 | const file = vm.route.file;
287 | // Add API styling
288 | if (!file.startsWith('api/')) {
289 | dom.classList.remove('api-container');
290 | } else {
291 | dom.classList.add('api-container');
292 | }
293 | // Close sidebar
294 | if (graphpipe.hasSidebar(file)) {
295 | dom.classList.remove('no-sidebar');
296 | } else {
297 | dom.classList.add('no-sidebar');
298 | }
299 | });
300 | };
301 |
--------------------------------------------------------------------------------
/docs/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/assets/logo.png
--------------------------------------------------------------------------------
/docs/assets/prism-bash.min.js:
--------------------------------------------------------------------------------
1 | !function(e){var t={variable:[{pattern:/\$?\(\([\s\S]+?\)\)/,inside:{variable:[{pattern:/(^\$\(\([\s\S]+)\)\)/,lookbehind:!0},/^\$\(\(/],number:/\b0x[\dA-Fa-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:[Ee]-?\d+)?/,operator:/--?|-=|\+\+?|\+=|!=?|~|\*\*?|\*=|\/=?|%=?|<<=?|>>=?|<=?|>=?|==?|&&?|&=|\^=?|\|\|?|\|=|\?|:/,punctuation:/\(\(?|\)\)?|,|;/}},{pattern:/\$\([^)]+\)|`[^`]+`/,greedy:!0,inside:{variable:/^\$\(|^`|\)$|`$/}},/\$(?:[\w#?*!@]+|\{[^}]+\})/i]};e.languages.bash={shebang:{pattern:/^#!\s*\/bin\/bash|^#!\s*\/bin\/sh/,alias:"important"},comment:{pattern:/(^|[^"{\\])#.*/,lookbehind:!0},string:[{pattern:/((?:^|[^<])<<\s*)["']?(\w+?)["']?\s*\r?\n(?:[\s\S])*?\r?\n\2/,lookbehind:!0,greedy:!0,inside:t},{pattern:/(["'])(?:\\[\s\S]|\$\([^)]+\)|`[^`]+`|(?!\1)[^\\])*\1/,greedy:!0,inside:t}],variable:t.variable,"function":{pattern:/(^|[\s;|&])(?:alias|apropos|apt-get|aptitude|aspell|awk|basename|bash|bc|bg|builtin|bzip2|cal|cat|cd|cfdisk|chgrp|chmod|chown|chroot|chkconfig|cksum|clear|cmp|comm|command|cp|cron|crontab|csplit|cut|date|dc|dd|ddrescue|df|diff|diff3|dig|dir|dircolors|dirname|dirs|dmesg|du|egrep|eject|enable|env|ethtool|eval|exec|expand|expect|export|expr|fdformat|fdisk|fg|fgrep|file|find|fmt|fold|format|free|fsck|ftp|fuser|gawk|getopts|git|grep|groupadd|groupdel|groupmod|groups|gzip|hash|head|help|hg|history|hostname|htop|iconv|id|ifconfig|ifdown|ifup|import|install|jobs|join|kill|killall|less|link|ln|locate|logname|logout|look|lpc|lpr|lprint|lprintd|lprintq|lprm|ls|lsof|make|man|mkdir|mkfifo|mkisofs|mknod|more|most|mount|mtools|mtr|mv|mmv|nano|netstat|nice|nl|nohup|notify-send|npm|nslookup|open|op|passwd|paste|pathchk|ping|pkill|popd|pr|printcap|printenv|printf|ps|pushd|pv|pwd|quota|quotacheck|quotactl|ram|rar|rcp|read|readarray|readonly|reboot|rename|renice|remsync|rev|rm|rmdir|rsync|screen|scp|sdiff|sed|seq|service|sftp|shift|shopt|shutdown|sleep|slocate|sort|source|split|ssh|stat|strace|su|sudo|sum|suspend|sync|tail|tar|tee|test|time|timeout|times|touch|top|traceroute|trap|tr|tsort|tty|type|ulimit|umask|umount|unalias|uname|unexpand|uniq|units|unrar|unshar|uptime|useradd|userdel|usermod|users|uuencode|uudecode|v|vdir|vi|vmstat|wait|watch|wc|wget|whereis|which|who|whoami|write|xargs|xdg-open|yes|zip)(?=$|[\s;|&])/,lookbehind:!0},keyword:{pattern:/(^|[\s;|&])(?:let|:|\.|if|then|else|elif|fi|for|break|continue|while|in|case|function|select|do|done|until|echo|exit|return|set|declare)(?=$|[\s;|&])/,lookbehind:!0},"boolean":{pattern:/(^|[\s;|&])(?:true|false)(?=$|[\s;|&])/,lookbehind:!0},operator:/&&?|\|\|?|==?|!=?|<<|>>|<=?|>=?|=~/,punctuation:/\$?\(\(?|\)\)?|\.\.|[{}[\];]/};var a=t.variable[1].inside;a.string=e.languages.bash.string,a["function"]=e.languages.bash["function"],a.keyword=e.languages.bash.keyword,a.boolean=e.languages.bash.boolean,a.operator=e.languages.bash.operator,a.punctuation=e.languages.bash.punctuation,e.languages.shell=e.languages.bash}(Prism);
--------------------------------------------------------------------------------
/docs/assets/prism-c.min.js:
--------------------------------------------------------------------------------
1 | Prism.languages.c=Prism.languages.extend("clike",{keyword:/\b(?:_Alignas|_Alignof|_Atomic|_Bool|_Complex|_Generic|_Imaginary|_Noreturn|_Static_assert|_Thread_local|asm|typeof|inline|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|union|unsigned|void|volatile|while)\b/,operator:/-[>-]?|\+\+?|!=?|<=?|>>?=?|==?|&&?|\|\|?|[~^%?*\/]/,number:/(?:\b0x[\da-f]+|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?)[ful]*/i}),Prism.languages.insertBefore("c","string",{macro:{pattern:/(^\s*)#\s*[a-z]+(?:[^\r\n\\]|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,alias:"property",inside:{string:{pattern:/(#\s*include\s*)(?:<.+?>|("|')(?:\\?.)+?\2)/,lookbehind:!0},directive:{pattern:/(#\s*)\b(?:define|defined|elif|else|endif|error|ifdef|ifndef|if|import|include|line|pragma|undef|using)\b/,lookbehind:!0,alias:"keyword"}}},constant:/\b(?:__FILE__|__LINE__|__DATE__|__TIME__|__TIMESTAMP__|__func__|EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|stdin|stdout|stderr)\b/}),delete Prism.languages.c["class-name"],delete Prism.languages.c["boolean"];
--------------------------------------------------------------------------------
/docs/assets/prism-cpp.min.js:
--------------------------------------------------------------------------------
1 | Prism.languages.cpp=Prism.languages.extend("c",{keyword:/\b(?:alignas|alignof|asm|auto|bool|break|case|catch|char|char16_t|char32_t|class|compl|const|constexpr|const_cast|continue|decltype|default|delete|do|double|dynamic_cast|else|enum|explicit|export|extern|float|for|friend|goto|if|inline|int|int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|long|mutable|namespace|new|noexcept|nullptr|operator|private|protected|public|register|reinterpret_cast|return|short|signed|sizeof|static|static_assert|static_cast|struct|switch|template|this|thread_local|throw|try|typedef|typeid|typename|union|unsigned|using|virtual|void|volatile|wchar_t|while)\b/,"boolean":/\b(?:true|false)\b/,operator:/--?|\+\+?|!=?|<{1,2}=?|>{1,2}=?|->|:{1,2}|={1,2}|\^|~|%|&{1,2}|\|\|?|\?|\*|\/|\b(?:and|and_eq|bitand|bitor|not|not_eq|or|or_eq|xor|xor_eq)\b/}),Prism.languages.insertBefore("cpp","keyword",{"class-name":{pattern:/(class\s+)\w+/i,lookbehind:!0}}),Prism.languages.insertBefore("cpp","string",{"raw-string":{pattern:/R"([^()\\ ]{0,16})\([\s\S]*?\)\1"/,alias:"string",greedy:!0}});
--------------------------------------------------------------------------------
/docs/assets/prism-go.min.js:
--------------------------------------------------------------------------------
1 | Prism.languages.go=Prism.languages.extend("clike",{keyword:/\b(?:break|case|chan|const|continue|default|defer|else|fallthrough|for|func|go(?:to)?|if|import|interface|map|package|range|return|select|struct|switch|type|var)\b/,builtin:/\b(?:bool|byte|complex(?:64|128)|error|float(?:32|64)|rune|string|u?int(?:8|16|32|64)?|uintptr|append|cap|close|complex|copy|delete|imag|len|make|new|panic|print(?:ln)?|real|recover)\b/,"boolean":/\b(?:_|iota|nil|true|false)\b/,operator:/[*\/%^!=]=?|\+[=+]?|-[=-]?|\|[=|]?|&(?:=|&|\^=?)?|>(?:>=?|=)?|<(?:<=?|=|-)?|:=|\.\.\./,number:/(?:\b0x[a-f\d]+|(?:\b\d+\.?\d*|\B\.\d+)(?:e[-+]?\d+)?)i?/i,string:{pattern:/(["'`])(\\[\s\S]|(?!\1)[^\\])*\1/,greedy:!0}}),delete Prism.languages.go["class-name"];
--------------------------------------------------------------------------------
/docs/assets/prism-java.min.js:
--------------------------------------------------------------------------------
1 | Prism.languages.java=Prism.languages.extend("clike",{keyword:/\b(?:abstract|continue|for|new|switch|assert|default|goto|package|synchronized|boolean|do|if|private|this|break|double|implements|protected|throw|byte|else|import|public|throws|case|enum|instanceof|return|transient|catch|extends|int|short|try|char|final|interface|static|void|class|finally|long|strictfp|volatile|const|float|native|super|while)\b/,number:/\b0b[01]+\b|\b0x[\da-f]*\.?[\da-fp-]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?[df]?/i,operator:{pattern:/(^|[^.])(?:\+[+=]?|-[-=]?|!=?|<=?|>>?>?=?|==?|&[&=]?|\|[|=]?|\*=?|\/=?|%=?|\^=?|[?:~])/m,lookbehind:!0}}),Prism.languages.insertBefore("java","function",{annotation:{alias:"punctuation",pattern:/(^|[^.])@\w+/,lookbehind:!0}});
--------------------------------------------------------------------------------
/docs/assets/prism-latex.min.js:
--------------------------------------------------------------------------------
1 | !function(a){var e=/\\(?:[^a-z()[\]]|[a-z*]+)/i,n={"equation-command":{pattern:e,alias:"regex"}};a.languages.latex={comment:/%.*/m,cdata:{pattern:/(\\begin\{((?:verbatim|lstlisting)\*?)\})[\s\S]*?(?=\\end\{\2\})/,lookbehind:!0},equation:[{pattern:/\$(?:\\[\s\S]|[^\\$])*\$|\\\([\s\S]*?\\\)|\\\[[\s\S]*?\\\]/,inside:n,alias:"string"},{pattern:/(\\begin\{((?:equation|math|eqnarray|align|multline|gather)\*?)\})[\s\S]*?(?=\\end\{\2\})/,lookbehind:!0,inside:n,alias:"string"}],keyword:{pattern:/(\\(?:begin|end|ref|cite|label|usepackage|documentclass)(?:\[[^\]]+\])?\{)[^}]+(?=\})/,lookbehind:!0},url:{pattern:/(\\url\{)[^}]+(?=\})/,lookbehind:!0},headline:{pattern:/(\\(?:part|chapter|section|subsection|frametitle|subsubsection|paragraph|subparagraph|subsubparagraph|subsubsubparagraph)\*?(?:\[[^\]]+\])?\{)[^}]+(?=\}(?:\[[^\]]+\])?)/,lookbehind:!0,alias:"class-name"},"function":{pattern:e,alias:"selector"},punctuation:/[[\]{}&]/}}(Prism);
--------------------------------------------------------------------------------
/docs/assets/prism-python.min.js:
--------------------------------------------------------------------------------
1 | Prism.languages.python={comment:{pattern:/(^|[^\\])#.*/,lookbehind:!0},"triple-quoted-string":{pattern:/("""|''')[\s\S]+?\1/,greedy:!0,alias:"string"},string:{pattern:/("|')(?:\\.|(?!\1)[^\\\r\n])*\1/,greedy:!0},"function":{pattern:/((?:^|\s)def[ \t]+)[a-zA-Z_]\w*(?=\s*\()/g,lookbehind:!0},"class-name":{pattern:/(\bclass\s+)\w+/i,lookbehind:!0},keyword:/\b(?:as|assert|async|await|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|pass|print|raise|return|try|while|with|yield)\b/,builtin:/\b(?:__import__|abs|all|any|apply|ascii|basestring|bin|bool|buffer|bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|divmod|enumerate|eval|execfile|file|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|intern|isinstance|issubclass|iter|len|list|locals|long|map|max|memoryview|min|next|object|oct|open|ord|pow|property|range|raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|vars|xrange|zip)\b/,"boolean":/\b(?:True|False|None)\b/,number:/(?:\b(?=\d)|\B(?=\.))(?:0[bo])?(?:(?:\d|0x[\da-f])[\da-f]*\.?\d*|\.\d+)(?:e[+-]?\d+)?j?\b/i,operator:/[-+%=]=?|!=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]|\b(?:or|and|not)\b/,punctuation:/[{}[\];(),.:]/};
--------------------------------------------------------------------------------
/docs/assets/search.min.js:
--------------------------------------------------------------------------------
1 | !function(){var n,e={};function t(n){var e={"&":"&","<":"<",">":">",'"':""","'":"'","/":"/"};return String(n).replace(/[&<>"'/]/g,function(n){return e[n]})}function o(t,o){n=Docsify;var a="auto"===t.paths,i=localStorage.getItem("docsify.search.expires")l.length&&(d=l.length);var h="..."+t(l).substring(i,d).replace(e,''+n+" ")+"...";s+=h}}),r)){var h={title:t(c),content:s,url:d};o.push(h)}},s=0;s\n\n'+n.title+" \n"+n.content+"
\n \n"}),i.classList.add("show"),r.classList.add("show"),i.innerHTML=s||''+a+"
"}function r(n,e){var t=e.router.parse().query.s;Docsify.dom.style("\n.sidebar {\n padding-top: 0;\n}\n\n.search {\n margin-bottom: 20px;\n padding: 6px;\n border-bottom: 1px solid #eee;\n}\n\n.search .input-wrap {\n display: flex;\n align-items: center;\n}\n\n.search .results-panel {\n display: none;\n}\n\n.search .results-panel.show {\n display: block;\n}\n\n.search input {\n outline: none;\n border: none;\n width: 100%;\n padding: 0 7px;\n line-height: 36px;\n font-size: 14px;\n}\n\n.search input::-webkit-search-decoration,\n.search input::-webkit-search-cancel-button,\n.search input {\n -webkit-appearance: none;\n -moz-appearance: none;\n appearance: none;\n}\n.search .clear-button {\n width: 36px;\n text-align: right;\n display: none;\n}\n\n.search .clear-button.show {\n display: block;\n}\n\n.search .clear-button svg {\n transform: scale(.5);\n}\n\n.search h2 {\n font-size: 17px;\n margin: 10px 0;\n}\n\n.search a {\n text-decoration: none;\n color: inherit;\n}\n\n.search .matching-post {\n border-bottom: 1px solid #eee;\n}\n\n.search .matching-post:last-child {\n border-bottom: 0;\n}\n\n.search p {\n font-size: 14px;\n overflow: hidden;\n text-overflow: ellipsis;\n display: -webkit-box;\n -webkit-line-clamp: 2;\n -webkit-box-orient: vertical;\n}\n\n.search p.empty {\n text-align: center;\n}"),function(n,e){void 0===e&&(e="");var t='\n
\n ',o=Docsify.dom.create("div",t),a=Docsify.dom.find("aside");Docsify.dom.toggleClass(o,"search"),Docsify.dom.before(a,o)}(0,t),function(){var n,e=Docsify.dom.find("div.search"),t=Docsify.dom.find(e,"input"),o=Docsify.dom.find(e,".input-wrap");Docsify.dom.on(e,"click",function(n){return"A"!==n.target.tagName&&n.stopPropagation()}),Docsify.dom.on(t,"input",function(e){clearTimeout(n),n=setTimeout(function(n){return i(e.target.value.trim())},100)}),Docsify.dom.on(o,"click",function(n){"INPUT"!==n.target.tagName&&(t.value="",i())})}(),t&&setTimeout(function(n){return i(t)},500)}function s(n,e){!function(n,e){var t=Docsify.dom.getNode('.search input[type="search"]');if(t)if("string"==typeof n)t.placeholder=n;else{var o=Object.keys(n).filter(function(n){return e.indexOf(n)>-1})[0];t.placeholder=n[o]}}(n.placeholder,e.route.path),function(n,e){if("string"==typeof n)a=n;else{var t=Object.keys(n).filter(function(n){return e.indexOf(n)>-1})[0];a=n[t]}}(n.noData,e.route.path)}var c={placeholder:"Type to search",noData:"No Results!",paths:"auto",depth:2,maxAge:864e5};$docsify.plugins=[].concat(function(n,e){var t=Docsify.util,a=e.config.search||c;Array.isArray(a)?c.paths=a:"object"==typeof a&&(c.paths=Array.isArray(a.paths)?a.paths:"auto",c.maxAge=t.isPrimitive(a.maxAge)?a.maxAge:c.maxAge,c.placeholder=a.placeholder||c.placeholder,c.noData=a.noData||c.noData,c.depth=a.depth||c.depth);var i="auto"===c.paths;n.mounted(function(n){r(0,e),!i&&o(c,e)}),n.doneEach(function(n){s(c,e),i&&o(c,e)})},$docsify.plugins)}();
2 |
--------------------------------------------------------------------------------
/docs/assets/toaster-js.css:
--------------------------------------------------------------------------------
1 | .toast {
2 | position: fixed;
3 | right: 0;
4 | bottom: 0;
5 | z-index: 10000;
6 | max-width: 100%;
7 | opacity: 0;
8 | transform: translate(75%, 0);
9 | pointer-events: none;
10 | -webkit-transition: all 0.3s ease, transform 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
11 | -moz-transition: all 0.3s ease, transform 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
12 | -o-transition: all 0.3s ease, transform 0.3s cubic-bezier(0.645, 0.045, 0.355, 1);
13 | transition: all 0.3s ease, transform 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); }
14 | .toast.displayed {
15 | opacity: 1;
16 | transform: translate(0, 0); }
17 | .toast > .body {
18 | position: relative;
19 | font-size: initial;
20 | margin: 0 1em 1em 1em;
21 | padding: 1.0em;
22 | word-wrap: break-word;
23 | border-radius: 3px;
24 | background: rgba(255, 255, 255, 0.9);
25 | pointer-events: all;
26 | box-shadow: 0 1px 2px rgba(0, 0, 0, 0.5); }
27 | .toast > .body.info {
28 | background: rgba(255, 245, 195, 0.9); }
29 | .toast > .body.warning {
30 | background: rgba(255, 183, 99, 0.9); }
31 | .toast > .body.warning > .icon {
32 | color: white; }
33 | .toast > .body.error, .toast .body.error a {
34 | color: white;
35 | text-shadow: 0 0 1px black;
36 | background: rgba(255, 86, 86, 0.9); }
37 | .toast > .body.done {
38 | background: rgba(147, 255, 157, 0.9); }
39 |
40 | .toast .little-cross {
41 | position: absolute;
42 | right: 0;
43 | top: -5px;
44 | display: inline-block;
45 | float: right;
46 | font-size: 15px;
47 | cursor: pointer;
48 | color: white;
49 | text-shadow: none;
50 | margin-top: 8px;
51 | margin-right: 4px;
52 | }
53 |
54 |
--------------------------------------------------------------------------------
/docs/assets/toaster-js.js:
--------------------------------------------------------------------------------
1 | !function(e,t){if("object"==typeof exports&&"object"==typeof module)module.exports=t();else if("function"==typeof define&&define.amd)define([],t);else{var o=t();for(var n in o)("object"==typeof exports?exports:e)[n]=o[n]}}(this,function(){return function(e){function t(n){if(o[n])return o[n].exports;var i=o[n]={i:n,l:!1,exports:{}};return e[n].call(i.exports,i,i.exports,t),i.l=!0,i.exports}var o={};return t.m=e,t.c=o,t.i=function(e){return e},t.d=function(e,o,n){t.o(e,o)||Object.defineProperty(e,o,{configurable:!1,enumerable:!0,get:n})},t.n=function(e){var o=e&&e.__esModule?function(){return e.default}:function(){return e};return t.d(o,"a",o),o},t.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},t.p="",t(t.s=1)}([function(e,t,o){"use strict";function n(){this.toasts=[],this.timeouts=new Map}Object.defineProperty(t,"__esModule",{value:!0}),t.toaster=new n,n.prototype.push=function(e,t){var o=this;requestAnimationFrame(function(){var n=e.attach(0);o.toasts.forEach(function(e){e.seek(n)}),o.toasts.push(e),o.timeouts.set(e,setTimeout(function(){return o.remove(e)},t))})},n.prototype.remove=function(e){if(this.timeouts.has(e)){clearTimeout(this.timeouts.get(e)),this.timeouts.delete(e);var t=this.toasts.indexOf(e),o=this.toasts.splice(t,1)[0],n=e.element.offsetHeight;o.detach(),this.toasts.slice(0,t).forEach(function(e){return e.seek(-n)})}},n.prototype.removeAll=function(){for(;this.toasts.length>0;)this.remove(this.toasts[0])}},function(e,t,o){"use strict";function n(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};Object.assign(a,e)}function i(){return r.toaster.removeAll()}function s(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"No text!",t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:s.TYPE_INFO,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:s.TIME_LONG,n=document.createElement("div"),i=document.createElement("div");n.className="toast",i.className="body "+t,n.appendChild(i),e instanceof Element?i.appendChild(e):i.textContent=""+e,this.element=n,this.position=0,r.toaster.push(this,o)}Object.defineProperty(t,"__esModule",{value:!0}),t.configureToasts=n,t.deleteAllToasts=i,t.Toast=s;var r=o(0);s.TYPE_INFO="info",s.TYPE_MESSAGE="message",s.TYPE_WARNING="warning",s.TYPE_ERROR="error",s.TYPE_DONE="done",s.TIME_SHORT=2e3,s.TIME_NORMAL=4e3,s.TIME_LONG=8e3;var a={deleteDelay:300,topOrigin:0};s.prototype.attach=function(e){var t=this;return this.position=e,this.updateVisualPosition(),document.body.appendChild(this.element),requestAnimationFrame(function(){t.element.classList.add("displayed")}),this.element.offsetHeight},s.prototype.seek=function(e){this.position+=e,this.updateVisualPosition()},s.prototype.updateVisualPosition=function(){var e=this;requestAnimationFrame(function(){e.element.style.bottom=-a.topOrigin+e.position+"px"})},s.prototype.detach=function(){var e=this,t=this;this.element.parentNode&&(requestAnimationFrame(function(){e.element.classList.remove("displayed"),e.element.classList.add("deleted")}),setTimeout(function(){requestAnimationFrame(function(){t.element&&t.element.parentNode&&t.element.parentNode.removeChild(t.element)})},a.deleteDelay))},s.prototype.delete=function(){r.toaster.remove(this)}}])});
--------------------------------------------------------------------------------
/docs/assets/vue.css:
--------------------------------------------------------------------------------
1 | @import url("https://fonts.googleapis.com/css?family=Roboto+Mono|Source+Sans+Pro:300,400,600");*{-webkit-font-smoothing:antialiased;-webkit-overflow-scrolling:touch;-webkit-tap-highlight-color:rgba(0,0,0,0);-webkit-text-size-adjust:none;-webkit-touch-callout:none;box-sizing:border-box}body:not(.ready){overflow:hidden}body:not(.ready) .app-nav,body:not(.ready)>nav,body:not(.ready) [data-cloak]{display:none}div#app{font-size:30px;font-weight:lighter;margin:40vh auto;text-align:center}div#app:empty:before{content:"Loading..."}.emoji{height:1.2rem;vertical-align:middle}.progress{background-color:var(--theme-color,#42b983);height:2px;left:0;position:fixed;right:0;top:0;transition:width .2s,opacity .4s;width:0;z-index:5}.search .search-keyword,.search a:hover{color:var(--theme-color,#42b983)}.search .search-keyword{font-style:normal;font-weight:700}body,html{height:100%}body{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;color:#34495e;font-family:Source Sans Pro,Helvetica Neue,Arial,sans-serif;font-size:15px;letter-spacing:0;margin:0;overflow-x:hidden}img{max-width:100%}a[disabled]{cursor:not-allowed;opacity:.6}kbd{border:1px solid #ccc;border-radius:3px;display:inline-block;font-size:12px!important;line-height:12px;margin-bottom:3px;padding:3px 5px;vertical-align:middle}.task-list-item{list-style-type:none}li input[type=checkbox]{margin:0 .2em .25em -1.6em;vertical-align:middle}.app-nav{margin:25px 60px 0 0;position:absolute;right:0;text-align:right;z-index:2}.app-nav.no-badge{margin-right:25px}.app-nav p{margin:0}.app-nav>a{margin:0 1rem;padding:5px 0}.app-nav li,.app-nav ul{display:inline-block;list-style:none;margin:0}.app-nav a{color:inherit;font-size:16px;text-decoration:none;transition:color .3s}.app-nav a.active,.app-nav a:hover{color:var(--theme-color,#42b983)}.app-nav a.active{border-bottom:2px solid var(--theme-color,#42b983)}.app-nav li{display:inline-block;margin:0 1rem;padding:5px 0;position:relative}.app-nav li ul{background-color:#fff;border:1px solid #ddd;border-bottom-color:#ccc;border-radius:4px;box-sizing:border-box;display:none;max-height:calc(100vh - 61px);overflow-y:auto;padding:10px 0;position:absolute;right:-15px;text-align:left;top:100%;white-space:nowrap}.app-nav li ul li{display:block;font-size:14px;line-height:1rem;margin:0;margin:8px 14px;white-space:nowrap}.app-nav li ul a{display:block;font-size:inherit;margin:0;padding:0}.app-nav li ul a.active{border-bottom:0}.app-nav li:hover ul{display:block}.github-corner{border-bottom:0;position:fixed;right:0;text-decoration:none;top:0;z-index:1}.github-corner:hover .octo-arm{animation:a .56s ease-in-out}.github-corner svg{color:#fff;fill:var(--theme-color,#42b983);height:80px;width:80px}main{display:block;position:relative;width:100vw;height:100%;z-index:0}main.hidden{display:none}.anchor{display:inline-block;text-decoration:none;transition:all .3s}.anchor span{color:#34495e}.anchor:hover{text-decoration:underline}.sidebar{border-right:1px solid rgba(0,0,0,.07);overflow-y:auto;padding:40px 0 0;position:absolute;top:0;bottom:0;left:0;transition:transform .25s ease-out;width:300px;z-index:3}.sidebar>h1{margin:0 auto 1rem;font-size:1.5rem;font-weight:300;text-align:center}.sidebar>h1 a{color:inherit;text-decoration:none}.sidebar>h1 .app-nav{display:block;position:static}.sidebar .sidebar-nav{line-height:2em;padding-bottom:40px}.sidebar li.collapse .app-sub-sidebar{display:none}.sidebar ul{margin:0;padding:0}.sidebar li>p{font-weight:700;margin:0}.sidebar ul,.sidebar ul li{list-style:none}.sidebar ul li a{border-bottom:none;display:block}.sidebar ul li ul{padding-left:20px}.sidebar::-webkit-scrollbar{width:4px}.sidebar::-webkit-scrollbar-thumb{background:transparent;border-radius:4px}.sidebar:hover::-webkit-scrollbar-thumb{background:hsla(0,0%,53%,.4)}.sidebar:hover::-webkit-scrollbar-track{background:hsla(0,0%,53%,.1)}.sidebar-toggle{background-color:transparent;background-color:hsla(0,0%,100%,.8);border:0;outline:none;padding:10px;position:absolute;bottom:0;left:0;text-align:center;transition:opacity .3s;width:284px;z-index:4}.sidebar-toggle .sidebar-toggle-button:hover{opacity:.4}.sidebar-toggle span{background-color:var(--theme-color,#42b983);display:block;margin-bottom:4px;width:16px;height:2px}body.sticky .sidebar,body.sticky .sidebar-toggle{position:fixed}.content{padding-top:60px;position:absolute;top:0;right:0;bottom:0;left:300px;transition:left .25s ease}.markdown-section{margin:0 auto;max-width:800px;padding:30px 15px 40px;position:relative}.markdown-section>*{box-sizing:border-box;font-size:inherit}.markdown-section>:first-child{margin-top:0!important}.markdown-section hr{border:none;border-bottom:1px solid #eee;margin:2em 0}.markdown-section iframe{border:1px solid #eee}.markdown-section table{border-collapse:collapse;border-spacing:0;display:block;margin-bottom:1rem;overflow:auto;width:100%}.markdown-section th{font-weight:700}.markdown-section td,.markdown-section th{border:1px solid #ddd;padding:6px 13px}.markdown-section tr{border-top:1px solid #ccc}.markdown-section p.tip,.markdown-section tr:nth-child(2n){background-color:#f8f8f8}.markdown-section p.tip{border-bottom-right-radius:2px;border-left:4px solid #f66;border-top-right-radius:2px;margin:2em 0;padding:12px 24px 12px 30px;position:relative}.markdown-section p.tip:before{background-color:#f66;border-radius:100%;color:#fff;content:"!";font-family:Dosis,Source Sans Pro,Helvetica Neue,Arial,sans-serif;font-size:14px;font-weight:700;left:-12px;line-height:20px;position:absolute;height:20px;width:20px;text-align:center;top:14px}.markdown-section p.tip code{background-color:#efefef}.markdown-section p.tip em{color:#34495e}.markdown-section p.warn{background:rgba(66,185,131,.1);border-radius:2px;padding:1rem}body.close .sidebar{transform:translateX(-300px)}body.close .sidebar-toggle{width:auto}body.close .content{left:0}@media print{.app-nav,.github-corner,.sidebar,.sidebar-toggle{display:none}}@media screen and (max-width:768px){.github-corner,.sidebar,.sidebar-toggle{position:fixed}.app-nav{margin-top:16px}.app-nav li ul{top:30px}main{height:auto;overflow-x:hidden}.sidebar{left:-300px;transition:transform .25s ease-out}.content{left:0;max-width:100vw;position:static;padding-top:20px;transition:transform .25s ease}.app-nav,.github-corner{transition:transform .25s ease-out}.sidebar-toggle{background-color:transparent;width:auto;padding:30px 30px 10px 10px}body.close .sidebar{transform:translateX(300px)}body.close .sidebar-toggle{background-color:hsla(0,0%,100%,.8);transition:background-color 1s;width:284px;padding:10px}body.close .content{transform:translateX(300px)}body.close .app-nav,body.close .github-corner{display:none}.github-corner:hover .octo-arm{animation:none}.github-corner .octo-arm{animation:a .56s ease-in-out}}@keyframes a{0%,to{transform:rotate(0)}20%,60%{transform:rotate(-25deg)}40%,80%{transform:rotate(10deg)}}section.cover{-ms-flex-align:center;align-items:center;background-position:50%;background-repeat:no-repeat;background-size:cover;height:100vh;display:none}section.cover.show{display:-ms-flexbox;display:flex}section.cover.has-mask .mask{background-color:#fff;opacity:.8;position:absolute;top:0;height:100%;width:100%}section.cover .cover-main{-ms-flex:1;flex:1;margin:-20px 16px 0;text-align:center;z-index:1}section.cover a{color:inherit}section.cover a,section.cover a:hover{text-decoration:none}section.cover p{line-height:1.5rem;margin:1em 0}section.cover h1{color:inherit;font-size:2.5rem;font-weight:300;margin:.625rem 0 2.5rem;position:relative;text-align:center}section.cover h1 a{display:block}section.cover h1 small{bottom:-.4375rem;font-size:1rem;position:absolute}section.cover blockquote{font-size:1.5rem;text-align:center}section.cover ul{line-height:1.8;list-style-type:none;margin:1em auto;max-width:500px;padding:0}section.cover .cover-main>p:last-child a{border:1px solid var(--theme-color,#42b983);border-radius:2rem;box-sizing:border-box;color:var(--theme-color,#42b983);display:inline-block;font-size:1.05rem;letter-spacing:.1rem;margin:.5rem 1rem;padding:.75em 2rem;text-decoration:none;transition:all .15s ease}section.cover .cover-main>p:last-child a:last-child{background-color:var(--theme-color,#42b983);color:#fff}section.cover .cover-main>p:last-child a:last-child:hover{color:inherit;opacity:.8}section.cover .cover-main>p:last-child a:hover{color:inherit}section.cover blockquote>p>a{border-bottom:2px solid var(--theme-color,#42b983);transition:color .3s}section.cover blockquote>p>a:hover{color:var(--theme-color,#42b983)}.sidebar,body{background-color:#fff}.sidebar{color:#364149}.sidebar li{margin:6px 0 6px 15px}.sidebar ul li a{color:#505d6b;font-size:14px;font-weight:400;overflow:hidden;text-decoration:none;text-overflow:ellipsis;white-space:nowrap}.sidebar ul li a:hover{text-decoration:underline}.sidebar ul li ul{padding:0}.sidebar ul li.active>a{border-right:2px solid;color:var(--theme-color,#42b983);font-weight:600}.app-sub-sidebar li:before{content:"-";padding-right:4px;float:left}.markdown-section h1,.markdown-section h2,.markdown-section h3,.markdown-section h4,.markdown-section strong{color:#2c3e50;font-weight:600}.markdown-section a{color:var(--theme-color,#42b983);font-weight:600}.markdown-section h1{font-size:2rem;margin:0 0 1rem}.markdown-section h2{font-size:1.75rem;margin:45px 0 .8rem}.markdown-section h3{font-size:1.5rem;margin:40px 0 .6rem}.markdown-section h4{font-size:1.25rem}.markdown-section h5{font-size:1rem}.markdown-section h6{color:#777;font-size:1rem}.markdown-section figure,.markdown-section p{margin:1.2em 0}.markdown-section ol,.markdown-section p,.markdown-section ul{line-height:1.6rem;word-spacing:.05rem}.markdown-section ol,.markdown-section ul{padding-left:1.5rem}.markdown-section blockquote{border-left:4px solid var(--theme-color,#42b983);color:#858585;margin:2em 0;padding-left:20px}.markdown-section blockquote p{font-weight:600;margin-left:0}.markdown-section iframe{margin:1em 0}.markdown-section em{color:#7f8c8d}.markdown-section code{border-radius:2px;color:#e96900;font-size:.8rem;margin:0 2px;padding:3px 5px;white-space:pre-wrap}.markdown-section code,.markdown-section pre{background-color:#f8f8f8;font-family:Roboto Mono,Monaco,courier,monospace}.markdown-section pre{-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;line-height:1.5rem;margin:1.2em 0;overflow:auto;padding:0 1.4rem;position:relative;word-wrap:normal}.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#8e908c}.token.namespace{opacity:.7}.token.boolean,.token.number{color:#c76b29}.token.punctuation{color:#525252}.token.property{color:#c08b30}.token.tag{color:#2973b7}.token.string{color:var(--theme-color,#42b983)}.token.selector{color:#6679cc}.token.attr-name{color:#2973b7}.language-css .token.string,.style .token.string,.token.entity,.token.url{color:#22a2c9}.token.attr-value,.token.control,.token.directive,.token.unit{color:var(--theme-color,#42b983)}.token.keyword{color:#e96900}.token.atrule,.token.regex,.token.statement{color:#22a2c9}.token.placeholder,.token.variable{color:#3d8fd1}.token.deleted{text-decoration:line-through}.token.inserted{border-bottom:1px dotted #202746;text-decoration:none}.token.italic{font-style:italic}.token.bold,.token.important{font-weight:700}.token.important{color:#c94922}.token.entity{cursor:help}.markdown-section pre>code{-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;background-color:#f8f8f8;border-radius:2px;color:#525252;display:block;font-family:Roboto Mono,Monaco,courier,monospace;font-size:.8rem;line-height:inherit;margin:0 2px;max-width:inherit;overflow:inherit;padding:2.2em 5px;white-space:inherit}.markdown-section code:after,.markdown-section code:before{letter-spacing:.05rem}code .token{-moz-osx-font-smoothing:initial;-webkit-font-smoothing:initial;min-height:1.5rem}pre:after{color:#ccc;content:attr(data-lang);font-size:.6rem;font-weight:600;height:15px;line-height:15px;padding:5px 10px 0;position:absolute;right:0;text-align:right;top:0}
--------------------------------------------------------------------------------
/docs/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/favicon.ico
--------------------------------------------------------------------------------
/docs/guide/_sidebar.md:
--------------------------------------------------------------------------------
1 | - [**User Guide**](/guide/user-guide/overview)
2 | - [Overview](/guide/user-guide/overview)
3 | - [Quick Start](/guide/user-guide/quickstart)
4 | - [Specification](/guide/user-guide/spec)
5 | - [Performance](/guide/user-guide/performance)
6 | - [Get Involved](/guide/user-guide/get_involved)
7 |
8 | - [**Serving Models**](/guide/servers/overview.md)
9 | - [Overview](/guide/servers/overview.md)
10 | - [Installation](/guide/servers/installation)
11 | - [Supported Model Formats](/guide/servers/converting)
12 | - [Running GraphPipe Servers](/guide/servers/serving)
13 | - [`graphpipe-tf` Reference](/guide/servers/graphpipe-tf)
14 | - [`graphpipe-onnx` Reference](/guide/servers/graphpipe-onnx)
15 |
16 | - [**Using GraphPipe Clients**](/guide/clients/overview.md)
17 | - [Overview](/guide/clients/overview.md)
18 | - [Installation](/guide/clients/installation.md)
19 | - [Usage](/guide/clients/usage)
20 |
21 | - [**Examples**](/guide/examples/overview.md)
22 |
--------------------------------------------------------------------------------
/docs/guide/clients/_media/client_interaction.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/clients/_media/client_interaction.png
--------------------------------------------------------------------------------
/docs/guide/clients/installation.md:
--------------------------------------------------------------------------------
1 | # Client Installation
2 |
3 | Before making inference requests against a GraphPipe server, you need to
4 | install the appropriate GraphPipe client libraries for your language of choice.
5 |
6 | Don't see your favorite language? Please open an
7 | [issue](https://github.com/oracle/graphpipe/issues/) so that we can gauge which
8 | languages to support next!
9 |
10 |
11 | ::: tabs install
12 |
13 | - python
14 |
15 | ```bash
16 | pip install graphpipe
17 | ```
18 |
19 | - go
20 |
21 | ```bash
22 | go get github.com/oracle/graphpipe-go
23 | cd $GOPATH/src/github.com/oracle/graphpipe-go
24 | make install-govendor
25 | make deps
26 | ```
27 |
28 | - java
29 |
30 |
72 | Coming Soon!
73 |
74 | :::
75 |
76 | Once you've installed your client, continue to the
77 | [next section](guide/clients/usage.md) to see an example of how
78 | to make a basic request.
79 |
--------------------------------------------------------------------------------
/docs/guide/clients/overview.md:
--------------------------------------------------------------------------------
1 | # Overview
2 |
3 | In order to make it as easy as possible to integrate GraphPipe into your
4 | application, we provide efficient client implementations for major programming
5 | languages.
6 |
7 | In general, the machine learning client's work looks something like this:
8 |
9 | 
10 |
11 | It is important for each of the steps in the dotted box to be implemented as
12 | efficiently as possible. GraphPipe is implemented with this efficiency in
13 | mind.
14 |
15 | At present, we provide client support for python, go, and java.
16 |
--------------------------------------------------------------------------------
/docs/guide/clients/usage.md:
--------------------------------------------------------------------------------
1 | # Usage
2 |
3 | Here, we will briefly explore how to access a GraphPipe server
4 | from various clients. To keep things simple, we will begin by launching
5 | [`graphpipe-echo`](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-echo),
6 | a server that simply echoes back any GraphPipe Tensor that is sent to it.
7 |
8 | To start the server, use the following docker command.
9 |
10 | ```bash
11 | docker run -it --rm \
12 | -p 9000:9000 \
13 | sleepsonthefloor/graphpipe-echo \
14 | --listen=0.0.0.0:9000
15 | ```
16 |
17 | The server will now be accessible over port 9000 on your local machine. Now,
18 | you can make a request against the model using the following languages:
19 |
20 | ::: tabs remoteinf
21 |
22 | - python
23 |
24 | ```python
25 | from graphpipe import remote
26 | import numpy as np
27 | request = np.array([[0.0, 1.0], [2.0, 3.0]])
28 | result = remote.execute("http://127.0.0.1:9000", request)
29 | print(result)
30 |
31 | ```
32 |
33 | - go
34 |
35 | ```go
36 | package main
37 |
38 | import (
39 | "fmt"
40 | "net/http"
41 |
42 | graphpipe "github.com/oracle/graphpipe-go"
43 | )
44 |
45 | func main() {
46 | uri := "http://127.0.0.1:9000"
47 | request := [][]float32{{0.0, 1.0}, {2.0, 3.0}}
48 | result, err := graphpipe.Remote(uri, request)
49 | if err != nil {
50 | panic(err)
51 | }
52 | fmt.Println(result)
53 | }
54 | ```
55 |
56 | - java
57 |
72 | Coming Soon!
73 |
74 | :::
75 |
76 |
--------------------------------------------------------------------------------
/docs/guide/examples/overview.md:
--------------------------------------------------------------------------------
1 | # Examples
2 | Here is a collection of links for example code related to GraphPipe:
3 |
4 | * Basic Serving Examples
5 | * [Simple Golang Identity Server](https://github.com/oracle/graphpipe-go)
6 | * [Simple Python Identity Server](https://github.com/oracle/graphpipe-py/tree/master/examples/identity_example)
7 | * [Simple Python SKLearn Server](https://github.com/oracle/graphpipe-py/tree/master/examples/sklearn_example)
8 | * [Simple Python Tensorflow Model Server](https://github.com/oracle/graphpipe-tf-py/blob/master/examples/model_server.py)
9 | * Production GraphPipe Server Source Code
10 | * [graphpipe-tf Source Code](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-tf)
11 | * [graphpipe-onnx Source Code](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-onnx)
12 | * Model Conversion Examples
13 | * [Keras to Graphdef Conversion Script](https://github.com/oracle/graphpipe-tf-py/blob/master/examples/convert.py)
14 | * [Tensorflow Graph to GraphDef](https://github.com/oracle/graphpipe-tf-py/blob/master/examples/tf_graph.py)
15 | * [Saving Pytorch Model as ONNX](https://pytorch.org/tutorials/advanced/super_resolution_with_caffe2.html)
16 | * End-to-end examples
17 | * [Jupyter Notebook: serving and querying VGG with GraphPipe](https://github.com/oracle/graphpipe-tf-py/blob/master/examples/RemoteModelWithGraphPipe.ipynb)
18 | * [Python Complete client/server example](https://github.com/oracle/graphpipe-tf-py/blob/master/examples/simple_request.py)
19 |
--------------------------------------------------------------------------------
/docs/guide/servers/_media/server_flow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/servers/_media/server_flow.png
--------------------------------------------------------------------------------
/docs/guide/servers/converting.md:
--------------------------------------------------------------------------------
1 | # Supported Model Formats
2 |
3 | In order to serve models using our example GraphPipe servers, your model must
4 | be in a supported format.
5 |
6 | # Tensorflow Model Formats
7 |
8 | If you are using Tensorflow, we support the two most common model formats:
9 |
10 | * SavedModel format - the tensorflow-serving directory format ([see
11 | here](https://www.tensorflow.org/serving/serving_basic))
12 | * GraphDef (.pb) format. This is simply the graphdef serialized as a protobuf.
13 | If you are trying to productionize a Keras model, this is the easiest format
14 | to generate.
15 |
16 | #### Keras model conversion
17 |
18 | It is very common for data scientists to first develop models using Keras
19 | before attempting to deploy them. The graphpipe-tf-py repository provides a
20 | simple tool to convert your keras .h5 model into GraphDef .pb format. It has
21 | been packaged into a docker container, which you can use like this:
22 |
23 | ```bash
24 | curl https://oracle.github.io/graphpipe/models/squeezenet.h5 > squeezenet.h5
25 | docker run -v $PWD:/tmp/ sleepsonthefloor/graphpipe-h5topb:latest squeezenet.h5 converted_squeezenet.pb
26 | ```
27 |
28 | # Caffe2/ONNX/Pytorch model formats
29 |
30 | If you are using Caffe2 or Pytorch, you will likely be using one of the
31 | following formats:
32 |
33 | * ONNX (.onnx) + value_inputs.json
34 | * Caffe2 NetDef + value_inputs.json - in this case, your model has three files:
35 | init_net.pb, predict_net.pb, and value_inputs.json
36 |
37 | #### Pytorch model conversion
38 |
39 | If you are using Pytorch, see [this
40 | guide](https://pytorch.org/tutorials/advanced/super_resolution_with_caffe2.html)
41 | on how to convert your model into ONNX or caffe2 format.
42 |
43 | #### Caffe2 model conversion
44 |
45 | You can find some tips on saving caffe2 models to NetDef format (init_net.pb +
46 | predict_net.pb + value_inputs.json)
47 | [here](https://github.com/caffe2/caffe2/issues/642).
48 |
--------------------------------------------------------------------------------
/docs/guide/servers/graphpipe-onnx.md:
--------------------------------------------------------------------------------
1 | # graphpipe-onnx Reference
2 |
3 | ## Overview ##
4 |
5 | `graphpipe-onnx` is a simple onnx/caffe2 model server written in Go. It
6 | supports both the ONNX model format as well as the caffe2 NetDef format.
7 |
8 | ## Building From Source ##
9 |
10 | Instructions for building `graphpipe-onnx` can be found on
11 | [github](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-onnx).
12 |
13 | ## Server Options ##
14 |
15 | The `graphpipe-onnx` binary has the following runtime flags:
16 |
17 | ```bash
18 | Required Flags for ONNX Models:
19 | -m, --model string ONNX model to load. Accepts local file or http(s) url.
20 | --value-inputs string value_inputs.json for the model. Accepts local file or http(s) url.
21 |
22 | Required Flags for Caffe2 Models:
23 | --init-net string init_net file to load
24 | --predict-net string predict_net file to load. Accepts local file or http(s) url.
25 | --value-inputs string value_inputs.json for the model. Accepts local file or http(s) url.
26 |
27 | Optional Flags:
28 | --cache enable results caching
29 | --cache-dir string directory for local cache state (default "~/.graphpipe")
30 | --disable-cuda disable Cuda
31 | --engine-count int number of caffe2 graph engines to create (default 1)
32 | -h, --help help for graphpipe-caffe2
33 | -l, --listen string listen string (default "127.0.0.1:9000")
34 | --profile string profile and write profiling output to this file
35 | -v, --verbose enable verbose output
36 | -V, --version show version
37 | ```
38 |
39 | Additionally, you may use the following environment variables:
40 |
41 | ```bash
42 | GP_OUTPUTS comma seprated default inputs
43 | GP_INPUTS comma seprated default outputs
44 | GP_MODEL ONNX model to load. Accepts local file or http(s) url.
45 | GP_CACHE enable results caching
46 | GP_INIT_NET init_net file to load. Accepts local file or http(s) url.
47 | GP_PREDICT_NET predict_net file to load. Accepts local file or http(s) url.
48 | GP_VALUE_INPUTS value_inputs.json file to load. Accepts local file or http(s) url.
49 | ```
50 |
--------------------------------------------------------------------------------
/docs/guide/servers/graphpipe-tf.md:
--------------------------------------------------------------------------------
1 | # graphpipe-tf Reference
2 |
3 | ## Overview ##
4 |
5 | `graphpipe-tf` is a simple tensorflow model server written in Go. It
6 | supports both the tensorflow SavedModel format as well as the GraphDef format.
7 |
8 | ## Building From Source ##
9 |
10 | Instructions for building `graphpipe-tf` can be found on
11 | [github](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-tf).
12 |
13 | ## Server Options ##
14 |
15 | The `graphpipe-tf` binary has the following runtime flags:
16 |
17 | ```bash
18 | Required Flags:
19 | -m, --model string tensorflow model to load. Accepts local file or http(s) url.
20 |
21 | Optional Flags:
22 | -c, --cache enable results caching
23 | -d, --cache-dir string directory for local cache state (default "~/.graphpipe")
24 | -h, --help help for graphpipe-tf
25 | -i, --inputs string comma seprated default inputs
26 | -l, --listen string listen string (default "127.0.0.1:9000")
27 | -o, --outputs string comma separated default outputs
28 | -v, --verbose verbose output
29 | -V, --version show version
30 | ```
31 |
32 | Additionally, you may use the following environment variables:
33 |
34 | ```bash
35 | GP_OUTPUTS comma seprated default inputs
36 | GP_INPUTS comma seprated default outputs
37 | GP_MODEL tensorflow model to load. Accepts local file or http(s) url.
38 | GP_CACHE enable results caching
39 | ```
40 |
--------------------------------------------------------------------------------
/docs/guide/servers/installation.md:
--------------------------------------------------------------------------------
1 | # Server installation
2 | While we provide full source code for our servers, such that you can build your
3 | own binaries for your preferred platform, we make it easy to use our published
4 | Docker images out-of-the-box. Note that while "official" images are coming
5 | soon, the following community images are available.
6 |
7 | ## Installation of CPU server
8 |
9 | ::: tabs installcpu
10 |
11 | - Tensorflow
12 |
13 | ```bash
14 | docker pull sleepsonthefloor/graphpipe-tf:cpu
15 | ```
16 |
17 | - ONNX/Caffe2
18 |
19 | ```bash
20 | docker pull sleepsonthefloor/graphpipe-onnx:cpu
21 | ```
22 |
23 | - Tensorflow + Oracle Linux
24 |
25 | ```bash
26 | docker pull sleepsonthefloor/graphpipe-tf:oraclelinux-cpu
27 | ```
28 |
29 | - ONNX/Caffe2 + Oracle Linux
30 |
31 | ```bash
32 | docker pull sleepsonthefloor/graphpipe-onnx:oraclelinux-cpu
33 | ```
34 | :::
35 |
36 |
37 | ## Installation of GPU server
38 | ::: tabs installgpu
39 |
40 | - Tensorflow
41 |
42 | ```bash
43 | docker pull sleepsonthefloor/graphpipe-tf:gpu
44 | ```
45 |
46 | - ONNX/Caffe2
47 |
48 | ```bash
49 | docker pull sleepsonthefloor/graphpipe-onnx:gpu
50 | ```
51 |
52 | :::
53 |
--------------------------------------------------------------------------------
/docs/guide/servers/overview.md:
--------------------------------------------------------------------------------
1 | # Serving Models with GraphPipe
2 |
3 | In addition to an efficient protocol, GraphPipe offers reference model servers
4 | written in go to simplify the process of deploying machine learning models. In
5 | our experience, converting existing models into a common format can involve
6 | lots of pitfalls. We therefore provide model servers that can natively run the
7 | most common ML model formats.
8 |
9 | The servers can be found in the cmd subdirectory of the
10 | [graphpipe-go](https://github.com/oracle/graphpipe-go)
11 | repository:
12 |
13 | * [`graphpipe-tf`](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-tf)
14 | uses libtensorflow to serve tensorflow models.
15 | * [`graphpipe-onnx`](https://github.com/oracle/graphpipe-go/tree/master/cmd/graphpipe-onnx)
16 | uses libcaffe2 to serve caffe2 and onnx models.
17 |
18 | ## Goals
19 |
20 | In designing the model servers we aimed to achieve the following:
21 |
22 | * Excellent performance
23 | * Simple, documented build process
24 | * Flexible code that is easy to work with
25 | * Support for the most common ML Frameworks
26 | * Optimized cpu and gpu support
27 |
28 | ## Overview of server functionality
29 | The basic mechanics of what our reference servers do is fairly straightforward.
30 |
31 | 
32 |
33 | In order to minimize overhead, we have attempted to make the above illustrated
34 | steps as efficient as possible. For example, because we use flatbuffers as a
35 | network transport format, de-serialization is effectively a pointer cast.
36 |
37 | ## Language Choice
38 |
39 | While go isn't a common language in the ML space, it is ideal for creating
40 | efficient servers. It is also an explicit and friendly language without many
41 | surprises, which makes it an excellent language for collaboration. These
42 | qualities make it an ideal choice for GraphPipe's model servers.
43 |
44 | The one drawback of go is that because existing ML Frameworks are written
45 | in C/C++, there is a slight overhead when context switching from go code into
46 | the framework backend. In practice, however, we have seen very little
47 | real-world performance impact due to this. During performance testing, our go
48 | code achieved virtually the same performance as an optimized build of
49 | tensorflow-serving, which is written in pure C++.
50 |
51 | ## CUDA GPU Acceleration
52 | For each of our reference servers, we provide docker images and source code for
53 | building binaries optimized to run with CUDA acceleration.
54 |
55 | ## MKL CPU acceleration
56 | In order to provide maximize compatibility between our various server builds, and
57 | also to maximize performance, the CPU builds of our servers are compiled with
58 | [MKL](https://software.intel.com/en-us/mkl). When using Tensorflow, MKL
59 | provides the added benefit of supporting channels_first dimension ordering for
60 | convolutional operations, which is generally the preferred ordering when using GPU.
61 | Thus, when using the default configurations of GraphPipe servers, CPU and GPU
62 | versions can serve the same models and accept the same inputs.
63 |
64 | Of course, it could be that your application requires different optimizations
65 | or channel ordering; if this is the case, it should be straightforward to tweak
66 | the source code to build a custom version for your needs.
67 |
68 | ## Caching
69 |
70 | One of the advantages of a language like go is easy access to simple and
71 | powerful libraries. We were therefore able to add some shared code to create a
72 | row-level cache for both of the model servers. This optional row-level cache
73 | is based on [boltdb](https://github.com/coreos/bbolt) and can be very useful
74 | for models that receive many requests for the same data. The cache can often
75 | return data in a matter of microseconds, whereas a large model can take
76 | hundreds of milliseconds to process a request.
77 |
--------------------------------------------------------------------------------
/docs/guide/servers/serving.md:
--------------------------------------------------------------------------------
1 | # Running GraphPipe Servers
2 |
3 | This page provides an overview on how to serve models using our reference
4 | model servers, `graphpipe-tf` and `graphpipe-onnx`.
5 |
6 | ## Remote vs. Local Models
7 |
8 | Unless you are using the Tensorflow SavedModel format, where the model is
9 | stored in a directory instead of an individual file, you have the option to
10 | load your models over a remote http/https url. Here is an example of serving a
11 | model that is accessible from a remote url:
12 |
13 | ```bash
14 | docker run -it --rm \
15 | -p 9000:9000 \
16 | sleepsonthefloor/graphpipe-tf:cpu \
17 | --model=https://oracle.github.io/graphpipe/models/squeezenet.pb \
18 | --listen=0.0.0.0:9000
19 | ```
20 |
21 | Because our recommended configuration runs the server from a docker image, you
22 | will need to use a volume mount in order to serve local models. Here is an
23 | example of serving a local .pb model:
24 |
25 | ```bash
26 | curl https://oracle.github.io/graphpipe/models/squeezenet.pb > squeezenet.pb
27 | docker run -it --rm \
28 | -v "$PWD:/models/" \
29 | -p 9000:9000 \
30 | sleepsonthefloor/graphpipe-tf:cpu \
31 | --model=/models/squeezenet.pb \
32 | --listen=0.0.0.0:9000
33 | ```
34 |
35 | ## Serving Different Model Types
36 |
37 | ::: tabs cpu
38 |
39 | - Tensorflow
40 |
41 | ```bash
42 | docker run -it --rm \
43 | -e https_proxy=${https_proxy} \
44 | -p 9000:9000 \
45 | sleepsonthefloor/graphpipe-tf:cpu \
46 | --model=https://oracle.github.io/graphpipe/models/squeezenet.pb \
47 | --listen=0.0.0.0:9000
48 | ```
49 |
50 | - ONNX
51 |
52 | ```bash
53 | docker run -it --rm \
54 | -e https_proxy=${https_proxy} \
55 | -p 9000:9000 \
56 | sleepsonthefloor/graphpipe-onnx:cpu \
57 | --value-inputs=https://oracle.github.io/graphpipe/models/squeezenet.value_inputs.json \
58 | --model=https://oracle.github.io/graphpipe/models/squeezenet.onnx \
59 | --listen=0.0.0.0:9000
60 | ```
61 |
62 | - Caffe2
63 |
64 | ```bash
65 | docker run -it --rm \
66 | -e https_proxy=${https_proxy} \
67 | -p 9000:9000 \
68 | graphpipe-onnx:cpu \
69 | --value-inputs=https://oracle.github.io/graphpipe/models/squeezenet.value_inputs_caffe2.json \
70 | --init-net=https://oracle.github.io/graphpipe/models/squeezenet.init_net.pb \
71 | --predict-net=https://oracle.github.io/graphpipe/models/squeezenet.predict_net.pb \
72 | --listen=0.0.0.0:9000
73 | ```
74 |
75 | :::
76 |
77 | ## GPU Acceleration
78 |
79 | For those wishing to deploy on GPU-accelerated CUDA hardware, we provide gpu
80 | builds for each of our servers. In order to make this work correctly, you need
81 | to install [nvidia-docker](https://github.com/NVIDIA/nvidia-docker), as well as
82 | cuda-9.0+cudnn7.
83 |
84 | ::: tabs gpu
85 |
86 | - Tensorflow
87 | ```bash
88 | nvidia-docker run -it --rm \
89 | -e https_proxy=${https_proxy} \
90 | -p 9000:9000 \
91 | sleepsonthefloor/graphpipe-tf:gpu \
92 | --model=https://oracle.github.io/graphpipe/models/squeezenet.pb \
93 | --listen=0.0.0.0:9000
94 | ```
95 |
96 | - ONNX
97 | ```bash
98 | nvidia-docker run -it --rm \
99 | -e https_proxy=${https_proxy} \
100 | -p 9000:9000 \
101 | sleepsonthefloor/graphpipe-onnx:gpu \
102 | --value-inputs=https://oracle.github.io/graphpipe/models/squeezenet.value_inputs.json \
103 | --model=https://oracle.github.io/graphpipe/models/squeezenet.onnx \
104 | --listen=0.0.0.0:9000
105 | ```
106 |
107 | - CAFFE2
108 | ```bash
109 | nvidia-docker run -it --rm \
110 | -e https_proxy=${https_proxy} \
111 | -p 9000:9000 \
112 | sleepsonthefloor/graphpipe-onnx:gpu \
113 | --value-inputs=https://oracle.github.io/graphpipe/models/squeezenet.value_inputs_caffe2.json \
114 | --init-net=https://oracle.github.io/graphpipe/models/squeezenet.init_net.pb \
115 | --predict-net=https://oracle.github.io/graphpipe/models/squeezenet.predict_net.pb \
116 | --listen=0.0.0.0:9000
117 | ```
118 |
119 | :::
120 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/_examples/_metadata_1.py:
--------------------------------------------------------------------------------
1 | from graphpipe import remote
2 |
3 | url = "http://localhost:9000"
4 | metadata = remote.metadata(url)
5 | print(metadata.Outputs)
6 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/_examples/_squeezenet_req.python:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 | from PIL import Image, ImageOps
3 | import numpy as np
4 | import requests
5 |
6 | from graphpipe import remote
7 |
8 | data = np.array(Image.open("mug227.png"))
9 | data = data.reshape([1] + list(data.shape))
10 | data = np.rollaxis(data, 3, 1).astype(np.float32) # channels first
11 | print(data.shape)
12 |
13 | pred = remote.execute("http://127.0.0.1:9000", data)
14 | print("Expected 504 (Coffee mug), got: %s", np.argmax(pred, axis=1))
15 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/gparch.001.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/gparch.001.jpg
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/gparch.002.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/gparch.002.jpg
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/logo.png
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/mug.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/mug.png
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/mug227.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/mug227.png
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/perf.001.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/perf.001.jpg
--------------------------------------------------------------------------------
/docs/guide/user-guide/_media/perf.002.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/guide/user-guide/_media/perf.002.jpg
--------------------------------------------------------------------------------
/docs/guide/user-guide/get_involved.md:
--------------------------------------------------------------------------------
1 | # Getting Involved
2 |
3 | ## Repositories
4 |
5 | The GraphPipe code is divided into a few different projects:
6 |
7 | * [graphpipe](https://github.com/oracle/graphpipe) contains the flatbuffer
8 | spec and the documentation.
9 | * [graphpipe-go](https://github.com/oracle/graphpipe-go) contains go client
10 | and server code, as well as the reference servers.
11 | * [graphpipe-py](https://github.com/oracle/graphpipe-py) contains python
12 | client and server code.
13 | * [graphpipe-tf-py](https://github.com/oracle/graphpipe-tf-py) contains a
14 | GraphPipe remote operation for TensorFlow as well as some example code.
15 | * [graphpipe-java](https://github.com/oracle/graphpipe-java) __(COMING SOON)__
16 | contains the java client library.
17 |
18 | If you have a question or issue related to the code or one of our projects,
19 | please send us an issue in the relevant repo, or talk to us on our [Slack
20 | Channel](https://join.slack.com/t/graphpipe/shared_invite/enQtNDE4MTUyODk2NzQzLTUwODlkZDRiYTI4NmE1OTA5NzRmNjk5MGZiY2M0ZDRiYzNiMTQ0ZmIxODYzZjY2NzRmNzM4NTI0OGVlZGYzZTA).
21 |
22 | ## Contributing
23 |
24 | All of the GraphPipe projects are licensed under the [UPL]. We welcome
25 | contributions. Pull requests can be made on github under the [Oracle
26 | Contributor Agreement] (OCA). If you find any bugs or have feature requests,
27 | please add them to our [issue
28 | tracker](https://github.com/oracle/graphpipe/issues).
29 |
30 | [UPL]: https://opensource.org/licenses/UPL
31 | [Oracle Contributor Agreement]: https://www.oracle.com/technetwork/community/oca-486395.html
32 |
33 | If you are unfamiliar with the UPL, it is a very permissive license that is
34 | similar to the MIT license. You can read more about it
35 | [here](https://oss.oracle.com/licenses/upl/).
36 |
37 | For pull requests to be accepted, the bottom of your commit message must have
38 | the following line using your name and e-mail address as it appears in the OCA
39 | Signatories list.
40 |
41 | Signed-off-by: Your Name
42 |
43 | This can be automatically added to pull requests by committing with:
44 |
45 | git commit --signoff
46 |
47 | Only pull requests from committers that can be verified as having signed the
48 | OCA can be accepted.
49 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/overview.md:
--------------------------------------------------------------------------------
1 | # Overview
2 |
3 | While surveying the existing landscape of Machine Learning model serving, we
4 | discovered:
5 |
6 | * Model serving network protocols are tied to underlying model implementations.
7 | If you have a Tensorflow model, for example, you need to use tensorflow's
8 | protocol buffer server (tensorflow-serving) to perform remote inference.
9 | * Pytorch and Caffe2, on the other hand, do not provide an efficient model
10 | server in their codebase, but rely on tools like mxnet-model-server for
11 | remote inference. mxnet-model-server is written in python and provides a json
12 | api without batch support. While this is good for simple use cases, it is
13 | not suitable for back-end infrastructure.
14 | * ONNX exists, but tackles the vendor-coupling problem by standardizing model
15 | formats rather than protocol formats. This is useful but challenging, as not
16 | all backend model formats have fully equivalent operations. This means a
17 | simple conversion doesn't always work, and sometimes a model rewrite is
18 | necessary.
19 | * For operators looking to sanely maintain infrastructure, having a standard
20 | way for front-end clients to talk to back-end machine-learning models,
21 | irrespective of model implementation, is important.
22 |
23 | In other words, serving models without GraphPipe is a huge pain:
24 |
25 | 
26 |
27 | In the code we are releasing we provide:
28 |
29 | * A minimalist machine learning transport specification based on [flatbuffers]
30 | * Simple, efficient reference model servers for [Tensorflow], [Caffe2], and [ONNX].
31 | * Efficient client implementations in Go, Python, and Java.
32 |
33 | [flatbuffers]: https://google.github.io/flatbuffers/
34 | [Tensorflow]: https://www.tensorflow.org
35 | [Caffe2]: https://caffe2.ai
36 | [ONNX]: https://onnx.ai
37 |
38 | With these tools, communication is streamlined and serving is simpler:
39 |
40 | 
41 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/performance.md:
--------------------------------------------------------------------------------
1 | # Performance
2 |
3 | First, we compare serialization and deserialization speed of float tensor data
4 | in python using a custom ujson API, protocol buffers using a TensorFlow-serving
5 | predict request, and a GraphPipe remote request. The request consisted of about
6 | 19 million floating-point values (consisting of 128 224x224x3 images) and the
7 | response was approximately 3.2 million floating point values (consisting of 128
8 | 7x7x512 convolutional outputs). The units on the left are in seconds.
9 |
10 | 
11 |
12 | _GraphPipe is especially performant on the deserialize side, because
13 | flatbuffers provide access to underlying data without a memory copy._
14 |
15 | Second, we compare end-to-end throughput using a Python-JSON TensorFlow model
16 | server, TensorFlow-serving, and the GraphPipe-go TensorFlow model server. In
17 | each case the backend model was the same. Large requests were made to the
18 | server using 1 thread and then again with 5 threads. The units on the left are
19 | rows calculated by the model per second.
20 |
21 | 
22 |
23 | _Note that this test uses the recommended parameters for building
24 | Tensorflow-serving. Although the recommended build parameters for
25 | TensorFlow-serving do not perform well, we were ultimately able to discover
26 | compilation parameters that allow it to perform on par with our GraphPipe
27 | implementation. In other words, an optimized TensorFlow-serving performs
28 | similarly to GraphPipe, although building TensorFlow-serving to perform
29 | optimally is not documented nor easy._
30 |
31 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/quickstart.md:
--------------------------------------------------------------------------------
1 | # Getting Started
2 |
3 | To get started, let's see how to deploy a tensorflow model and communicate with
4 | it via python.
5 |
6 | First, launch a simple GraphPipe model server that is serving squeezenet on
7 | CPU:
8 |
9 | ```bash
10 | docker run -it --rm \
11 | -e https_proxy=${https_proxy} \
12 | -p 9000:9000 \
13 | sleepsonthefloor/graphpipe-tf:cpu \
14 | --model=https://oracle.github.io/graphpipe/models/squeezenet.pb \
15 | --listen=0.0.0.0:9000
16 | ```
17 |
18 | To see metadata about this model:
19 |
20 | ```bash
21 | curl localhost:9000
22 | ```
23 |
24 | To make a request, install the python client and dependencies:
25 |
26 | ```bash
27 | pip install graphpipe
28 | pip install pillow # needed for image manipulation
29 | ```
30 |
31 | Finally, let's test an image against the model. Right click and download the
32 | [following image](https://oracle.github.io/graphpipe/guide/user-guide/_media/mug227.png):
33 |
34 | 
35 |
36 | After you download the image, run this script:
37 |
38 | [filename](_examples/_squeezenet_req.python ':include :type=code')
39 |
40 | You should see that your image was correctly classified as a Coffee mug.
41 | Congratulations!
42 |
--------------------------------------------------------------------------------
/docs/guide/user-guide/spec.md:
--------------------------------------------------------------------------------
1 | # Specification
2 |
3 | ## 1. Introduction
4 |
5 | The GraphPipe specification describes a transport protocol used to exchange
6 | tensors between a client and a model server. A client uses the GraphPipe
7 | protocol to send one or more tensors to a GraphPipe-compatible server and
8 | recevies one or more tensors in response. A standard protocol allows client and
9 | server implementations to be completely decoupled.
10 |
11 | The GraphPipe protocol employs a low-overhead data format based on
12 | [flatbuffers], which implements highly efficient serialization/deserialization
13 | operations.
14 |
15 | [flatbuffers]: https://github.com/google/flatbuffers
16 |
17 | ## 2. Terminology
18 |
19 | __Model__ Any machine learning entity or function that accepts input data and
20 | returns output data.
21 |
22 | __Server__ Software that provides access to a model by implementing the
23 | GraphPipe spec as detailed in [Section 4](guide/user-guide/spec?id=_4-server)
24 |
25 | __Tensor__ A multi-dimensional array of data with a specific shape and type.
26 |
27 | __Input__ Tensor data transmitted in a request to the server.
28 |
29 | __Output__ Tensor data transmitted in the response from the server.
30 |
31 |
32 | ## 3. Messages
33 |
34 | A flatbuffer specification includes a nested set of _tables_ or messages. These
35 | are analogous to classes or types in a programming language. The flatbuffer
36 | definitions include a number of messages to create a simple API around model
37 | inference. The messages are outlined below. Note that the `string` type in
38 | flatbuffers can hold any variable length binary data, including null, so it
39 | can be thought of as an arbitrarily-sized byte-array.
40 |
41 | ### 3.1 Tensor
42 |
43 | The `Tensor` message holds a tensor. The tensor has a type, which is an
44 | enumeration that is defined in the next section. It has a list of integers
45 | representing the shape of the tensor and two fields for storing data, depending
46 | on the type of the tensor. One of `data` and `string_val` MUST be empty.
47 |
48 | ```
49 | table Tensor {
50 | type:Type;
51 | shape:[int64];
52 | data:[uint8];
53 | string_val:[string];
54 | }
55 | ```
56 |
57 | | Field | Explanation |
58 | |------------|-------------|
59 | | type | The type of each element of the tensor. |
60 | | shape | Array of int64 values representing the size of each dimension of the tensor |
61 | | data | The raw data of the tensor in little-endian row-major order |
62 | | string_val | An array of raw byte tensors (only used for the `String` type |
63 |
64 | #### 3.1.1 Tensor Type
65 |
66 | GraphPipe currently supports the following tensor types:
67 |
68 | | Type | Id | Bytes | Description |
69 | |---------|----|-------|-------------|
70 | | Null | 0 | ??? | Type is unknown or not supported |
71 | | Uint8 | 1 | 1 | Unsigned 8-bit integer |
72 | | Int8 | 2 | 1 | Signed 8-bit integer |
73 | | Uint16 | 3 | 2 | Unsigned 16-bit integer |
74 | | Int16 | 4 | 2 | Signed 16-bit integer |
75 | | Uint32 | 5 | 4 | Unsigned 32-bit integer |
76 | | Int32 | 6 | 4 | Signed 32-bit integer |
77 | | Uint64 | 7 | 8 | Unsigned 64-bit integer |
78 | | Int64 | 8 | 8 | Signed 64-bit integer |
79 | | Float16 | 9 | 2 | Half-precision (16-bit) floating point number |
80 | | Float32 | 10 | 4 | Full-precision (32-bit) floating point number |
81 | | Float64 | 11 | 8 | Double-precision (64-bit) floating point number |
82 | | String | 12 | var | Raw bytes. Note that this can hold irregular data |
83 |
84 | ### 3.2 Request
85 |
86 | The GraphPipe spec requires the server to respond to a `Request`
87 | message. This request message contains a union representing two different
88 | request types. The request is either an `InferRequest` or a `MetadataRequest`.
89 |
90 |
91 | ```
92 | union Req {InferRequest, MetadataRequest}
93 |
94 | table Request {
95 | req:Req;
96 | }
97 | ```
98 |
99 | #### 3.2.1 InferRequest
100 |
101 | The `InferRequest` accepts one or more tensors as well as a list of unicode
102 | strings representing the input names and a list of unicode strings representing
103 | the output names. The model server MAY include default input and output names
104 | that are used if none are specified. If input names are specified, the length
105 | of input_names and input_tensors MUST be the same.
106 |
107 | The model server MAY support arbitrary config data to change its behavior. This
108 | config data is sent to the server using the config field. This could be used to
109 | turn on debugging, for example. The format of the config field is be defined by
110 | the model server. It could be a json-encoded string or any other binary data.
111 |
112 | ```
113 | table InferRequest {
114 | config:string; // optional
115 | input_names:[string]; // optional
116 | input_tensors:[Tensor];
117 | output_names:[string]; // optional
118 | }
119 | ```
120 |
121 | | Field | Explanation |
122 | |----------------|-------------|
123 | | config | Config data for the model server |
124 | | input_names | List of unicode strings representing inputs |
125 | | input_tensors | List of input tensors |
126 | | output_names | List of unicode strings representing outputs |
127 |
128 | #### 3.2.2 MetadataRequest
129 |
130 | The `MetadataRequest` allows a client to request information about the server and
131 | the model. A `MetadataRequest` has no fields.
132 |
133 | ```
134 | table MetadataRequest {}
135 | ```
136 |
137 | ### 3.3 Responses
138 |
139 | The server MUST respond to a `InferRequest` with an `InferResponse` and a
140 | `MetadataRequest` with a `MetadataResponse`. In the case of a badly formatted
141 | Request with an invalid type, the server SHOULD respond with an `InferResponse`
142 | containing an `Error`. Note that there is no union response type because the
143 | response type can be determined from the request type.
144 |
145 | #### 3.3.1 InferResponse
146 |
147 | The `InferResponse` contains one output tensor per requested output name, or an
148 | error. If no output names are requested, the server may return one or more
149 | default output tensors. If no input names are included, the server may apply
150 | the input tensors to default inputs. The server MUST include data in only one
151 | of`output_tensors` and `errors`.
152 |
153 | ```
154 | table InferResponse {
155 | output_tensors:[Tensor];
156 | errors:[Error];
157 | }
158 | ```
159 |
160 | | Field | Explanation |
161 | |----------------|-------------|
162 | | output_tensors | The type of each element of the tensor. |
163 | | errors | Array of `Error` messages (defined below) |
164 |
165 | ##### 3.3.1.1 Error
166 |
167 | The `Error` type contains a representation of the error. The
168 | server SHOULD include a numeric code for each error so they can be
169 | differentiated without parsing the error string
170 |
171 | ```
172 | table Error {
173 | code:int64;
174 | message:string;
175 | }
176 | ```
177 | | Field | Explanation |
178 | |------------|-------------|
179 | | code | A unique error code for this particular problem |
180 | | message | A human-friendly unicode error message describing the problem |
181 |
182 | #### 3.3.2 MetadataResponse
183 |
184 | The `MetadataResponse` contains information describing the types and shapes of
185 | the supported inputs and outputs. It allows a client to validate input and
186 | output data without sending a request through the model.
187 |
188 | Input and output names are unicode identifiers representing inputs and outputs
189 | to the model. It is not required for the server to support multiple inputs and
190 | outputs. For example, it is perfectly acceptable to have a simple server that
191 | accepts and returns a single tensor and requires no input or output names. On
192 | the other hand, you could have a complex convolutional model that accepts
193 | multiple inputs and allows the client to request output at one or more of the
194 | convolutional layers.
195 |
196 | ```
197 | table MetadataResponse {
198 | name:string; // optional
199 | version:string; // optional
200 | server:string; // optional
201 | description:string; //optional
202 | inputs:[IOMetadata]; // required
203 | outputs:[IOMetadata]; // required
204 | }
205 | ```
206 |
207 | | Field | Explanation |
208 | |-------------|-------------|
209 | | name | Name of the model being served |
210 | | version | Version of the model server |
211 | | server | Name of the model server |
212 | | description | Description of the model being served |
213 | | inputs | Array of `IOMetadata` (defined below) about the inputs |
214 | | outputs | Array of `IOMetadata` (defined below) about the outputs |
215 |
216 | ##### 3.3.2.1 IOMetadata
217 |
218 | `IOMetadata` contains information about a given input or output. The name is
219 | the unique unicode identifier for this particular input or output. This name is
220 | used when specifying `input_names` and `output_names` as part of an
221 | `InferRequest`. A negative one (-1) in the shape means the model accepts an
222 | arbitrary size input in that dimension. This is generally useful if the model
223 | supports a batch dimension.
224 |
225 | ```
226 | table IOMetadata {
227 | name:string; // required
228 | description:string; // optional
229 | shape:[int64]; // required
230 | type:Type; // required
231 | }
232 | ```
233 |
234 | | Field | Explanation |
235 | |-------------|-------------|
236 | | name | Name of the input or output |
237 | | description | Description of the input or output |
238 | | shape | Shape of the input or output (-1 represents any size) |
239 | | type | Type of the input or output |
240 |
241 |
242 | ## 4. Server
243 |
244 | A GraphPipe server MUST accept flatbuffer-encoded `Request`s over a binary
245 | protocol and return an `InferResponse` or a `MetadataResponse`. This can be a
246 | standard tcp or unix socket, or it can be served over a higher level protocol
247 | like HTTP/HTTPS. If the server is serving over HTTP, it MUST accept the request
248 | as a POST where the body of the http request contains only the
249 | flatbuffer-encoded requests. It SHOULD also provide helpful information when
250 | the serving endpoint is queried with a GET. One method would be to serialize
251 | the `MetadataResponse` into json and return it in response to a GET.
252 |
--------------------------------------------------------------------------------
/docs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | GraphPipe -- Machine Learning Model Deployment Made Simple
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
104 |
125 |
126 |
127 |
--------------------------------------------------------------------------------
/docs/models/squeezenet.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/models/squeezenet.h5
--------------------------------------------------------------------------------
/docs/models/squeezenet.init_net.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/models/squeezenet.init_net.pb
--------------------------------------------------------------------------------
/docs/models/squeezenet.onnx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/models/squeezenet.onnx
--------------------------------------------------------------------------------
/docs/models/squeezenet.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oracle/graphpipe/224c32130006d652a9c3bf205abd700ad67940b3/docs/models/squeezenet.pb
--------------------------------------------------------------------------------
/docs/models/squeezenet.predict_net.pb:
--------------------------------------------------------------------------------
1 |
2 | F
3 | data
4 | conv1_w
5 | conv1_bconv1"Conv*
6 |
7 | stride*
8 | pad *
9 |
10 | kernel
11 | conv1conv1"ReluW
12 | conv1pool1"MaxPool*
13 |
14 | stride*
15 | pad *
16 |
17 | kernel*
18 | order"NCHW*
19 |
20 | legacy_padh
21 | pool1
22 | fire2/squeeze1x1_w
23 | fire2/squeeze1x1_bfire2/squeeze1x1"Conv*
24 |
25 | stride*
26 | pad *
27 |
28 | kernel*
29 | fire2/squeeze1x1fire2/squeeze1x1"Relup
30 | fire2/squeeze1x1
31 | fire2/expand1x1_w
32 | fire2/expand1x1_bfire2/expand1x1"Conv*
33 |
34 | stride*
35 | pad *
36 |
37 | kernel(
38 | fire2/expand1x1fire2/expand1x1"Relup
39 | fire2/squeeze1x1
40 | fire2/expand3x3_w
41 | fire2/expand3x3_bfire2/expand3x3"Conv*
42 |
43 | stride*
44 | pad*
45 |
46 | kernel(
47 | fire2/expand3x3fire2/expand3x3"Relu[
48 | fire2/expand1x1
49 | fire2/expand3x3fire2/concat_fire2/concat_dims"Concat*
50 | order"NCHWo
51 | fire2/concat
52 | fire3/squeeze1x1_w
53 | fire3/squeeze1x1_bfire3/squeeze1x1"Conv*
54 |
55 | stride*
56 | pad *
57 |
58 | kernel*
59 | fire3/squeeze1x1fire3/squeeze1x1"Relup
60 | fire3/squeeze1x1
61 | fire3/expand1x1_w
62 | fire3/expand1x1_bfire3/expand1x1"Conv*
63 |
64 | stride*
65 | pad *
66 |
67 | kernel(
68 | fire3/expand1x1fire3/expand1x1"Relup
69 | fire3/squeeze1x1
70 | fire3/expand3x3_w
71 | fire3/expand3x3_bfire3/expand3x3"Conv*
72 |
73 | stride*
74 | pad*
75 |
76 | kernel(
77 | fire3/expand3x3fire3/expand3x3"Relu[
78 | fire3/expand1x1
79 | fire3/expand3x3fire3/concat_fire3/concat_dims"Concat*
80 | order"NCHW^
81 | fire3/concatpool3"MaxPool*
82 |
83 | stride*
84 | pad *
85 |
86 | kernel*
87 | order"NCHW*
88 |
89 | legacy_padh
90 | pool3
91 | fire4/squeeze1x1_w
92 | fire4/squeeze1x1_bfire4/squeeze1x1"Conv*
93 |
94 | stride*
95 | pad *
96 |
97 | kernel*
98 | fire4/squeeze1x1fire4/squeeze1x1"Relup
99 | fire4/squeeze1x1
100 | fire4/expand1x1_w
101 | fire4/expand1x1_bfire4/expand1x1"Conv*
102 |
103 | stride*
104 | pad *
105 |
106 | kernel(
107 | fire4/expand1x1fire4/expand1x1"Relup
108 | fire4/squeeze1x1
109 | fire4/expand3x3_w
110 | fire4/expand3x3_bfire4/expand3x3"Conv*
111 |
112 | stride*
113 | pad*
114 |
115 | kernel(
116 | fire4/expand3x3fire4/expand3x3"Relu[
117 | fire4/expand1x1
118 | fire4/expand3x3fire4/concat_fire4/concat_dims"Concat*
119 | order"NCHWo
120 | fire4/concat
121 | fire5/squeeze1x1_w
122 | fire5/squeeze1x1_bfire5/squeeze1x1"Conv*
123 |
124 | stride*
125 | pad *
126 |
127 | kernel*
128 | fire5/squeeze1x1fire5/squeeze1x1"Relup
129 | fire5/squeeze1x1
130 | fire5/expand1x1_w
131 | fire5/expand1x1_bfire5/expand1x1"Conv*
132 |
133 | stride*
134 | pad *
135 |
136 | kernel(
137 | fire5/expand1x1fire5/expand1x1"Relup
138 | fire5/squeeze1x1
139 | fire5/expand3x3_w
140 | fire5/expand3x3_bfire5/expand3x3"Conv*
141 |
142 | stride*
143 | pad*
144 |
145 | kernel(
146 | fire5/expand3x3fire5/expand3x3"Relu[
147 | fire5/expand1x1
148 | fire5/expand3x3fire5/concat_fire5/concat_dims"Concat*
149 | order"NCHW^
150 | fire5/concatpool5"MaxPool*
151 |
152 | stride*
153 | pad *
154 |
155 | kernel*
156 | order"NCHW*
157 |
158 | legacy_padh
159 | pool5
160 | fire6/squeeze1x1_w
161 | fire6/squeeze1x1_bfire6/squeeze1x1"Conv*
162 |
163 | stride*
164 | pad *
165 |
166 | kernel*
167 | fire6/squeeze1x1fire6/squeeze1x1"Relup
168 | fire6/squeeze1x1
169 | fire6/expand1x1_w
170 | fire6/expand1x1_bfire6/expand1x1"Conv*
171 |
172 | stride*
173 | pad *
174 |
175 | kernel(
176 | fire6/expand1x1fire6/expand1x1"Relup
177 | fire6/squeeze1x1
178 | fire6/expand3x3_w
179 | fire6/expand3x3_bfire6/expand3x3"Conv*
180 |
181 | stride*
182 | pad*
183 |
184 | kernel(
185 | fire6/expand3x3fire6/expand3x3"Relu[
186 | fire6/expand1x1
187 | fire6/expand3x3fire6/concat_fire6/concat_dims"Concat*
188 | order"NCHWo
189 | fire6/concat
190 | fire7/squeeze1x1_w
191 | fire7/squeeze1x1_bfire7/squeeze1x1"Conv*
192 |
193 | stride*
194 | pad *
195 |
196 | kernel*
197 | fire7/squeeze1x1fire7/squeeze1x1"Relup
198 | fire7/squeeze1x1
199 | fire7/expand1x1_w
200 | fire7/expand1x1_bfire7/expand1x1"Conv*
201 |
202 | stride*
203 | pad *
204 |
205 | kernel(
206 | fire7/expand1x1fire7/expand1x1"Relup
207 | fire7/squeeze1x1
208 | fire7/expand3x3_w
209 | fire7/expand3x3_bfire7/expand3x3"Conv*
210 |
211 | stride*
212 | pad*
213 |
214 | kernel(
215 | fire7/expand3x3fire7/expand3x3"Relu[
216 | fire7/expand1x1
217 | fire7/expand3x3fire7/concat_fire7/concat_dims"Concat*
218 | order"NCHWo
219 | fire7/concat
220 | fire8/squeeze1x1_w
221 | fire8/squeeze1x1_bfire8/squeeze1x1"Conv*
222 |
223 | stride*
224 | pad *
225 |
226 | kernel*
227 | fire8/squeeze1x1fire8/squeeze1x1"Relup
228 | fire8/squeeze1x1
229 | fire8/expand1x1_w
230 | fire8/expand1x1_bfire8/expand1x1"Conv*
231 |
232 | stride*
233 | pad *
234 |
235 | kernel(
236 | fire8/expand1x1fire8/expand1x1"Relup
237 | fire8/squeeze1x1
238 | fire8/expand3x3_w
239 | fire8/expand3x3_bfire8/expand3x3"Conv*
240 |
241 | stride*
242 | pad*
243 |
244 | kernel(
245 | fire8/expand3x3fire8/expand3x3"Relu[
246 | fire8/expand1x1
247 | fire8/expand3x3fire8/concat_fire8/concat_dims"Concat*
248 | order"NCHWo
249 | fire8/concat
250 | fire9/squeeze1x1_w
251 | fire9/squeeze1x1_bfire9/squeeze1x1"Conv*
252 |
253 | stride*
254 | pad *
255 |
256 | kernel*
257 | fire9/squeeze1x1fire9/squeeze1x1"Relup
258 | fire9/squeeze1x1
259 | fire9/expand1x1_w
260 | fire9/expand1x1_bfire9/expand1x1"Conv*
261 |
262 | stride*
263 | pad *
264 |
265 | kernel(
266 | fire9/expand1x1fire9/expand1x1"Relup
267 | fire9/squeeze1x1
268 | fire9/expand3x3_w
269 | fire9/expand3x3_bfire9/expand3x3"Conv*
270 |
271 | stride*
272 | pad*
273 |
274 | kernel(
275 | fire9/expand3x3fire9/expand3x3"Relu[
276 | fire9/expand1x1
277 | fire9/expand3x3fire9/concat_fire9/concat_dims"Concat*
278 | order"NCHWT
279 | fire9/concatfire9/concat_fire9/concat_mask"Dropout*
280 | ratio ?*
281 | is_testQ
282 | fire9/concat
283 | conv10_w
284 | conv10_bconv10"Conv*
285 |
286 | stride*
287 | pad *
288 |
289 | kernel
290 | conv10conv10"Reluq
291 | conv10pool10"AveragePool*
292 |
293 | stride*
294 | pad *
295 |
296 | kernel *
297 | order"NCHW*
298 |
299 | legacy_pad*
300 | global_pooling
301 | pool10
302 | softmaxout"Softmax:data:conv1_w:conv1_b:fire2/squeeze1x1_w:fire2/squeeze1x1_b:fire2/expand1x1_w:fire2/expand1x1_b:fire2/expand3x3_w:fire2/expand3x3_b:fire3/squeeze1x1_w:fire3/squeeze1x1_b:fire3/expand1x1_w:fire3/expand1x1_b:fire3/expand3x3_w:fire3/expand3x3_b:fire4/squeeze1x1_w:fire4/squeeze1x1_b:fire4/expand1x1_w:fire4/expand1x1_b:fire4/expand3x3_w:fire4/expand3x3_b:fire5/squeeze1x1_w:fire5/squeeze1x1_b:fire5/expand1x1_w:fire5/expand1x1_b:fire5/expand3x3_w:fire5/expand3x3_b:fire6/squeeze1x1_w:fire6/squeeze1x1_b:fire6/expand1x1_w:fire6/expand1x1_b:fire6/expand3x3_w:fire6/expand3x3_b:fire7/squeeze1x1_w:fire7/squeeze1x1_b:fire7/expand1x1_w:fire7/expand1x1_b:fire7/expand3x3_w:fire7/expand3x3_b:fire8/squeeze1x1_w:fire8/squeeze1x1_b:fire8/expand1x1_w:fire8/expand1x1_b:fire8/expand3x3_w:fire8/expand3x3_b:fire9/squeeze1x1_w:fire9/squeeze1x1_b:fire9/expand1x1_w:fire9/expand1x1_b:fire9/expand3x3_w:fire9/expand3x3_b:conv10_w:conv10_bB
303 | softmaxout
--------------------------------------------------------------------------------
/docs/models/squeezenet.value_inputs.json:
--------------------------------------------------------------------------------
1 | {"data_0": [1, [1, 3, 227, 227]]}
2 |
--------------------------------------------------------------------------------
/docs/models/squeezenet.value_inputs_caffe2.json:
--------------------------------------------------------------------------------
1 | {"data": [1, [1, 3, 227, 227]]}
2 |
--------------------------------------------------------------------------------
/graphpipe.fbs:
--------------------------------------------------------------------------------
1 | /*
2 | ** Copyright © 2018, Oracle and/or its affiliates. All rights reserved.
3 | **
4 | ** Licensed under the Universal Permissive License v 1.0 as shown at
5 | ** http://oss.oracle.com/licenses/upl.
6 | */
7 | namespace graphpipe;
8 |
9 | /*
10 | Enumeration of supported types.
11 | */
12 | enum Type:uint8 {
13 | Null,
14 | Uint8,
15 | Int8,
16 | Uint16,
17 | Int16,
18 | Uint32,
19 | Int32,
20 | Uint64,
21 | Int64,
22 | Float16,
23 | Float32,
24 | Float64,
25 | String,
26 | }
27 |
28 | /*
29 | Tensor definition.
30 |
31 | type: defines what type of data this Tensor holds
32 | shape: an array that describes the shape of the Tensor (like [10, 3, 224, 224])
33 | data: stores the actual tensor data for all types but String
34 | string_val: holds the data for tensors of type String
35 | */
36 | table Tensor {
37 | type:Type;
38 | shape:[int64];
39 | data:[uint8];
40 | string_val:[string];
41 | }
42 |
43 | /*
44 | Req definition, which is a union of an InferRequest and a a MetadataRequest.
45 | */
46 | union Req {InferRequest, MetadataRequest}
47 |
48 | /*
49 | Request definition, which is a container for one of the two allowed request types.
50 | req: a union representing an InferRequest or a MetadataRequest.
51 | */
52 | table Request {
53 | req:Req;
54 | }
55 |
56 | /*
57 | Infer Request definition, which is used to request data from a remote model.
58 | config: application-specific string used for request-specific model configuration.
59 | input_names: a list of input names
60 | input_tensors: a list of input tensors, associated with input_names
61 | output_names: a list of outputs to return in response to the provided inputs
62 | */
63 | table InferRequest {
64 | config:string; // optional
65 | input_names:[string]; // optional
66 | input_tensors:[Tensor];
67 | output_names:[string]; // optional
68 | }
69 |
70 | /*
71 | Error definition
72 | code: integer representation of the error
73 | message: Human-readable message description
74 | */
75 | table Error {
76 | code:int64;
77 | message:string;
78 | }
79 |
80 | /*
81 | InferResponse definition. Should contain either a list of tensors or a list
82 | of errors, but not both.
83 | output_tensors: a list of output_tensors, in the order requested by InferRequest.output_names
84 | errors: A list of errors that occurred during processing, if any
85 | */
86 | table InferResponse {
87 | output_tensors:[Tensor];
88 | errors:[Error];
89 | }
90 |
91 | /*
92 | MetadatRequest. Used to request metadata about a graphpipe model server.
93 | */
94 | table MetadataRequest {}
95 |
96 | /*
97 | IOMetadata definition. Provides info about inputs and outputs of a model.
98 | name: name of the input/output
99 | description: description of the input/output
100 | shape: input or output shape
101 | type: Type of the input/output
102 | */
103 | table IOMetadata {
104 | name:string; // required
105 | description:string; // optional
106 | shape:[int64]; // required
107 | type:Type; // required
108 | }
109 |
110 | /*
111 | MetadataResponse definition. Describes characteristics of the server and the model being served.
112 | name: name of the model being served
113 | version: version of the server
114 | server: name of the server
115 | description: description of the model being served
116 | inputs: metadata about the model's inputs
117 | outputs: metadata about the model's outputs
118 | */
119 | table MetadataResponse {
120 | name:string; // optional
121 | version:string; // optional
122 | server:string; // optional
123 | description:string; //optional
124 | inputs:[IOMetadata]; // required
125 | outputs:[IOMetadata]; // required
126 | }
127 |
128 | root_type Request;
129 |
--------------------------------------------------------------------------------