├── graphpipe ├── __init__.py ├── graphpipefb │ ├── __init__.py │ ├── Req.py │ ├── Type.py │ ├── MetadataRequest.py │ ├── Error.py │ ├── Request.py │ ├── InferResponse.py │ ├── IOMetadata.py │ ├── InferRequest.py │ ├── Tensor.py │ └── MetadataResponse.py ├── remote.py └── convert.py ├── MANIFEST.in ├── requirements.txt ├── test-requirements.txt ├── Makefile ├── examples ├── identity_example │ ├── README.md │ ├── client.py │ └── server.py └── sklearn_example │ ├── README.md │ ├── client.py │ └── server.py ├── CONTRIBUTING.md ├── tox.ini ├── README.md ├── setup.py ├── tests └── convert_test.py ├── .gitignore └── LICENSE.txt /graphpipe/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include requirements.txt 2 | include LICENSE.txt 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.13.3 2 | requests==2.18.4 3 | flatbuffers==1.9.0 4 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | flake8==2.5.0 2 | hacking<0.11,>=0.10.0 3 | pytest-cov==2.4.0 4 | testtools>=1.4.0 5 | numpy>=1.13.3 6 | requests==2.18.4 7 | flatbuffers==1.9.0 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | test: 2 | docker run -it --rm \ 3 | -v $(PWD):/src \ 4 | -e http_proxy=$(http_proxy) \ 5 | -e https_proxy=$(https_proxy) \ 6 | themattrix/tox-base \ 7 | tox 8 | #/bin/sh 9 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. 2 | # 3 | # Licensed under the Universal Permissive License v 1.0 as shown at 4 | # http://oss.oracle.com/licenses/upl. 5 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/Req.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | class Req(object): 6 | NONE = 0 7 | InferRequest = 1 8 | MetadataRequest = 2 9 | 10 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/Type.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | class Type(object): 6 | Null = 0 7 | Uint8 = 1 8 | Int8 = 2 9 | Uint16 = 3 10 | Int16 = 4 11 | Uint32 = 5 12 | Int32 = 6 13 | Uint64 = 7 14 | Int64 = 8 15 | Float16 = 9 16 | Float32 = 10 17 | Float64 = 11 18 | String = 12 19 | 20 | -------------------------------------------------------------------------------- /examples/identity_example/README.md: -------------------------------------------------------------------------------- 1 | # Identity Server Example 2 | 3 | This is a demo of a graphpipe-backed identity function, the most basic client/server example we could think of. Very simply, server.py echos incoming requests back to the client. To use it: 4 | 5 | ``` 6 | > python3 server.py 7 | Starting graphpipe identity server on port 10000... 8 | ``` 9 | 10 | And then use the client to contact the server: 11 | ``` 12 | > python3 client.py 13 | Hooray! We got our data back with shape: (10, 1, 2, 3, 4) 14 | ``` 15 | -------------------------------------------------------------------------------- /examples/sklearn_example/README.md: -------------------------------------------------------------------------------- 1 | # Simple sklearn model serving example. 2 | 3 | This code demonstrates how to wrap a simple python sklearn regresion model with graphpipe so that it can be served over a network. It uses the built-in diabetes dataset. On startup, the server.py trains a regression model, and then serves graphpipe inference requests: 4 | 5 | ``` 6 | > python3 server.py 7 | Starting graphpipe sklearn server on port 10000... 8 | ``` 9 | 10 | To run an example inference request, see client.py: 11 | ``` 12 | > python3 client.py 13 | Got back a response of shape: (20,) 14 | Mean squared error: 2004.57 15 | Variance score: 0.59 16 | ``` 17 | -------------------------------------------------------------------------------- /examples/identity_example/client.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | import numpy as np 4 | from sklearn import datasets 5 | 6 | from graphpipe import remote 7 | 8 | 9 | SHAPE = (10, 1, 2, 3, 4) 10 | 11 | 12 | def get_sample_data(): 13 | return np.random.rand(*SHAPE) 14 | 15 | 16 | if __name__ == '__main__': 17 | parser = argparse.ArgumentParser() 18 | parser.add_argument("--url", default="http://127.0.0.1:10000", 19 | help="Url", type=str) 20 | args = parser.parse_args() 21 | X = get_sample_data() 22 | pred = remote.execute(args.url, X) 23 | assert(pred.shape == SHAPE) 24 | assert(np.allclose(X, pred)) 25 | 26 | print('Hooray! We got our data back with shape: %s' % str(SHAPE)) 27 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/MetadataRequest.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class MetadataRequest(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsMetadataRequest(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = MetadataRequest() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # MetadataRequest 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | def MetadataRequestStart(builder): builder.StartObject(0) 22 | def MetadataRequestEnd(builder): return builder.EndObject() 23 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Graphpipe-Py # 2 | 3 | *Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.* 4 | 5 | Pull requests can be made under 6 | [The Oracle Contributor Agreement](https://www.oracle.com/technetwork/community/oca-486395.html) (OCA). 7 | 8 | For pull requests to be accepted, the bottom of 9 | your commit message must have the following line using your name and 10 | e-mail address as it appears in the OCA Signatories list. 11 | 12 | ``` 13 | Signed-off-by: Your Name 14 | ``` 15 | 16 | This can be automatically added to pull requests by committing with: 17 | 18 | ``` 19 | git commit --signoff 20 | ``` 21 | 22 | Only pull requests from committers that can be verified as having 23 | signed the OCA can be accepted. 24 | -------------------------------------------------------------------------------- /examples/sklearn_example/client.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from http import server 3 | from sklearn import datasets 4 | from sklearn.metrics import mean_squared_error, r2_score 5 | 6 | from graphpipe import remote 7 | 8 | 9 | def get_sample_data(): 10 | diabetes = datasets.load_diabetes() 11 | return diabetes.data[-20:], diabetes.target[-20:] 12 | 13 | 14 | if __name__ == '__main__': 15 | parser = argparse.ArgumentParser() 16 | parser.add_argument("--url", default="http://127.0.0.1:10000", 17 | help="Port", type=str) 18 | args = parser.parse_args() 19 | X, Y = get_sample_data() 20 | pred = remote.execute(args.url, X) 21 | print("Got back a response of shape: %s" % str(pred.shape)) 22 | print("Mean squared error: %.2f" % mean_squared_error(Y, pred)) 23 | print('Variance score: %.2f' % r2_score(Y, pred)) 24 | -------------------------------------------------------------------------------- /examples/identity_example/server.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from http import server 3 | 4 | from graphpipe import convert 5 | 6 | 7 | class GPHandler(server.BaseHTTPRequestHandler): 8 | def do_POST(self): 9 | inp = self.rfile.read(int(self.headers['Content-Length'])) 10 | 11 | obj = convert.deserialize_request(inp).input_tensors 12 | outp = convert.serialize_infer_response(obj) 13 | 14 | self.send_response(200) 15 | self.end_headers() 16 | self.wfile.write(outp) 17 | 18 | 19 | if __name__ == '__main__': 20 | parser = argparse.ArgumentParser() 21 | parser.add_argument("--port", default=10000, help="TCP port", type=int) 22 | args = parser.parse_args() 23 | server_address = ('', args.port) 24 | httpd = server.HTTPServer(server_address, GPHandler) 25 | print('Starting graphpipe identity server on port %d...' % args.port) 26 | while(True): 27 | httpd.handle_request() 28 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{3.5,3.6},pep8 3 | skipsdist = True 4 | 5 | [testenv] 6 | basepython = 7 | pep8: python 8 | py3.5: python3.5 9 | py3.6: python3.6 10 | 11 | passenv = 12 | DOCKER_HOST 13 | API_URL 14 | setenv = VIRTUAL_ENV={envdir} 15 | usedevelop = True 16 | install_command = pip install -U {opts} {packages} 17 | deps = -r{toxinidir}/test-requirements.txt 18 | commands = find . -type f -name "*.pyc" -delete 19 | whitelist_externals = find 20 | rm 21 | go 22 | docker 23 | [testenv:pep8] 24 | commands = flake8 25 | 26 | [testenv:venv] 27 | commands = {posargs} 28 | 29 | [testenv:py3.5] 30 | commands = pytest -vv --tb=long --capture=sys --cov=graphpipe --capture=fd {toxinidir}/tests 31 | 32 | [testenv:py3.6] 33 | commands = pytest -vv --tb=long --capture=sys --cov=graphpipe --capture=fd {toxinidir}/tests 34 | 35 | [flake8] 36 | ignore = H405,H404,H403,H401 37 | show-source = True 38 | exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,docs,venv,.venv,graphpipe/graphpipefb 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GraphPipe for python 2 | 3 | GraphPipe for python consists of the flatbuffer implementation (generated from 4 | the [graphpipe project](https://github.com/graphpipe)), utilities for 5 | converting between flatbuffers and python types, a client for making calls 6 | to graphpipe services, and some simple examples. 7 | 8 | To learn more about GraphPipe, see our [documentation](https://oracle.github.io/graphpipe/) 9 | 10 | ## Build 11 | 12 | You can install from pypi like so: 13 | 14 | ``` 15 | pip install graphpipe 16 | ``` 17 | 18 | You an also install directly from github with pip like so: 19 | 20 | ``` 21 | pip install git+https://github.com/oracle/graphpipe-py 22 | ``` 23 | 24 | This is a pure python package, your setup.py knowledge should do the trick 25 | if you are have checked out the code from github. 26 | 27 | ## Develop 28 | 29 | The tox tests can be run via `make test` if you have docker installed. 30 | 31 | To update the graphpipe flatbuffer files, you will need to `make python` in 32 | the graphpipe project and copy the generated files into this repository's 33 | `graphpipe/graphpipefb/` directory 34 | 35 | 36 | To build installation packages: 37 | 38 | ``` 39 | python setup.py sdist bdist_wheel 40 | ``` 41 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | from codecs import open 4 | from os import path 5 | 6 | here = path.abspath(path.dirname(__file__)) 7 | with open(path.join(here, 'README.md'), encoding='utf-8') as f: 8 | long_description = f.read() 9 | 10 | requirements = None 11 | with open('requirements.txt') as f: 12 | requirements = f.read().splitlines() 13 | 14 | setuptools.setup( 15 | name='graphpipe', 16 | version='1.0.4', 17 | description='Graphpipe client and helpers', 18 | long_description=long_description, 19 | long_description_content_type="text/markdown", 20 | author='OCI ML Team', 21 | install_requires=requirements, 22 | author_email='vish.ishaya@oracle.com', 23 | url='https://oracle.github.io/graphpipe', 24 | classifier=[ 25 | 'Intended Audience :: Information Technology', 26 | 'Intended Audience :: System Administrators', 27 | 'Operating System :: POSIX :: Linux', 28 | 'License :: OSI Approved :: Universal Permissive License (UPL)', 29 | 'Programming Language :: Python', 30 | 'Programming Language :: Python :: 3', 31 | 'Programming Language :: Python :: 3.5', 32 | 'Programming Language :: Python :: 3.6', 33 | ], 34 | packages=setuptools.find_packages(exclude=['contrib', 'docs', 'tests']), 35 | ) 36 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/Error.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class Error(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsError(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = Error() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # Error 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # Error 22 | def Code(self): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) 26 | return 0 27 | 28 | # Error 29 | def Message(self): 30 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 31 | if o != 0: 32 | return self._tab.String(o + self._tab.Pos) 33 | return bytes() 34 | 35 | def ErrorStart(builder): builder.StartObject(2) 36 | def ErrorAddCode(builder, code): builder.PrependInt64Slot(0, code, 0) 37 | def ErrorAddMessage(builder, message): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(message), 0) 38 | def ErrorEnd(builder): return builder.EndObject() 39 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/Request.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class Request(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsRequest(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = Request() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # Request 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # Request 22 | def ReqType(self): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) 26 | return 0 27 | 28 | # Request 29 | def Req(self): 30 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 31 | if o != 0: 32 | from flatbuffers.table import Table 33 | obj = Table(bytearray(), 0) 34 | self._tab.Union(obj, o) 35 | return obj 36 | return None 37 | 38 | def RequestStart(builder): builder.StartObject(2) 39 | def RequestAddReqType(builder, reqType): builder.PrependUint8Slot(0, reqType, 0) 40 | def RequestAddReq(builder, req): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(req), 0) 41 | def RequestEnd(builder): return builder.EndObject() 42 | -------------------------------------------------------------------------------- /examples/sklearn_example/server.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from http import server 3 | 4 | from sklearn import datasets, linear_model 5 | from sklearn.metrics import mean_squared_error, r2_score 6 | 7 | from graphpipe import convert 8 | 9 | 10 | def create_diabetes_model(): 11 | diabetes = datasets.load_diabetes() 12 | diabetes_X = diabetes.data 13 | print(diabetes_X.shape) 14 | 15 | diabetes_X_train = diabetes_X[:-20] 16 | diabetes_y_train = diabetes.target[:-20] 17 | 18 | model = linear_model.LinearRegression() 19 | 20 | model.fit(diabetes_X_train, diabetes_y_train) 21 | return model 22 | 23 | 24 | if __name__ == '__main__': 25 | parser = argparse.ArgumentParser() 26 | parser.add_argument("--port", default=10000, help="TCP port", type=int) 27 | args = parser.parse_args() 28 | server_address = ('', args.port) 29 | 30 | class GPHandler(server.BaseHTTPRequestHandler): 31 | 32 | def do_POST(self): 33 | inp = self.rfile.read(int(self.headers['Content-Length'])) 34 | 35 | obj = convert.deserialize_request(inp).input_tensors[0] 36 | outp = convert.serialize_infer_response( 37 | [model.predict(obj)]) 38 | 39 | self.send_response(200) 40 | self.end_headers() 41 | self.wfile.write(outp) 42 | 43 | httpd = server.HTTPServer(server_address, GPHandler) 44 | 45 | model = create_diabetes_model() 46 | 47 | print('Starting graphpipe sklearn server on port %d...' % args.port) 48 | while(True): 49 | httpd.handle_request() 50 | -------------------------------------------------------------------------------- /tests/convert_test.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import flatbuffers 4 | import numpy as np 5 | 6 | from graphpipe import convert 7 | 8 | 9 | metadata = { 10 | "name": "mymodel", 11 | "version": "1.2", 12 | "server": "python server", 13 | "inputs": [{ 14 | "name": "input0", 15 | "description": "the input", 16 | "shape": [-1, 3, 3], 17 | "type": np.float32().dtype, 18 | }], 19 | "outputs": [{ 20 | "name": "output0", 21 | "description": "the output", 22 | "shape": [-1, 3, 3], 23 | "type": np.float32().dtype, 24 | }], 25 | } 26 | 27 | 28 | class TestSimple(unittest.TestCase): 29 | 30 | def test_simple(self): 31 | b = flatbuffers.Builder(1024000) 32 | t = np.random.uniform((9, 9, 9)) 33 | 34 | def toot(): 35 | """Stupid test function""" 36 | convert.make_tensor(b, t) 37 | 38 | toot() 39 | convert.save_tensor(np.array([b"foo", "b"]), "strings.dat") 40 | foo = convert.load_tensor("strings.dat") 41 | print(foo) 42 | convert.save_tensor(np.identity(3), "floats.dat") 43 | bar = convert.load_tensor("floats.dat") 44 | print(bar) 45 | i = ["some", "other"] 46 | o = ["third", "fourth"] 47 | ir = convert.InferReq("conf", [foo, bar], i, o) 48 | convert.save_request(ir, "inferreq.dat") 49 | x = convert.load_request("inferreq.dat") 50 | print(x) 51 | convert.save_request(None, "metadatareq.dat") 52 | x = convert.load_request("metadatareq.dat") 53 | print(x) 54 | convert.save_infer_response([foo, bar], None, "inferresp.dat") 55 | x = convert.load_infer_response("inferresp.dat") 56 | print(x) 57 | convert.save_metadata_response(metadata, "metadataresp.dat") 58 | x = convert.load_metadata_response("metadataresp.dat") 59 | 60 | if __name__ == '__main__': 61 | unittest.main() 62 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | *.pyc 103 | .testrepository 104 | .tox/* 105 | dist/* 106 | build/* 107 | html/* 108 | *.egg* 109 | cover/* 110 | .coverage 111 | rdserver.txt 112 | python-troveclient.iml 113 | 114 | # Files created by releasenotes build 115 | releasenotes/build 116 | .coverage.* 117 | *.json 118 | .cache 119 | *.log* 120 | *.csv 121 | venv 122 | .venv 123 | ChangeLog 124 | AUTHORS 125 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. 2 | 3 | This software is licensed to you under the Universal Permissive License (UPL). See below for license terms. 4 | ____________________________ 5 | The Universal Permissive License (UPL), Version 1.0 6 | Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. 7 | 8 | Subject to the condition set forth below, permission is hereby granted to any person obtaining a copy of this software, associated documentation and/or data (collectively the "Software"), free of charge and under any and all copyright rights in the Software, and any and all patent rights owned or freely licensable by each licensor hereunder covering either (i) the unmodified Software as contributed to or provided by such licensor, or (ii) the Larger Works (as defined below), to deal in both 9 | 10 | (a) the Software, and 11 | (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if one is included with the Software (each a "Larger Work" to which the Software is contributed by such licensors), 12 | 13 | without restriction, including without limitation the rights to copy, create derivative works of, display, perform, and distribute the Software and make, use, sell, offer for sale, import, export, have made, and have sold the Software and the Larger Work(s), and to sublicense the foregoing rights on either these or other terms. 14 | 15 | This license is subject to the following condition: 16 | 17 | The above copyright notice and either this complete permission notice or at a minimum a reference to the UPL must be included in all copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/InferResponse.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class InferResponse(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsInferResponse(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = InferResponse() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # InferResponse 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # InferResponse 22 | def OutputTensors(self, j): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | x = self._tab.Vector(o) 26 | x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 27 | x = self._tab.Indirect(x) 28 | from .Tensor import Tensor 29 | obj = Tensor() 30 | obj.Init(self._tab.Bytes, x) 31 | return obj 32 | return None 33 | 34 | # InferResponse 35 | def OutputTensorsLength(self): 36 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 37 | if o != 0: 38 | return self._tab.VectorLen(o) 39 | return 0 40 | 41 | # InferResponse 42 | def Errors(self, j): 43 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 44 | if o != 0: 45 | x = self._tab.Vector(o) 46 | x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 47 | x = self._tab.Indirect(x) 48 | from .Error import Error 49 | obj = Error() 50 | obj.Init(self._tab.Bytes, x) 51 | return obj 52 | return None 53 | 54 | # InferResponse 55 | def ErrorsLength(self): 56 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 57 | if o != 0: 58 | return self._tab.VectorLen(o) 59 | return 0 60 | 61 | def InferResponseStart(builder): builder.StartObject(2) 62 | def InferResponseAddOutputTensors(builder, outputTensors): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(outputTensors), 0) 63 | def InferResponseStartOutputTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) 64 | def InferResponseAddErrors(builder, errors): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(errors), 0) 65 | def InferResponseStartErrorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) 66 | def InferResponseEnd(builder): return builder.EndObject() 67 | -------------------------------------------------------------------------------- /graphpipe/remote.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. 2 | # 3 | # Licensed under the Universal Permissive License v 1.0 as shown at 4 | # http://oss.oracle.com/licenses/upl. 5 | 6 | import requests 7 | 8 | from graphpipe import convert 9 | 10 | 11 | def execute(uri, inp, input_name=None, output_name=None): 12 | res = execute_multi(uri, 13 | [inp], 14 | None if input_name is None else [input_name], 15 | None if output_name is None else [output_name]) 16 | if len(res) == 1: 17 | res = res[0] 18 | return res 19 | 20 | 21 | def execute_multi(uri, inputs, input_names, output_names, config=None): 22 | req = convert.InferReq(config, inputs, input_names, output_names) 23 | data = convert.serialize_infer_request(req) 24 | post_res = requests.post(uri, data=data) 25 | if post_res.status_code != 200: 26 | post_res.close() 27 | post_res.raise_for_status() 28 | output, errors = convert.deserialize_infer_response(post_res.content) 29 | if len(errors) != 0: 30 | raise Exception(errors[0]["message"]) 31 | return output 32 | 33 | 34 | # TODO(vish): cache metadata for uri 35 | def metadata(uri): 36 | data = convert.serialize_metadata_request() 37 | post_res = requests.post(uri, data=data) 38 | if post_res.status_code != 200: 39 | post_res.close() 40 | post_res.raise_for_status() 41 | return convert.deserialize_metadata_response(post_res.content) 42 | 43 | 44 | def get_input_names(uri): 45 | m = metadata(uri) 46 | return [m.Inputs(i).Name().decode() for i in range(m.InputsLength())] 47 | 48 | 49 | def get_output_names(uri): 50 | m = metadata(uri) 51 | return [m.Outputs(i).Name().decode() for i in range(m.OutputsLength())] 52 | 53 | 54 | def get_input_types(uri): 55 | m = metadata(uri) 56 | return [convert.t_to_np[m.Inputs(i).Type()] 57 | if m.Outputs(i).Type() else None 58 | for i in range(m.InputsLength())] 59 | 60 | 61 | def get_output_types(uri): 62 | m = metadata(uri) 63 | return [convert.t_to_np[m.Outputs(i).Type()] 64 | if m.Outputs(i).Type() else None 65 | for i in range(m.OutputsLength())] 66 | 67 | 68 | def get_input_shapes(uri): 69 | m = metadata(uri) 70 | return [[None if x < 0 else x 71 | for x in m.Inputs(i).ShapeAsNumpy().tolist()] 72 | for i in range(m.InputsLength())] 73 | 74 | 75 | def get_output_shapes(uri): 76 | m = metadata(uri) 77 | return [[None if x < 0 else x 78 | for x in m.Outputs(i).ShapeAsNumpy().tolist()] 79 | for i in range(m.OutputsLength())] 80 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/IOMetadata.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class IOMetadata(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsIOMetadata(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = IOMetadata() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # IOMetadata 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # IOMetadata 22 | def Name(self): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | return self._tab.String(o + self._tab.Pos) 26 | return bytes() 27 | 28 | # IOMetadata 29 | def Description(self): 30 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 31 | if o != 0: 32 | return self._tab.String(o + self._tab.Pos) 33 | return bytes() 34 | 35 | # IOMetadata 36 | def Shape(self, j): 37 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 38 | if o != 0: 39 | a = self._tab.Vector(o) 40 | return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) 41 | return 0 42 | 43 | # IOMetadata 44 | def ShapeAsNumpy(self): 45 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 46 | if o != 0: 47 | return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) 48 | return 0 49 | 50 | # IOMetadata 51 | def ShapeLength(self): 52 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 53 | if o != 0: 54 | return self._tab.VectorLen(o) 55 | return 0 56 | 57 | # IOMetadata 58 | def Type(self): 59 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) 60 | if o != 0: 61 | return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) 62 | return 0 63 | 64 | def IOMetadataStart(builder): builder.StartObject(4) 65 | def IOMetadataAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) 66 | def IOMetadataAddDescription(builder, description): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) 67 | def IOMetadataAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) 68 | def IOMetadataStartShapeVector(builder, numElems): return builder.StartVector(8, numElems, 8) 69 | def IOMetadataAddType(builder, type): builder.PrependUint8Slot(3, type, 0) 70 | def IOMetadataEnd(builder): return builder.EndObject() 71 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/InferRequest.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class InferRequest(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsInferRequest(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = InferRequest() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # InferRequest 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # InferRequest 22 | def Config(self): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | return self._tab.String(o + self._tab.Pos) 26 | return bytes() 27 | 28 | # InferRequest 29 | def InputNames(self, j): 30 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 31 | if o != 0: 32 | a = self._tab.Vector(o) 33 | return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) 34 | return "" 35 | 36 | # InferRequest 37 | def InputNamesLength(self): 38 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 39 | if o != 0: 40 | return self._tab.VectorLen(o) 41 | return 0 42 | 43 | # InferRequest 44 | def InputTensors(self, j): 45 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 46 | if o != 0: 47 | x = self._tab.Vector(o) 48 | x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 49 | x = self._tab.Indirect(x) 50 | from .Tensor import Tensor 51 | obj = Tensor() 52 | obj.Init(self._tab.Bytes, x) 53 | return obj 54 | return None 55 | 56 | # InferRequest 57 | def InputTensorsLength(self): 58 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 59 | if o != 0: 60 | return self._tab.VectorLen(o) 61 | return 0 62 | 63 | # InferRequest 64 | def OutputNames(self, j): 65 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) 66 | if o != 0: 67 | a = self._tab.Vector(o) 68 | return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) 69 | return "" 70 | 71 | # InferRequest 72 | def OutputNamesLength(self): 73 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) 74 | if o != 0: 75 | return self._tab.VectorLen(o) 76 | return 0 77 | 78 | def InferRequestStart(builder): builder.StartObject(4) 79 | def InferRequestAddConfig(builder, config): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(config), 0) 80 | def InferRequestAddInputNames(builder, inputNames): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputNames), 0) 81 | def InferRequestStartInputNamesVector(builder, numElems): return builder.StartVector(4, numElems, 4) 82 | def InferRequestAddInputTensors(builder, inputTensors): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(inputTensors), 0) 83 | def InferRequestStartInputTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) 84 | def InferRequestAddOutputNames(builder, outputNames): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(outputNames), 0) 85 | def InferRequestStartOutputNamesVector(builder, numElems): return builder.StartVector(4, numElems, 4) 86 | def InferRequestEnd(builder): return builder.EndObject() 87 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/Tensor.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class Tensor(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsTensor(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = Tensor() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # Tensor 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # Tensor 22 | def Type(self): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) 26 | return 0 27 | 28 | # Tensor 29 | def Shape(self, j): 30 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 31 | if o != 0: 32 | a = self._tab.Vector(o) 33 | return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) 34 | return 0 35 | 36 | # Tensor 37 | def ShapeAsNumpy(self): 38 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 39 | if o != 0: 40 | return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) 41 | return 0 42 | 43 | # Tensor 44 | def ShapeLength(self): 45 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 46 | if o != 0: 47 | return self._tab.VectorLen(o) 48 | return 0 49 | 50 | # Tensor 51 | def Data(self, j): 52 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 53 | if o != 0: 54 | a = self._tab.Vector(o) 55 | return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) 56 | return 0 57 | 58 | # Tensor 59 | def DataAsNumpy(self): 60 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 61 | if o != 0: 62 | return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) 63 | return 0 64 | 65 | # Tensor 66 | def DataLength(self): 67 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 68 | if o != 0: 69 | return self._tab.VectorLen(o) 70 | return 0 71 | 72 | # Tensor 73 | def StringVal(self, j): 74 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) 75 | if o != 0: 76 | a = self._tab.Vector(o) 77 | return self._tab.String(a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) 78 | return "" 79 | 80 | # Tensor 81 | def StringValLength(self): 82 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) 83 | if o != 0: 84 | return self._tab.VectorLen(o) 85 | return 0 86 | 87 | def TensorStart(builder): builder.StartObject(4) 88 | def TensorAddType(builder, type): builder.PrependUint8Slot(0, type, 0) 89 | def TensorAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) 90 | def TensorStartShapeVector(builder, numElems): return builder.StartVector(8, numElems, 8) 91 | def TensorAddData(builder, data): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) 92 | def TensorStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) 93 | def TensorAddStringVal(builder, stringVal): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(stringVal), 0) 94 | def TensorStartStringValVector(builder, numElems): return builder.StartVector(4, numElems, 4) 95 | def TensorEnd(builder): return builder.EndObject() 96 | -------------------------------------------------------------------------------- /graphpipe/graphpipefb/MetadataResponse.py: -------------------------------------------------------------------------------- 1 | # automatically generated by the FlatBuffers compiler, do not modify 2 | 3 | # namespace: graphpipe 4 | 5 | import flatbuffers 6 | 7 | class MetadataResponse(object): 8 | __slots__ = ['_tab'] 9 | 10 | @classmethod 11 | def GetRootAsMetadataResponse(cls, buf, offset): 12 | n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) 13 | x = MetadataResponse() 14 | x.Init(buf, n + offset) 15 | return x 16 | 17 | # MetadataResponse 18 | def Init(self, buf, pos): 19 | self._tab = flatbuffers.table.Table(buf, pos) 20 | 21 | # MetadataResponse 22 | def Name(self): 23 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) 24 | if o != 0: 25 | return self._tab.String(o + self._tab.Pos) 26 | return bytes() 27 | 28 | # MetadataResponse 29 | def Version(self): 30 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) 31 | if o != 0: 32 | return self._tab.String(o + self._tab.Pos) 33 | return bytes() 34 | 35 | # MetadataResponse 36 | def Server(self): 37 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) 38 | if o != 0: 39 | return self._tab.String(o + self._tab.Pos) 40 | return bytes() 41 | 42 | # MetadataResponse 43 | def Description(self): 44 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) 45 | if o != 0: 46 | return self._tab.String(o + self._tab.Pos) 47 | return bytes() 48 | 49 | # MetadataResponse 50 | def Inputs(self, j): 51 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) 52 | if o != 0: 53 | x = self._tab.Vector(o) 54 | x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 55 | x = self._tab.Indirect(x) 56 | from .IOMetadata import IOMetadata 57 | obj = IOMetadata() 58 | obj.Init(self._tab.Bytes, x) 59 | return obj 60 | return None 61 | 62 | # MetadataResponse 63 | def InputsLength(self): 64 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) 65 | if o != 0: 66 | return self._tab.VectorLen(o) 67 | return 0 68 | 69 | # MetadataResponse 70 | def Outputs(self, j): 71 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) 72 | if o != 0: 73 | x = self._tab.Vector(o) 74 | x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 75 | x = self._tab.Indirect(x) 76 | from .IOMetadata import IOMetadata 77 | obj = IOMetadata() 78 | obj.Init(self._tab.Bytes, x) 79 | return obj 80 | return None 81 | 82 | # MetadataResponse 83 | def OutputsLength(self): 84 | o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) 85 | if o != 0: 86 | return self._tab.VectorLen(o) 87 | return 0 88 | 89 | def MetadataResponseStart(builder): builder.StartObject(6) 90 | def MetadataResponseAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) 91 | def MetadataResponseAddVersion(builder, version): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(version), 0) 92 | def MetadataResponseAddServer(builder, server): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(server), 0) 93 | def MetadataResponseAddDescription(builder, description): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) 94 | def MetadataResponseAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) 95 | def MetadataResponseStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) 96 | def MetadataResponseAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) 97 | def MetadataResponseStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) 98 | def MetadataResponseEnd(builder): return builder.EndObject() 99 | -------------------------------------------------------------------------------- /graphpipe/convert.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. 2 | # 3 | # Licensed under the Universal Permissive License v 1.0 as shown at 4 | # http://oss.oracle.com/licenses/upl. 5 | 6 | import ctypes 7 | 8 | import flatbuffers 9 | import numpy as np 10 | 11 | from graphpipe.graphpipefb import Error 12 | from graphpipe.graphpipefb import InferRequest 13 | from graphpipe.graphpipefb import InferResponse 14 | from graphpipe.graphpipefb import IOMetadata 15 | from graphpipe.graphpipefb import MetadataRequest 16 | from graphpipe.graphpipefb import MetadataResponse 17 | from graphpipe.graphpipefb.Req import Req 18 | from graphpipe.graphpipefb import Request 19 | from graphpipe.graphpipefb import Tensor 20 | from graphpipe.graphpipefb.Type import Type 21 | 22 | t_to_np = { 23 | Type.Uint8: np.uint8().dtype, 24 | Type.Int8: np.int8().dtype, 25 | Type.Uint16: np.uint16().dtype, 26 | Type.Int16: np.int16().dtype, 27 | Type.Uint32: np.uint32().dtype, 28 | Type.Int32: np.int32().dtype, 29 | Type.Uint64: np.uint64().dtype, 30 | Type.Int64: np.int64().dtype, 31 | Type.Float16: np.float16().dtype, 32 | Type.Float32: np.float32().dtype, 33 | Type.Float64: np.float64().dtype, 34 | Type.String: np.dtype('O'), 35 | } 36 | 37 | np_to_t = dict((v, k) for k, v in t_to_np.items()) 38 | 39 | 40 | def to_type(t): 41 | # currently conly converts fromt the dtype of an np array 42 | # TOOD(vish): handle other forms like np.float32 and Type.Float32 43 | return np_to_t[t] 44 | 45 | 46 | stringlike = [ 47 | "S", 48 | "U", 49 | "a", 50 | "O", 51 | ] 52 | 53 | 54 | def is_stringlike(ndarray): 55 | if ndarray.dtype.kind in stringlike: 56 | return True 57 | 58 | 59 | def output(builder, obj): 60 | builder.Finish(obj) 61 | return builder.Output() 62 | 63 | 64 | def serialize_tensor(ndarray): 65 | size = ndarray.nbytes + 100 66 | builder = flatbuffers.Builder(size) 67 | tensor = make_tensor(builder, ndarray) 68 | return output(builder, tensor) 69 | 70 | 71 | def make_tensor(builder, ndarray): 72 | ndim = len(ndarray.shape) 73 | Tensor.TensorStartShapeVector(builder, ndim) 74 | for i in ndarray.shape[::-1]: 75 | builder.PrependInt64(i) 76 | shape = builder.EndVector(ndim) 77 | if is_stringlike(ndarray): 78 | strs = [] 79 | for s in ndarray.flat[::-1]: 80 | strs.append(builder.CreateString(s)) 81 | Tensor.TensorStartStringValVector(builder, ndarray.size) 82 | for s in strs: 83 | builder.PrependUOffsetTRelative(s) 84 | strings = builder.EndVector(ndarray.size) 85 | else: 86 | n = ndarray.nbytes 87 | Tensor.TensorStartDataVector(builder, n) 88 | builder.head = builder.head - n 89 | # 3 times slower 90 | # d = np.frombuffer(builder.Bytes, dtype=np.uint8, 91 | # count=n, offset=builder.head) 92 | # d[:] = ndarray.ravel().view(np.uint8) 93 | if not ndarray.flags['C_CONTIGUOUS']: 94 | ndarray = np.ascontiguousarray(ndarray) 95 | dst = (ctypes.c_char * n).from_buffer(builder.Bytes, builder.head) 96 | raw_data = ndarray.__array_interface__['data'][0] 97 | src = ctypes.cast(raw_data, ctypes.POINTER(ctypes.c_char)) 98 | ctypes.memmove(dst, src, n) 99 | data = builder.EndVector(n) 100 | 101 | Tensor.TensorStart(builder) 102 | Tensor.TensorAddShape(builder, shape) 103 | if is_stringlike(ndarray): 104 | Tensor.TensorAddType(builder, Type.String) 105 | Tensor.TensorAddStringVal(builder, strings) 106 | else: 107 | Tensor.TensorAddType(builder, to_type(ndarray.dtype)) 108 | Tensor.TensorAddData(builder, data) 109 | return Tensor.TensorEnd(builder) 110 | 111 | 112 | def deserialize_tensor(buf): 113 | tensor = Tensor.Tensor.GetRootAsTensor(buf, 0) 114 | return tensor_to_np(tensor) 115 | 116 | 117 | def tensor_to_np(tensor): 118 | shape = tensor.ShapeAsNumpy() 119 | t = t_to_np[tensor.Type()] 120 | 121 | if t == np.dtype('O'): 122 | strs = [tensor.StringVal(i) for i in range(tensor.StringValLength())] 123 | return np.array(strs).reshape(shape) 124 | else: 125 | return tensor.DataAsNumpy().view(t).reshape(shape) 126 | 127 | 128 | class InferReq(object): 129 | __slots__ = ["config", "input_tensors", "input_names", "output_names"] 130 | 131 | def __init__(self, config=None, input_tensors=None, 132 | input_names=None, output_names=None): 133 | self.config = config or "" 134 | self.input_tensors = input_tensors or [] 135 | self.input_names = input_names or [] 136 | self.output_names = output_names or [] 137 | 138 | @property 139 | def __dict__(self): 140 | return { 141 | s: getattr(self, s) for s in self.__slots__ if hasattr(self, s) 142 | } 143 | 144 | def __repr__(self): 145 | return repr(self.__dict__) 146 | 147 | 148 | # NOTE(vish): should this automatically convert lists into numpy arrays? 149 | def serialize_infer_request(req): 150 | # guess at size 151 | nbytes = sum(i.nbytes for i in req.input_tensors) 152 | nbytes += sum(len(i) for i in req.input_names) 153 | nbytes += sum(len(i) for i in req.output_names) 154 | nbytes += len(req.config) 155 | nbytes += 1024 156 | builder = flatbuffers.Builder(nbytes) 157 | r = make_infer_request(builder, req) 158 | return output(builder, r) 159 | 160 | 161 | def serialize_metadata_request(): 162 | builder = flatbuffers.Builder(1024) 163 | r = make_metadata_request(builder) 164 | return output(builder, r) 165 | 166 | 167 | # NOTE(vish): if this is going to be passed to a backend, it may be 168 | # more efficient to skip converting to numpy 169 | def deserialize_request(buf): 170 | req = Request.Request.GetRootAsRequest(buf, 0) 171 | if req.ReqType() == Req.InferRequest: 172 | r = InferRequest.InferRequest() 173 | r.Init(req.Req().Bytes, req.Req().Pos) 174 | 175 | ir = InferReq() 176 | ir.config = r.Config() 177 | ir.input_tensors = [tensor_to_np(r.InputTensors(i)) 178 | for i in range(r.InputTensorsLength())] 179 | ir.input_names = [r.InputNames(i) 180 | for i in range(r.InputNamesLength())] 181 | ir.output_names = [r.OutputNames(i) 182 | for i in range(r.OutputNamesLength())] 183 | return ir 184 | else: 185 | return None 186 | 187 | 188 | def make_infer_request(builder, req): 189 | config_fb = builder.CreateString(req.config) 190 | 191 | tensors = [make_tensor(builder, t) for t in req.input_tensors[::-1]] 192 | InferRequest.InferRequestStartInputTensorsVector(builder, len(tensors)) 193 | for t in tensors: 194 | builder.PrependUOffsetTRelative(t) 195 | inputs_fb = builder.EndVector(len(tensors)) 196 | 197 | strs = [builder.CreateString(s) for s in req.input_names[::-1]] 198 | InferRequest.InferRequestStartInputNamesVector(builder, len(strs)) 199 | for s in strs: 200 | builder.PrependUOffsetTRelative(s) 201 | input_names_fb = builder.EndVector(len(strs)) 202 | 203 | strs = [builder.CreateString(s) for s in req.output_names[::-1]] 204 | InferRequest.InferRequestStartOutputNamesVector(builder, len(strs)) 205 | for s in strs: 206 | builder.PrependUOffsetTRelative(s) 207 | output_names_fb = builder.EndVector(len(strs)) 208 | 209 | InferRequest.InferRequestStart(builder) 210 | InferRequest.InferRequestAddConfig(builder, config_fb) 211 | InferRequest.InferRequestAddInputTensors(builder, inputs_fb) 212 | InferRequest.InferRequestAddInputNames(builder, input_names_fb) 213 | InferRequest.InferRequestAddOutputNames(builder, output_names_fb) 214 | infer_req = InferRequest.InferRequestEnd(builder) 215 | 216 | Request.RequestStart(builder) 217 | Request.RequestAddReqType(builder, Req.InferRequest) 218 | Request.RequestAddReq(builder, infer_req) 219 | 220 | return Request.RequestEnd(builder) 221 | 222 | 223 | def make_metadata_request(builder): 224 | MetadataRequest.MetadataRequestStart(builder) 225 | metadata_req = MetadataRequest.MetadataRequestEnd(builder) 226 | 227 | Request.RequestStart(builder) 228 | Request.RequestAddReqType(builder, Req.MetadataRequest) 229 | Request.RequestAddReq(builder, metadata_req) 230 | return Request.RequestEnd(builder) 231 | 232 | 233 | def write(buf, fname): 234 | if "." not in fname: 235 | fname += ".dat" 236 | with open(fname, "wb") as f: 237 | f.write(buf) 238 | 239 | 240 | def read(fname): 241 | with open(fname, "rb") as f: 242 | buf = bytearray(f.read()) 243 | return buf 244 | 245 | 246 | def save_tensor(ndarray, fname): 247 | write(serialize_tensor(ndarray), fname) 248 | 249 | 250 | def load_tensor(fname): 251 | return deserialize_tensor(read(fname)) 252 | 253 | 254 | def save_request(ir, fname): 255 | if ir is not None: 256 | buf = serialize_infer_request(ir) 257 | else: 258 | buf = serialize_metadata_request() 259 | write(buf, fname) 260 | 261 | 262 | def load_request(fname): 263 | return deserialize_request(read(fname)) 264 | 265 | 266 | def make_error(builder, error): 267 | m = builder.CreateString(error["message"]) if "message" in error else None 268 | Error.ErrorStart(builder) 269 | if "code" in error: 270 | Error.AddCode(error["code"]) 271 | if m is not None: 272 | Error.AddMessage(m) 273 | return Error.ErrorEnd(builder) 274 | 275 | 276 | def make_infer_response(builder, output_tensors, output_errors): 277 | tensors = [make_tensor(builder, t) for t in output_tensors[::-1]] 278 | InferResponse.InferResponseStartOutputTensorsVector(builder, len(tensors)) 279 | for t in tensors: 280 | builder.PrependUOffsetTRelative(t) 281 | outputs_fb = builder.EndVector(len(tensors)) 282 | 283 | errors = [make_error(builder, e) for e in output_errors[::-1]] 284 | InferResponse.InferResponseStartErrorsVector(builder, len(tensors)) 285 | for e in errors: 286 | builder.PrependUOffsetTRelative(t) 287 | errors_fb = builder.EndVector(len(errors)) 288 | InferResponse.InferResponseStart(builder) 289 | InferResponse.InferResponseAddOutputTensors(builder, outputs_fb) 290 | InferResponse.InferResponseAddErrors(builder, errors_fb) 291 | return InferResponse.InferResponseEnd(builder) 292 | 293 | 294 | def get_string(builder, d, k): 295 | val = d.get(k) 296 | if val is None: 297 | return None 298 | return builder.CreateString(val) 299 | 300 | 301 | def make_io_metadata(builder, metadata): 302 | name_fb = get_string(builder, metadata, "name") 303 | description_fb = get_string(builder, metadata, "description") 304 | ndim = len(metadata["shape"]) 305 | IOMetadata.IOMetadataStartShapeVector(builder, ndim) 306 | for i in metadata["shape"][::-1]: 307 | builder.PrependInt64(i) 308 | shape_fb = builder.EndVector(ndim) 309 | IOMetadata.IOMetadataStart(builder) 310 | IOMetadata.IOMetadataAddName(builder, name_fb) 311 | if description_fb is not None: 312 | IOMetadata.IOMetadataAddDescription(builder, description_fb) 313 | IOMetadata.IOMetadataAddType(builder, to_type(metadata["type"])) 314 | IOMetadata.IOMetadataAddShape(builder, shape_fb) 315 | return IOMetadata.IOMetadataEnd(builder) 316 | 317 | 318 | def make_metadata_response(builder, metadata): 319 | name_fb = get_string(builder, metadata, "name") 320 | version_fb = get_string(builder, metadata, "version") 321 | server_fb = get_string(builder, metadata, "server") 322 | description_fb = get_string(builder, metadata, "description") 323 | inputs = [make_io_metadata(builder, m) for m in metadata["inputs"]] 324 | outputs = [make_io_metadata(builder, m) for m in metadata["outputs"]] 325 | mr = MetadataResponse 326 | mr.MetadataResponseStartInputsVector(builder, len(inputs)) 327 | for i in inputs: 328 | builder.PrependUOffsetTRelative(i) 329 | inputs_fb = builder.EndVector(len(inputs)) 330 | mr.MetadataResponseStartOutputsVector(builder, len(outputs)) 331 | for o in outputs: 332 | builder.PrependUOffsetTRelative(o) 333 | outputs_fb = builder.EndVector(len(outputs)) 334 | mr.MetadataResponseStart(builder) 335 | if name_fb is not None: 336 | mr.MetadataResponseAddName(builder, name_fb) 337 | if version_fb is not None: 338 | mr.MetadataResponseAddVersion(builder, version_fb) 339 | if server_fb is not None: 340 | mr.MetadataResponseAddServer(builder, server_fb) 341 | if description_fb is not None: 342 | mr.MetadataResponseAddDescription(builder, description_fb) 343 | mr.MetadataResponseAddInputs(builder, inputs_fb) 344 | mr.MetadataResponseAddOutputs(builder, outputs_fb) 345 | return mr.MetadataResponseEnd(builder) 346 | 347 | 348 | def serialize_infer_response(output_tensors, output_errors=None): 349 | if output_tensors is None: 350 | output_tensors = [] 351 | if output_errors is None: 352 | output_errors = [] 353 | # guess at size 354 | nbytes = sum(i.nbytes for i in output_tensors) 355 | nbytes += sum(len(i.message) + 8 for i in output_errors) 356 | nbytes += 1024 357 | builder = flatbuffers.Builder(nbytes) 358 | r = make_infer_response(builder, output_tensors, output_errors) 359 | return output(builder, r) 360 | 361 | 362 | def serialize_metadata_response(metadata): 363 | builder = flatbuffers.Builder(4096) 364 | r = make_metadata_response(builder, metadata) 365 | return output(builder, r) 366 | 367 | 368 | def error_to_dict(err): 369 | return {"code": err.Code(), "message": err.Message()} 370 | 371 | 372 | def deserialize_infer_response(buf): 373 | r = InferResponse.InferResponse.GetRootAsInferResponse(buf, 0) 374 | output_tensors = [tensor_to_np(r.OutputTensors(i)) 375 | for i in range(r.OutputTensorsLength())] 376 | output_errors = [error_to_dict(r.Errors(i)) 377 | for i in range(r.ErrorsLength())] 378 | return output_tensors, output_errors 379 | 380 | 381 | def deserialize_metadata_response(buf): 382 | # just give back the object 383 | return MetadataResponse.MetadataResponse.GetRootAsMetadataResponse(buf, 0) 384 | 385 | 386 | def save_infer_response(output_tensors, output_errors, fname): 387 | write(serialize_infer_response(output_tensors, output_errors), fname) 388 | 389 | 390 | def save_metadata_response(metadata, fname): 391 | write(serialize_metadata_response(metadata), fname) 392 | 393 | 394 | def load_infer_response(fname): 395 | return deserialize_infer_response(read(fname)) 396 | 397 | 398 | def load_metadata_response(fname): 399 | return deserialize_metadata_response(read(fname)) 400 | --------------------------------------------------------------------------------