├── .babelrc
├── .coveragerc
├── .editorconfig
├── .eslintrc
├── .gitignore
├── .postcssrc
├── .python-version
├── .pyup.yml
├── .travis.yml
├── CHANGES.txt
├── CONTRIBUTING.rst
├── LICENSE
├── MANIFEST.in
├── Makefile
├── README.rst
├── docs
└── _static
│ ├── aiohttp-icon-128x128.png
│ ├── list_models.png
│ └── one_model.png
├── examples
├── README.rst
├── multiple
│ ├── boston_gbr
│ │ ├── boston_gbr.pkl
│ │ ├── boston_gbr.py
│ │ └── boston_schema.json
│ ├── diabetes
│ │ ├── diabetes_lasso.json
│ │ ├── diabetes_lasso.pkl
│ │ └── diabetes_lasso.py
│ ├── instagram
│ │ ├── instagram.json
│ │ ├── instagram_inf.py
│ │ ├── instgram_rf.pkl
│ │ └── labelled_1000_inclprivate.csv
│ ├── iris_lr
│ │ ├── iris_lr.json
│ │ ├── iris_lr.pkl
│ │ └── iris_lr.py
│ ├── models.yml
│ ├── toxic_lr
│ │ ├── toxic_lr.json
│ │ ├── toxic_lr.pkl
│ │ └── toxic_lr.py
│ └── wine_rf
│ │ ├── wine_quality_rf.pkl
│ │ ├── wine_quality_schema.json
│ │ └── wine_rf.py
└── simple
│ ├── boston_gbr.pkl
│ ├── boston_gbr.py
│ ├── boston_schema.json
│ └── models.yml
├── mlserve
├── __init__.py
├── __main__.py
├── consts.py
├── exceptions.py
├── handlers.py
├── loaders.py
├── main.py
├── middleware.py
├── schema_builder.py
├── server_utils.py
├── static
│ ├── .gitkeep
│ ├── index.html
│ ├── ui.6157e984.js
│ ├── ui.899bd01b.map
│ └── ui.9042ff4d.css
├── stats.py
├── utils.py
└── worker.py
├── package.json
├── postcss.config.js
├── pyup.yml
├── requirements-dev.txt
├── setup.py
├── tests
├── conftest.py
├── data
│ ├── Auto.csv
│ ├── Credit.csv
│ ├── auto_schema.json
│ ├── boston.json
│ ├── boston_gbr.joblib
│ ├── boston_gbr.pkl
│ └── credit_schema.json
├── test_handlers.py
├── test_loaders.py
├── test_schema_builder.py
├── test_stats.py
└── test_utitls.py
├── ui
├── App
│ ├── AggStats.js
│ ├── AppHeader.js
│ ├── Home.js
│ ├── Model.js
│ ├── ModelList.js
│ └── ModelsContext.js
├── index.html
└── index.js
└── yarn.lock
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | [
4 | "env",
5 | {
6 | "useBuiltIns": true
7 | }
8 | ],
9 | "react"
10 | ],
11 | "plugins": [
12 | "transform-class-properties"
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 | source = mlserve
4 | omit = site-packages, .tox
5 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # This file is for unifying the coding style for different editors and IDEs
2 | # editorconfig.org
3 |
4 | root = true
5 |
6 | [*]
7 | charset = utf-8
8 | end_of_line = lf
9 | indent_size = 2
10 | indent_style = space
11 | insert_final_newline = true
12 | trim_trailing_whitespace = true
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "env": {
4 | "es6": true,
5 | "browser": true
6 | },
7 | "extends": [
8 | "airbnb-base",
9 | "prettier"
10 | ],
11 | "plugins": [
12 | "json",
13 | "react",
14 | "prettier"
15 | ],
16 | "rules": {
17 | "no-unused-vars": [
18 | 0,
19 | {
20 | "varsIgnorePattern": "^h$"
21 | }
22 | ],
23 | "prettier/prettier": "error"
24 | },
25 | "parser": "babel-eslint",
26 | "parserOptions": {
27 | "ecmaVersion": 6,
28 | "sourceType": "module",
29 | "ecmaFeatures": {
30 | "jsx": true,
31 | "modules": true,
32 | "experimentalObjectRestSpread": true
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ># Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | pyvenv/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | *.egg-info/
23 | .installed.cfg
24 | *.egg
25 |
26 | # PyInstaller
27 | # Usually these files are written by a python script from a template
28 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
29 | *.manifest
30 | *.spec
31 |
32 | # Installer logs
33 | pip-log.txt
34 | pip-delete-this-directory.txt
35 |
36 | # Unit test / coverage reports
37 | htmlcov/
38 | .tox/
39 | .coverage
40 | .cache
41 | .pytest_cache
42 | nosetests.xml
43 | coverage.xml
44 | cover
45 |
46 | # Translations
47 | *.mo
48 | *.pot
49 |
50 | # Django stuff:
51 | *.log
52 |
53 | # Sphinx documentation
54 | docs/_build/
55 |
56 | # PyBuilder
57 | target/
58 |
59 | # PyCharm
60 | .idea
61 | *.iml
62 | # rope
63 | *.swp
64 | .ropeproject
65 | tags
66 | node_modules/
67 |
68 | # mypy
69 | .mypy_cache/
70 |
--------------------------------------------------------------------------------
/.postcssrc:
--------------------------------------------------------------------------------
1 | {
2 | "modules": true,
3 | "plugins": {
4 | "autoprefixer": {
5 | "grid": true
6 | }
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.7.0
2 |
--------------------------------------------------------------------------------
/.pyup.yml:
--------------------------------------------------------------------------------
1 | label_prs: deps-update
2 | schedule: every week
3 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | sudo: required
3 |
4 |
5 | python:
6 | - '3.7-dev'
7 |
8 | install:
9 | - pip install --upgrade setuptools
10 | - pip install codecov
11 | - pip install -r requirements-dev.txt
12 |
13 | script:
14 | make cov_only
15 |
16 | after_success:
17 | codecov
18 |
--------------------------------------------------------------------------------
/CHANGES.txt:
--------------------------------------------------------------------------------
1 | CHANGES
2 | =======
3 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Setting Development Environment
5 | -------------------------------
6 |
7 | .. _GitHub: https://github.com/jettify/mlserve
8 |
9 | Thanks for your interest in contributing to ``mlserve``, there are multiple
10 | ways and places you can contribute, help on on documentation and tests is very
11 | appreciated.
12 |
13 | To setup development environment, fist of all just clone repository::
14 |
15 | $ git clone git@github.com:jettify/mlserve.git
16 |
17 | Create virtualenv with python3.7 (python 3.6 also supported). For example
18 | using *virtualenvwrapper* commands could look like::
19 |
20 | $ cd mlserve
21 | $ mkvirtualenv --python=`which python3.7` mlserve
22 |
23 |
24 | After that please install libraries required for development::
25 |
26 | $ pip install -r requirements-dev.txt
27 | $ pip install -e .
28 |
29 |
30 | Running Tests
31 | -------------
32 | Congratulations, you are ready to run the test suite::
33 |
34 | $ make cov
35 |
36 | To run individual test use following command::
37 |
38 | $ py.test -sv tests/test_utils.py -k test_name
39 |
40 |
41 |
42 | Reporting an Issue
43 | ------------------
44 | If you have found an issue with `mlserve` please do
45 | not hesitate to file an issue on the GitHub_ project. When filing your
46 | issue please make sure you can express the issue with a reproducible test
47 | case.
48 |
49 | When reporting an issue we also need as much information about your environment
50 | that you can include. We never know what information will be pertinent when
51 | trying narrow down the issue. Please include at least the following
52 | information:
53 |
54 | * Version of `mlserve` and `python`.
55 | * Versions of installed python libraries `pip freeze`.
56 | * Platform you're running on (OS X, Linux).
57 |
58 | .. _Docker: https://docs.docker.com/engine/installation/
59 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2018 Nikolay Novik
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
203 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include CHANGES.txt
3 | include README.rst
4 | graft mlserve
5 | global-exclude *.pyc *.swp
6 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Some simple testing tasks (sorry, UNIX only).
2 |
3 | FLAGS=
4 |
5 | flake: checkrst bandit pyroma
6 | flake8 mlserve tests examples setup.py demos
7 |
8 | test: flake
9 | py.test -s -v $(FLAGS) ./tests/
10 |
11 | vtest:
12 | py.test -s -v $(FLAGS) ./tests/
13 |
14 | checkrst:
15 | python setup.py check --restructuredtext
16 |
17 | bandit:
18 | bandit -r ./mlserve
19 |
20 | pyroma:
21 | pyroma -d .
22 |
23 | mypy:
24 | mypy mlserve --ignore-missing-imports --disallow-untyped-calls --no-site-packages --strict
25 |
26 | testloop:
27 | while true ; do \
28 | py.test -s -v $(FLAGS) ./tests/ ; \
29 | done
30 |
31 | cov cover coverage: flake checkrst
32 | py.test -s -v --cov-report term --cov-report html --cov mlserve ./tests
33 | @echo "open file://`pwd`/htmlcov/index.html"
34 |
35 | cov_only: flake
36 | py.test -s -v --cov-report term --cov-report html --cov mlserve ./tests
37 | @echo "open file://`pwd`/htmlcov/index.html"
38 |
39 | ci: flake mypy
40 | py.test -s -v --cov-report term --cov-report html --cov mlserve ./tests
41 | @echo "open file://`pwd`/htmlcov/index.html"
42 |
43 | clean:
44 | rm -rf `find . -name __pycache__`
45 | rm -f `find . -type f -name '*.py[co]' `
46 | rm -f `find . -type f -name '*~' `
47 | rm -f `find . -type f -name '.*~' `
48 | rm -f `find . -type f -name '@*' `
49 | rm -f `find . -type f -name '#*#' `
50 | rm -f `find . -type f -name '*.orig' `
51 | rm -f `find . -type f -name '*.rej' `
52 | rm -f .coverage
53 | rm -rf coverage
54 | rm -rf build
55 | rm -rf htmlcov
56 | rm -rf dist
57 |
58 | doc:
59 | make -C docs html
60 | @echo "open file://`pwd`/docs/_build/html/index.html"
61 |
62 | .PHONY: all flake test vtest cov clean doc ci
63 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | mlserve
2 | =======
3 | .. image:: https://travis-ci.com/ml-libs/mlserve.svg?branch=master
4 | :target: https://travis-ci.com/ml-libs/mlserve
5 | .. image:: https://codecov.io/gh/ml-libs/mlserve/branch/master/graph/badge.svg
6 | :target: https://codecov.io/gh/ml-libs/mlserve
7 | .. image:: https://api.codeclimate.com/v1/badges/1ff813d5cad2d702cbf1/maintainability
8 | :target: https://codeclimate.com/github/ml-libs/mlserve/maintainability
9 | :alt: Maintainability
10 | .. image:: https://img.shields.io/pypi/v/mlserve.svg
11 | :target: https://pypi.python.org/pypi/mlserve
12 |
13 | **mlserve** turns your python models into RESTful API, serves web page with
14 | form generated to match your input data.
15 |
16 | It may be useful if one wants to demonstrate created predictive model and
17 | quickly integrate into existing application. Additionally UI is provided for
18 | input data (based on training dataframe) and simple dashboard.
19 |
20 | Project is not complete but already usable, so no any guaranties on API or UI
21 | backward compatibility.
22 |
23 | Online Demo
24 | ===========
25 |
26 | Several models deployed online using ``heroku.com/free`` free dynos.
27 | Free apps sleep automatically after 30 mins of inactivity so first request
28 | may take some time.
29 |
30 |
31 | * https://young-ridge-56019.herokuapp.com
32 |
33 | Full source code and instructions available here: https://github.com/ml-libs/mlserve-demo
34 |
35 | .. image:: https://raw.githubusercontent.com/ml-libs/mlserve/master/docs/_static/list_models.png
36 | :alt: mlserve models
37 |
38 | .. image:: https://raw.githubusercontent.com/ml-libs/mlserve/master/docs/_static/one_model.png
39 | :alt: one model
40 |
41 | Ideas
42 | -----
43 | **mlsserve** is small using following design based on following ideas:
44 |
45 | - Simplicity and ease of use is primary objective.
46 | - Application consists of two processes: IO process that runs HTTP server
47 | and responsible for fetching and sending data, as well as serve UI, other
48 | process (worker) is doing CPU intensive work related to predictions
49 | calculations.
50 |
51 |
52 | Features
53 | ========
54 | * Model predictions serving via RESTful API endpoint.
55 | * Model predictions serving via generated UI.
56 | * Web page to simplify models usage.
57 | * Automatic UI generation to match your input data.
58 | * Simple dashboard for monitoring purposes.
59 |
60 |
61 | Installation
62 | ============
63 |
64 | Installation process is simple, just::
65 |
66 | $ pip install git+https://github.com/ml-libs/mlserve.git
67 |
68 | Example
69 | =======
70 |
71 | To deploy model just follow following simple steps:
72 |
73 | Save your model into pickle file:
74 |
75 | .. code:: python
76 |
77 | with open('boston_gbr.pkl', 'wb') as f:
78 | pickle.dump(clf, f)
79 |
80 | Use `build_schema` function to build UI representation of pandas dataframe,
81 | and save it as json file file:
82 |
83 | .. code:: python
84 |
85 | import mlserve
86 |
87 | data_schema = mlserve.build_schema(df)
88 | with open('boston.json', 'wb') as f:
89 | json.dump(data_schema, f)
90 |
91 | Create configuration file with following format::
92 |
93 | models:
94 | - name: "boston_regressor" # url friendly name
95 | description: "Boston GBR" # optional model description
96 | model_path: "boston_gbr.pkl" # path to your saved model
97 | data_schema_path: "boston.json" # path to data representation
98 | target: "target" # name of the target column
99 |
100 | Serve model::
101 |
102 | $ mlserve -c models.yaml
103 |
104 |
105 | Thats it, model is available throw REST API, you can test is with curl command::
106 |
107 | $ curl --header "Content-Type: application/json" --request POST
108 | --data '[{"feature1": 1, "feature2": 2}]'
109 | http://127.0.0.1:9000/api/v1/models/boston_gradient_boosting_regressor/predict
110 |
111 |
112 | UI is available via http://127.0.0.1:9000
113 |
114 |
115 | Supported Frameworks
116 | ====================
117 | * Scikit-Learn
118 | * Keras (planning)
119 | * PyTorch (planning)
120 |
121 |
122 | Requirements
123 | ------------
124 |
125 | * Python_ 3.6+
126 | * aiohttp_
127 |
128 | .. _PEP492: https://www.python.org/dev/peps/pep-0492/
129 | .. _Python: https://www.python.org
130 | .. _aiohttp: https://github.com/aio-libs/aiohttp
131 | .. _asyncio: http://docs.python.org/3.6/library/asyncio.html
132 | .. _uvloop: https://github.com/MagicStack/uvloop
133 |
--------------------------------------------------------------------------------
/docs/_static/aiohttp-icon-128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/docs/_static/aiohttp-icon-128x128.png
--------------------------------------------------------------------------------
/docs/_static/list_models.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/docs/_static/list_models.png
--------------------------------------------------------------------------------
/docs/_static/one_model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/docs/_static/one_model.png
--------------------------------------------------------------------------------
/examples/README.rst:
--------------------------------------------------------------------------------
1 | Demos
2 | -----
3 | List of demos
4 |
--------------------------------------------------------------------------------
/examples/multiple/boston_gbr/boston_gbr.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/multiple/boston_gbr/boston_gbr.pkl
--------------------------------------------------------------------------------
/examples/multiple/boston_gbr/boston_gbr.py:
--------------------------------------------------------------------------------
1 | import json
2 | import numpy as np
3 | import pandas as pd
4 | import pickle
5 | from sklearn import ensemble
6 | from sklearn import datasets
7 | from sklearn.utils import shuffle
8 | from sklearn.metrics import mean_squared_error
9 | from mlserve import build_schema
10 |
11 | boston = datasets.load_boston()
12 | X, y = shuffle(boston.data, boston.target, random_state=13)
13 | X = X.astype(np.float32)
14 | offset = int(X.shape[0] * 0.9)
15 | X_train, y_train = X[:offset], y[:offset]
16 | X_test, y_test = X[offset:], y[offset:]
17 |
18 | params = {
19 | 'n_estimators': 500,
20 | 'max_depth': 4,
21 | 'min_samples_split': 2,
22 | 'learning_rate': 0.01,
23 | 'loss': 'ls',
24 | }
25 | clf = ensemble.GradientBoostingRegressor(**params)
26 |
27 | clf.fit(X_train, y_train)
28 |
29 | y_pred = clf.predict(X_test)
30 | mse = mean_squared_error(y_test, clf.predict(X_test))
31 | print('MSE: %.4f' % mse)
32 |
33 |
34 | columns = list(boston.feature_names) + ['target']
35 | data = np.c_[boston.data, boston.target]
36 | df = pd.DataFrame(data=data, columns=columns)
37 |
38 |
39 | model_file = 'boston_gbr.pkl'
40 | print('Writing model')
41 | with open(model_file, 'wb') as f:
42 | pickle.dump(clf, f)
43 |
44 |
45 | print('Writing dataset schema')
46 | schema = build_schema(df)
47 | with open('boston_schema.json', 'w') as f:
48 | json.dump(schema, f, indent=4, sort_keys=True)
49 |
--------------------------------------------------------------------------------
/examples/multiple/boston_gbr/boston_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "AGE": 65.2,
4 | "B": 396.9,
5 | "CHAS": 0.0,
6 | "CRIM": 0.00632,
7 | "DIS": 4.09,
8 | "INDUS": 2.31,
9 | "LSTAT": 4.98,
10 | "NOX": 0.538,
11 | "PTRATIO": 15.3,
12 | "RAD": 1.0,
13 | "RM": 6.575,
14 | "TAX": 296.0,
15 | "ZN": 18.0,
16 | "target": 24.0
17 | },
18 | "schema": {
19 | "properties": {
20 | "AGE": {
21 | "type": [
22 | "number"
23 | ]
24 | },
25 | "B": {
26 | "type": [
27 | "number"
28 | ]
29 | },
30 | "CHAS": {
31 | "type": [
32 | "number"
33 | ]
34 | },
35 | "CRIM": {
36 | "type": [
37 | "number"
38 | ]
39 | },
40 | "DIS": {
41 | "type": [
42 | "number"
43 | ]
44 | },
45 | "INDUS": {
46 | "type": [
47 | "number"
48 | ]
49 | },
50 | "LSTAT": {
51 | "type": [
52 | "number"
53 | ]
54 | },
55 | "NOX": {
56 | "type": [
57 | "number"
58 | ]
59 | },
60 | "PTRATIO": {
61 | "type": [
62 | "number"
63 | ]
64 | },
65 | "RAD": {
66 | "type": [
67 | "number"
68 | ]
69 | },
70 | "RM": {
71 | "type": [
72 | "number"
73 | ]
74 | },
75 | "TAX": {
76 | "type": [
77 | "number"
78 | ]
79 | },
80 | "ZN": {
81 | "type": [
82 | "number"
83 | ]
84 | },
85 | "target": {
86 | "type": [
87 | "number"
88 | ]
89 | }
90 | },
91 | "required": [
92 | "CRIM",
93 | "ZN",
94 | "INDUS",
95 | "CHAS",
96 | "NOX",
97 | "RM",
98 | "AGE",
99 | "DIS",
100 | "RAD",
101 | "TAX",
102 | "PTRATIO",
103 | "B",
104 | "LSTAT",
105 | "target"
106 | ],
107 | "type": "object"
108 | },
109 | "ui_schema": {}
110 | }
111 |
--------------------------------------------------------------------------------
/examples/multiple/diabetes/diabetes_lasso.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "age": 0.0380759064,
4 | "bmi": 0.0616962065,
5 | "bp": 0.021872355,
6 | "s1": -0.0442234984,
7 | "s2": -0.0348207628,
8 | "s3": -0.0434008457,
9 | "s4": -0.002592262,
10 | "s5": 0.0199084209,
11 | "s6": -0.0176461252,
12 | "sex": 0.0506801187,
13 | "target": 151.0
14 | },
15 | "schema": {
16 | "properties": {
17 | "age": {
18 | "type": [
19 | "number"
20 | ]
21 | },
22 | "bmi": {
23 | "type": [
24 | "number"
25 | ]
26 | },
27 | "bp": {
28 | "type": [
29 | "number"
30 | ]
31 | },
32 | "s1": {
33 | "type": [
34 | "number"
35 | ]
36 | },
37 | "s2": {
38 | "type": [
39 | "number"
40 | ]
41 | },
42 | "s3": {
43 | "type": [
44 | "number"
45 | ]
46 | },
47 | "s4": {
48 | "type": [
49 | "number"
50 | ]
51 | },
52 | "s5": {
53 | "type": [
54 | "number"
55 | ]
56 | },
57 | "s6": {
58 | "type": [
59 | "number"
60 | ]
61 | },
62 | "sex": {
63 | "type": [
64 | "number"
65 | ]
66 | },
67 | "target": {
68 | "type": [
69 | "number"
70 | ]
71 | }
72 | },
73 | "required": [
74 | "age",
75 | "sex",
76 | "bmi",
77 | "bp",
78 | "s1",
79 | "s2",
80 | "s3",
81 | "s4",
82 | "s5",
83 | "s6",
84 | "target"
85 | ],
86 | "type": "object"
87 | },
88 | "ui_schema": {}
89 | }
--------------------------------------------------------------------------------
/examples/multiple/diabetes/diabetes_lasso.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/multiple/diabetes/diabetes_lasso.pkl
--------------------------------------------------------------------------------
/examples/multiple/diabetes/diabetes_lasso.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import json
3 |
4 | import numpy as np
5 | import pandas as pd
6 | from sklearn import datasets
7 | from sklearn.linear_model import Lasso
8 | from sklearn.model_selection import GridSearchCV
9 | from mlserve import build_schema
10 |
11 |
12 | diabetes = datasets.load_diabetes()
13 | X = diabetes.data[:150]
14 | y = diabetes.target[:150]
15 |
16 | lasso = Lasso(random_state=0)
17 | alphas = np.logspace(-4, -0.5, 30)
18 |
19 | tuned_parameters = [{'alpha': alphas}]
20 | n_folds = 3
21 |
22 | clf = GridSearchCV(lasso, tuned_parameters, cv=n_folds, refit=True)
23 | clf.fit(X, y)
24 |
25 | scores = clf.cv_results_['mean_test_score']
26 |
27 | print(scores)
28 |
29 |
30 | columns = list(diabetes.feature_names) + ['target']
31 | data = np.c_[diabetes.data, diabetes.target]
32 | df = pd.DataFrame(data=data, columns=columns)
33 |
34 |
35 | print('Writing model')
36 | with open('diabetes_lasso.pkl', 'wb') as f:
37 | pickle.dump(clf, f)
38 |
39 |
40 | print('Writing dataset schema')
41 | schema = build_schema(df)
42 | with open('diabetes_lasso.json', 'w') as f:
43 | json.dump(schema, f, indent=4, sort_keys=True)
44 |
--------------------------------------------------------------------------------
/examples/multiple/instagram/instagram.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "Number.of.followers": 177,
4 | "Number.of.people.they.follow": 198,
5 | "Number.of.posts": 0,
6 | "Private.account": true,
7 | "has_profile_picture": true,
8 | "rating": false
9 | },
10 | "schema": {
11 | "properties": {
12 | "Number.of.followers": {
13 | "type": [
14 | "integer"
15 | ]
16 | },
17 | "Number.of.people.they.follow": {
18 | "type": [
19 | "integer"
20 | ]
21 | },
22 | "Number.of.posts": {
23 | "type": [
24 | "integer"
25 | ]
26 | },
27 | "Private.account": {
28 | "type": [
29 | "boolean"
30 | ]
31 | },
32 | "has_profile_picture": {
33 | "type": [
34 | "boolean"
35 | ]
36 | },
37 | "rating": {
38 | "type": [
39 | "boolean"
40 | ]
41 | }
42 | },
43 | "required": [
44 | "Number.of.posts",
45 | "Number.of.people.they.follow",
46 | "Number.of.followers",
47 | "has_profile_picture",
48 | "Private.account",
49 | "rating"
50 | ],
51 | "type": "object"
52 | },
53 | "ui_schema": {
54 | "Number.of.followers": {},
55 | "Number.of.people.they.follow": {},
56 | "Number.of.posts": {},
57 | "Private.account": {"ui:widget": "radio"},
58 | "has_profile_picture": {"ui:widget": "radio"},
59 | "rating": {}
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/examples/multiple/instagram/instagram_inf.py:
--------------------------------------------------------------------------------
1 | import json
2 | import pandas as pd
3 | import cloudpickle
4 | from sklearn.ensemble import RandomForestClassifier
5 | from mlserve import build_schema
6 |
7 |
8 | columns = [
9 | 'Number.of.posts',
10 | 'Number.of.people.they.follow',
11 | 'Number.of.followers',
12 | 'has_profile_picture',
13 | 'Private.account']
14 | target_name = 'rating'
15 |
16 | dtypes = {
17 | 'Number.of.posts': 'uint32',
18 | 'Number.of.people.they.follow': 'uint32',
19 | 'Number.of.followers': 'uint32',
20 | 'has_profile_picture': 'bool',
21 | 'Private.account': 'bool',
22 | }
23 |
24 | data = pd.read_csv('labelled_1000_inclprivate.csv', dtype=dtypes)
25 |
26 | X_train = data[columns]
27 | y_train = data[target_name]
28 | original = pd.concat([X_train, y_train], axis=1)
29 |
30 | rfc = RandomForestClassifier(n_estimators=100)
31 | rfc.fit(X_train, y_train)
32 | print(rfc.predict_proba(X_train))
33 |
34 |
35 | print('Writing model')
36 | with open('instgram_rf.pkl', 'wb') as f:
37 | cloudpickle.dump(rfc, f)
38 |
39 |
40 | print('Writing dataset schema')
41 | schema = build_schema(original)
42 | with open('instagram.json', 'w') as f:
43 | json.dump(schema, f, indent=4, sort_keys=True)
44 |
--------------------------------------------------------------------------------
/examples/multiple/instagram/instgram_rf.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/multiple/instagram/instgram_rf.pkl
--------------------------------------------------------------------------------
/examples/multiple/iris_lr/iris_lr.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "petal length (cm)": 1.4,
4 | "petal width (cm)": 0.2,
5 | "sepal length (cm)": 5.1,
6 | "sepal width (cm)": 3.5,
7 | "target": 0.0
8 | },
9 | "schema": {
10 | "properties": {
11 | "petal length (cm)": {
12 | "type": [
13 | "number"
14 | ]
15 | },
16 | "petal width (cm)": {
17 | "type": [
18 | "number"
19 | ]
20 | },
21 | "sepal length (cm)": {
22 | "type": [
23 | "number"
24 | ]
25 | },
26 | "sepal width (cm)": {
27 | "type": [
28 | "number"
29 | ]
30 | },
31 | "target": {
32 | "type": [
33 | "number"
34 | ]
35 | }
36 | },
37 | "required": [
38 | "sepal length (cm)",
39 | "sepal width (cm)",
40 | "petal length (cm)",
41 | "petal width (cm)",
42 | "target"
43 | ],
44 | "type": "object"
45 | },
46 | "ui_schema": {}
47 | }
--------------------------------------------------------------------------------
/examples/multiple/iris_lr/iris_lr.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/multiple/iris_lr/iris_lr.pkl
--------------------------------------------------------------------------------
/examples/multiple/iris_lr/iris_lr.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import json
3 | import numpy as np
4 | import pandas as pd
5 | from sklearn import linear_model, datasets
6 | from sklearn.utils import shuffle
7 | from mlserve import build_schema
8 |
9 |
10 | iris = datasets.load_iris()
11 |
12 | X, y = shuffle(iris.data, iris.target, random_state=13)
13 |
14 | offset = int(X.shape[0] * 0.9)
15 | X_train, y_train = X[:offset], y[:offset]
16 | X_test, y_test = X[offset:], y[offset:]
17 |
18 | logreg = linear_model.LogisticRegression(C=1e5)
19 | logreg.fit(X, y)
20 |
21 | columns = list(iris.feature_names) + ['target']
22 | data = np.c_[iris.data, iris.target]
23 | df = pd.DataFrame(data=data, columns=columns)
24 |
25 |
26 | print('Writing model')
27 | with open('iris_lr.pkl', 'wb') as f:
28 | pickle.dump(logreg, f)
29 |
30 |
31 | print('Writing dataset schema')
32 | schema = build_schema(df)
33 | with open('iris_lr.json', 'w') as f:
34 | json.dump(schema, f, indent=4, sort_keys=True)
35 |
--------------------------------------------------------------------------------
/examples/multiple/models.yml:
--------------------------------------------------------------------------------
1 | models:
2 | - name: "wine_quality"
3 | description: "Wine Quality Model"
4 | model_path: "wine_rf/wine_quality_rf.pkl"
5 | data_schema_path: "wine_rf/wine_quality_schema.json"
6 | target: "quality"
7 |
8 | - name: "boston_gradient_boosting_regressor"
9 | description: "Boston dataset with gradient boosting regressor"
10 | model_path: "boston_gbr/boston_gbr.pkl"
11 | data_schema_path: "boston_gbr/boston_schema.json"
12 | target: "target"
13 |
14 | - name: "iris_lr"
15 | description: "Iris dataset for logistic regression"
16 | model_path: "iris_lr/iris_lr.pkl"
17 | data_schema_path: "iris_lr/iris_lr.json"
18 | target: "target"
19 |
20 | - name: "students_lasso"
21 | description: "Iris dataset for logistic regression"
22 | model_path: "students/students_lasso.pkl"
23 | data_schema_path: "students/students_lasso.json"
24 | target: "G3"
25 |
26 | - name: "diabetes_lasso"
27 | description: "Diabetes dataset with lasso"
28 | model_path: "diabetes/diabetes_lasso.pkl"
29 | data_schema_path: "diabetes/diabetes_lasso.json"
30 | target: "target"
31 |
32 | - name: "toxic_lr"
33 | description: "Toxic comments classification with TF/IDF and LogisticRegression"
34 | model_path: "toxic_lr/toxic_lr.pkl"
35 | data_schema_path: "toxic_lr/toxic_lr.json"
36 | target: ["identity_hate", "insult", "obscene", "severe_toxic", "threat"]
37 |
38 | - name: "instagram_rf"
39 | description: "Classification fake user on instagram"
40 | model_path: "instagram/instgram_rf.pkl"
41 | data_schema_path: "instagram/instagram.json"
42 | target: ["rating"]
43 |
--------------------------------------------------------------------------------
/examples/multiple/toxic_lr/toxic_lr.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "comment_text": "Explanation\nWhy the edits made under my username Hardcore Metallica Fan were reverted? They weren't vandalisms, just closure on some GAs after I voted at New York Dolls FAC. And please don't remove the template from the talk page since I'm retired now.89.205.38.27",
4 | "identity_hate": 0,
5 | "insult": 0,
6 | "obscene": 0,
7 | "severe_toxic": 0,
8 | "toxic": 0
9 | },
10 | "schema": {
11 | "properties": {
12 | "comment_text": {
13 | "type": [
14 | "string"
15 | ]
16 | },
17 | "identity_hate": {
18 | "type": [
19 | "integer"
20 | ]
21 | },
22 | "insult": {
23 | "type": [
24 | "integer"
25 | ]
26 | },
27 | "obscene": {
28 | "type": [
29 | "integer"
30 | ]
31 | },
32 | "severe_toxic": {
33 | "type": [
34 | "integer"
35 | ]
36 | }
37 | },
38 | "required": [
39 | "comment_text",
40 | "severe_toxic",
41 | "obscene",
42 | "insult",
43 | "identity_hate"
44 | ],
45 | "type": "object"
46 | },
47 | "ui_schema": {
48 | "comment_text": {
49 | "ui:widget": "textarea",
50 | "ui:options": {
51 | "rows": 10
52 | }
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/examples/multiple/toxic_lr/toxic_lr.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/multiple/toxic_lr/toxic_lr.pkl
--------------------------------------------------------------------------------
/examples/multiple/toxic_lr/toxic_lr.py:
--------------------------------------------------------------------------------
1 | import json
2 | import pandas as pd
3 | import numpy as np
4 |
5 | import cloudpickle
6 | from sklearn.base import BaseEstimator, TransformerMixin
7 | from sklearn.feature_extraction.text import TfidfVectorizer
8 | from sklearn.linear_model import LogisticRegression
9 | from sklearn.model_selection import cross_val_score
10 | from sklearn.multioutput import MultiOutputClassifier
11 | from sklearn.pipeline import Pipeline
12 |
13 | from mlserve import build_schema
14 |
15 |
16 | def read_data(dataset_path):
17 | class_names = ['toxic', 'severe_toxic', 'obscene',
18 | 'insult', 'identity_hate']
19 | train = pd.read_csv(dataset_path).fillna(' ')
20 | train_text = train[['comment_text']]
21 | train_targets = train[class_names]
22 | return train_text, train_targets
23 |
24 |
25 | class ColumnSelector(BaseEstimator, TransformerMixin):
26 |
27 | def __init__(self, key):
28 | self.key = key
29 |
30 | def fit(self, x, y=None):
31 | return self
32 |
33 | def transform(self, df):
34 | return df[self.key]
35 |
36 |
37 | dataset_path = 'data/train.csv'
38 | train, targets = read_data(dataset_path)
39 | original = pd.concat([train, targets], axis=1)
40 |
41 | seed = 1234
42 | word_vectorizer = TfidfVectorizer(
43 | sublinear_tf=True,
44 | strip_accents='unicode',
45 | analyzer='word',
46 | token_pattern=r'\w{1,}',
47 | stop_words='english',
48 | ngram_range=(1, 1),
49 | max_features=10000,
50 | )
51 |
52 | logistic = LogisticRegression(C=0.1, solver='sag', random_state=seed)
53 | classifier = MultiOutputClassifier(logistic)
54 |
55 | pipeline = Pipeline(steps=[
56 | ('selector', ColumnSelector(key='comment_text')),
57 | ('word_tfidf', word_vectorizer),
58 | ('logistic', classifier)
59 | ])
60 |
61 |
62 | pipeline.fit(train, targets)
63 |
64 | scores = cross_val_score(
65 | pipeline,
66 | train,
67 | targets,
68 | cv=5,
69 | scoring='roc_auc')
70 |
71 | score = np.mean(scores)
72 | print(score)
73 |
74 |
75 | print('Writing model')
76 | with open('toxic_lr.pkl', 'wb') as f:
77 | cloudpickle.dump(pipeline, f)
78 |
79 |
80 | print('Writing dataset schema')
81 | schema = build_schema(original)
82 | with open('toxic_lr.json', 'w') as f:
83 | json.dump(schema, f, indent=4, sort_keys=True)
84 |
--------------------------------------------------------------------------------
/examples/multiple/wine_rf/wine_quality_rf.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/multiple/wine_rf/wine_quality_rf.pkl
--------------------------------------------------------------------------------
/examples/multiple/wine_rf/wine_quality_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "alcohol": 9.4,
4 | "chlorides": 0.076,
5 | "citric acid": 0.0,
6 | "density": 0.9978,
7 | "fixed acidity": 7.4,
8 | "free sulfur dioxide": 11.0,
9 | "pH": 3.51,
10 | "quality": 5.0,
11 | "residual sugar": 1.9,
12 | "sulphates": 0.56,
13 | "total sulfur dioxide": 34.0,
14 | "volatile acidity": 0.7
15 | },
16 | "schema": {
17 | "properties": {
18 | "alcohol": {
19 | "type": [
20 | "number"
21 | ]
22 | },
23 | "chlorides": {
24 | "type": [
25 | "number"
26 | ]
27 | },
28 | "citric acid": {
29 | "type": [
30 | "number"
31 | ]
32 | },
33 | "density": {
34 | "type": [
35 | "number"
36 | ]
37 | },
38 | "fixed acidity": {
39 | "type": [
40 | "number"
41 | ]
42 | },
43 | "free sulfur dioxide": {
44 | "type": [
45 | "number"
46 | ]
47 | },
48 | "pH": {
49 | "type": [
50 | "number"
51 | ]
52 | },
53 | "quality": {
54 | "type": [
55 | "integer"
56 | ]
57 | },
58 | "residual sugar": {
59 | "type": [
60 | "number"
61 | ]
62 | },
63 | "sulphates": {
64 | "type": [
65 | "number"
66 | ]
67 | },
68 | "total sulfur dioxide": {
69 | "type": [
70 | "number"
71 | ]
72 | },
73 | "volatile acidity": {
74 | "type": [
75 | "number"
76 | ]
77 | }
78 | },
79 | "required": [
80 | "fixed acidity",
81 | "volatile acidity",
82 | "citric acid",
83 | "residual sugar",
84 | "chlorides",
85 | "free sulfur dioxide",
86 | "total sulfur dioxide",
87 | "density",
88 | "pH",
89 | "sulphates",
90 | "alcohol",
91 | "quality"
92 | ],
93 | "type": "object"
94 | },
95 | "ui_schema": {}
96 | }
--------------------------------------------------------------------------------
/examples/multiple/wine_rf/wine_rf.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 |
3 | import mlserve
4 | import pickle
5 | import json
6 | from sklearn.model_selection import train_test_split
7 | from sklearn.preprocessing import StandardScaler
8 | from sklearn.ensemble import RandomForestRegressor
9 | from sklearn.pipeline import make_pipeline
10 | from sklearn.model_selection import GridSearchCV
11 | from sklearn.metrics import mean_squared_error, r2_score
12 |
13 |
14 | dataset_url = (
15 | 'http://mlr.cs.umass.edu/'
16 | 'ml/machine-learning-databases/'
17 | 'wine-quality/winequality-red.csv'
18 | )
19 | data = pd.read_csv(dataset_url, sep=';')
20 |
21 |
22 | y = data.quality
23 | X = data.drop('quality', axis=1)
24 | X_train, X_test, y_train, y_test = train_test_split(
25 | X, y, test_size=0.2, random_state=123, stratify=y
26 | )
27 |
28 | pipeline = make_pipeline(
29 | StandardScaler(), RandomForestRegressor(n_estimators=100)
30 | )
31 |
32 | hyperparameters = {
33 | 'randomforestregressor__max_features': ['auto', 'sqrt', 'log2'],
34 | 'randomforestregressor__max_depth': [None, 5],
35 | }
36 |
37 | clf = GridSearchCV(pipeline, hyperparameters, cv=5)
38 | clf.fit(X_train, y_train)
39 |
40 | pred = clf.predict(X_test)
41 | print(r2_score(y_test, pred))
42 | print(mean_squared_error(y_test, pred))
43 |
44 | model_file = 'wine_quality_rf.pkl'
45 | print('Writing model')
46 | with open(model_file, 'wb') as f:
47 | pickle.dump(clf, f)
48 |
49 |
50 | print('Writing dataset schema')
51 | schema = mlserve.build_schema(data)
52 | with open('wine_quality_schema.json', 'w') as f:
53 | json.dump(schema, f, indent=4, sort_keys=True)
54 |
--------------------------------------------------------------------------------
/examples/simple/boston_gbr.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/examples/simple/boston_gbr.pkl
--------------------------------------------------------------------------------
/examples/simple/boston_gbr.py:
--------------------------------------------------------------------------------
1 | import json
2 | import numpy as np
3 | import pandas as pd
4 | import pickle
5 | from sklearn import ensemble
6 | from sklearn import datasets
7 | from sklearn.utils import shuffle
8 | from sklearn.metrics import mean_squared_error
9 | from mlserve import build_schema
10 |
11 | boston = datasets.load_boston()
12 | X, y = shuffle(boston.data, boston.target, random_state=13)
13 | X = X.astype(np.float32)
14 | offset = int(X.shape[0] * 0.9)
15 | X_train, y_train = X[:offset], y[:offset]
16 | X_test, y_test = X[offset:], y[offset:]
17 |
18 | params = {
19 | 'n_estimators': 500,
20 | 'max_depth': 4,
21 | 'min_samples_split': 2,
22 | 'learning_rate': 0.01,
23 | 'loss': 'ls',
24 | }
25 | clf = ensemble.GradientBoostingRegressor(**params)
26 |
27 | clf.fit(X_train, y_train)
28 |
29 | y_pred = clf.predict(X_test)
30 | mse = mean_squared_error(y_test, clf.predict(X_test))
31 | print('MSE: %.4f' % mse)
32 |
33 |
34 | columns = list(boston.feature_names) + ['target']
35 | data = np.c_[boston.data, boston.target]
36 | df = pd.DataFrame(data=data, columns=columns)
37 |
38 |
39 | model_file = 'boston_gbr.pkl'
40 | print('Writing model')
41 | with open(model_file, 'wb') as f:
42 | pickle.dump(clf, f)
43 |
44 |
45 | print('Writing dataset schema')
46 | schema = build_schema(df)
47 | with open('boston_schema.json', 'w') as f:
48 | json.dump(schema, f, indent=4, sort_keys=True)
49 |
--------------------------------------------------------------------------------
/examples/simple/boston_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "AGE": 65.2,
4 | "B": 396.9,
5 | "CHAS": 0.0,
6 | "CRIM": 0.00632,
7 | "DIS": 4.09,
8 | "INDUS": 2.31,
9 | "LSTAT": 4.98,
10 | "NOX": 0.538,
11 | "PTRATIO": 15.3,
12 | "RAD": 1.0,
13 | "RM": 6.575,
14 | "TAX": 296.0,
15 | "ZN": 18.0,
16 | "target": 24.0
17 | },
18 | "schema": {
19 | "properties": {
20 | "AGE": {
21 | "type": [
22 | "number"
23 | ]
24 | },
25 | "B": {
26 | "type": [
27 | "number"
28 | ]
29 | },
30 | "CHAS": {
31 | "type": [
32 | "number"
33 | ]
34 | },
35 | "CRIM": {
36 | "type": [
37 | "number"
38 | ]
39 | },
40 | "DIS": {
41 | "type": [
42 | "number"
43 | ]
44 | },
45 | "INDUS": {
46 | "type": [
47 | "number"
48 | ]
49 | },
50 | "LSTAT": {
51 | "type": [
52 | "number"
53 | ]
54 | },
55 | "NOX": {
56 | "type": [
57 | "number"
58 | ]
59 | },
60 | "PTRATIO": {
61 | "type": [
62 | "number"
63 | ]
64 | },
65 | "RAD": {
66 | "type": [
67 | "number"
68 | ]
69 | },
70 | "RM": {
71 | "type": [
72 | "number"
73 | ]
74 | },
75 | "TAX": {
76 | "type": [
77 | "number"
78 | ]
79 | },
80 | "ZN": {
81 | "type": [
82 | "number"
83 | ]
84 | },
85 | "target": {
86 | "type": [
87 | "number"
88 | ]
89 | }
90 | },
91 | "required": [
92 | "CRIM",
93 | "ZN",
94 | "INDUS",
95 | "CHAS",
96 | "NOX",
97 | "RM",
98 | "AGE",
99 | "DIS",
100 | "RAD",
101 | "TAX",
102 | "PTRATIO",
103 | "B",
104 | "LSTAT",
105 | "target"
106 | ],
107 | "type": "object"
108 | },
109 | "ui_schema": {}
110 | }
111 |
--------------------------------------------------------------------------------
/examples/simple/models.yml:
--------------------------------------------------------------------------------
1 | models:
2 | - name: "boston_gradient_boosting_regressor"
3 | description: "Boston dataset with gradient boosting regressor"
4 | model_path: "boston_gbr.pkl"
5 | data_schema_path: "boston_schema.json"
6 | target: "target"
7 |
--------------------------------------------------------------------------------
/mlserve/__init__.py:
--------------------------------------------------------------------------------
1 | from .schema_builder import build_schema
2 |
3 |
4 | __all__ = ('build_schema',)
5 | __version__ = '0.0.1a6'
6 |
--------------------------------------------------------------------------------
/mlserve/__main__.py:
--------------------------------------------------------------------------------
1 | from mlserve.main import main
2 |
3 |
4 | if __name__ == '__main__':
5 | main()
6 |
--------------------------------------------------------------------------------
/mlserve/consts.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | MODELS_KEY = 'models'
4 | PROJ_ROOT = Path(__file__).parent
5 |
--------------------------------------------------------------------------------
/mlserve/exceptions.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import Optional, Dict, Any
3 |
4 | from aiohttp import web
5 |
6 |
7 | # Server related exceptions
8 | class RESTError(web.HTTPError): # type: ignore
9 | status_code = 500
10 | error = 'Unknown Error'
11 |
12 | def __init__( # type: ignore
13 | self: 'RESTError',
14 | message: Optional[str] = None,
15 | status_code=Optional[int],
16 | *args: Any,
17 | **kwargs: Any
18 | ) -> None:
19 | super().__init__(reason=message)
20 |
21 | if status_code is not None:
22 | self.status_code = status_code
23 |
24 | if not message:
25 | message = self.error
26 |
27 | msg_dict: Dict[str, Any] = {'error': message}
28 |
29 | if kwargs:
30 | msg_dict['error_details'] = kwargs
31 |
32 | self.text = json.dumps(msg_dict)
33 | self.content_type = 'application/json'
34 |
35 |
36 | class ObjectNotFound(RESTError):
37 | status_code = 404
38 | error = 'Object not found'
39 |
40 |
41 | class UnprocessableEntity(RESTError):
42 | status_code = 422
43 | error = 'Unprocessable Entity'
44 |
45 |
46 | # REST client related exceptions
47 | class RestClientError(Exception):
48 | """Base exception class for RESTClient"""
49 |
50 | @property
51 | def status_code(self) -> int:
52 | r: int = self.args[0]
53 | return r
54 |
55 |
56 | class PlainRestError(RestClientError):
57 | """Answer is not JSON, for example for 500 Internal Server Error"""
58 |
59 | @property
60 | def error_text(self) -> str:
61 | return str(self.args)
62 |
--------------------------------------------------------------------------------
/mlserve/handlers.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 |
4 | from concurrent.futures import ProcessPoolExecutor
5 | from functools import partial
6 | from pathlib import Path
7 | from typing import Callable, Dict, Any, Union, List
8 |
9 | from aiohttp import web
10 |
11 | from .consts import MODELS_KEY
12 | from .exceptions import ObjectNotFound
13 | from .stats import ModelStats, AggStats
14 | from .utils import ModelDescriptor
15 | from .worker import predict
16 |
17 |
18 | def path_serializer(obj: Any) -> str:
19 | if isinstance(obj, Path):
20 | return str(obj)
21 | raise TypeError('Type not serializable')
22 |
23 |
24 | jsonify = partial(
25 | json.dumps, indent=4, sort_keys=True, default=path_serializer
26 | )
27 | JsonResp = Callable[[Union[Dict[str, Any], List[Any]]], web.Response]
28 | json_response: JsonResp = partial(web.json_response, dumps=jsonify)
29 |
30 |
31 | class SiteHandler:
32 |
33 | def __init__(self, project_root: Path) -> None:
34 | self._root = project_root
35 | self._loop = asyncio.get_event_loop()
36 |
37 | @property
38 | def project_root(self) -> Path:
39 | return self._root
40 |
41 | async def index(self, request: web.Request) -> web.FileResponse:
42 | path = str(self._root / 'static' / 'index.html')
43 | return web.FileResponse(path)
44 |
45 |
46 | def setup_app_routes(
47 | app: web.Application, handler: SiteHandler
48 | ) -> web.Application:
49 | r = app.router
50 | h = handler
51 | path = str(handler.project_root / 'static')
52 | r.add_get('/', h.index, name='index')
53 | r.add_get('/models', h.index, name='index.models')
54 | r.add_get('/models/{model_name}', h.index, name='index.model.name')
55 | r.add_static('/static/', path=path, name='static')
56 | return app
57 |
58 |
59 | class APIHandler:
60 | def __init__(
61 | self,
62 | app: web.Application,
63 | executor: ProcessPoolExecutor,
64 | project_root: Path,
65 | model_descs: List[ModelDescriptor],
66 | ) -> None:
67 | self._app = app
68 | self._executor = executor
69 | self._root = project_root
70 | self._loop = asyncio.get_event_loop()
71 |
72 | self._models = {m.name: m for m in model_descs}
73 | self._app[MODELS_KEY] = {m.name: ModelStats() for m in model_descs}
74 |
75 | result = sorted(self._models.values(), key=lambda v: v.name)
76 | self._models_list = [
77 | {'name': m.name, 'target': m.target} for m in result
78 | ]
79 |
80 | def validate_model_name(self, model_name: str) -> str:
81 | if model_name not in self._models:
82 | msg = f'Model with name {model_name} not found.'
83 | raise ObjectNotFound(msg)
84 | return model_name
85 |
86 | async def model_list(self, request: web.Request) -> web.Response:
87 | return json_response(self._models_list)
88 |
89 | async def model_detail(self, request: web.Request) -> web.Response:
90 | model_name = request.match_info['model_name']
91 | self.validate_model_name(model_name)
92 |
93 | r = self._models[model_name].asdict()
94 | return json_response(r)
95 |
96 | async def model_predict(self, request: web.Request) -> web.Response:
97 | model_name = request.match_info['model_name']
98 | self.validate_model_name(model_name)
99 | target = self._models[model_name].target
100 | raw_data = await request.read()
101 | run = self._loop.run_in_executor
102 | # TODO: figure out if we need protect call with aiojobs
103 | r = await run(self._executor, predict, model_name, target, raw_data)
104 | # TODO: introduce exception in case of model failure to predict
105 | # msg = 'Model failed to predict'
106 | # raise UnprocessableEntity(msg, reason=str(e)) from e
107 |
108 | return json_response(r)
109 |
110 | async def model_stats(self, request: web.Request) -> web.Response:
111 | model_name = request.match_info['model_name']
112 | stats: ModelStats = request.app[MODELS_KEY][model_name]
113 | r = stats.formatted()
114 | return json_response(r)
115 |
116 | async def agg_stats(self, request: web.Request) -> web.Response:
117 | stats_map: Dict[str, ModelStats] = request.app[MODELS_KEY]
118 | agg = AggStats.from_models_stats(stats_map)
119 | return json_response(agg.formatted())
120 |
121 |
122 | def setup_api_routes(
123 | api: web.Application, handler: APIHandler
124 | ) -> web.Application:
125 | r = api.router
126 | h = handler
127 | r.add_get('/v1/agg_stats', h.agg_stats, name='stats.list')
128 | r.add_get('/v1/models', h.model_list, name='models.list')
129 | r.add_get('/v1/models/{model_name}', h.model_detail, name='models.detail')
130 | r.add_get(
131 | '/v1/models/{model_name}/stats', h.model_stats, name='models.stats'
132 | )
133 | r.add_get(
134 | '/v1/models/{model_name}/schema', h.model_detail, name='models.schema'
135 | )
136 | r.add_post(
137 | '/v1/models/{model_name}/predict',
138 | h.model_predict,
139 | name='models.predict',
140 | )
141 | return api
142 |
--------------------------------------------------------------------------------
/mlserve/loaders.py:
--------------------------------------------------------------------------------
1 | import pickle # nosec
2 | from typing import Any, Callable
3 |
4 |
5 | def pickle_loader(model_path: str) -> Any:
6 | with open(model_path, 'rb') as f:
7 | pipeline = pickle.load(f) # nosec
8 | return pipeline
9 |
10 |
11 | def joblib_loader(model_path: str) -> Any:
12 | from sklearn.externals import joblib
13 | with open(model_path, 'rb') as f:
14 | pipeline = joblib.load(f) # nosec
15 | return pipeline
16 |
17 |
18 | def get_loader(loader_name: str) -> Callable[[str], Any]:
19 | if loader_name == 'joblib':
20 | return joblib_loader
21 | return pickle_loader
22 |
--------------------------------------------------------------------------------
/mlserve/main.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import argparse
3 | import logging
4 | from typing import Any
5 |
6 | from aiohttp import web
7 |
8 | from .server_utils import init
9 | from .utils import load_model_config
10 |
11 |
12 | def _cli_parser() -> Any:
13 | parser = argparse.ArgumentParser()
14 | parser.add_argument('-c', '--config', help='Provide path to config file')
15 | parser.add_argument(
16 | '-H', '--host', help='Port for WEB/API', default='127.0.0.1'
17 | )
18 | parser.add_argument('-P', '--port', help='Port for WEB/API', default=9000)
19 | parser.add_argument('-w', '--workers', help='Number of workers', default=2)
20 | args = parser.parse_args()
21 | return args
22 |
23 |
24 | def main() -> None:
25 | args = _cli_parser()
26 | logging.basicConfig(level=logging.DEBUG)
27 | loop = asyncio.get_event_loop()
28 | model_conf = load_model_config(args.config)
29 | app = loop.run_until_complete(init(args.workers, model_conf))
30 | web.run_app(app, host=args.host, port=args.port)
31 |
--------------------------------------------------------------------------------
/mlserve/middleware.py:
--------------------------------------------------------------------------------
1 | import time
2 | from datetime import datetime
3 | from aiohttp.web import middleware
4 | from typing import Awaitable, Callable # flake8: noqa
5 | from aiohttp.web import HTTPException, Request, Application, Response
6 |
7 | from .handlers import APIHandler
8 | from .stats import ModelStats, RequestTiming
9 | from .consts import MODELS_KEY
10 |
11 |
12 | Handler = Callable[[Request], Awaitable[Response]]
13 |
14 |
15 | def process_request(
16 | req: Request, resp: Response, ts: datetime, duration: float
17 | ) -> None:
18 | model_name = req.match_info['model_name']
19 | if model_name not in req.app[MODELS_KEY]:
20 | return
21 | point = RequestTiming(resp.status, ts, duration)
22 | stats: ModelStats = req.app[MODELS_KEY][model_name]
23 | stats.log_data_point(point)
24 |
25 |
26 | @middleware # type: ignore
27 | async def stats_middleware(request: Request, handler: Handler) -> Response:
28 | if request.match_info.route.name == 'models.predict':
29 | ts = datetime.now()
30 | start = time.time()
31 | try:
32 | resp = await handler(request)
33 | except HTTPException as e:
34 | duration = time.time() - start
35 | process_request(request, e, ts, duration)
36 | raise
37 | else:
38 | duration = time.time() - start
39 | process_request(request, resp, ts, duration)
40 |
41 | else:
42 | resp = await handler(request)
43 | return resp
44 |
--------------------------------------------------------------------------------
/mlserve/schema_builder.py:
--------------------------------------------------------------------------------
1 | import json
2 | import pandas as pd
3 |
4 | from pandas.core.dtypes.common import (
5 | is_bool_dtype,
6 | is_categorical_dtype,
7 | is_datetime64_dtype,
8 | is_datetime64tz_dtype,
9 | is_integer_dtype,
10 | is_numeric_dtype,
11 | is_period_dtype,
12 | is_string_dtype,
13 | is_timedelta64_dtype,
14 | )
15 | from typing import Dict, Any, List, Optional
16 |
17 |
18 | def as_json_table_type(x) -> str:
19 | if is_integer_dtype(x):
20 | return 'integer'
21 | elif is_bool_dtype(x):
22 | return 'boolean'
23 | elif is_numeric_dtype(x):
24 | return 'number'
25 | elif (
26 | is_datetime64_dtype(x)
27 | or is_datetime64tz_dtype(x)
28 | or is_period_dtype(x)
29 | ):
30 | # TODO: fix this
31 | # return 'datetime'
32 | return 'string'
33 | elif is_timedelta64_dtype(x):
34 | # TODO: fix this
35 | # return 'duration'
36 | return 'string'
37 | elif is_categorical_dtype(x):
38 | # TODO: fix this
39 | # return 'any'
40 | return 'string'
41 | elif is_string_dtype(x):
42 | return 'string'
43 | else:
44 | return 'any'
45 |
46 |
47 | text_area = {
48 | 'ui:widget': 'textarea',
49 | 'ui:options': {
50 | 'rows': 10
51 | }
52 | }
53 |
54 |
55 | radio_button = {'ui:widget': 'radio'}
56 |
57 |
58 | def make_field(arr: pd.Series):
59 | ui_schema: Optional[Dict[str, Any]] = None
60 | add_types = []
61 | if arr.isnull().sum() > 0:
62 | arr_no_na = arr.dropna()
63 | dtype = arr_no_na.infer_objects().dtypes
64 | add_types.append('null')
65 | else:
66 | dtype = arr.dtype
67 |
68 | if arr.name is None:
69 | name = 'values'
70 | else:
71 | name = arr.name
72 | json_type = as_json_table_type(dtype)
73 | field = {'type': [as_json_table_type(dtype)] + add_types}
74 |
75 | if json_type == 'string' and arr.str.len().mean() > 50:
76 | ui_schema = text_area
77 |
78 | if is_categorical_dtype(arr):
79 | if hasattr(arr, 'categories'):
80 | cats = arr.categories
81 | # ordered = arr.ordered
82 | else:
83 | cats = arr.cat.categories
84 | # ordered = arr.cat.ordered
85 | field['enum'] = list(cats)
86 |
87 | # TODO: handle datetime properly
88 | # elif is_period_dtype(arr):
89 | # field['freq'] = arr.freqstr
90 |
91 | # elif is_datetime64tz_dtype(arr):
92 | # if hasattr(arr, 'dt'):
93 | # field['tz'] = arr.dt.tz.zone
94 | # else:
95 | # field['tz'] = arr.tz.zone
96 | return name, field, dtype, ui_schema
97 |
98 |
99 | def build_schema(
100 | data: pd.DataFrame, include_example: bool=True
101 | ) -> Dict[str, Any]:
102 | form_data = json.loads(data.iloc[[0]].to_json(orient='records'))[0]
103 | fields = []
104 | for _, s in data.items():
105 | fields.append(make_field(s))
106 |
107 | names: List[str] = []
108 | items = {}
109 | ui_schema: Dict[str, Any] = {}
110 | for k, v, _, ui in fields:
111 | items[k] = v
112 | # if 'null' not in v['type']:
113 | names.append(k)
114 | if ui is not None:
115 | ui_schema[k] = ui
116 |
117 | schema = {'type': 'object', 'properties': items, 'required': names}
118 | result = {
119 | 'schema': schema,
120 | 'ui_schema': ui_schema,
121 | 'example_data': form_data if include_example else {},
122 | }
123 | return result
124 |
--------------------------------------------------------------------------------
/mlserve/server_utils.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from functools import partial
3 | from concurrent.futures import ProcessPoolExecutor
4 | from typing import Any, List, Dict
5 |
6 | from aiohttp import web
7 |
8 | from .handlers import (
9 | APIHandler,
10 | SiteHandler,
11 | setup_api_routes,
12 | setup_app_routes,
13 | )
14 | from .consts import PROJ_ROOT
15 | from .middleware import stats_middleware
16 | from .utils import ModelDescriptor, load_models
17 | from .worker import warm, clean_worker
18 |
19 |
20 | async def setup_executor(
21 | app: web.Application,
22 | max_workers: int,
23 | models: List[ModelDescriptor]
24 | ) -> ProcessPoolExecutor:
25 | executor = ProcessPoolExecutor(max_workers=max_workers)
26 | loop = asyncio.get_event_loop()
27 | run = loop.run_in_executor
28 |
29 | w = partial(warm, cache=None, init_signals=True)
30 | fs = [run(executor, w, models) for i in range(0, max_workers)]
31 | await asyncio.gather(*fs)
32 |
33 | async def close_executor(app: web.Application) -> None:
34 | # TODO: figureout timeout for shutdown
35 | fs = [run(executor, clean_worker) for i in range(0, max_workers)]
36 | await asyncio.gather(*fs)
37 | executor.shutdown(wait=True)
38 |
39 | app.on_cleanup.append(close_executor)
40 | app['executor'] = executor
41 | return executor
42 |
43 |
44 | async def init(
45 | max_workers: int, model_conf: Dict[str, Any]
46 | ) -> web.Application:
47 | # setup web page related routes
48 | app = web.Application()
49 | handler = SiteHandler(PROJ_ROOT)
50 | setup_app_routes(app, handler)
51 |
52 | # setup API routes
53 | api = web.Application(middlewares=[stats_middleware])
54 | models = load_models(model_conf['models'])
55 | executor = await setup_executor(app, max_workers, models)
56 | api_handler = APIHandler(api, executor, PROJ_ROOT, models)
57 | setup_api_routes(api, api_handler)
58 |
59 | app.add_subapp('/api', api)
60 | return app
61 |
--------------------------------------------------------------------------------
/mlserve/static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/mlserve/static/.gitkeep
--------------------------------------------------------------------------------
/mlserve/static/index.html:
--------------------------------------------------------------------------------
1 |
MLServe - easy model deployment
--------------------------------------------------------------------------------
/mlserve/stats.py:
--------------------------------------------------------------------------------
1 | from collections import deque
2 | from dataclasses import dataclass, field
3 | from datetime import datetime
4 | from statistics import mean
5 | from typing import Deque, Dict, Any
6 |
7 |
8 | @dataclass
9 | class RequestTiming:
10 | status: int
11 | start_time: datetime
12 | duration: float
13 |
14 |
15 | def factory(maxlen: int=1000) -> Deque[RequestTiming]:
16 | return deque(maxlen=maxlen)
17 |
18 |
19 | @dataclass
20 | class _Stats:
21 | success: int = 0
22 | error: int = 0
23 | timings: Deque[RequestTiming] = field(default_factory=factory)
24 |
25 | def mean_resp_time(self) -> float:
26 | series = [t.duration for t in self.timings if t.status < 400]
27 | return mean(series) if series else 0
28 |
29 | def formatted(self) -> Dict[str, Any]:
30 | # TODO: format time series datapoints
31 | f = {
32 | 'success': self.success,
33 | 'error': self.error,
34 | 'mean_resp_time': round(self.mean_resp_time(), 4)
35 | }
36 | return f
37 |
38 |
39 | class ModelStats(_Stats):
40 |
41 | def log_data_point(self, t: RequestTiming) -> None:
42 | if t.status < 400:
43 | self.success += 1
44 | else:
45 | self.error += 1
46 | self.timings.append(t)
47 |
48 |
49 | class AggStats(_Stats):
50 |
51 | @classmethod
52 | def from_models_stats(cls, stats_map: Dict[str, ModelStats]) -> 'AggStats':
53 | agg_stats = cls()
54 | all_timings = []
55 | for stat in stats_map.values():
56 | agg_stats.success += stat.success
57 | agg_stats.error += stat.error
58 | all_timings.extend(list(stat.timings))
59 |
60 | # TODO: implement merge functions of sorted timings
61 | timings = sorted(all_timings, key=lambda v: v.start_time)[-1000:]
62 | agg_stats.timings.extend(timings)
63 | return agg_stats
64 |
--------------------------------------------------------------------------------
/mlserve/utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 | import trafaret as t
5 | import yaml
6 |
7 | from dataclasses import dataclass, asdict
8 | from pathlib import Path
9 | from typing import Any, List, Dict
10 |
11 |
12 | ModelMeta = t.Dict(
13 | {
14 | t.Key('name'): t.String,
15 | t.Key('description'): t.String,
16 | t.Key('model_path'): t.String,
17 | t.Key('data_schema_path'): t.String,
18 | t.Key('target'): t.String | t.List(t.String),
19 | t.Key('loader', default='pickle'): t.Enum('pickle', 'joblib'),
20 | }
21 | )
22 |
23 |
24 | # TODO: rename to something more general
25 | ModelConfig = t.Dict({
26 | t.Key('host', default='127.0.0.1'): t.String,
27 | t.Key('port', default=9000): t.Int[0: 65535],
28 | t.Key('workers', default=2): t.Int[1:127],
29 | t.Key('models'): t.List(ModelMeta),
30 | })
31 |
32 |
33 | ServerConfigTrafaret = t.Dict({
34 | t.Key('host', default='127.0.0.1'): t.String,
35 | t.Key('port', default=9000): t.Int[0: 65535],
36 | t.Key('workers', default=2): t.Int[1:127],
37 | }).ignore_extra('*')
38 |
39 |
40 | @dataclass(frozen=True)
41 | class ServerConfig:
42 | host: str
43 | port: int
44 | workers: int
45 |
46 |
47 | @dataclass(frozen=True)
48 | class ModelDescriptor:
49 | name: str
50 | description: str
51 | target: List[str]
52 | features: List[str]
53 | schema: Dict[Any, Any]
54 | model_path: Path
55 | model_size: int
56 | data_schema_path: Path
57 | schema_size: int
58 | loader: str
59 |
60 | def asdict(self) -> Dict[str, Any]:
61 | return asdict(self)
62 |
63 |
64 | def load_model_config(fname: Path) -> Dict[str, Any]:
65 | with open(fname, 'rt') as f:
66 | raw_data = yaml.safe_load(f)
67 | data: Dict[str, Any] = ModelConfig(raw_data)
68 | return data
69 |
70 |
71 | def load_models(model_conf: List[Dict[str, str]]) -> List[ModelDescriptor]:
72 | result: List[ModelDescriptor] = []
73 | for m in model_conf:
74 | with open(m['data_schema_path'], 'rb') as f:
75 | schema = json.load(f)
76 |
77 | _target = m['target']
78 | target: List[str] = _target if isinstance(_target, list) else [_target]
79 | schema = drop_columns(schema, target)
80 |
81 | schema_size = os.path.getsize(m['data_schema_path'])
82 | model_size = os.path.getsize(m['model_path'])
83 | features = list(schema['schema']['properties'].keys())
84 | model_desc = ModelDescriptor(
85 | name=m['name'],
86 | description=m['description'],
87 | target=target,
88 | features=features,
89 | schema=schema,
90 | model_path=Path(m['model_path']),
91 | model_size=model_size,
92 | data_schema_path=Path(m['data_schema_path']),
93 | schema_size=schema_size,
94 | loader=m['loader'],
95 | )
96 | result.append(model_desc)
97 | return result
98 |
99 |
100 | def drop_columns(schema: Dict[str, Any], columns: List[str]) -> Dict[str, Any]:
101 | for col in columns:
102 | schema['schema']['properties'].pop(col, None)
103 | schema['ui_schema'].pop(col, None)
104 | schema['example_data'].pop(col, None)
105 |
106 | if col in schema['schema']['required']:
107 | schema['schema']['required'].remove(col)
108 | return schema
109 |
--------------------------------------------------------------------------------
/mlserve/worker.py:
--------------------------------------------------------------------------------
1 | import json
2 | import signal
3 | from typing import Dict, Any, List, Optional
4 |
5 | import numpy as np
6 | import pandas as pd
7 |
8 | from .loaders import get_loader
9 | from .utils import ModelDescriptor
10 |
11 |
12 | # TODO: add structural typing with predict method instead
13 | # of Any
14 | Cache = Dict[str, Any]
15 | _models: Cache = {}
16 |
17 |
18 | def clean_worker():
19 | signal.signal(signal.SIGINT, signal.SIG_DFL)
20 | global _model
21 | _models = None # noqa
22 |
23 |
24 | def warm(models: List[ModelDescriptor],
25 | cache: Optional[Cache]=None,
26 | init_signals: bool=False,
27 | ) -> bool:
28 | global _models
29 |
30 | if init_signals:
31 | signal.signal(signal.SIGINT, signal.SIG_IGN)
32 |
33 | cache = cache if cache is not None else _models
34 |
35 | for model in models:
36 | loader = get_loader(model.loader)
37 | if model.name not in cache:
38 | pipeline = loader(str(model.model_path))
39 | cache[model.name] = pipeline
40 | return True
41 |
42 |
43 | def format_result(
44 | pred: np.ndarray, target: List[str]) -> List[Dict[str, float]]:
45 | if len(target) == 1:
46 | results_list = [pred]
47 | else:
48 | results_list = pred
49 |
50 | formated = []
51 | for r in results_list:
52 | paris = [(t, r) for t, r in zip(target, r)]
53 | formated.append(dict(paris))
54 | return formated
55 |
56 |
57 | def predict(
58 | model_name: str, target: List[str], raw_data: bytes,
59 | cache: Optional[Cache]=None
60 | ) -> List[Dict[str, float]]:
61 | cache = cache if cache is not None else _models
62 | # TODO: wrap this call into try except
63 | df = pd.DataFrame(json.loads(raw_data))
64 | model = cache[model_name]
65 | results: List[float]
66 | if hasattr(model, 'predict_proba'):
67 | results = model.predict_proba(df)
68 | results = np.array(results).T[1].tolist()
69 | else:
70 | results = model.predict(df)
71 |
72 | return format_result(results, target)
73 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mlserve-client",
3 | "version": "0.0.1",
4 | "engines": {
5 | "node": ">=8",
6 | "yarn": ">=1.9.0"
7 | },
8 | "scripts": {
9 | "build": "parcel build --out-dir mlserve/static ui/index.html --public-url /static/",
10 | "dev": "parcel ui/index.html",
11 | "prettier": "prettier --write 'ui/**/*.js' 'test/**/*.js'",
12 | "eslint": "eslint 'ui/**/*.js' 'test/**/*.js'",
13 | "lint": "eslint 'ui/**/*.js' 'test/**/*.js'",
14 | "eslint-check": "eslint --print-config .eslintrc.json | eslint-config-prettier-check",
15 | "pretest": "npm-run-all eslint-check eslint",
16 | "test": "echo \"Error: no test specified\" && exit 1"
17 | },
18 | "license": "MIT",
19 | "dependencies": {
20 | "babel-polyfill": "^6.26.0",
21 | "bootstrap": "^4.1.3",
22 | "react": "^16.0.0",
23 | "react-dom": "^16.0.0",
24 | "react-jsonschema-form": "^1.0.4",
25 | "react-router-dom": "^4.3.1",
26 | "react-vis": "^1.10.4",
27 | "reactstrap": "^6.3.1",
28 | "whatwg-fetch": "^2.0.4"
29 | },
30 | "devDependencies": {
31 | "autoprefixer": "8.5.0",
32 | "babel-eslint": "^9.0.0",
33 | "babel-plugin-transform-class-properties": "^6.24.1",
34 | "babel-preset-env": "1.7.0",
35 | "babel-preset-react": "^6.24.1",
36 | "eslint": "4.19.1",
37 | "eslint-config-airbnb-base": "12.1.0",
38 | "eslint-config-prettier": "2.9.0",
39 | "eslint-plugin-import": "2.12.0",
40 | "eslint-plugin-json": "1.2.0",
41 | "eslint-plugin-prettier": "2.6.0",
42 | "eslint-plugin-react": "7.8.2",
43 | "jest-cli": "22.4.4",
44 | "node-sass": "4.9.0",
45 | "npm-run-all": "4.1.3",
46 | "parcel": "^1.9.7",
47 | "parcel-bundler": "1.8.1",
48 | "postcss-modules": "1.1.0",
49 | "prettier": "1.12.1",
50 | "prettier-eslint": "8.8.1"
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | const autoPrefixer = require("autoprefixer");
2 |
3 | module.exports = {
4 | plugins: [autoPrefixer]
5 | };
6 |
--------------------------------------------------------------------------------
/pyup.yml:
--------------------------------------------------------------------------------
1 | label_prs: deps-update
2 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.4.4
2 | attrs==18.2.0
3 | bandit==1.5.1
4 | flake8==3.5.0
5 | flake8-bugbear==18.8.0
6 | flake8-builtins-unleashed==1.3.1
7 | flake8-class-newline==1.6.0
8 | flake8-comprehensions==1.4.1
9 | flake8-debugger==3.1.0
10 | flake8-mutable==1.2.0
11 | flake8-pyi==18.3.1
12 | flake8-quotes==1.0.0
13 | flake8-tuple==0.2.13
14 | ipdb==0.11
15 | jsonschema==2.6.0
16 | mypy==0.641
17 | pandas==0.23.4
18 | pyroma==2.4
19 | pytest-aiohttp==0.3.0
20 | pytest-asyncio==0.9.0
21 | pytest-cov==2.6.0
22 | pytest-sugar==0.9.1
23 | pytest==3.8.2
24 | trafaret==1.2.0
25 | scipy==1.1.0
26 | scikit-learn==0.20.0
27 | -e .
28 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import sys
4 | from setuptools import setup, find_packages
5 |
6 |
7 | PY_VER = sys.version_info
8 |
9 | if not PY_VER >= (3, 6):
10 | raise RuntimeError('mlserve does not support Python earlier than 3.6')
11 |
12 |
13 | def read(f):
14 | return open(os.path.join(os.path.dirname(__file__), f)).read().strip()
15 |
16 |
17 | install_requires = [
18 | 'aiohttp>=3.0.0',
19 | 'pandas',
20 | 'jsonschema',
21 | 'trafaret',
22 | 'pyyaml',
23 | 'dataclasses; python_version<"3.7"'
24 | ]
25 | extras_require = {}
26 |
27 |
28 | def read_version():
29 | regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'")
30 | init_py = os.path.join(os.path.dirname(__file__),
31 | 'mlserve', '__init__.py')
32 | with open(init_py) as f:
33 | for line in f:
34 | match = regexp.match(line)
35 | if match is not None:
36 | return match.group(1)
37 | else:
38 | msg = 'Cannot find version in mlserve/__init__.py'
39 | raise RuntimeError(msg)
40 |
41 |
42 | classifiers = [
43 | 'License :: OSI Approved :: Apache Software License',
44 | 'Intended Audience :: Developers',
45 | 'Programming Language :: Python :: 3',
46 | 'Programming Language :: Python :: 3.6',
47 | 'Programming Language :: Python :: 3.7',
48 | 'Operating System :: POSIX',
49 | 'Development Status :: 2 - Pre-Alpha',
50 | 'Framework :: AsyncIO',
51 | ]
52 |
53 |
54 | setup(name='mlserve',
55 | version=read_version(),
56 | description=('mlserve -- turns python model into RESTful API'
57 | ' with automatically generated UI.'),
58 | long_description='\n\n'.join((read('README.rst'), read('CHANGES.txt'))),
59 | install_requires=install_requires,
60 | classifiers=classifiers,
61 | platforms=['POSIX'],
62 | author='Nikolay Novik',
63 | author_email='nickolainovik@gmail.com',
64 | url='https://github.com/ml-libs/mlserve',
65 | download_url='https://pypi.python.org/pypi/mlserve',
66 | license='Apache 2',
67 | packages=find_packages(),
68 | entry_points={'console_scripts': ['mlserve = mlserve.main:main']},
69 | extras_require=extras_require,
70 | keywords=['mlserve', 'serving', 'aiohttp'],
71 | zip_safe=True,
72 | include_package_data=True)
73 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import gc
3 |
4 | import pytest
5 |
6 | from mlserve.server_utils import init
7 | from mlserve.utils import ModelConfig
8 |
9 |
10 | @pytest.fixture(scope='session')
11 | def event_loop():
12 | asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy())
13 | loop = asyncio.get_event_loop_policy().new_event_loop()
14 | yield loop
15 | gc.collect()
16 | loop.close()
17 |
18 |
19 | @pytest.fixture(scope='session')
20 | def loop(event_loop):
21 | return event_loop
22 |
23 |
24 | @pytest.fixture(scope='session')
25 | def model_conf():
26 | m = ModelConfig({
27 | 'models': [
28 | {
29 | 'name': 'boston_gbr_1',
30 | 'description': 'model',
31 | 'model_path': 'tests/data/boston_gbr.pkl',
32 | 'data_schema_path': 'tests/data/boston.json',
33 | 'target': 'target',
34 | 'loader': 'pickle',
35 | }
36 | ]
37 | })
38 | return m
39 |
40 |
41 | @pytest.fixture(scope='session')
42 | def max_workers():
43 | return 1
44 |
45 |
46 | @pytest.fixture
47 | def api(loop, aiohttp_client, max_workers, model_conf):
48 | app = loop.run_until_complete(init(max_workers, model_conf))
49 | yield loop.run_until_complete(aiohttp_client(app))
50 | loop.run_until_complete(app.shutdown())
51 |
52 |
53 | pytest_plugins = []
54 |
--------------------------------------------------------------------------------
/tests/data/Auto.csv:
--------------------------------------------------------------------------------
1 | mpg,cylinders,displacement,horsepower,weight,acceleration,year,origin,name
2 | 18,8,307,130,3504,12,70,1,chevrolet chevelle malibu
3 | 15,8,350,165,3693,11.5,70,1,buick skylark 320
4 | 18,8,318,150,3436,11,70,1,plymouth satellite
5 | 16,8,304,150,3433,12,70,1,amc rebel sst
6 | 17,8,302,140,3449,10.5,70,1,ford torino
7 | 15,8,429,198,4341,10,70,1,ford galaxie 500
8 | 14,8,454,220,4354,9,70,1,chevrolet impala
9 | 14,8,440,215,4312,8.5,70,1,plymouth fury iii
10 | 14,8,455,225,4425,10,70,1,pontiac catalina
11 | 15,8,390,190,3850,8.5,70,1,amc ambassador dpl
12 | 15,8,383,170,3563,10,70,1,dodge challenger se
13 | 14,8,340,160,3609,8,70,1,plymouth 'cuda 340
14 | 15,8,400,150,3761,9.5,70,1,chevrolet monte carlo
15 | 14,8,455,225,3086,10,70,1,buick estate wagon (sw)
16 | 24,4,113,95,2372,15,70,3,toyota corona mark ii
17 | 22,6,198,95,2833,15.5,70,1,plymouth duster
18 | 18,6,199,97,2774,15.5,70,1,amc hornet
19 | 21,6,200,85,2587,16,70,1,ford maverick
20 | 27,4,97,88,2130,14.5,70,3,datsun pl510
21 | 26,4,97,46,1835,20.5,70,2,volkswagen 1131 deluxe sedan
22 | 25,4,110,87,2672,17.5,70,2,peugeot 504
23 | 24,4,107,90,2430,14.5,70,2,audi 100 ls
24 | 25,4,104,95,2375,17.5,70,2,saab 99e
25 | 26,4,121,113,2234,12.5,70,2,bmw 2002
26 | 21,6,199,90,2648,15,70,1,amc gremlin
27 | 10,8,360,215,4615,14,70,1,ford f250
28 | 10,8,307,200,4376,15,70,1,chevy c20
29 | 11,8,318,210,4382,13.5,70,1,dodge d200
30 | 9,8,304,193,4732,18.5,70,1,hi 1200d
31 | 27,4,97,88,2130,14.5,71,3,datsun pl510
32 | 28,4,140,90,2264,15.5,71,1,chevrolet vega 2300
33 | 25,4,113,95,2228,14,71,3,toyota corona
34 | 25,4,98,?,2046,19,71,1,ford pinto
35 | 19,6,232,100,2634,13,71,1,amc gremlin
36 | 16,6,225,105,3439,15.5,71,1,plymouth satellite custom
37 | 17,6,250,100,3329,15.5,71,1,chevrolet chevelle malibu
38 | 19,6,250,88,3302,15.5,71,1,ford torino 500
39 | 18,6,232,100,3288,15.5,71,1,amc matador
40 | 14,8,350,165,4209,12,71,1,chevrolet impala
41 | 14,8,400,175,4464,11.5,71,1,pontiac catalina brougham
42 | 14,8,351,153,4154,13.5,71,1,ford galaxie 500
43 | 14,8,318,150,4096,13,71,1,plymouth fury iii
44 | 12,8,383,180,4955,11.5,71,1,dodge monaco (sw)
45 | 13,8,400,170,4746,12,71,1,ford country squire (sw)
46 | 13,8,400,175,5140,12,71,1,pontiac safari (sw)
47 | 18,6,258,110,2962,13.5,71,1,amc hornet sportabout (sw)
48 | 22,4,140,72,2408,19,71,1,chevrolet vega (sw)
49 | 19,6,250,100,3282,15,71,1,pontiac firebird
50 | 18,6,250,88,3139,14.5,71,1,ford mustang
51 | 23,4,122,86,2220,14,71,1,mercury capri 2000
52 | 28,4,116,90,2123,14,71,2,opel 1900
53 | 30,4,79,70,2074,19.5,71,2,peugeot 304
54 | 30,4,88,76,2065,14.5,71,2,fiat 124b
55 | 31,4,71,65,1773,19,71,3,toyota corolla 1200
56 | 35,4,72,69,1613,18,71,3,datsun 1200
57 | 27,4,97,60,1834,19,71,2,volkswagen model 111
58 | 26,4,91,70,1955,20.5,71,1,plymouth cricket
59 | 24,4,113,95,2278,15.5,72,3,toyota corona hardtop
60 | 25,4,97.5,80,2126,17,72,1,dodge colt hardtop
61 | 23,4,97,54,2254,23.5,72,2,volkswagen type 3
62 | 20,4,140,90,2408,19.5,72,1,chevrolet vega
63 | 21,4,122,86,2226,16.5,72,1,ford pinto runabout
64 | 13,8,350,165,4274,12,72,1,chevrolet impala
65 | 14,8,400,175,4385,12,72,1,pontiac catalina
66 | 15,8,318,150,4135,13.5,72,1,plymouth fury iii
67 | 14,8,351,153,4129,13,72,1,ford galaxie 500
68 | 17,8,304,150,3672,11.5,72,1,amc ambassador sst
69 | 11,8,429,208,4633,11,72,1,mercury marquis
70 | 13,8,350,155,4502,13.5,72,1,buick lesabre custom
71 | 12,8,350,160,4456,13.5,72,1,oldsmobile delta 88 royale
72 | 13,8,400,190,4422,12.5,72,1,chrysler newport royal
73 | 19,3,70,97,2330,13.5,72,3,mazda rx2 coupe
74 | 15,8,304,150,3892,12.5,72,1,amc matador (sw)
75 | 13,8,307,130,4098,14,72,1,chevrolet chevelle concours (sw)
76 | 13,8,302,140,4294,16,72,1,ford gran torino (sw)
77 | 14,8,318,150,4077,14,72,1,plymouth satellite custom (sw)
78 | 18,4,121,112,2933,14.5,72,2,volvo 145e (sw)
79 | 22,4,121,76,2511,18,72,2,volkswagen 411 (sw)
80 | 21,4,120,87,2979,19.5,72,2,peugeot 504 (sw)
81 | 26,4,96,69,2189,18,72,2,renault 12 (sw)
82 | 22,4,122,86,2395,16,72,1,ford pinto (sw)
83 | 28,4,97,92,2288,17,72,3,datsun 510 (sw)
84 | 23,4,120,97,2506,14.5,72,3,toyouta corona mark ii (sw)
85 | 28,4,98,80,2164,15,72,1,dodge colt (sw)
86 | 27,4,97,88,2100,16.5,72,3,toyota corolla 1600 (sw)
87 | 13,8,350,175,4100,13,73,1,buick century 350
88 | 14,8,304,150,3672,11.5,73,1,amc matador
89 | 13,8,350,145,3988,13,73,1,chevrolet malibu
90 | 14,8,302,137,4042,14.5,73,1,ford gran torino
91 | 15,8,318,150,3777,12.5,73,1,dodge coronet custom
92 | 12,8,429,198,4952,11.5,73,1,mercury marquis brougham
93 | 13,8,400,150,4464,12,73,1,chevrolet caprice classic
94 | 13,8,351,158,4363,13,73,1,ford ltd
95 | 14,8,318,150,4237,14.5,73,1,plymouth fury gran sedan
96 | 13,8,440,215,4735,11,73,1,chrysler new yorker brougham
97 | 12,8,455,225,4951,11,73,1,buick electra 225 custom
98 | 13,8,360,175,3821,11,73,1,amc ambassador brougham
99 | 18,6,225,105,3121,16.5,73,1,plymouth valiant
100 | 16,6,250,100,3278,18,73,1,chevrolet nova custom
101 | 18,6,232,100,2945,16,73,1,amc hornet
102 | 18,6,250,88,3021,16.5,73,1,ford maverick
103 | 23,6,198,95,2904,16,73,1,plymouth duster
104 | 26,4,97,46,1950,21,73,2,volkswagen super beetle
105 | 11,8,400,150,4997,14,73,1,chevrolet impala
106 | 12,8,400,167,4906,12.5,73,1,ford country
107 | 13,8,360,170,4654,13,73,1,plymouth custom suburb
108 | 12,8,350,180,4499,12.5,73,1,oldsmobile vista cruiser
109 | 18,6,232,100,2789,15,73,1,amc gremlin
110 | 20,4,97,88,2279,19,73,3,toyota carina
111 | 21,4,140,72,2401,19.5,73,1,chevrolet vega
112 | 22,4,108,94,2379,16.5,73,3,datsun 610
113 | 18,3,70,90,2124,13.5,73,3,maxda rx3
114 | 19,4,122,85,2310,18.5,73,1,ford pinto
115 | 21,6,155,107,2472,14,73,1,mercury capri v6
116 | 26,4,98,90,2265,15.5,73,2,fiat 124 sport coupe
117 | 15,8,350,145,4082,13,73,1,chevrolet monte carlo s
118 | 16,8,400,230,4278,9.5,73,1,pontiac grand prix
119 | 29,4,68,49,1867,19.5,73,2,fiat 128
120 | 24,4,116,75,2158,15.5,73,2,opel manta
121 | 20,4,114,91,2582,14,73,2,audi 100ls
122 | 19,4,121,112,2868,15.5,73,2,volvo 144ea
123 | 15,8,318,150,3399,11,73,1,dodge dart custom
124 | 24,4,121,110,2660,14,73,2,saab 99le
125 | 20,6,156,122,2807,13.5,73,3,toyota mark ii
126 | 11,8,350,180,3664,11,73,1,oldsmobile omega
127 | 20,6,198,95,3102,16.5,74,1,plymouth duster
128 | 21,6,200,?,2875,17,74,1,ford maverick
129 | 19,6,232,100,2901,16,74,1,amc hornet
130 | 15,6,250,100,3336,17,74,1,chevrolet nova
131 | 31,4,79,67,1950,19,74,3,datsun b210
132 | 26,4,122,80,2451,16.5,74,1,ford pinto
133 | 32,4,71,65,1836,21,74,3,toyota corolla 1200
134 | 25,4,140,75,2542,17,74,1,chevrolet vega
135 | 16,6,250,100,3781,17,74,1,chevrolet chevelle malibu classic
136 | 16,6,258,110,3632,18,74,1,amc matador
137 | 18,6,225,105,3613,16.5,74,1,plymouth satellite sebring
138 | 16,8,302,140,4141,14,74,1,ford gran torino
139 | 13,8,350,150,4699,14.5,74,1,buick century luxus (sw)
140 | 14,8,318,150,4457,13.5,74,1,dodge coronet custom (sw)
141 | 14,8,302,140,4638,16,74,1,ford gran torino (sw)
142 | 14,8,304,150,4257,15.5,74,1,amc matador (sw)
143 | 29,4,98,83,2219,16.5,74,2,audi fox
144 | 26,4,79,67,1963,15.5,74,2,volkswagen dasher
145 | 26,4,97,78,2300,14.5,74,2,opel manta
146 | 31,4,76,52,1649,16.5,74,3,toyota corona
147 | 32,4,83,61,2003,19,74,3,datsun 710
148 | 28,4,90,75,2125,14.5,74,1,dodge colt
149 | 24,4,90,75,2108,15.5,74,2,fiat 128
150 | 26,4,116,75,2246,14,74,2,fiat 124 tc
151 | 24,4,120,97,2489,15,74,3,honda civic
152 | 26,4,108,93,2391,15.5,74,3,subaru
153 | 31,4,79,67,2000,16,74,2,fiat x1.9
154 | 19,6,225,95,3264,16,75,1,plymouth valiant custom
155 | 18,6,250,105,3459,16,75,1,chevrolet nova
156 | 15,6,250,72,3432,21,75,1,mercury monarch
157 | 15,6,250,72,3158,19.5,75,1,ford maverick
158 | 16,8,400,170,4668,11.5,75,1,pontiac catalina
159 | 15,8,350,145,4440,14,75,1,chevrolet bel air
160 | 16,8,318,150,4498,14.5,75,1,plymouth grand fury
161 | 14,8,351,148,4657,13.5,75,1,ford ltd
162 | 17,6,231,110,3907,21,75,1,buick century
163 | 16,6,250,105,3897,18.5,75,1,chevroelt chevelle malibu
164 | 15,6,258,110,3730,19,75,1,amc matador
165 | 18,6,225,95,3785,19,75,1,plymouth fury
166 | 21,6,231,110,3039,15,75,1,buick skyhawk
167 | 20,8,262,110,3221,13.5,75,1,chevrolet monza 2+2
168 | 13,8,302,129,3169,12,75,1,ford mustang ii
169 | 29,4,97,75,2171,16,75,3,toyota corolla
170 | 23,4,140,83,2639,17,75,1,ford pinto
171 | 20,6,232,100,2914,16,75,1,amc gremlin
172 | 23,4,140,78,2592,18.5,75,1,pontiac astro
173 | 24,4,134,96,2702,13.5,75,3,toyota corona
174 | 25,4,90,71,2223,16.5,75,2,volkswagen dasher
175 | 24,4,119,97,2545,17,75,3,datsun 710
176 | 18,6,171,97,2984,14.5,75,1,ford pinto
177 | 29,4,90,70,1937,14,75,2,volkswagen rabbit
178 | 19,6,232,90,3211,17,75,1,amc pacer
179 | 23,4,115,95,2694,15,75,2,audi 100ls
180 | 23,4,120,88,2957,17,75,2,peugeot 504
181 | 22,4,121,98,2945,14.5,75,2,volvo 244dl
182 | 25,4,121,115,2671,13.5,75,2,saab 99le
183 | 33,4,91,53,1795,17.5,75,3,honda civic cvcc
184 | 28,4,107,86,2464,15.5,76,2,fiat 131
185 | 25,4,116,81,2220,16.9,76,2,opel 1900
186 | 25,4,140,92,2572,14.9,76,1,capri ii
187 | 26,4,98,79,2255,17.7,76,1,dodge colt
188 | 27,4,101,83,2202,15.3,76,2,renault 12tl
189 | 17.5,8,305,140,4215,13,76,1,chevrolet chevelle malibu classic
190 | 16,8,318,150,4190,13,76,1,dodge coronet brougham
191 | 15.5,8,304,120,3962,13.9,76,1,amc matador
192 | 14.5,8,351,152,4215,12.8,76,1,ford gran torino
193 | 22,6,225,100,3233,15.4,76,1,plymouth valiant
194 | 22,6,250,105,3353,14.5,76,1,chevrolet nova
195 | 24,6,200,81,3012,17.6,76,1,ford maverick
196 | 22.5,6,232,90,3085,17.6,76,1,amc hornet
197 | 29,4,85,52,2035,22.2,76,1,chevrolet chevette
198 | 24.5,4,98,60,2164,22.1,76,1,chevrolet woody
199 | 29,4,90,70,1937,14.2,76,2,vw rabbit
200 | 33,4,91,53,1795,17.4,76,3,honda civic
201 | 20,6,225,100,3651,17.7,76,1,dodge aspen se
202 | 18,6,250,78,3574,21,76,1,ford granada ghia
203 | 18.5,6,250,110,3645,16.2,76,1,pontiac ventura sj
204 | 17.5,6,258,95,3193,17.8,76,1,amc pacer d/l
205 | 29.5,4,97,71,1825,12.2,76,2,volkswagen rabbit
206 | 32,4,85,70,1990,17,76,3,datsun b-210
207 | 28,4,97,75,2155,16.4,76,3,toyota corolla
208 | 26.5,4,140,72,2565,13.6,76,1,ford pinto
209 | 20,4,130,102,3150,15.7,76,2,volvo 245
210 | 13,8,318,150,3940,13.2,76,1,plymouth volare premier v8
211 | 19,4,120,88,3270,21.9,76,2,peugeot 504
212 | 19,6,156,108,2930,15.5,76,3,toyota mark ii
213 | 16.5,6,168,120,3820,16.7,76,2,mercedes-benz 280s
214 | 16.5,8,350,180,4380,12.1,76,1,cadillac seville
215 | 13,8,350,145,4055,12,76,1,chevy c10
216 | 13,8,302,130,3870,15,76,1,ford f108
217 | 13,8,318,150,3755,14,76,1,dodge d100
218 | 31.5,4,98,68,2045,18.5,77,3,honda accord cvcc
219 | 30,4,111,80,2155,14.8,77,1,buick opel isuzu deluxe
220 | 36,4,79,58,1825,18.6,77,2,renault 5 gtl
221 | 25.5,4,122,96,2300,15.5,77,1,plymouth arrow gs
222 | 33.5,4,85,70,1945,16.8,77,3,datsun f-10 hatchback
223 | 17.5,8,305,145,3880,12.5,77,1,chevrolet caprice classic
224 | 17,8,260,110,4060,19,77,1,oldsmobile cutlass supreme
225 | 15.5,8,318,145,4140,13.7,77,1,dodge monaco brougham
226 | 15,8,302,130,4295,14.9,77,1,mercury cougar brougham
227 | 17.5,6,250,110,3520,16.4,77,1,chevrolet concours
228 | 20.5,6,231,105,3425,16.9,77,1,buick skylark
229 | 19,6,225,100,3630,17.7,77,1,plymouth volare custom
230 | 18.5,6,250,98,3525,19,77,1,ford granada
231 | 16,8,400,180,4220,11.1,77,1,pontiac grand prix lj
232 | 15.5,8,350,170,4165,11.4,77,1,chevrolet monte carlo landau
233 | 15.5,8,400,190,4325,12.2,77,1,chrysler cordoba
234 | 16,8,351,149,4335,14.5,77,1,ford thunderbird
235 | 29,4,97,78,1940,14.5,77,2,volkswagen rabbit custom
236 | 24.5,4,151,88,2740,16,77,1,pontiac sunbird coupe
237 | 26,4,97,75,2265,18.2,77,3,toyota corolla liftback
238 | 25.5,4,140,89,2755,15.8,77,1,ford mustang ii 2+2
239 | 30.5,4,98,63,2051,17,77,1,chevrolet chevette
240 | 33.5,4,98,83,2075,15.9,77,1,dodge colt m/m
241 | 30,4,97,67,1985,16.4,77,3,subaru dl
242 | 30.5,4,97,78,2190,14.1,77,2,volkswagen dasher
243 | 22,6,146,97,2815,14.5,77,3,datsun 810
244 | 21.5,4,121,110,2600,12.8,77,2,bmw 320i
245 | 21.5,3,80,110,2720,13.5,77,3,mazda rx-4
246 | 43.1,4,90,48,1985,21.5,78,2,volkswagen rabbit custom diesel
247 | 36.1,4,98,66,1800,14.4,78,1,ford fiesta
248 | 32.8,4,78,52,1985,19.4,78,3,mazda glc deluxe
249 | 39.4,4,85,70,2070,18.6,78,3,datsun b210 gx
250 | 36.1,4,91,60,1800,16.4,78,3,honda civic cvcc
251 | 19.9,8,260,110,3365,15.5,78,1,oldsmobile cutlass salon brougham
252 | 19.4,8,318,140,3735,13.2,78,1,dodge diplomat
253 | 20.2,8,302,139,3570,12.8,78,1,mercury monarch ghia
254 | 19.2,6,231,105,3535,19.2,78,1,pontiac phoenix lj
255 | 20.5,6,200,95,3155,18.2,78,1,chevrolet malibu
256 | 20.2,6,200,85,2965,15.8,78,1,ford fairmont (auto)
257 | 25.1,4,140,88,2720,15.4,78,1,ford fairmont (man)
258 | 20.5,6,225,100,3430,17.2,78,1,plymouth volare
259 | 19.4,6,232,90,3210,17.2,78,1,amc concord
260 | 20.6,6,231,105,3380,15.8,78,1,buick century special
261 | 20.8,6,200,85,3070,16.7,78,1,mercury zephyr
262 | 18.6,6,225,110,3620,18.7,78,1,dodge aspen
263 | 18.1,6,258,120,3410,15.1,78,1,amc concord d/l
264 | 19.2,8,305,145,3425,13.2,78,1,chevrolet monte carlo landau
265 | 17.7,6,231,165,3445,13.4,78,1,buick regal sport coupe (turbo)
266 | 18.1,8,302,139,3205,11.2,78,1,ford futura
267 | 17.5,8,318,140,4080,13.7,78,1,dodge magnum xe
268 | 30,4,98,68,2155,16.5,78,1,chevrolet chevette
269 | 27.5,4,134,95,2560,14.2,78,3,toyota corona
270 | 27.2,4,119,97,2300,14.7,78,3,datsun 510
271 | 30.9,4,105,75,2230,14.5,78,1,dodge omni
272 | 21.1,4,134,95,2515,14.8,78,3,toyota celica gt liftback
273 | 23.2,4,156,105,2745,16.7,78,1,plymouth sapporo
274 | 23.8,4,151,85,2855,17.6,78,1,oldsmobile starfire sx
275 | 23.9,4,119,97,2405,14.9,78,3,datsun 200-sx
276 | 20.3,5,131,103,2830,15.9,78,2,audi 5000
277 | 17,6,163,125,3140,13.6,78,2,volvo 264gl
278 | 21.6,4,121,115,2795,15.7,78,2,saab 99gle
279 | 16.2,6,163,133,3410,15.8,78,2,peugeot 604sl
280 | 31.5,4,89,71,1990,14.9,78,2,volkswagen scirocco
281 | 29.5,4,98,68,2135,16.6,78,3,honda accord lx
282 | 21.5,6,231,115,3245,15.4,79,1,pontiac lemans v6
283 | 19.8,6,200,85,2990,18.2,79,1,mercury zephyr 6
284 | 22.3,4,140,88,2890,17.3,79,1,ford fairmont 4
285 | 20.2,6,232,90,3265,18.2,79,1,amc concord dl 6
286 | 20.6,6,225,110,3360,16.6,79,1,dodge aspen 6
287 | 17,8,305,130,3840,15.4,79,1,chevrolet caprice classic
288 | 17.6,8,302,129,3725,13.4,79,1,ford ltd landau
289 | 16.5,8,351,138,3955,13.2,79,1,mercury grand marquis
290 | 18.2,8,318,135,3830,15.2,79,1,dodge st. regis
291 | 16.9,8,350,155,4360,14.9,79,1,buick estate wagon (sw)
292 | 15.5,8,351,142,4054,14.3,79,1,ford country squire (sw)
293 | 19.2,8,267,125,3605,15,79,1,chevrolet malibu classic (sw)
294 | 18.5,8,360,150,3940,13,79,1,chrysler lebaron town @ country (sw)
295 | 31.9,4,89,71,1925,14,79,2,vw rabbit custom
296 | 34.1,4,86,65,1975,15.2,79,3,maxda glc deluxe
297 | 35.7,4,98,80,1915,14.4,79,1,dodge colt hatchback custom
298 | 27.4,4,121,80,2670,15,79,1,amc spirit dl
299 | 25.4,5,183,77,3530,20.1,79,2,mercedes benz 300d
300 | 23,8,350,125,3900,17.4,79,1,cadillac eldorado
301 | 27.2,4,141,71,3190,24.8,79,2,peugeot 504
302 | 23.9,8,260,90,3420,22.2,79,1,oldsmobile cutlass salon brougham
303 | 34.2,4,105,70,2200,13.2,79,1,plymouth horizon
304 | 34.5,4,105,70,2150,14.9,79,1,plymouth horizon tc3
305 | 31.8,4,85,65,2020,19.2,79,3,datsun 210
306 | 37.3,4,91,69,2130,14.7,79,2,fiat strada custom
307 | 28.4,4,151,90,2670,16,79,1,buick skylark limited
308 | 28.8,6,173,115,2595,11.3,79,1,chevrolet citation
309 | 26.8,6,173,115,2700,12.9,79,1,oldsmobile omega brougham
310 | 33.5,4,151,90,2556,13.2,79,1,pontiac phoenix
311 | 41.5,4,98,76,2144,14.7,80,2,vw rabbit
312 | 38.1,4,89,60,1968,18.8,80,3,toyota corolla tercel
313 | 32.1,4,98,70,2120,15.5,80,1,chevrolet chevette
314 | 37.2,4,86,65,2019,16.4,80,3,datsun 310
315 | 28,4,151,90,2678,16.5,80,1,chevrolet citation
316 | 26.4,4,140,88,2870,18.1,80,1,ford fairmont
317 | 24.3,4,151,90,3003,20.1,80,1,amc concord
318 | 19.1,6,225,90,3381,18.7,80,1,dodge aspen
319 | 34.3,4,97,78,2188,15.8,80,2,audi 4000
320 | 29.8,4,134,90,2711,15.5,80,3,toyota corona liftback
321 | 31.3,4,120,75,2542,17.5,80,3,mazda 626
322 | 37,4,119,92,2434,15,80,3,datsun 510 hatchback
323 | 32.2,4,108,75,2265,15.2,80,3,toyota corolla
324 | 46.6,4,86,65,2110,17.9,80,3,mazda glc
325 | 27.9,4,156,105,2800,14.4,80,1,dodge colt
326 | 40.8,4,85,65,2110,19.2,80,3,datsun 210
327 | 44.3,4,90,48,2085,21.7,80,2,vw rabbit c (diesel)
328 | 43.4,4,90,48,2335,23.7,80,2,vw dasher (diesel)
329 | 36.4,5,121,67,2950,19.9,80,2,audi 5000s (diesel)
330 | 30,4,146,67,3250,21.8,80,2,mercedes-benz 240d
331 | 44.6,4,91,67,1850,13.8,80,3,honda civic 1500 gl
332 | 40.9,4,85,?,1835,17.3,80,2,renault lecar deluxe
333 | 33.8,4,97,67,2145,18,80,3,subaru dl
334 | 29.8,4,89,62,1845,15.3,80,2,vokswagen rabbit
335 | 32.7,6,168,132,2910,11.4,80,3,datsun 280-zx
336 | 23.7,3,70,100,2420,12.5,80,3,mazda rx-7 gs
337 | 35,4,122,88,2500,15.1,80,2,triumph tr7 coupe
338 | 23.6,4,140,?,2905,14.3,80,1,ford mustang cobra
339 | 32.4,4,107,72,2290,17,80,3,honda accord
340 | 27.2,4,135,84,2490,15.7,81,1,plymouth reliant
341 | 26.6,4,151,84,2635,16.4,81,1,buick skylark
342 | 25.8,4,156,92,2620,14.4,81,1,dodge aries wagon (sw)
343 | 23.5,6,173,110,2725,12.6,81,1,chevrolet citation
344 | 30,4,135,84,2385,12.9,81,1,plymouth reliant
345 | 39.1,4,79,58,1755,16.9,81,3,toyota starlet
346 | 39,4,86,64,1875,16.4,81,1,plymouth champ
347 | 35.1,4,81,60,1760,16.1,81,3,honda civic 1300
348 | 32.3,4,97,67,2065,17.8,81,3,subaru
349 | 37,4,85,65,1975,19.4,81,3,datsun 210 mpg
350 | 37.7,4,89,62,2050,17.3,81,3,toyota tercel
351 | 34.1,4,91,68,1985,16,81,3,mazda glc 4
352 | 34.7,4,105,63,2215,14.9,81,1,plymouth horizon 4
353 | 34.4,4,98,65,2045,16.2,81,1,ford escort 4w
354 | 29.9,4,98,65,2380,20.7,81,1,ford escort 2h
355 | 33,4,105,74,2190,14.2,81,2,volkswagen jetta
356 | 34.5,4,100,?,2320,15.8,81,2,renault 18i
357 | 33.7,4,107,75,2210,14.4,81,3,honda prelude
358 | 32.4,4,108,75,2350,16.8,81,3,toyota corolla
359 | 32.9,4,119,100,2615,14.8,81,3,datsun 200sx
360 | 31.6,4,120,74,2635,18.3,81,3,mazda 626
361 | 28.1,4,141,80,3230,20.4,81,2,peugeot 505s turbo diesel
362 | 30.7,6,145,76,3160,19.6,81,2,volvo diesel
363 | 25.4,6,168,116,2900,12.6,81,3,toyota cressida
364 | 24.2,6,146,120,2930,13.8,81,3,datsun 810 maxima
365 | 22.4,6,231,110,3415,15.8,81,1,buick century
366 | 26.6,8,350,105,3725,19,81,1,oldsmobile cutlass ls
367 | 20.2,6,200,88,3060,17.1,81,1,ford granada gl
368 | 17.6,6,225,85,3465,16.6,81,1,chrysler lebaron salon
369 | 28,4,112,88,2605,19.6,82,1,chevrolet cavalier
370 | 27,4,112,88,2640,18.6,82,1,chevrolet cavalier wagon
371 | 34,4,112,88,2395,18,82,1,chevrolet cavalier 2-door
372 | 31,4,112,85,2575,16.2,82,1,pontiac j2000 se hatchback
373 | 29,4,135,84,2525,16,82,1,dodge aries se
374 | 27,4,151,90,2735,18,82,1,pontiac phoenix
375 | 24,4,140,92,2865,16.4,82,1,ford fairmont futura
376 | 36,4,105,74,1980,15.3,82,2,volkswagen rabbit l
377 | 37,4,91,68,2025,18.2,82,3,mazda glc custom l
378 | 31,4,91,68,1970,17.6,82,3,mazda glc custom
379 | 38,4,105,63,2125,14.7,82,1,plymouth horizon miser
380 | 36,4,98,70,2125,17.3,82,1,mercury lynx l
381 | 36,4,120,88,2160,14.5,82,3,nissan stanza xe
382 | 36,4,107,75,2205,14.5,82,3,honda accord
383 | 34,4,108,70,2245,16.9,82,3,toyota corolla
384 | 38,4,91,67,1965,15,82,3,honda civic
385 | 32,4,91,67,1965,15.7,82,3,honda civic (auto)
386 | 38,4,91,67,1995,16.2,82,3,datsun 310 gx
387 | 25,6,181,110,2945,16.4,82,1,buick century limited
388 | 38,6,262,85,3015,17,82,1,oldsmobile cutlass ciera (diesel)
389 | 26,4,156,92,2585,14.5,82,1,chrysler lebaron medallion
390 | 22,6,232,112,2835,14.7,82,1,ford granada l
391 | 32,4,144,96,2665,13.9,82,3,toyota celica gt
392 | 36,4,135,84,2370,13,82,1,dodge charger 2.2
393 | 27,4,151,90,2950,17.3,82,1,chevrolet camaro
394 | 27,4,140,86,2790,15.6,82,1,ford mustang gl
395 | 44,4,97,52,2130,24.6,82,2,vw pickup
396 | 32,4,135,84,2295,11.6,82,1,dodge rampage
397 | 28,4,120,79,2625,18.6,82,1,ford ranger
398 | 31,4,119,82,2720,19.4,82,1,chevy s-10
399 |
--------------------------------------------------------------------------------
/tests/data/Credit.csv:
--------------------------------------------------------------------------------
1 | "idx","Income","Limit","Rating","Cards","Age","Education","Gender","Student","Married","Ethnicity","Balance"
2 | "1",14.891,3606,283,2,34,11," Male","No","Yes","Caucasian",333
3 | "2",106.025,6645,483,3,82,15,"Female","Yes","Yes","Asian",903
4 | "3",104.593,7075,514,4,71,11," Male","No","No","Asian",580
5 | "4",148.924,9504,681,3,36,11,"Female","No","No","Asian",964
6 | "5",55.882,4897,357,2,68,16," Male","No","Yes","Caucasian",331
7 | "6",80.18,8047,569,4,77,10," Male","No","No","Caucasian",1151
8 | "7",20.996,3388,259,2,37,12,"Female","No","No","African American",203
9 | "8",71.408,7114,512,2,87,9," Male","No","No","Asian",872
10 | "9",15.125,3300,266,5,66,13,"Female","No","No","Caucasian",279
11 | "10",71.061,6819,491,3,41,19,"Female","Yes","Yes","African American",1350
12 | "11",63.095,8117,589,4,30,14," Male","No","Yes","Caucasian",1407
13 | "12",15.045,1311,138,3,64,16," Male","No","No","Caucasian",0
14 | "13",80.616,5308,394,1,57,7,"Female","No","Yes","Asian",204
15 | "14",43.682,6922,511,1,49,9," Male","No","Yes","Caucasian",1081
16 | "15",19.144,3291,269,2,75,13,"Female","No","No","African American",148
17 | "16",20.089,2525,200,3,57,15,"Female","No","Yes","African American",0
18 | "17",53.598,3714,286,3,73,17,"Female","No","Yes","African American",0
19 | "18",36.496,4378,339,3,69,15,"Female","No","Yes","Asian",368
20 | "19",49.57,6384,448,1,28,9,"Female","No","Yes","Asian",891
21 | "20",42.079,6626,479,2,44,9," Male","No","No","Asian",1048
22 | "21",17.7,2860,235,4,63,16,"Female","No","No","Asian",89
23 | "22",37.348,6378,458,1,72,17,"Female","No","No","Caucasian",968
24 | "23",20.103,2631,213,3,61,10," Male","No","Yes","African American",0
25 | "24",64.027,5179,398,5,48,8," Male","No","Yes","African American",411
26 | "25",10.742,1757,156,3,57,15,"Female","No","No","Caucasian",0
27 | "26",14.09,4323,326,5,25,16,"Female","No","Yes","African American",671
28 | "27",42.471,3625,289,6,44,12,"Female","Yes","No","Caucasian",654
29 | "28",32.793,4534,333,2,44,16," Male","No","No","African American",467
30 | "29",186.634,13414,949,2,41,14,"Female","No","Yes","African American",1809
31 | "30",26.813,5611,411,4,55,16,"Female","No","No","Caucasian",915
32 | "31",34.142,5666,413,4,47,5,"Female","No","Yes","Caucasian",863
33 | "32",28.941,2733,210,5,43,16," Male","No","Yes","Asian",0
34 | "33",134.181,7838,563,2,48,13,"Female","No","No","Caucasian",526
35 | "34",31.367,1829,162,4,30,10," Male","No","Yes","Caucasian",0
36 | "35",20.15,2646,199,2,25,14,"Female","No","Yes","Asian",0
37 | "36",23.35,2558,220,3,49,12,"Female","Yes","No","Caucasian",419
38 | "37",62.413,6457,455,2,71,11,"Female","No","Yes","Caucasian",762
39 | "38",30.007,6481,462,2,69,9,"Female","No","Yes","Caucasian",1093
40 | "39",11.795,3899,300,4,25,10,"Female","No","No","Caucasian",531
41 | "40",13.647,3461,264,4,47,14," Male","No","Yes","Caucasian",344
42 | "41",34.95,3327,253,3,54,14,"Female","No","No","African American",50
43 | "42",113.659,7659,538,2,66,15," Male","Yes","Yes","African American",1155
44 | "43",44.158,4763,351,2,66,13,"Female","No","Yes","Asian",385
45 | "44",36.929,6257,445,1,24,14,"Female","No","Yes","Asian",976
46 | "45",31.861,6375,469,3,25,16,"Female","No","Yes","Caucasian",1120
47 | "46",77.38,7569,564,3,50,12,"Female","No","Yes","Caucasian",997
48 | "47",19.531,5043,376,2,64,16,"Female","Yes","Yes","Asian",1241
49 | "48",44.646,4431,320,2,49,15," Male","Yes","Yes","Caucasian",797
50 | "49",44.522,2252,205,6,72,15," Male","No","Yes","Asian",0
51 | "50",43.479,4569,354,4,49,13," Male","Yes","Yes","African American",902
52 | "51",36.362,5183,376,3,49,15," Male","No","Yes","African American",654
53 | "52",39.705,3969,301,2,27,20," Male","No","Yes","African American",211
54 | "53",44.205,5441,394,1,32,12," Male","No","Yes","Caucasian",607
55 | "54",16.304,5466,413,4,66,10," Male","No","Yes","Asian",957
56 | "55",15.333,1499,138,2,47,9,"Female","No","Yes","Asian",0
57 | "56",32.916,1786,154,2,60,8,"Female","No","Yes","Asian",0
58 | "57",57.1,4742,372,7,79,18,"Female","No","Yes","Asian",379
59 | "58",76.273,4779,367,4,65,14,"Female","No","Yes","Caucasian",133
60 | "59",10.354,3480,281,2,70,17," Male","No","Yes","Caucasian",333
61 | "60",51.872,5294,390,4,81,17,"Female","No","No","Caucasian",531
62 | "61",35.51,5198,364,2,35,20,"Female","No","No","Asian",631
63 | "62",21.238,3089,254,3,59,10,"Female","No","No","Caucasian",108
64 | "63",30.682,1671,160,2,77,7,"Female","No","No","Caucasian",0
65 | "64",14.132,2998,251,4,75,17," Male","No","No","Caucasian",133
66 | "65",32.164,2937,223,2,79,15,"Female","No","Yes","African American",0
67 | "66",12,4160,320,4,28,14,"Female","No","Yes","Caucasian",602
68 | "67",113.829,9704,694,4,38,13,"Female","No","Yes","Asian",1388
69 | "68",11.187,5099,380,4,69,16,"Female","No","No","African American",889
70 | "69",27.847,5619,418,2,78,15,"Female","No","Yes","Caucasian",822
71 | "70",49.502,6819,505,4,55,14," Male","No","Yes","Caucasian",1084
72 | "71",24.889,3954,318,4,75,12," Male","No","Yes","Caucasian",357
73 | "72",58.781,7402,538,2,81,12,"Female","No","Yes","Asian",1103
74 | "73",22.939,4923,355,1,47,18,"Female","No","Yes","Asian",663
75 | "74",23.989,4523,338,4,31,15," Male","No","No","Caucasian",601
76 | "75",16.103,5390,418,4,45,10,"Female","No","Yes","Caucasian",945
77 | "76",33.017,3180,224,2,28,16," Male","No","Yes","African American",29
78 | "77",30.622,3293,251,1,68,16," Male","Yes","No","Caucasian",532
79 | "78",20.936,3254,253,1,30,15,"Female","No","No","Asian",145
80 | "79",110.968,6662,468,3,45,11,"Female","No","Yes","Caucasian",391
81 | "80",15.354,2101,171,2,65,14," Male","No","No","Asian",0
82 | "81",27.369,3449,288,3,40,9,"Female","No","Yes","Caucasian",162
83 | "82",53.48,4263,317,1,83,15," Male","No","No","Caucasian",99
84 | "83",23.672,4433,344,3,63,11," Male","No","No","Caucasian",503
85 | "84",19.225,1433,122,3,38,14,"Female","No","No","Caucasian",0
86 | "85",43.54,2906,232,4,69,11," Male","No","No","Caucasian",0
87 | "86",152.298,12066,828,4,41,12,"Female","No","Yes","Asian",1779
88 | "87",55.367,6340,448,1,33,15," Male","No","Yes","Caucasian",815
89 | "88",11.741,2271,182,4,59,12,"Female","No","No","Asian",0
90 | "89",15.56,4307,352,4,57,8," Male","No","Yes","African American",579
91 | "90",59.53,7518,543,3,52,9,"Female","No","No","African American",1176
92 | "91",20.191,5767,431,4,42,16," Male","No","Yes","African American",1023
93 | "92",48.498,6040,456,3,47,16," Male","No","Yes","Caucasian",812
94 | "93",30.733,2832,249,4,51,13," Male","No","No","Caucasian",0
95 | "94",16.479,5435,388,2,26,16," Male","No","No","African American",937
96 | "95",38.009,3075,245,3,45,15,"Female","No","No","African American",0
97 | "96",14.084,855,120,5,46,17,"Female","No","Yes","African American",0
98 | "97",14.312,5382,367,1,59,17," Male","Yes","No","Asian",1380
99 | "98",26.067,3388,266,4,74,17,"Female","No","Yes","African American",155
100 | "99",36.295,2963,241,2,68,14,"Female","Yes","No","African American",375
101 | "100",83.851,8494,607,5,47,18," Male","No","No","Caucasian",1311
102 | "101",21.153,3736,256,1,41,11," Male","No","No","Caucasian",298
103 | "102",17.976,2433,190,3,70,16,"Female","Yes","No","Caucasian",431
104 | "103",68.713,7582,531,2,56,16," Male","Yes","No","Caucasian",1587
105 | "104",146.183,9540,682,6,66,15," Male","No","No","Caucasian",1050
106 | "105",15.846,4768,365,4,53,12,"Female","No","No","Caucasian",745
107 | "106",12.031,3182,259,2,58,18,"Female","No","Yes","Caucasian",210
108 | "107",16.819,1337,115,2,74,15," Male","No","Yes","Asian",0
109 | "108",39.11,3189,263,3,72,12," Male","No","No","Asian",0
110 | "109",107.986,6033,449,4,64,14," Male","No","Yes","Caucasian",227
111 | "110",13.561,3261,279,5,37,19," Male","No","Yes","Asian",297
112 | "111",34.537,3271,250,3,57,17,"Female","No","Yes","Asian",47
113 | "112",28.575,2959,231,2,60,11,"Female","No","No","African American",0
114 | "113",46.007,6637,491,4,42,14," Male","No","Yes","Caucasian",1046
115 | "114",69.251,6386,474,4,30,12,"Female","No","Yes","Asian",768
116 | "115",16.482,3326,268,4,41,15," Male","No","No","Caucasian",271
117 | "116",40.442,4828,369,5,81,8,"Female","No","No","African American",510
118 | "117",35.177,2117,186,3,62,16,"Female","No","No","Caucasian",0
119 | "118",91.362,9113,626,1,47,17," Male","No","Yes","Asian",1341
120 | "119",27.039,2161,173,3,40,17,"Female","No","No","Caucasian",0
121 | "120",23.012,1410,137,3,81,16," Male","No","No","Caucasian",0
122 | "121",27.241,1402,128,2,67,15,"Female","No","Yes","Asian",0
123 | "122",148.08,8157,599,2,83,13," Male","No","Yes","Caucasian",454
124 | "123",62.602,7056,481,1,84,11,"Female","No","No","Caucasian",904
125 | "124",11.808,1300,117,3,77,14,"Female","No","No","African American",0
126 | "125",29.564,2529,192,1,30,12,"Female","No","Yes","Caucasian",0
127 | "126",27.578,2531,195,1,34,15,"Female","No","Yes","Caucasian",0
128 | "127",26.427,5533,433,5,50,15,"Female","Yes","Yes","Asian",1404
129 | "128",57.202,3411,259,3,72,11,"Female","No","No","Caucasian",0
130 | "129",123.299,8376,610,2,89,17," Male","Yes","No","African American",1259
131 | "130",18.145,3461,279,3,56,15," Male","No","Yes","African American",255
132 | "131",23.793,3821,281,4,56,12,"Female","Yes","Yes","African American",868
133 | "132",10.726,1568,162,5,46,19," Male","No","Yes","Asian",0
134 | "133",23.283,5443,407,4,49,13," Male","No","Yes","African American",912
135 | "134",21.455,5829,427,4,80,12,"Female","No","Yes","African American",1018
136 | "135",34.664,5835,452,3,77,15,"Female","No","Yes","African American",835
137 | "136",44.473,3500,257,3,81,16,"Female","No","No","African American",8
138 | "137",54.663,4116,314,2,70,8,"Female","No","No","African American",75
139 | "138",36.355,3613,278,4,35,9," Male","No","Yes","Asian",187
140 | "139",21.374,2073,175,2,74,11,"Female","No","Yes","Caucasian",0
141 | "140",107.841,10384,728,3,87,7," Male","No","No","African American",1597
142 | "141",39.831,6045,459,3,32,12,"Female","Yes","Yes","African American",1425
143 | "142",91.876,6754,483,2,33,10," Male","No","Yes","Caucasian",605
144 | "143",103.893,7416,549,3,84,17," Male","No","No","Asian",669
145 | "144",19.636,4896,387,3,64,10,"Female","No","No","African American",710
146 | "145",17.392,2748,228,3,32,14," Male","No","Yes","Caucasian",68
147 | "146",19.529,4673,341,2,51,14," Male","No","No","Asian",642
148 | "147",17.055,5110,371,3,55,15,"Female","No","Yes","Caucasian",805
149 | "148",23.857,1501,150,3,56,16," Male","No","Yes","Caucasian",0
150 | "149",15.184,2420,192,2,69,11,"Female","No","Yes","Caucasian",0
151 | "150",13.444,886,121,5,44,10," Male","No","Yes","Asian",0
152 | "151",63.931,5728,435,3,28,14,"Female","No","Yes","African American",581
153 | "152",35.864,4831,353,3,66,13,"Female","No","Yes","Caucasian",534
154 | "153",41.419,2120,184,4,24,11,"Female","Yes","No","Caucasian",156
155 | "154",92.112,4612,344,3,32,17," Male","No","No","Caucasian",0
156 | "155",55.056,3155,235,2,31,16," Male","No","Yes","African American",0
157 | "156",19.537,1362,143,4,34,9,"Female","No","Yes","Asian",0
158 | "157",31.811,4284,338,5,75,13,"Female","No","Yes","Caucasian",429
159 | "158",56.256,5521,406,2,72,16,"Female","Yes","Yes","Caucasian",1020
160 | "159",42.357,5550,406,2,83,12,"Female","No","Yes","Asian",653
161 | "160",53.319,3000,235,3,53,13," Male","No","No","Asian",0
162 | "161",12.238,4865,381,5,67,11,"Female","No","No","Caucasian",836
163 | "162",31.353,1705,160,3,81,14," Male","No","Yes","Caucasian",0
164 | "163",63.809,7530,515,1,56,12," Male","No","Yes","Caucasian",1086
165 | "164",13.676,2330,203,5,80,16,"Female","No","No","African American",0
166 | "165",76.782,5977,429,4,44,12," Male","No","Yes","Asian",548
167 | "166",25.383,4527,367,4,46,11," Male","No","Yes","Caucasian",570
168 | "167",35.691,2880,214,2,35,15," Male","No","No","African American",0
169 | "168",29.403,2327,178,1,37,14,"Female","No","Yes","Caucasian",0
170 | "169",27.47,2820,219,1,32,11,"Female","No","Yes","Asian",0
171 | "170",27.33,6179,459,4,36,12,"Female","No","Yes","Caucasian",1099
172 | "171",34.772,2021,167,3,57,9," Male","No","No","Asian",0
173 | "172",36.934,4270,299,1,63,9,"Female","No","Yes","Caucasian",283
174 | "173",76.348,4697,344,4,60,18," Male","No","No","Asian",108
175 | "174",14.887,4745,339,3,58,12," Male","No","Yes","African American",724
176 | "175",121.834,10673,750,3,54,16," Male","No","No","African American",1573
177 | "176",30.132,2168,206,3,52,17," Male","No","No","Caucasian",0
178 | "177",24.05,2607,221,4,32,18," Male","No","Yes","Caucasian",0
179 | "178",22.379,3965,292,2,34,14,"Female","No","Yes","Asian",384
180 | "179",28.316,4391,316,2,29,10,"Female","No","No","Caucasian",453
181 | "180",58.026,7499,560,5,67,11,"Female","No","No","Caucasian",1237
182 | "181",10.635,3584,294,5,69,16," Male","No","Yes","Asian",423
183 | "182",46.102,5180,382,3,81,12," Male","No","Yes","African American",516
184 | "183",58.929,6420,459,2,66,9,"Female","No","Yes","African American",789
185 | "184",80.861,4090,335,3,29,15,"Female","No","Yes","Asian",0
186 | "185",158.889,11589,805,1,62,17,"Female","No","Yes","Caucasian",1448
187 | "186",30.42,4442,316,1,30,14,"Female","No","No","African American",450
188 | "187",36.472,3806,309,2,52,13," Male","No","No","African American",188
189 | "188",23.365,2179,167,2,75,15," Male","No","No","Asian",0
190 | "189",83.869,7667,554,2,83,11," Male","No","No","African American",930
191 | "190",58.351,4411,326,2,85,16,"Female","No","Yes","Caucasian",126
192 | "191",55.187,5352,385,4,50,17,"Female","No","Yes","Caucasian",538
193 | "192",124.29,9560,701,3,52,17,"Female","Yes","No","Asian",1687
194 | "193",28.508,3933,287,4,56,14," Male","No","Yes","Asian",336
195 | "194",130.209,10088,730,7,39,19,"Female","No","Yes","Caucasian",1426
196 | "195",30.406,2120,181,2,79,14," Male","No","Yes","African American",0
197 | "196",23.883,5384,398,2,73,16,"Female","No","Yes","African American",802
198 | "197",93.039,7398,517,1,67,12," Male","No","Yes","African American",749
199 | "198",50.699,3977,304,2,84,17,"Female","No","No","African American",69
200 | "199",27.349,2000,169,4,51,16,"Female","No","Yes","African American",0
201 | "200",10.403,4159,310,3,43,7," Male","No","Yes","Asian",571
202 | "201",23.949,5343,383,2,40,18," Male","No","Yes","African American",829
203 | "202",73.914,7333,529,6,67,15,"Female","No","Yes","Caucasian",1048
204 | "203",21.038,1448,145,2,58,13,"Female","No","Yes","Caucasian",0
205 | "204",68.206,6784,499,5,40,16,"Female","Yes","No","African American",1411
206 | "205",57.337,5310,392,2,45,7,"Female","No","No","Caucasian",456
207 | "206",10.793,3878,321,8,29,13," Male","No","No","Caucasian",638
208 | "207",23.45,2450,180,2,78,13," Male","No","No","Caucasian",0
209 | "208",10.842,4391,358,5,37,10,"Female","Yes","Yes","Caucasian",1216
210 | "209",51.345,4327,320,3,46,15," Male","No","No","African American",230
211 | "210",151.947,9156,642,2,91,11,"Female","No","Yes","African American",732
212 | "211",24.543,3206,243,2,62,12,"Female","No","Yes","Caucasian",95
213 | "212",29.567,5309,397,3,25,15," Male","No","No","Caucasian",799
214 | "213",39.145,4351,323,2,66,13," Male","No","Yes","Caucasian",308
215 | "214",39.422,5245,383,2,44,19," Male","No","No","African American",637
216 | "215",34.909,5289,410,2,62,16,"Female","No","Yes","Caucasian",681
217 | "216",41.025,4229,337,3,79,19,"Female","No","Yes","Caucasian",246
218 | "217",15.476,2762,215,3,60,18," Male","No","No","Asian",52
219 | "218",12.456,5395,392,3,65,14," Male","No","Yes","Caucasian",955
220 | "219",10.627,1647,149,2,71,10,"Female","Yes","Yes","Asian",195
221 | "220",38.954,5222,370,4,76,13,"Female","No","No","Caucasian",653
222 | "221",44.847,5765,437,3,53,13,"Female","Yes","No","Asian",1246
223 | "222",98.515,8760,633,5,78,11,"Female","No","No","African American",1230
224 | "223",33.437,6207,451,4,44,9," Male","Yes","No","Caucasian",1549
225 | "224",27.512,4613,344,5,72,17," Male","No","Yes","Asian",573
226 | "225",121.709,7818,584,4,50,6," Male","No","Yes","Caucasian",701
227 | "226",15.079,5673,411,4,28,15,"Female","No","Yes","Asian",1075
228 | "227",59.879,6906,527,6,78,15,"Female","No","No","Caucasian",1032
229 | "228",66.989,5614,430,3,47,14,"Female","No","Yes","Caucasian",482
230 | "229",69.165,4668,341,2,34,11,"Female","No","No","African American",156
231 | "230",69.943,7555,547,3,76,9," Male","No","Yes","Asian",1058
232 | "231",33.214,5137,387,3,59,9," Male","No","No","African American",661
233 | "232",25.124,4776,378,4,29,12," Male","No","Yes","Caucasian",657
234 | "233",15.741,4788,360,1,39,14," Male","No","Yes","Asian",689
235 | "234",11.603,2278,187,3,71,11," Male","No","Yes","Caucasian",0
236 | "235",69.656,8244,579,3,41,14," Male","No","Yes","African American",1329
237 | "236",10.503,2923,232,3,25,18,"Female","No","Yes","African American",191
238 | "237",42.529,4986,369,2,37,11," Male","No","Yes","Asian",489
239 | "238",60.579,5149,388,5,38,15," Male","No","Yes","Asian",443
240 | "239",26.532,2910,236,6,58,19,"Female","No","Yes","Caucasian",52
241 | "240",27.952,3557,263,1,35,13,"Female","No","Yes","Asian",163
242 | "241",29.705,3351,262,5,71,14,"Female","No","Yes","Asian",148
243 | "242",15.602,906,103,2,36,11," Male","No","Yes","African American",0
244 | "243",20.918,1233,128,3,47,18,"Female","Yes","Yes","Asian",16
245 | "244",58.165,6617,460,1,56,12,"Female","No","Yes","Caucasian",856
246 | "245",22.561,1787,147,4,66,15,"Female","No","No","Caucasian",0
247 | "246",34.509,2001,189,5,80,18,"Female","No","Yes","African American",0
248 | "247",19.588,3211,265,4,59,14,"Female","No","No","Asian",199
249 | "248",36.364,2220,188,3,50,19," Male","No","No","Caucasian",0
250 | "249",15.717,905,93,1,38,16," Male","Yes","Yes","Caucasian",0
251 | "250",22.574,1551,134,3,43,13,"Female","Yes","Yes","Caucasian",98
252 | "251",10.363,2430,191,2,47,18,"Female","No","Yes","Asian",0
253 | "252",28.474,3202,267,5,66,12," Male","No","Yes","Caucasian",132
254 | "253",72.945,8603,621,3,64,8,"Female","No","No","Caucasian",1355
255 | "254",85.425,5182,402,6,60,12," Male","No","Yes","African American",218
256 | "255",36.508,6386,469,4,79,6,"Female","No","Yes","Caucasian",1048
257 | "256",58.063,4221,304,3,50,8," Male","No","No","African American",118
258 | "257",25.936,1774,135,2,71,14,"Female","No","No","Asian",0
259 | "258",15.629,2493,186,1,60,14," Male","No","Yes","Asian",0
260 | "259",41.4,2561,215,2,36,14," Male","No","Yes","Caucasian",0
261 | "260",33.657,6196,450,6,55,9,"Female","No","No","Caucasian",1092
262 | "261",67.937,5184,383,4,63,12," Male","No","Yes","Asian",345
263 | "262",180.379,9310,665,3,67,8,"Female","Yes","Yes","Asian",1050
264 | "263",10.588,4049,296,1,66,13,"Female","No","Yes","Caucasian",465
265 | "264",29.725,3536,270,2,52,15,"Female","No","No","African American",133
266 | "265",27.999,5107,380,1,55,10," Male","No","Yes","Caucasian",651
267 | "266",40.885,5013,379,3,46,13,"Female","No","Yes","African American",549
268 | "267",88.83,4952,360,4,86,16,"Female","No","Yes","Caucasian",15
269 | "268",29.638,5833,433,3,29,15,"Female","No","Yes","Asian",942
270 | "269",25.988,1349,142,4,82,12," Male","No","No","Caucasian",0
271 | "270",39.055,5565,410,4,48,18,"Female","No","Yes","Caucasian",772
272 | "271",15.866,3085,217,1,39,13," Male","No","No","Caucasian",136
273 | "272",44.978,4866,347,1,30,10,"Female","No","No","Caucasian",436
274 | "273",30.413,3690,299,2,25,15,"Female","Yes","No","Asian",728
275 | "274",16.751,4706,353,6,48,14," Male","Yes","No","Asian",1255
276 | "275",30.55,5869,439,5,81,9,"Female","No","No","African American",967
277 | "276",163.329,8732,636,3,50,14," Male","No","Yes","Caucasian",529
278 | "277",23.106,3476,257,2,50,15,"Female","No","No","Caucasian",209
279 | "278",41.532,5000,353,2,50,12," Male","No","Yes","Caucasian",531
280 | "279",128.04,6982,518,2,78,11,"Female","No","Yes","Caucasian",250
281 | "280",54.319,3063,248,3,59,8,"Female","Yes","No","Caucasian",269
282 | "281",53.401,5319,377,3,35,12,"Female","No","No","African American",541
283 | "282",36.142,1852,183,3,33,13,"Female","No","No","African American",0
284 | "283",63.534,8100,581,2,50,17,"Female","No","Yes","Caucasian",1298
285 | "284",49.927,6396,485,3,75,17,"Female","No","Yes","Caucasian",890
286 | "285",14.711,2047,167,2,67,6," Male","No","Yes","Caucasian",0
287 | "286",18.967,1626,156,2,41,11,"Female","No","Yes","Asian",0
288 | "287",18.036,1552,142,2,48,15,"Female","No","No","Caucasian",0
289 | "288",60.449,3098,272,4,69,8," Male","No","Yes","Caucasian",0
290 | "289",16.711,5274,387,3,42,16,"Female","No","Yes","Asian",863
291 | "290",10.852,3907,296,2,30,9," Male","No","No","Caucasian",485
292 | "291",26.37,3235,268,5,78,11," Male","No","Yes","Asian",159
293 | "292",24.088,3665,287,4,56,13,"Female","No","Yes","Caucasian",309
294 | "293",51.532,5096,380,2,31,15," Male","No","Yes","Caucasian",481
295 | "294",140.672,11200,817,7,46,9," Male","No","Yes","African American",1677
296 | "295",42.915,2532,205,4,42,13," Male","No","Yes","Asian",0
297 | "296",27.272,1389,149,5,67,10,"Female","No","Yes","Caucasian",0
298 | "297",65.896,5140,370,1,49,17,"Female","No","Yes","Caucasian",293
299 | "298",55.054,4381,321,3,74,17," Male","No","Yes","Asian",188
300 | "299",20.791,2672,204,1,70,18,"Female","No","No","African American",0
301 | "300",24.919,5051,372,3,76,11,"Female","No","Yes","African American",711
302 | "301",21.786,4632,355,1,50,17," Male","No","Yes","Caucasian",580
303 | "302",31.335,3526,289,3,38,7,"Female","No","No","Caucasian",172
304 | "303",59.855,4964,365,1,46,13,"Female","No","Yes","Caucasian",295
305 | "304",44.061,4970,352,1,79,11," Male","No","Yes","African American",414
306 | "305",82.706,7506,536,2,64,13,"Female","No","Yes","Asian",905
307 | "306",24.46,1924,165,2,50,14,"Female","No","Yes","Asian",0
308 | "307",45.12,3762,287,3,80,8," Male","No","Yes","Caucasian",70
309 | "308",75.406,3874,298,3,41,14,"Female","No","Yes","Asian",0
310 | "309",14.956,4640,332,2,33,6," Male","No","No","Asian",681
311 | "310",75.257,7010,494,3,34,18,"Female","No","Yes","Caucasian",885
312 | "311",33.694,4891,369,1,52,16," Male","Yes","No","African American",1036
313 | "312",23.375,5429,396,3,57,15,"Female","No","Yes","Caucasian",844
314 | "313",27.825,5227,386,6,63,11," Male","No","Yes","Caucasian",823
315 | "314",92.386,7685,534,2,75,18,"Female","No","Yes","Asian",843
316 | "315",115.52,9272,656,2,69,14," Male","No","No","African American",1140
317 | "316",14.479,3907,296,3,43,16," Male","No","Yes","Caucasian",463
318 | "317",52.179,7306,522,2,57,14," Male","No","No","Asian",1142
319 | "318",68.462,4712,340,2,71,16," Male","No","Yes","Caucasian",136
320 | "319",18.951,1485,129,3,82,13,"Female","No","No","Caucasian",0
321 | "320",27.59,2586,229,5,54,16," Male","No","Yes","African American",0
322 | "321",16.279,1160,126,3,78,13," Male","Yes","Yes","African American",5
323 | "322",25.078,3096,236,2,27,15,"Female","No","Yes","Caucasian",81
324 | "323",27.229,3484,282,6,51,11," Male","No","No","Caucasian",265
325 | "324",182.728,13913,982,4,98,17," Male","No","Yes","Caucasian",1999
326 | "325",31.029,2863,223,2,66,17," Male","Yes","Yes","Asian",415
327 | "326",17.765,5072,364,1,66,12,"Female","No","Yes","Caucasian",732
328 | "327",125.48,10230,721,3,82,16," Male","No","Yes","Caucasian",1361
329 | "328",49.166,6662,508,3,68,14,"Female","No","No","Asian",984
330 | "329",41.192,3673,297,3,54,16,"Female","No","Yes","Caucasian",121
331 | "330",94.193,7576,527,2,44,16,"Female","No","Yes","Caucasian",846
332 | "331",20.405,4543,329,2,72,17," Male","Yes","No","Asian",1054
333 | "332",12.581,3976,291,2,48,16," Male","No","Yes","Caucasian",474
334 | "333",62.328,5228,377,3,83,15," Male","No","No","Caucasian",380
335 | "334",21.011,3402,261,2,68,17," Male","No","Yes","African American",182
336 | "335",24.23,4756,351,2,64,15,"Female","No","Yes","Caucasian",594
337 | "336",24.314,3409,270,2,23,7,"Female","No","Yes","Caucasian",194
338 | "337",32.856,5884,438,4,68,13," Male","No","No","Caucasian",926
339 | "338",12.414,855,119,3,32,12," Male","No","Yes","African American",0
340 | "339",41.365,5303,377,1,45,14," Male","No","No","Caucasian",606
341 | "340",149.316,10278,707,1,80,16," Male","No","No","African American",1107
342 | "341",27.794,3807,301,4,35,8,"Female","No","Yes","African American",320
343 | "342",13.234,3922,299,2,77,17,"Female","No","Yes","Caucasian",426
344 | "343",14.595,2955,260,5,37,9," Male","No","Yes","African American",204
345 | "344",10.735,3746,280,2,44,17,"Female","No","Yes","Caucasian",410
346 | "345",48.218,5199,401,7,39,10," Male","No","Yes","Asian",633
347 | "346",30.012,1511,137,2,33,17," Male","No","Yes","Caucasian",0
348 | "347",21.551,5380,420,5,51,18," Male","No","Yes","Asian",907
349 | "348",160.231,10748,754,2,69,17," Male","No","No","Caucasian",1192
350 | "349",13.433,1134,112,3,70,14," Male","No","Yes","Caucasian",0
351 | "350",48.577,5145,389,3,71,13,"Female","No","Yes","Asian",503
352 | "351",30.002,1561,155,4,70,13,"Female","No","Yes","Caucasian",0
353 | "352",61.62,5140,374,1,71,9," Male","No","Yes","Caucasian",302
354 | "353",104.483,7140,507,2,41,14," Male","No","Yes","African American",583
355 | "354",41.868,4716,342,2,47,18," Male","No","No","Caucasian",425
356 | "355",12.068,3873,292,1,44,18,"Female","No","Yes","Asian",413
357 | "356",180.682,11966,832,2,58,8,"Female","No","Yes","African American",1405
358 | "357",34.48,6090,442,3,36,14," Male","No","No","Caucasian",962
359 | "358",39.609,2539,188,1,40,14," Male","No","Yes","Asian",0
360 | "359",30.111,4336,339,1,81,18," Male","No","Yes","Caucasian",347
361 | "360",12.335,4471,344,3,79,12," Male","No","Yes","African American",611
362 | "361",53.566,5891,434,4,82,10,"Female","No","No","Caucasian",712
363 | "362",53.217,4943,362,2,46,16,"Female","No","Yes","Asian",382
364 | "363",26.162,5101,382,3,62,19,"Female","No","No","African American",710
365 | "364",64.173,6127,433,1,80,10," Male","No","Yes","Caucasian",578
366 | "365",128.669,9824,685,3,67,16," Male","No","Yes","Asian",1243
367 | "366",113.772,6442,489,4,69,15," Male","Yes","Yes","Caucasian",790
368 | "367",61.069,7871,564,3,56,14," Male","No","Yes","Caucasian",1264
369 | "368",23.793,3615,263,2,70,14," Male","No","No","African American",216
370 | "369",89,5759,440,3,37,6,"Female","No","No","Caucasian",345
371 | "370",71.682,8028,599,3,57,16," Male","No","Yes","Caucasian",1208
372 | "371",35.61,6135,466,4,40,12," Male","No","No","Caucasian",992
373 | "372",39.116,2150,173,4,75,15," Male","No","No","Caucasian",0
374 | "373",19.782,3782,293,2,46,16,"Female","Yes","No","Caucasian",840
375 | "374",55.412,5354,383,2,37,16,"Female","Yes","Yes","Caucasian",1003
376 | "375",29.4,4840,368,3,76,18,"Female","No","Yes","Caucasian",588
377 | "376",20.974,5673,413,5,44,16,"Female","No","Yes","Caucasian",1000
378 | "377",87.625,7167,515,2,46,10,"Female","No","No","African American",767
379 | "378",28.144,1567,142,3,51,10," Male","No","Yes","Caucasian",0
380 | "379",19.349,4941,366,1,33,19," Male","No","Yes","Caucasian",717
381 | "380",53.308,2860,214,1,84,10," Male","No","Yes","Caucasian",0
382 | "381",115.123,7760,538,3,83,14,"Female","No","No","African American",661
383 | "382",101.788,8029,574,2,84,11," Male","No","Yes","Caucasian",849
384 | "383",24.824,5495,409,1,33,9," Male","Yes","No","Caucasian",1352
385 | "384",14.292,3274,282,9,64,9," Male","No","Yes","Caucasian",382
386 | "385",20.088,1870,180,3,76,16," Male","No","No","African American",0
387 | "386",26.4,5640,398,3,58,15,"Female","No","No","Asian",905
388 | "387",19.253,3683,287,4,57,10," Male","No","No","African American",371
389 | "388",16.529,1357,126,3,62,9," Male","No","No","Asian",0
390 | "389",37.878,6827,482,2,80,13,"Female","No","No","Caucasian",1129
391 | "390",83.948,7100,503,2,44,18," Male","No","No","Caucasian",806
392 | "391",135.118,10578,747,3,81,15,"Female","No","Yes","Asian",1393
393 | "392",73.327,6555,472,2,43,15,"Female","No","No","Caucasian",721
394 | "393",25.974,2308,196,2,24,10," Male","No","No","Asian",0
395 | "394",17.316,1335,138,2,65,13," Male","No","No","African American",0
396 | "395",49.794,5758,410,4,40,8," Male","No","No","Caucasian",734
397 | "396",12.096,4100,307,3,32,13," Male","No","Yes","Caucasian",560
398 | "397",13.364,3838,296,5,65,17," Male","No","No","African American",480
399 | "398",57.872,4171,321,5,67,12,"Female","No","Yes","Caucasian",138
400 | "399",37.728,2525,192,1,44,13," Male","No","Yes","Caucasian",0
401 | "400",18.701,5524,415,5,64,7,"Female","No","No","Asian",966
402 |
--------------------------------------------------------------------------------
/tests/data/auto_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "properties": {
3 | "acceleration": {
4 | "type": [
5 | "number"
6 | ]
7 | },
8 | "cylinders": {
9 | "type": [
10 | "integer"
11 | ]
12 | },
13 | "displacement": {
14 | "type": [
15 | "number"
16 | ]
17 | },
18 | "horsepower": {
19 | "type": [
20 | "string"
21 | ]
22 | },
23 | "mpg": {
24 | "type": [
25 | "number"
26 | ]
27 | },
28 | "name": {
29 | "type": [
30 | "string"
31 | ]
32 | },
33 | "origin": {
34 | "type": [
35 | "integer"
36 | ]
37 | },
38 | "weight": {
39 | "type": [
40 | "integer"
41 | ]
42 | },
43 | "year": {
44 | "type": [
45 | "integer"
46 | ]
47 | }
48 | },
49 | "required": [
50 | "mpg",
51 | "cylinders",
52 | "displacement",
53 | "horsepower",
54 | "weight",
55 | "acceleration",
56 | "year",
57 | "origin",
58 | "name"
59 | ],
60 | "type": "object"
61 | }
62 |
--------------------------------------------------------------------------------
/tests/data/boston.json:
--------------------------------------------------------------------------------
1 | {
2 | "example_data": {
3 | "AGE": 65.2,
4 | "B": 396.9,
5 | "CHAS": 0.0,
6 | "CRIM": 0.00632,
7 | "DIS": 4.09,
8 | "INDUS": 2.31,
9 | "LSTAT": 4.98,
10 | "NOX": 0.538,
11 | "PTRATIO": 15.3,
12 | "RAD": 1.0,
13 | "RM": 6.575,
14 | "TAX": 296.0,
15 | "ZN": 18.0,
16 | "target": 24.0
17 | },
18 | "schema": {
19 | "properties": {
20 | "AGE": {
21 | "type": [
22 | "number"
23 | ]
24 | },
25 | "B": {
26 | "type": [
27 | "number"
28 | ]
29 | },
30 | "CHAS": {
31 | "type": [
32 | "number"
33 | ]
34 | },
35 | "CRIM": {
36 | "type": [
37 | "number"
38 | ]
39 | },
40 | "DIS": {
41 | "type": [
42 | "number"
43 | ]
44 | },
45 | "INDUS": {
46 | "type": [
47 | "number"
48 | ]
49 | },
50 | "LSTAT": {
51 | "type": [
52 | "number"
53 | ]
54 | },
55 | "NOX": {
56 | "type": [
57 | "number"
58 | ]
59 | },
60 | "PTRATIO": {
61 | "type": [
62 | "number"
63 | ]
64 | },
65 | "RAD": {
66 | "type": [
67 | "number"
68 | ]
69 | },
70 | "RM": {
71 | "type": [
72 | "number"
73 | ]
74 | },
75 | "TAX": {
76 | "type": [
77 | "number"
78 | ]
79 | },
80 | "ZN": {
81 | "type": [
82 | "number"
83 | ]
84 | },
85 | "target": {
86 | "type": [
87 | "number"
88 | ]
89 | }
90 | },
91 | "required": [
92 | "CRIM",
93 | "ZN",
94 | "INDUS",
95 | "CHAS",
96 | "NOX",
97 | "RM",
98 | "AGE",
99 | "DIS",
100 | "RAD",
101 | "TAX",
102 | "PTRATIO",
103 | "B",
104 | "LSTAT",
105 | "target"
106 | ],
107 | "type": "object"
108 | },
109 | "ui_schema": {}
110 | }
111 |
--------------------------------------------------------------------------------
/tests/data/boston_gbr.joblib:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/tests/data/boston_gbr.joblib
--------------------------------------------------------------------------------
/tests/data/boston_gbr.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ml-libs/mlserve/571152e4475738e0b01fcbde166d95a3636b3c5f/tests/data/boston_gbr.pkl
--------------------------------------------------------------------------------
/tests/data/credit_schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "properties": {
3 | "Age": {
4 | "type": [
5 | "integer"
6 | ]
7 | },
8 | "Balance": {
9 | "type": [
10 | "integer"
11 | ]
12 | },
13 | "Cards": {
14 | "type": [
15 | "integer"
16 | ]
17 | },
18 | "Education": {
19 | "type": [
20 | "integer"
21 | ]
22 | },
23 | "Ethnicity": {
24 | "enum": [
25 | "African American",
26 | "Asian",
27 | "Caucasian"
28 | ],
29 | "type": [
30 | "string"
31 | ]
32 | },
33 | "Gender": {
34 | "enum": [
35 | " Male",
36 | "Female"
37 | ],
38 | "type": [
39 | "string"
40 | ]
41 | },
42 | "Income": {
43 | "type": [
44 | "number"
45 | ]
46 | },
47 | "Limit": {
48 | "type": [
49 | "integer"
50 | ]
51 | },
52 | "Married": {
53 | "enum": [
54 | "No",
55 | "Yes"
56 | ],
57 | "type": [
58 | "string"
59 | ]
60 | },
61 | "Rating": {
62 | "type": [
63 | "integer"
64 | ]
65 | },
66 | "Student": {
67 | "enum": [
68 | "No",
69 | "Yes"
70 | ],
71 | "type": [
72 | "string"
73 | ]
74 | },
75 | "Unnamed: 0": {
76 | "type": [
77 | "integer"
78 | ]
79 | }
80 | },
81 | "required": [
82 | "Unnamed: 0",
83 | "Income",
84 | "Limit",
85 | "Rating",
86 | "Cards",
87 | "Age",
88 | "Education",
89 | "Gender",
90 | "Student",
91 | "Married",
92 | "Ethnicity",
93 | "Balance"
94 | ],
95 | "type": "object"
96 | }
97 |
--------------------------------------------------------------------------------
/tests/test_handlers.py:
--------------------------------------------------------------------------------
1 | async def test_index_page(api):
2 | resp = await api.get('/')
3 | assert resp.status == 200
4 | body = await resp.text()
5 | assert body
6 |
7 |
8 | async def test_models_list(api):
9 | resp = await api.get('/api/v1/models')
10 | assert resp.status == 200
11 | body = await resp.json()
12 | assert isinstance(body, list)
13 | assert len(body) == 1
14 |
15 |
16 | async def test_get_one_model(api):
17 | resp = await api.get('/api/v1/models/boston_gbr_1')
18 | assert resp.status == 200
19 | body = await resp.json()
20 | assert isinstance(body, dict)
21 |
22 | resp = await api.get('/api/v1/models/no_such_model')
23 | assert resp.status == 404
24 | body = await resp.json()
25 | assert isinstance(body, dict)
26 |
27 |
28 | preds = {
29 | 'CRIM': {'0': 0.00632},
30 | 'ZN': {'0': 18.0},
31 | 'INDUS': {'0': 2.31},
32 | 'CHAS': {'0': 0.0},
33 | 'NOX': {'0': 0.538},
34 | 'RM': {'0': 6.575},
35 | 'AGE': {'0': 65.2},
36 | 'DIS': {'0': 4.09},
37 | 'RAD': {'0': 1.0},
38 | 'TAX': {'0': 296.0},
39 | 'PTRATIO': {'0': 15.3},
40 | 'B': {'0': 396.9},
41 | 'LSTAT': {'0': 4.98},
42 | }
43 |
44 |
45 | async def test_baic_predict(api):
46 | resp = await api.post('/api/v1/models/no_such_model/predict', json=preds)
47 | assert resp.status == 404
48 | body = await resp.json()
49 | assert isinstance(body, dict)
50 |
51 | resp = await api.post('/api/v1/models/boston_gbr_1/predict', json=preds)
52 | assert resp.status == 200
53 | body = await resp.json()
54 | assert isinstance(body, list)
55 |
--------------------------------------------------------------------------------
/tests/test_loaders.py:
--------------------------------------------------------------------------------
1 | from mlserve.loaders import get_loader
2 |
3 |
4 | def test_pickle_loader():
5 | model_path = 'tests/data/boston_gbr.pkl'
6 | loader = get_loader('pickle')
7 | model = loader(model_path)
8 | assert hasattr(model, 'predict')
9 |
10 |
11 | def test_joblib_loader():
12 | model_path = 'tests/data/boston_gbr.joblib'
13 | loader = get_loader('joblib')
14 | model = loader(model_path)
15 | assert hasattr(model, 'predict')
16 |
--------------------------------------------------------------------------------
/tests/test_schema_builder.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import json
3 | import pandas as pd
4 | import numpy as np
5 | from jsonschema import validate
6 | from mlserve.schema_builder import build_schema
7 |
8 |
9 | @pytest.fixture(scope='session')
10 | def auto_dataset():
11 | dataset_path = 'tests/data/Auto.csv'
12 | df = pd.read_csv(dataset_path)
13 | return df
14 |
15 |
16 | @pytest.fixture(scope='session')
17 | def credit_dataset():
18 | dataset_path = 'tests/data/Credit.csv'
19 | df = pd.read_csv(dataset_path)
20 |
21 | cat_features = ['Gender', 'Ethnicity', 'Student', 'Married']
22 | for feature in cat_features:
23 | df[feature] = df[feature].astype('category')
24 | return df
25 |
26 |
27 | def assert_schema(df):
28 | desc = build_schema(df)
29 | s = desc['schema']
30 | ui_schema = desc['ui_schema']
31 | form_data = desc['example_data']
32 |
33 | for i in range(len(df)):
34 | row = json.loads(df.iloc[i].to_json())
35 | validate(row, s)
36 |
37 | assert set(form_data.keys()) == set(df.columns)
38 | assert ui_schema == {}
39 |
40 |
41 | def test_basic(auto_dataset, credit_dataset):
42 | assert_schema(auto_dataset)
43 | assert_schema(credit_dataset)
44 |
45 |
46 | def test_none():
47 | data = {
48 | 'i': [3, 2, 1, np.nan],
49 | 'f': [3.5, 2, 1, np.nan],
50 | 'b': [True, False, True, np.nan],
51 | 's': ['a', 'b', 'c', np.nan],
52 | }
53 | df = pd.DataFrame.from_dict(data)
54 | assert_schema(df)
55 |
56 |
57 | def test_types():
58 | data = {
59 | 'i': [3, 2, 1],
60 | 'f': [3.5, 2, 1],
61 | 'b': [True, False, True],
62 | 's': ['a', 'b', 'c'],
63 | }
64 | df = pd.DataFrame.from_dict(data)
65 | assert_schema(df)
66 |
67 |
68 | def test_text_area():
69 | data = {'txt': 'a' * 80}
70 | df = pd.DataFrame([data])
71 | desc = build_schema(df)
72 | assert desc
73 |
--------------------------------------------------------------------------------
/tests/test_stats.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from mlserve.stats import ModelStats, AggStats, RequestTiming
3 |
4 |
5 | def test_request_timing_ctor():
6 | dt = datetime(2018, 8, 5, 12, 5, 47, 000000)
7 | rt = RequestTiming(200, dt, 0.01)
8 | assert rt.status == 200
9 |
10 |
11 | def test_model_stats_ctor():
12 | model_stats = ModelStats()
13 | assert model_stats.success == 0
14 | assert model_stats.error == 0
15 | assert model_stats.mean_resp_time() == 0
16 | assert len(model_stats.timings) == 0
17 | expected = {
18 | 'success': 0,
19 | 'error': 0,
20 | 'mean_resp_time': 0,
21 | }
22 | assert model_stats.formatted() == expected
23 |
24 |
25 | def test_model_stats_log_data_point():
26 | dt = datetime(2018, 8, 5, 12, 5, 47, 000000)
27 | rt1 = RequestTiming(200, dt, 0.01)
28 | dt = datetime(2018, 8, 5, 12, 6, 47, 000000)
29 | rt2 = RequestTiming(200, dt, 0.01)
30 | model_stats = ModelStats()
31 | model_stats.log_data_point(rt1)
32 | model_stats.log_data_point(rt2)
33 |
34 | assert model_stats.success == 2
35 | assert model_stats.error == 0
36 | assert model_stats.mean_resp_time() == 0.01
37 |
38 |
39 | def test_agg_stats_ctor():
40 | agg_stats = AggStats()
41 | assert agg_stats.success == 0
42 | assert agg_stats.error == 0
43 | assert agg_stats.mean_resp_time() == 0
44 | assert len(agg_stats.timings) == 0
45 |
46 | expected = {
47 | 'success': 0,
48 | 'error': 0,
49 | 'mean_resp_time': 0,
50 | }
51 | assert agg_stats.formatted() == expected
52 |
53 | model_stats1 = ModelStats()
54 | model_stats2 = ModelStats()
55 |
56 | stats_map = {'name1': model_stats1, 'name2': model_stats2}
57 | agg_stats = AggStats.from_models_stats(stats_map)
58 | assert agg_stats.success == 0
59 | assert agg_stats.error == 0
60 | assert agg_stats.mean_resp_time() == 0
61 | assert len(agg_stats.timings) == 0
62 |
--------------------------------------------------------------------------------
/tests/test_utitls.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 |
4 | import pytest
5 | from mlserve.utils import load_models, ModelMeta
6 | from mlserve.worker import warm, predict
7 |
8 |
9 | def test_load_models():
10 | m = [
11 | ModelMeta({
12 | 'name': 'boston_gbr_1',
13 | 'description': 'model predicts',
14 | 'model_path': 'tests/data/boston_gbr.pkl',
15 | 'data_schema_path': 'tests/data/boston.json',
16 | 'target': 'target',
17 | })
18 | ]
19 | r = load_models(m)
20 | f = [
21 | 'AGE',
22 | 'B',
23 | 'CHAS',
24 | 'CRIM',
25 | 'DIS',
26 | 'INDUS',
27 | 'LSTAT',
28 | 'NOX',
29 | 'PTRATIO',
30 | 'RAD',
31 | 'RM',
32 | 'TAX',
33 | 'ZN',
34 | ]
35 | assert len(r) == 1
36 | model_desc = r[0]
37 | assert model_desc.loader == 'pickle'
38 | assert model_desc.target == ['target']
39 | assert model_desc.name == 'boston_gbr_1'
40 | assert model_desc.features == f
41 | assert model_desc.model_path == Path('tests/data/boston_gbr.pkl')
42 | assert model_desc.data_schema_path == Path('tests/data/boston.json')
43 |
44 |
45 | @pytest.fixture
46 | def model_desc():
47 | m = [
48 | ModelMeta({
49 | 'name': 'boston_gbr_1',
50 | 'description': 'model predicts',
51 | 'model_path': 'tests/data/boston_gbr.pkl',
52 | 'data_schema_path': 'tests/data/boston.json',
53 | 'target': 'target',
54 | })
55 | ]
56 | r = load_models(m)
57 | assert len(r) == 1
58 | return r[0]
59 |
60 |
61 | def test_warm_predict(model_desc):
62 | cache = {}
63 | warm([model_desc], cache)
64 | assert len(cache) == 1
65 | raw = json.dumps([model_desc.schema['example_data']])
66 | target = ['target']
67 |
68 | result = predict(model_desc.name, target, raw, cache)
69 | assert result
70 |
--------------------------------------------------------------------------------
/ui/App/AggStats.js:
--------------------------------------------------------------------------------
1 | import "whatwg-fetch";
2 | import React, { Component } from "react";
3 | import { Container, Row, Col, Button, Card, CardTitle } from "reactstrap";
4 |
5 | export default class AggStats extends Component {
6 | state = {
7 | success: 0,
8 | error: 0,
9 | mean_resp_time: 0
10 | };
11 |
12 | fetchStats() {
13 | fetch("/api/v1/agg_stats", {
14 | method: "GET"
15 | })
16 | .then(response => response.text())
17 | .then(jsonData => JSON.parse(jsonData))
18 | .then(data => {
19 | this.setState({
20 | success: data.success,
21 | error: data.error,
22 | mean_resp_time: data.mean_resp_time
23 | });
24 | });
25 | }
26 |
27 | handleFetch = event => {
28 | event.preventDefault();
29 | this.fetchStats();
30 | };
31 |
32 | componentDidMount() {
33 | this.fetchStats();
34 | }
35 |
36 | render() {
37 | return (
38 |
39 |
42 |
43 |
51 |
52 | Successes
53 | {this.state.success}
54 |
55 |
56 |
64 |
65 | Mean Response Time
66 | {this.state.mean_resp_time}
67 |
68 |
69 |
77 |
78 | Errors
79 | {this.state.error}
80 |
81 |
82 |
83 |
84 | );
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/ui/App/AppHeader.js:
--------------------------------------------------------------------------------
1 | import "whatwg-fetch";
2 | import React from "react";
3 | import {
4 | Collapse,
5 | Navbar,
6 | NavbarToggler,
7 | NavbarBrand,
8 | Nav,
9 | NavItem,
10 | NavLink,
11 | UncontrolledDropdown,
12 | DropdownToggle,
13 | DropdownMenu,
14 | DropdownItem
15 | } from "reactstrap";
16 | import { withRouter } from "react-router-dom";
17 | import { ModelsConsumer } from "./ModelsContext";
18 |
19 | export default withRouter(
20 | class AppHeader extends React.Component {
21 | state = {
22 | isOpen: false,
23 | models: []
24 | };
25 |
26 | toggle = () => {
27 | this.setState({
28 | isOpen: !this.state.isOpen
29 | });
30 | };
31 |
32 | handleModelSelect = (modelName) => {
33 | this.props.history.push(`/models/${modelName}`);
34 | };
35 |
36 | render() {
37 | return (
38 |
39 |
40 | mlserve
41 |
42 |
43 |
71 |
72 |
73 |
74 | );
75 | }
76 | }
77 | );
78 |
--------------------------------------------------------------------------------
/ui/App/Home.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | import "bootstrap/dist/css/bootstrap.min.css";
4 |
5 | import AggStats from "./AggStats";
6 | import ModelList from "./ModelList";
7 |
8 | const Home = () => {
9 | return (
10 |
11 |
12 |
13 |
14 | );
15 | };
16 |
17 | export default Home;
18 |
--------------------------------------------------------------------------------
/ui/App/Model.js:
--------------------------------------------------------------------------------
1 | import "whatwg-fetch";
2 | import React, { Component } from "react";
3 | import {
4 | Button,
5 | Card,
6 | CardText,
7 | CardTitle,
8 | CardBody,
9 | Collapse,
10 | Container,
11 | FormGroup
12 | } from "reactstrap";
13 | import Form from "react-jsonschema-form";
14 | import {
15 | XAxis,
16 | YAxis,
17 | VerticalGridLines,
18 | HorizontalGridLines,
19 | LineMarkSeries,
20 | DiscreteColorLegend,
21 | FlexibleWidthXYPlot
22 | } from "react-vis";
23 |
24 | const reformatPlot = (target, rawPlot) =>
25 | target.map(t => [t, rawPlot.map((pl, idx) => ({ y: pl[t], x: idx }))]);
26 |
27 | export default class Model extends Component {
28 | state = {
29 | model: {
30 | schema: { schema: {}, ui_schema: {}, example_data: {} },
31 | description: "",
32 | target: []
33 | },
34 | predictions: [],
35 | plot: [],
36 | collapse: false,
37 | counter: 0
38 | };
39 | modelName = this.props.match.params.modelName;
40 |
41 | fetchStats = () => {
42 | fetch(`/api/v1/models/${this.modelName}`, {
43 | method: "GET"
44 | })
45 | .then(response => response.text())
46 | .then(jsonData => JSON.parse(jsonData))
47 | .then(payload => {
48 | this.setState({
49 | model: payload,
50 | formData: payload.schema.example_data
51 | });
52 | });
53 | };
54 |
55 | componentDidMount() {
56 | this.fetchStats();
57 | }
58 |
59 | handleSubmit = data => {
60 | fetch(`/api/v1/models/${this.modelName}/predict`, {
61 | method: "POST",
62 | body: JSON.stringify([data.formData])
63 | })
64 | .then(response => response.text())
65 | .then(jsonData => JSON.parse(jsonData))
66 | .then(payload => {
67 | const point = payload[0];
68 | this.setState({
69 | predictions: payload[0],
70 | plot: [...this.state.plot, point]
71 | });
72 | });
73 | };
74 |
75 | handleChange = data => {
76 | this.setState({ formData: data.formData });
77 | };
78 |
79 | handleError = err => {
80 | console.log(err);
81 | };
82 |
83 | toggle = () => {
84 | this.setState({ collapse: !this.state.collapse });
85 | };
86 |
87 | render() {
88 | return (
89 |
90 |
91 | Model {this.modelName}
92 | Model {this.state.model.description}
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 | {reformatPlot(this.state.model.target, this.state.plot).map(
101 | item => {
102 | const [_, series] = item;
103 | return (
104 |
109 | );
110 | }
111 | )}
112 |
113 |
114 |
115 |
116 |
117 |
118 |
121 |
122 |
123 |
124 |
125 |
126 |
127 | curl --header "Content-Type: application/json" --request
128 | POST --data '[{JSON.stringify(
129 | this.state.model.schema.example_data
130 | )}]' {window.location.origin}/api/v1/models/{this.modelName}/predict
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 | Predict With WEB UI
139 |
147 |
148 |
149 | {this.state.model.target.join(", ")}
150 | {JSON.stringify(this.state.predictions)}
151 |
152 |
153 |
154 |
155 | );
156 | }
157 | }
158 |
--------------------------------------------------------------------------------
/ui/App/ModelList.js:
--------------------------------------------------------------------------------
1 | import "whatwg-fetch";
2 | import React, { Component } from "react";
3 | import { ListGroupItem, ListGroup, Badge, Container } from "reactstrap";
4 | import { Link } from "react-router-dom";
5 | import { ModelsConsumer } from "./ModelsContext";
6 |
7 | export default class ModelsList extends Component {
8 | render() {
9 | return (
10 |
11 | Available Models
12 |
13 |
14 | {models =>
15 | models.map(model => (
16 |
17 |
24 | {model.name}
25 | {model.target.join(", ")}
26 |
27 |
28 | ))
29 | }
30 |
31 |
32 |
33 | );
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/ui/App/ModelsContext.js:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | const ModelsContext = React.createContext({});
4 |
5 | class ModelsProvider extends React.Component {
6 | state = { models: [], target: [] };
7 |
8 | async componentDidMount() {
9 | const response = await fetch("/api/v1/models", { method: "GET" });
10 | const jsonData = await response.text();
11 | const data = JSON.parse(jsonData);
12 | this.setState({ models: data });
13 | }
14 |
15 | render() {
16 | return (
17 |
18 | {this.props.children}
19 |
20 | );
21 | }
22 | }
23 |
24 | const ModelsConsumer = ModelsContext.Consumer;
25 | export { ModelsConsumer, ModelsProvider };
26 |
--------------------------------------------------------------------------------
/ui/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
9 | MLServe - easy model deployment
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/ui/index.js:
--------------------------------------------------------------------------------
1 | import "babel-polyfill";
2 | import ReactDOM from "react-dom";
3 | import React from "react";
4 | import { BrowserRouter, Route, Switch } from "react-router-dom";
5 |
6 | import "bootstrap/dist/css/bootstrap.min.css";
7 |
8 | import AppHeader from "./App/AppHeader";
9 | import Home from "./App/Home";
10 | import Model from "./App/Model";
11 | import { ModelsProvider } from "./App/ModelsContext";
12 |
13 | const App = () => (
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | );
22 |
23 | ReactDOM.render(
24 |
25 |
26 | ,
27 | document.getElementById("app")
28 | );
29 |
--------------------------------------------------------------------------------