├── Data_Science_With_Covid.ipynb
├── Excel Template_Ten Week Demo Schedule.xlsx
├── Ludwig.ipynb
├── README.md
├── chap05
├── Makefile
├── config.yaml
├── requirements.txt
└── reuters-allcats.csv
├── chapter10
├── .gitignore
├── azure
│ ├── aci.py
│ └── scoring.py
├── coreml
│ ├── Dockerfile
│ ├── README.md
│ ├── cli.py
│ └── requirements.txt
├── onnx-checker.py
├── tf
│ └── requirements.txt
└── torch
│ ├── README.md
│ ├── check.py
│ ├── convert.py
│ └── requirements.txt
├── chapter11
├── carriage.csv
├── linter-modularized
│ ├── .gitignore
│ ├── csv_linter
│ │ ├── __init__.py
│ │ ├── checks.py
│ │ └── main.py
│ ├── requirements.txt
│ └── setup.py
├── linter
│ ├── .gitignore
│ ├── carriage.csv
│ ├── csv_linter.py
│ ├── requirements.txt
│ └── setup.py
├── serverless
│ ├── cli
│ │ ├── setup.py
│ │ └── trigger.py
│ ├── foo.py
│ └── trigger.py
└── wine-ratings.csv
├── chapter2
├── add.py
└── hello.sh
├── chapter3
├── Dockerfile
├── README.md
└── images
│ └── common-fly.jpg
├── chapter4
└── packaging-containers
│ ├── .gitignore
│ ├── Dockerfile
│ ├── requirements.txt
│ └── webapp
│ ├── app.py
│ └── minimal.py
├── chapter6
├── multiple-loggers
│ └── http-app.py
├── sagemaker
│ ├── capture.py
│ └── invoke.py
└── simple-logging
│ ├── describe.py
│ └── logging_describe.py
└── chapter7
├── batch.py
├── dyno.py
└── nba_2017_players_with_salary_wiki_twitter.csv
/Excel Template_Ten Week Demo Schedule.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paiml/practical-mlops-book/5517dc375d23a511e2bf850f700c12a57a85fddd/Excel Template_Ten Week Demo Schedule.xlsx
--------------------------------------------------------------------------------
/Ludwig.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Ludwig.ipynb",
7 | "provenance": [],
8 | "authorship_tag": "ABX9TyOMzzeXx6IrOVDQ3KRcsRYk",
9 | "include_colab_link": true
10 | },
11 | "kernelspec": {
12 | "name": "python3",
13 | "display_name": "Python 3"
14 | }
15 | },
16 | "cells": [
17 | {
18 | "cell_type": "markdown",
19 | "metadata": {
20 | "id": "view-in-github",
21 | "colab_type": "text"
22 | },
23 | "source": [
24 | "
"
25 | ]
26 | },
27 | {
28 | "cell_type": "markdown",
29 | "metadata": {
30 | "id": "ZCkVd-evwGdR"
31 | },
32 | "source": [
33 | "# Ludwig AutoML with Text Categorization"
34 | ]
35 | },
36 | {
37 | "cell_type": "markdown",
38 | "metadata": {
39 | "id": "5szU_JcSwu_A"
40 | },
41 | "source": [
42 | "Uses data set from here: [http://boston.lti.cs.cmu.edu/classes/95-865-K/HW/HW2/](http://boston.lti.cs.cmu.edu/classes/95-865-K/HW/HW2/)"
43 | ]
44 | },
45 | {
46 | "cell_type": "markdown",
47 | "metadata": {
48 | "id": "ubMkNKHUv41s"
49 | },
50 | "source": [
51 | "## Ingest"
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "metadata": {
57 | "id": "z5OztwvJ5ds_",
58 | "outputId": "c3a48e8c-af43-46ce-fbd3-fb22114b56ab",
59 | "colab": {
60 | "base_uri": "https://localhost:8080/"
61 | }
62 | },
63 | "source": [
64 | "!pip install -q ludwig"
65 | ],
66 | "execution_count": 1,
67 | "outputs": [
68 | {
69 | "output_type": "stream",
70 | "text": [
71 | "\u001b[K |████████████████████████████████| 235kB 5.7MB/s \n",
72 | "\u001b[K |████████████████████████████████| 9.5MB 7.1MB/s \n",
73 | "\u001b[K |████████████████████████████████| 706kB 29.2MB/s \n",
74 | "\u001b[?25h Building wheel for ludwig (setup.py) ... \u001b[?25l\u001b[?25hdone\n"
75 | ],
76 | "name": "stdout"
77 | }
78 | ]
79 | },
80 | {
81 | "cell_type": "code",
82 | "metadata": {
83 | "colab": {
84 | "base_uri": "https://localhost:8080/"
85 | },
86 | "id": "25UIDqQbv2Xb",
87 | "outputId": "9e8000ab-c16a-4c89-e2b2-f214edec8436"
88 | },
89 | "source": [
90 | "!wget https://raw.githubusercontent.com/paiml/practical-mlops-book/main/chap05/config.yaml\n",
91 | "!wget https://raw.githubusercontent.com/paiml/practical-mlops-book/main/chap05/reuters-allcats.csv\n"
92 | ],
93 | "execution_count": null,
94 | "outputs": [
95 | {
96 | "output_type": "stream",
97 | "text": [
98 | "--2021-01-25 14:35:26-- https://raw.githubusercontent.com/paiml/practical-mlops-book/main/chap05/config.yaml\n",
99 | "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n",
100 | "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n",
101 | "HTTP request sent, awaiting response... 200 OK\n",
102 | "Length: 176 [text/plain]\n",
103 | "Saving to: ‘config.yaml’\n",
104 | "\n",
105 | "\rconfig.yaml 0%[ ] 0 --.-KB/s \rconfig.yaml 100%[===================>] 176 --.-KB/s in 0s \n",
106 | "\n",
107 | "2021-01-25 14:35:27 (7.99 MB/s) - ‘config.yaml’ saved [176/176]\n",
108 | "\n",
109 | "--2021-01-25 14:35:27-- https://raw.githubusercontent.com/paiml/practical-mlops-book/main/chap05/reuters-allcats.csv\n",
110 | "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 151.101.0.133, 151.101.64.133, 151.101.128.133, ...\n",
111 | "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|151.101.0.133|:443... connected.\n",
112 | "HTTP request sent, awaiting response... 200 OK\n",
113 | "Length: 3381488 (3.2M) [text/plain]\n",
114 | "Saving to: ‘reuters-allcats.csv’\n",
115 | "\n",
116 | "reuters-allcats.csv 100%[===================>] 3.22M --.-KB/s in 0.08s \n",
117 | "\n",
118 | "2021-01-25 14:35:27 (38.0 MB/s) - ‘reuters-allcats.csv’ saved [3381488/3381488]\n",
119 | "\n"
120 | ],
121 | "name": "stdout"
122 | }
123 | ]
124 | },
125 | {
126 | "cell_type": "markdown",
127 | "metadata": {
128 | "id": "iVs8bHF35Bn1"
129 | },
130 | "source": [
131 | "## Model"
132 | ]
133 | },
134 | {
135 | "cell_type": "code",
136 | "metadata": {
137 | "colab": {
138 | "base_uri": "https://localhost:8080/"
139 | },
140 | "id": "4haCp4x55DBP",
141 | "outputId": "c80f760c-2f80-48c0-c04d-fdf95b5f05c8"
142 | },
143 | "source": [
144 | "!ludwig experiment \\\n",
145 | " --dataset reuters-allcats.csv \\\n",
146 | " --config_file config.yaml"
147 | ],
148 | "execution_count": null,
149 | "outputs": [
150 | {
151 | "output_type": "stream",
152 | "text": [
153 | "2021-01-25 14:35:35.946551: I tensorflow/stream_executor/platform/default/dso_loader.cc:49] Successfully opened dynamic library libcudart.so.10.1\n",
154 | "/usr/local/lib/python3.6/dist-packages/typeguard/__init__.py:804: UserWarning: no type annotations present -- not typechecking tensorflow_addons.layers.max_unpooling_2d.MaxUnpooling2D.__init__\n",
155 | " warn('no type annotations present -- not typechecking {}'.format(function_name(func)))\n",
156 | "███████████████████████\n",
157 | "█ █ █ █ ▜█ █ █ █ █ █\n",
158 | "█ █ █ █ █ █ █ █ █ █ ███\n",
159 | "█ █ █ █ █ █ █ █ █ ▌ █\n",
160 | "█ █████ █ █ █ █ █ █ █ █\n",
161 | "█ █ ▟█ █ █ █\n",
162 | "███████████████████████\n",
163 | "ludwig v0.3.2 - Experiment\n",
164 | "\n",
165 | "2021-01-25 14:35:37.626504: I tensorflow/compiler/jit/xla_cpu_device.cc:41] Not creating XLA devices, tf_xla_enable_xla_devices not set\n",
166 | "2021-01-25 14:35:37.652352: I tensorflow/stream_executor/platform/default/dso_loader.cc:49] Successfully opened dynamic library libcuda.so.1\n",
167 | "2021-01-25 14:35:37.713898: E tensorflow/stream_executor/cuda/cuda_driver.cc:328] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected\n",
168 | "2021-01-25 14:35:37.713957: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (8cdcad013cef): /proc/driver/nvidia/version does not exist\n",
169 | "Experiment name: experiment\n",
170 | "Model name: run\n",
171 | "Output directory: results/experiment_run\n",
172 | "\n",
173 | "\n",
174 | "ludwig_version: '0.3.2'\n",
175 | "command: ('/usr/local/bin/ludwig experiment --dataset reuters-allcats.csv --config_file '\n",
176 | " 'config.yaml')\n",
177 | "random_seed: 42\n",
178 | "dataset: 'reuters-allcats.csv'\n",
179 | "data_format: 'csv'\n",
180 | "config: { 'combiner': {'type': 'concat'},\n",
181 | " 'input_features': [ { 'column': 'text',\n",
182 | " 'encoder': 'parallel_cnn',\n",
183 | " 'level': 'word',\n",
184 | " 'name': 'text',\n",
185 | " 'proc_column': 'text_mZFLky',\n",
186 | " 'tied': None,\n",
187 | " 'type': 'text'}],\n",
188 | " 'output_features': [ { 'column': 'class',\n",
189 | " 'dependencies': [],\n",
190 | " 'loss': { 'class_similarities_temperature': 0,\n",
191 | " 'class_weights': 1,\n",
192 | " 'confidence_penalty': 0,\n",
193 | " 'labels_smoothing': 0,\n",
194 | " 'robust_lambda': 0,\n",
195 | " 'type': 'softmax_cross_entropy',\n",
196 | " 'weight': 1},\n",
197 | " 'name': 'class',\n",
198 | " 'proc_column': 'class_mZFLky',\n",
199 | " 'reduce_dependencies': 'sum',\n",
200 | " 'reduce_input': 'sum',\n",
201 | " 'top_k': 3,\n",
202 | " 'type': 'category'}],\n",
203 | " 'preprocessing': { 'audio': { 'audio_feature': {'type': 'raw'},\n",
204 | " 'audio_file_length_limit_in_s': 7.5,\n",
205 | " 'in_memory': True,\n",
206 | " 'missing_value_strategy': 'backfill',\n",
207 | " 'norm': None,\n",
208 | " 'padding_value': 0},\n",
209 | " 'bag': { 'fill_value': '',\n",
210 | " 'lowercase': False,\n",
211 | " 'missing_value_strategy': 'fill_with_const',\n",
212 | " 'most_common': 10000,\n",
213 | " 'tokenizer': 'space'},\n",
214 | " 'binary': { 'fill_value': 0,\n",
215 | " 'missing_value_strategy': 'fill_with_const'},\n",
216 | " 'category': { 'fill_value': '',\n",
217 | " 'lowercase': False,\n",
218 | " 'missing_value_strategy': 'fill_with_const',\n",
219 | " 'most_common': 10000},\n",
220 | " 'date': { 'datetime_format': None,\n",
221 | " 'fill_value': '',\n",
222 | " 'missing_value_strategy': 'fill_with_const'},\n",
223 | " 'force_split': False,\n",
224 | " 'h3': { 'fill_value': 576495936675512319,\n",
225 | " 'missing_value_strategy': 'fill_with_const'},\n",
226 | " 'image': { 'in_memory': True,\n",
227 | " 'missing_value_strategy': 'backfill',\n",
228 | " 'num_processes': 1,\n",
229 | " 'resize_method': 'interpolate',\n",
230 | " 'scaling': 'pixel_normalization'},\n",
231 | " 'numerical': { 'fill_value': 0,\n",
232 | " 'missing_value_strategy': 'fill_with_const',\n",
233 | " 'normalization': None},\n",
234 | " 'sequence': { 'fill_value': '',\n",
235 | " 'lowercase': False,\n",
236 | " 'missing_value_strategy': 'fill_with_const',\n",
237 | " 'most_common': 20000,\n",
238 | " 'padding': 'right',\n",
239 | " 'padding_symbol': '',\n",
240 | " 'sequence_length_limit': 256,\n",
241 | " 'tokenizer': 'space',\n",
242 | " 'unknown_symbol': '',\n",
243 | " 'vocab_file': None},\n",
244 | " 'set': { 'fill_value': '',\n",
245 | " 'lowercase': False,\n",
246 | " 'missing_value_strategy': 'fill_with_const',\n",
247 | " 'most_common': 10000,\n",
248 | " 'tokenizer': 'space'},\n",
249 | " 'split_probabilities': (0.7, 0.1, 0.2),\n",
250 | " 'stratify': None,\n",
251 | " 'text': { 'char_most_common': 70,\n",
252 | " 'char_sequence_length_limit': 1024,\n",
253 | " 'char_tokenizer': 'characters',\n",
254 | " 'char_vocab_file': None,\n",
255 | " 'fill_value': '',\n",
256 | " 'lowercase': True,\n",
257 | " 'missing_value_strategy': 'fill_with_const',\n",
258 | " 'padding': 'right',\n",
259 | " 'padding_symbol': '',\n",
260 | " 'pretrained_model_name_or_path': None,\n",
261 | " 'unknown_symbol': '',\n",
262 | " 'word_most_common': 20000,\n",
263 | " 'word_sequence_length_limit': 256,\n",
264 | " 'word_tokenizer': 'space_punct',\n",
265 | " 'word_vocab_file': None},\n",
266 | " 'timeseries': { 'fill_value': '',\n",
267 | " 'missing_value_strategy': 'fill_with_const',\n",
268 | " 'padding': 'right',\n",
269 | " 'padding_value': 0,\n",
270 | " 'timeseries_length_limit': 256,\n",
271 | " 'tokenizer': 'space'},\n",
272 | " 'vector': { 'fill_value': '',\n",
273 | " 'missing_value_strategy': 'fill_with_const'}},\n",
274 | " 'training': { 'batch_size': 128,\n",
275 | " 'bucketing_field': None,\n",
276 | " 'decay': False,\n",
277 | " 'decay_rate': 0.96,\n",
278 | " 'decay_steps': 10000,\n",
279 | " 'early_stop': 5,\n",
280 | " 'epochs': 100,\n",
281 | " 'eval_batch_size': 0,\n",
282 | " 'gradient_clipping': None,\n",
283 | " 'increase_batch_size_on_plateau': 0,\n",
284 | " 'increase_batch_size_on_plateau_max': 512,\n",
285 | " 'increase_batch_size_on_plateau_patience': 5,\n",
286 | " 'increase_batch_size_on_plateau_rate': 2,\n",
287 | " 'learning_rate': 0.001,\n",
288 | " 'learning_rate_warmup_epochs': 1,\n",
289 | " 'optimizer': { 'beta_1': 0.9,\n",
290 | " 'beta_2': 0.999,\n",
291 | " 'epsilon': 1e-08,\n",
292 | " 'type': 'adam'},\n",
293 | " 'reduce_learning_rate_on_plateau': 0,\n",
294 | " 'reduce_learning_rate_on_plateau_patience': 5,\n",
295 | " 'reduce_learning_rate_on_plateau_rate': 0.5,\n",
296 | " 'regularization_lambda': 0,\n",
297 | " 'regularizer': 'l2',\n",
298 | " 'staircase': False,\n",
299 | " 'validation_field': 'combined',\n",
300 | " 'validation_metric': 'loss'}}\n",
301 | "tf_version: '2.4.0'\n",
302 | "\n",
303 | "\n",
304 | "Using full raw csv, no hdf5 and json file with the same name have been found\n",
305 | "Building dataset (it may take a while)\n",
306 | "Writing preprocessed dataset cache\n",
307 | "Writing train set metadata\n",
308 | "Training set: 2868\n",
309 | "Validation set: 389\n",
310 | "Test set: 822\n",
311 | "2021-01-25 14:35:39.932229: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX512F\n",
312 | "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
313 | "2021-01-25 14:35:39.932413: I tensorflow/compiler/jit/xla_gpu_device.cc:99] Not creating XLA devices, tf_xla_enable_xla_devices not set\n",
314 | "\n",
315 | "╒══════════╕\n",
316 | "│ TRAINING │\n",
317 | "╘══════════╛\n",
318 | "\n",
319 | "\n",
320 | "Epoch 1\n",
321 | "Training: 0% 0/23 [00:00, ?it/s]2021-01-25 14:35:42.964572: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:116] None of the MLIR optimization passes are enabled (registered 2)\n",
322 | "2021-01-25 14:35:42.980281: I tensorflow/core/platform/profile_utils/cpu_utils.cc:112] CPU Frequency: 1999995000 Hz\n",
323 | "Training: 100% 23/23 [01:01<00:00, 2.67s/it]\n",
324 | "Evaluation train: 100% 23/23 [00:14<00:00, 1.60it/s]\n",
325 | "Evaluation vali : 100% 4/4 [00:02<00:00, 1.96it/s]\n",
326 | "Evaluation test : 100% 7/7 [00:04<00:00, 1.72it/s]\n",
327 | "Took 1m 21.9641s\n",
328 | "╒═════════╤════════╤════════════╤═════════════╕\n",
329 | "│ class │ loss │ accuracy │ hits_at_k │\n",
330 | "╞═════════╪════════╪════════════╪═════════════╡\n",
331 | "│ train │ 0.9258 │ 0.7148 │ 0.9826 │\n",
332 | "├─────────┼────────┼────────────┼─────────────┤\n",
333 | "│ vali │ 0.9134 │ 0.6992 │ 0.9692 │\n",
334 | "├─────────┼────────┼────────────┼─────────────┤\n",
335 | "│ test │ 0.9420 │ 0.7311 │ 0.9781 │\n",
336 | "╘═════════╧════════╧════════════╧═════════════╛\n",
337 | "╒════════════╤════════╕\n",
338 | "│ combined │ loss │\n",
339 | "╞════════════╪════════╡\n",
340 | "│ train │ 0.9376 │\n",
341 | "├────────────┼────────┤\n",
342 | "│ vali │ 0.9353 │\n",
343 | "├────────────┼────────┤\n",
344 | "│ test │ 1.0277 │\n",
345 | "╘════════════╧════════╛\n",
346 | "Validation loss on combined improved, model saved\n",
347 | "\n",
348 | "\n",
349 | "Epoch 2\n",
350 | "Training: 100% 23/23 [00:57<00:00, 2.51s/it]\n",
351 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.72it/s]\n",
352 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.19it/s]\n",
353 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.83it/s]\n",
354 | "Took 1m 16.7901s\n",
355 | "╒═════════╤════════╤════════════╤═════════════╕\n",
356 | "│ class │ loss │ accuracy │ hits_at_k │\n",
357 | "╞═════════╪════════╪════════════╪═════════════╡\n",
358 | "│ train │ 0.2788 │ 0.9355 │ 0.9857 │\n",
359 | "├─────────┼────────┼────────────┼─────────────┤\n",
360 | "│ vali │ 0.3220 │ 0.8869 │ 0.9846 │\n",
361 | "├─────────┼────────┼────────────┼─────────────┤\n",
362 | "│ test │ 0.3516 │ 0.8966 │ 0.9830 │\n",
363 | "╘═════════╧════════╧════════════╧═════════════╛\n",
364 | "╒════════════╤════════╕\n",
365 | "│ combined │ loss │\n",
366 | "╞════════════╪════════╡\n",
367 | "│ train │ 0.2821 │\n",
368 | "├────────────┼────────┤\n",
369 | "│ vali │ 0.3514 │\n",
370 | "├────────────┼────────┤\n",
371 | "│ test │ 0.4247 │\n",
372 | "╘════════════╧════════╛\n",
373 | "Validation loss on combined improved, model saved\n",
374 | "\n",
375 | "\n",
376 | "Epoch 3\n",
377 | "Training: 100% 23/23 [00:57<00:00, 2.50s/it]\n",
378 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.72it/s]\n",
379 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.21it/s]\n",
380 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.83it/s]\n",
381 | "Took 1m 16.5517s\n",
382 | "╒═════════╤════════╤════════════╤═════════════╕\n",
383 | "│ class │ loss │ accuracy │ hits_at_k │\n",
384 | "╞═════════╪════════╪════════════╪═════════════╡\n",
385 | "│ train │ 0.0985 │ 0.9749 │ 0.9857 │\n",
386 | "├─────────┼────────┼────────────┼─────────────┤\n",
387 | "│ vali │ 0.1960 │ 0.9306 │ 0.9846 │\n",
388 | "├─────────┼────────┼────────────┼─────────────┤\n",
389 | "│ test │ 0.2298 │ 0.9355 │ 0.9854 │\n",
390 | "╘═════════╧════════╧════════════╧═════════════╛\n",
391 | "╒════════════╤════════╕\n",
392 | "│ combined │ loss │\n",
393 | "╞════════════╪════════╡\n",
394 | "│ train │ 0.1016 │\n",
395 | "├────────────┼────────┤\n",
396 | "│ vali │ 0.2253 │\n",
397 | "├────────────┼────────┤\n",
398 | "│ test │ 0.2648 │\n",
399 | "╘════════════╧════════╛\n",
400 | "Validation loss on combined improved, model saved\n",
401 | "\n",
402 | "\n",
403 | "Epoch 4\n",
404 | "Training: 100% 23/23 [00:57<00:00, 2.50s/it]\n",
405 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.72it/s]\n",
406 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.21it/s]\n",
407 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.83it/s]\n",
408 | "Took 1m 16.5802s\n",
409 | "╒═════════╤════════╤════════════╤═════════════╕\n",
410 | "│ class │ loss │ accuracy │ hits_at_k │\n",
411 | "╞═════════╪════════╪════════════╪═════════════╡\n",
412 | "│ train │ 0.0598 │ 0.9812 │ 0.9941 │\n",
413 | "├─────────┼────────┼────────────┼─────────────┤\n",
414 | "│ vali │ 0.2486 │ 0.9203 │ 0.9717 │\n",
415 | "├─────────┼────────┼────────────┼─────────────┤\n",
416 | "│ test │ 0.2761 │ 0.9246 │ 0.9842 │\n",
417 | "╘═════════╧════════╧════════════╧═════════════╛\n",
418 | "╒════════════╤════════╕\n",
419 | "│ combined │ loss │\n",
420 | "╞════════════╪════════╡\n",
421 | "│ train │ 0.0658 │\n",
422 | "├────────────┼────────┤\n",
423 | "│ vali │ 0.3045 │\n",
424 | "├────────────┼────────┤\n",
425 | "│ test │ 0.2965 │\n",
426 | "╘════════════╧════════╛\n",
427 | "Last improvement of combined validation loss happened 1 epoch ago\n",
428 | "\n",
429 | "\n",
430 | "Epoch 5\n",
431 | "Training: 100% 23/23 [00:57<00:00, 2.50s/it]\n",
432 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.71it/s]\n",
433 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.20it/s]\n",
434 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.81it/s]\n",
435 | "Took 1m 16.6541s\n",
436 | "╒═════════╤════════╤════════════╤═════════════╕\n",
437 | "│ class │ loss │ accuracy │ hits_at_k │\n",
438 | "╞═════════╪════════╪════════════╪═════════════╡\n",
439 | "│ train │ 0.0425 │ 0.9847 │ 0.9944 │\n",
440 | "├─────────┼────────┼────────────┼─────────────┤\n",
441 | "│ vali │ 0.2227 │ 0.9306 │ 0.9743 │\n",
442 | "├─────────┼────────┼────────────┼─────────────┤\n",
443 | "│ test │ 0.2465 │ 0.9416 │ 0.9805 │\n",
444 | "╘═════════╧════════╧════════════╧═════════════╛\n",
445 | "╒════════════╤════════╕\n",
446 | "│ combined │ loss │\n",
447 | "╞════════════╪════════╡\n",
448 | "│ train │ 0.0463 │\n",
449 | "├────────────┼────────┤\n",
450 | "│ vali │ 0.2778 │\n",
451 | "├────────────┼────────┤\n",
452 | "│ test │ 0.2499 │\n",
453 | "╘════════════╧════════╛\n",
454 | "Last improvement of combined validation loss happened 2 epochs ago\n",
455 | "\n",
456 | "\n",
457 | "Epoch 6\n",
458 | "Training: 100% 23/23 [00:57<00:00, 2.50s/it]\n",
459 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.71it/s]\n",
460 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.21it/s]\n",
461 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.81it/s]\n",
462 | "Took 1m 16.7602s\n",
463 | "╒═════════╤════════╤════════════╤═════════════╕\n",
464 | "│ class │ loss │ accuracy │ hits_at_k │\n",
465 | "╞═════════╪════════╪════════════╪═════════════╡\n",
466 | "│ train │ 0.0288 │ 0.9906 │ 0.9965 │\n",
467 | "├─────────┼────────┼────────────┼─────────────┤\n",
468 | "│ vali │ 0.2748 │ 0.9100 │ 0.9614 │\n",
469 | "├─────────┼────────┼────────────┼─────────────┤\n",
470 | "│ test │ 0.2827 │ 0.9282 │ 0.9659 │\n",
471 | "╘═════════╧════════╧════════════╧═════════════╛\n",
472 | "╒════════════╤════════╕\n",
473 | "│ combined │ loss │\n",
474 | "╞════════════╪════════╡\n",
475 | "│ train │ 0.0323 │\n",
476 | "├────────────┼────────┤\n",
477 | "│ vali │ 0.3572 │\n",
478 | "├────────────┼────────┤\n",
479 | "│ test │ 0.2811 │\n",
480 | "╘════════════╧════════╛\n",
481 | "Last improvement of combined validation loss happened 3 epochs ago\n",
482 | "\n",
483 | "\n",
484 | "Epoch 7\n",
485 | "Training: 100% 23/23 [00:57<00:00, 2.51s/it]\n",
486 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.70it/s]\n",
487 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.19it/s]\n",
488 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.82it/s]\n",
489 | "Took 1m 16.8417s\n",
490 | "╒═════════╤════════╤════════════╤═════════════╕\n",
491 | "│ class │ loss │ accuracy │ hits_at_k │\n",
492 | "╞═════════╪════════╪════════════╪═════════════╡\n",
493 | "│ train │ 0.0248 │ 0.9878 │ 0.9972 │\n",
494 | "├─────────┼────────┼────────────┼─────────────┤\n",
495 | "│ vali │ 0.2584 │ 0.9280 │ 0.9666 │\n",
496 | "├─────────┼────────┼────────────┼─────────────┤\n",
497 | "│ test │ 0.2730 │ 0.9270 │ 0.9732 │\n",
498 | "╘═════════╧════════╧════════════╧═════════════╛\n",
499 | "╒════════════╤════════╕\n",
500 | "│ combined │ loss │\n",
501 | "╞════════════╪════════╡\n",
502 | "│ train │ 0.0281 │\n",
503 | "├────────────┼────────┤\n",
504 | "│ vali │ 0.3373 │\n",
505 | "├────────────┼────────┤\n",
506 | "│ test │ 0.2725 │\n",
507 | "╘════════════╧════════╛\n",
508 | "Last improvement of combined validation loss happened 4 epochs ago\n",
509 | "\n",
510 | "\n",
511 | "Epoch 8\n",
512 | "Training: 100% 23/23 [00:57<00:00, 2.50s/it]\n",
513 | "Evaluation train: 100% 23/23 [00:13<00:00, 1.72it/s]\n",
514 | "Evaluation vali : 100% 4/4 [00:01<00:00, 2.19it/s]\n",
515 | "Evaluation test : 100% 7/7 [00:03<00:00, 1.83it/s]\n",
516 | "Took 1m 16.5654s\n",
517 | "╒═════════╤════════╤════════════╤═════════════╕\n",
518 | "│ class │ loss │ accuracy │ hits_at_k │\n",
519 | "╞═════════╪════════╪════════════╪═════════════╡\n",
520 | "│ train │ 0.0227 │ 0.9927 │ 0.9972 │\n",
521 | "├─────────┼────────┼────────────┼─────────────┤\n",
522 | "│ vali │ 0.2478 │ 0.9357 │ 0.9666 │\n",
523 | "├─────────┼────────┼────────────┼─────────────┤\n",
524 | "│ test │ 0.2783 │ 0.9319 │ 0.9659 │\n",
525 | "╘═════════╧════════╧════════════╧═════════════╛\n",
526 | "╒════════════╤════════╕\n",
527 | "│ combined │ loss │\n",
528 | "╞════════════╪════════╡\n",
529 | "│ train │ 0.0266 │\n",
530 | "├────────────┼────────┤\n",
531 | "│ vali │ 0.3569 │\n",
532 | "├────────────┼────────┤\n",
533 | "│ test │ 0.2758 │\n",
534 | "╘════════════╧════════╛\n",
535 | "Last improvement of combined validation loss happened 5 epochs ago\n",
536 | "\n",
537 | "EARLY STOPPING due to lack of validation improvement, it has been 5 epochs since last validation improvement\n",
538 | "\n",
539 | "Best validation model epoch: 3\n",
540 | "Best validation model loss on validation set combined: 0.22534877061843872\n",
541 | "Best validation model loss on test set combined: 0.26480552554130554\n",
542 | "\n",
543 | "Finished: experiment_run\n",
544 | "Saved to: results/experiment_run\n",
545 | "Evaluation: 100% 7/7 [00:03<00:00, 1.82it/s]\n",
546 | "/usr/local/lib/python3.6/dist-packages/sklearn/metrics/_classification.py:1272: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
547 | " _warn_prf(average, modifier, msg_start, len(result))\n",
548 | "\n",
549 | "===== class =====\n",
550 | "accuracy: 0.9355231\n",
551 | "hits_at_k: 0.98540145\n",
552 | "loss: 0.22983195\n",
553 | "overall_stats: { 'avg_f1_score_macro': 0.4707728205334513,\n",
554 | " 'avg_f1_score_micro': 0.9355231143552312,\n",
555 | " 'avg_f1_score_weighted': 0.9252548584390955,\n",
556 | " 'avg_precision_macro': 0.46514839380709594,\n",
557 | " 'avg_precision_micro': 0.9355231143552312,\n",
558 | " 'avg_precision_weighted': 0.9355231143552312,\n",
559 | " 'avg_recall_macro': 0.47655637160159775,\n",
560 | " 'avg_recall_micro': 0.9355231143552312,\n",
561 | " 'avg_recall_weighted': 0.9355231143552312,\n",
562 | " 'kappa_score': 0.897569852629995,\n",
563 | " 'token_accuracy': 0.9355231143552312}\n",
564 | "per_class_stats: {: { 'accuracy': 1.0,\n",
565 | " 'f1_score': 0,\n",
566 | " 'fall_out': 0.0,\n",
567 | " 'false_discovery_rate': 1.0,\n",
568 | " 'false_negative_rate': 1.0,\n",
569 | " 'false_negatives': 0,\n",
570 | " 'false_omission_rate': 0.0,\n",
571 | " 'false_positive_rate': 0.0,\n",
572 | " 'false_positives': 0,\n",
573 | " 'hit_rate': 0,\n",
574 | " 'informedness': 0.0,\n",
575 | " 'markedness': 0.0,\n",
576 | " 'matthews_correlation_coefficient': 0,\n",
577 | " 'miss_rate': 1.0,\n",
578 | " 'negative_predictive_value': 1.0,\n",
579 | " 'positive_predictive_value': 0,\n",
580 | " 'precision': 0,\n",
581 | " 'recall': 0,\n",
582 | " 'sensitivity': 0,\n",
583 | " 'specificity': 1.0,\n",
584 | " 'true_negative_rate': 1.0,\n",
585 | " 'true_negatives': 822,\n",
586 | " 'true_positive_rate': 0,\n",
587 | " 'true_positives': 0},\n",
588 | " Neg-: { 'accuracy': 0.9440389294403893,\n",
589 | " 'f1_score': 0.9427860696517413,\n",
590 | " 'fall_out': 0.06367924528301883,\n",
591 | " 'false_discovery_rate': 0.06650246305418717,\n",
592 | " 'false_negative_rate': 0.047738693467336724,\n",
593 | " 'false_negatives': 19,\n",
594 | " 'false_omission_rate': 0.04567307692307687,\n",
595 | " 'false_positive_rate': 0.06367924528301883,\n",
596 | " 'false_positives': 27,\n",
597 | " 'hit_rate': 0.9522613065326633,\n",
598 | " 'informedness': 0.8885820612496445,\n",
599 | " 'markedness': 0.8878244600227361,\n",
600 | " 'matthews_correlation_coefficient': 0.8882031798608105,\n",
601 | " 'miss_rate': 0.047738693467336724,\n",
602 | " 'negative_predictive_value': 0.9543269230769231,\n",
603 | " 'positive_predictive_value': 0.9334975369458128,\n",
604 | " 'precision': 0.9334975369458128,\n",
605 | " 'recall': 0.9522613065326633,\n",
606 | " 'sensitivity': 0.9522613065326633,\n",
607 | " 'specificity': 0.9363207547169812,\n",
608 | " 'true_negative_rate': 0.9363207547169812,\n",
609 | " 'true_negatives': 397,\n",
610 | " 'true_positive_rate': 0.9522613065326633,\n",
611 | " 'true_positives': 379},\n",
612 | " Pos-earn: { 'accuracy': 0.9854014598540146,\n",
613 | " 'f1_score': 0.976190476190476,\n",
614 | " 'fall_out': 0.013986013986013957,\n",
615 | " 'false_discovery_rate': 0.03149606299212604,\n",
616 | " 'false_negative_rate': 0.016000000000000014,\n",
617 | " 'false_negatives': 4,\n",
618 | " 'false_omission_rate': 0.007042253521126751,\n",
619 | " 'false_positive_rate': 0.013986013986013957,\n",
620 | " 'false_positives': 8,\n",
621 | " 'hit_rate': 0.984,\n",
622 | " 'informedness': 0.970013986013986,\n",
623 | " 'markedness': 0.9614616834867471,\n",
624 | " 'matthews_correlation_coefficient': 0.9657283676058692,\n",
625 | " 'miss_rate': 0.016000000000000014,\n",
626 | " 'negative_predictive_value': 0.9929577464788732,\n",
627 | " 'positive_predictive_value': 0.968503937007874,\n",
628 | " 'precision': 0.968503937007874,\n",
629 | " 'recall': 0.984,\n",
630 | " 'sensitivity': 0.984,\n",
631 | " 'specificity': 0.986013986013986,\n",
632 | " 'true_negative_rate': 0.986013986013986,\n",
633 | " 'true_negatives': 564,\n",
634 | " 'true_positive_rate': 0.984,\n",
635 | " 'true_positives': 246},\n",
636 | " Pos-acq: { 'accuracy': 0.9635036496350365,\n",
637 | " 'f1_score': 0.9056603773584906,\n",
638 | " 'fall_out': 0.027027027027026973,\n",
639 | " 'false_discovery_rate': 0.11111111111111116,\n",
640 | " 'false_negative_rate': 0.07692307692307687,\n",
641 | " 'false_negatives': 12,\n",
642 | " 'false_omission_rate': 0.018181818181818188,\n",
643 | " 'false_positive_rate': 0.027027027027026973,\n",
644 | " 'false_positives': 18,\n",
645 | " 'hit_rate': 0.9230769230769231,\n",
646 | " 'informedness': 0.8960498960498962,\n",
647 | " 'markedness': 0.8707070707070708,\n",
648 | " 'matthews_correlation_coefficient': 0.8832875976696266,\n",
649 | " 'miss_rate': 0.07692307692307687,\n",
650 | " 'negative_predictive_value': 0.9818181818181818,\n",
651 | " 'positive_predictive_value': 0.8888888888888888,\n",
652 | " 'precision': 0.8888888888888888,\n",
653 | " 'recall': 0.9230769230769231,\n",
654 | " 'sensitivity': 0.9230769230769231,\n",
655 | " 'specificity': 0.972972972972973,\n",
656 | " 'true_negative_rate': 0.972972972972973,\n",
657 | " 'true_negatives': 648,\n",
658 | " 'true_positive_rate': 0.9230769230769231,\n",
659 | " 'true_positives': 144},\n",
660 | " Pos-coffee: { 'accuracy': 0.9841849148418491,\n",
661 | " 'f1_score': 0,\n",
662 | " 'fall_out': 0.0,\n",
663 | " 'false_discovery_rate': 1.0,\n",
664 | " 'false_negative_rate': 1.0,\n",
665 | " 'false_negatives': 13,\n",
666 | " 'false_omission_rate': 0.015815085158150888,\n",
667 | " 'false_positive_rate': 0.0,\n",
668 | " 'false_positives': 0,\n",
669 | " 'hit_rate': 0.0,\n",
670 | " 'informedness': 0.0,\n",
671 | " 'markedness': -0.015815085158150888,\n",
672 | " 'matthews_correlation_coefficient': 0,\n",
673 | " 'miss_rate': 1.0,\n",
674 | " 'negative_predictive_value': 0.9841849148418491,\n",
675 | " 'positive_predictive_value': 0,\n",
676 | " 'precision': 0,\n",
677 | " 'recall': 0.0,\n",
678 | " 'sensitivity': 0.0,\n",
679 | " 'specificity': 1.0,\n",
680 | " 'true_negative_rate': 1.0,\n",
681 | " 'true_negatives': 809,\n",
682 | " 'true_positive_rate': 0.0,\n",
683 | " 'true_positives': 0},\n",
684 | " Pos-gold: { 'accuracy': 0.9951338199513382,\n",
685 | " 'f1_score': 0,\n",
686 | " 'fall_out': 0.0,\n",
687 | " 'false_discovery_rate': 1.0,\n",
688 | " 'false_negative_rate': 1.0,\n",
689 | " 'false_negatives': 4,\n",
690 | " 'false_omission_rate': 0.0048661800486617945,\n",
691 | " 'false_positive_rate': 0.0,\n",
692 | " 'false_positives': 0,\n",
693 | " 'hit_rate': 0.0,\n",
694 | " 'informedness': 0.0,\n",
695 | " 'markedness': -0.0048661800486617945,\n",
696 | " 'matthews_correlation_coefficient': 0,\n",
697 | " 'miss_rate': 1.0,\n",
698 | " 'negative_predictive_value': 0.9951338199513382,\n",
699 | " 'positive_predictive_value': 0,\n",
700 | " 'precision': 0,\n",
701 | " 'recall': 0.0,\n",
702 | " 'sensitivity': 0.0,\n",
703 | " 'specificity': 1.0,\n",
704 | " 'true_negative_rate': 1.0,\n",
705 | " 'true_negatives': 818,\n",
706 | " 'true_positive_rate': 0.0,\n",
707 | " 'true_positives': 0},\n",
708 | " Pos-housing: { 'accuracy': 1.0,\n",
709 | " 'f1_score': 0,\n",
710 | " 'fall_out': 0.0,\n",
711 | " 'false_discovery_rate': 1.0,\n",
712 | " 'false_negative_rate': 1.0,\n",
713 | " 'false_negatives': 0,\n",
714 | " 'false_omission_rate': 0.0,\n",
715 | " 'false_positive_rate': 0.0,\n",
716 | " 'false_positives': 0,\n",
717 | " 'hit_rate': 0,\n",
718 | " 'informedness': 0.0,\n",
719 | " 'markedness': 0.0,\n",
720 | " 'matthews_correlation_coefficient': 0,\n",
721 | " 'miss_rate': 1.0,\n",
722 | " 'negative_predictive_value': 1.0,\n",
723 | " 'positive_predictive_value': 0,\n",
724 | " 'precision': 0,\n",
725 | " 'recall': 0,\n",
726 | " 'sensitivity': 0,\n",
727 | " 'specificity': 1.0,\n",
728 | " 'true_negative_rate': 1.0,\n",
729 | " 'true_negatives': 822,\n",
730 | " 'true_positive_rate': 0,\n",
731 | " 'true_positives': 0},\n",
732 | " Pos-heat: { 'accuracy': 0.9987834549878345,\n",
733 | " 'f1_score': 0,\n",
734 | " 'fall_out': 0.0,\n",
735 | " 'false_discovery_rate': 1.0,\n",
736 | " 'false_negative_rate': 1.0,\n",
737 | " 'false_negatives': 1,\n",
738 | " 'false_omission_rate': 0.0012165450121655041,\n",
739 | " 'false_positive_rate': 0.0,\n",
740 | " 'false_positives': 0,\n",
741 | " 'hit_rate': 0.0,\n",
742 | " 'informedness': 0.0,\n",
743 | " 'markedness': -0.0012165450121655041,\n",
744 | " 'matthews_correlation_coefficient': 0,\n",
745 | " 'miss_rate': 1.0,\n",
746 | " 'negative_predictive_value': 0.9987834549878345,\n",
747 | " 'positive_predictive_value': 0,\n",
748 | " 'precision': 0,\n",
749 | " 'recall': 0.0,\n",
750 | " 'sensitivity': 0.0,\n",
751 | " 'specificity': 1.0,\n",
752 | " 'true_negative_rate': 1.0,\n",
753 | " 'true_negatives': 821,\n",
754 | " 'true_positive_rate': 0.0,\n",
755 | " 'true_positives': 0}}\n",
756 | "Saved to: results/experiment_run\n"
757 | ],
758 | "name": "stdout"
759 | }
760 | ]
761 | },
762 | {
763 | "cell_type": "code",
764 | "metadata": {
765 | "id": "64YqpLy35w4n"
766 | },
767 | "source": [
768 | ""
769 | ],
770 | "execution_count": null,
771 | "outputs": []
772 | }
773 | ]
774 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## 🎓 Pragmatic AI Labs | Join 1M+ ML Engineers
2 |
3 | ### 🔥 Hot Course Offers:
4 | * 🤖 [Master GenAI Engineering](https://ds500.paiml.com/learn/course/0bbb5/) - Build Production AI Systems
5 | * 🦀 [Learn Professional Rust](https://ds500.paiml.com/learn/course/g6u1k/) - Industry-Grade Development
6 | * 📊 [AWS AI & Analytics](https://ds500.paiml.com/learn/course/31si1/) - Scale Your ML in Cloud
7 | * ⚡ [Production GenAI on AWS](https://ds500.paiml.com/learn/course/ehks1/) - Deploy at Enterprise Scale
8 | * 🛠️ [Rust DevOps Mastery](https://ds500.paiml.com/learn/course/ex8eu/) - Automate Everything
9 |
10 | ### 🚀 Level Up Your Career:
11 | * 💼 [Production ML Program](https://paiml.com) - Complete MLOps & Cloud Mastery
12 | * 🎯 [Start Learning Now](https://ds500.paiml.com) - Fast-Track Your ML Career
13 | * 🏢 Trusted by Fortune 500 Teams
14 |
15 | Learn end-to-end ML engineering from industry veterans at [PAIML.COM](https://paiml.com)
16 | ## Practical MLOps, an O'Reilly Book
17 |
18 | This is a public repo where code samples are stored for the book Practical MLOps.
19 |
20 | 
21 | * [Read Practical MLOps Online](https://learning.oreilly.com/library/view/practical-mlops/9781098103002/)
22 | * [Purchase Practical MLOps](https://www.amazon.com/Practical-MLOps-Operationalizing-Machine-Learning/dp/1098103017)
23 |
24 | ## Tentative Outline
25 |
26 | ### Chapter 1: Introduction to MLOps
27 | #### Source Code Chapter 1:
28 | * [Multi-cloud Github Actions Demo](https://github.com/noahgift/github-actions-demo)
29 |
30 | ### Chapter 2: MLOps Foundations
31 | #### Source Code Chapter 2:
32 |
33 | * https://github.com/noahgift/cloud-bash-essentials
34 | * https://github.com/noahgift/regression-concepts/blob/master/height_weight.ipynb
35 | * https://github.com/noahgift/or/blob/master/README.md#randomized-start-with-greedy-path-solution-for-tsp
36 |
37 | ### Chapter 3: Machine Learning Deployment In Production ~~Strategies~~
38 | #### Source Code Chapter 3:
39 |
40 | - [Logging Examples](https://github.com/paiml/practical-mlops-book/blob/master/chapter6)
41 | - [Multiple Loggers](https://github.com/paiml/practical-mlops-book/blob/master/chapter6/multiple-loggers)
42 | - [Simple Logging](https://github.com/paiml/practical-mlops-book/blob/master/chapter6/simple-logging)
43 |
44 |
45 | ### Chapter 4: Continuous Delivery for Machine Learning Models
46 | #### Source Code Chapter 4:
47 |
48 | ### Chapter 5: AutoML
49 | #### Source Code Chapter 5:
50 |
51 | * [Apple CreateML Walkthrough](https://github.com/noahgift/Apple-CreateML-AutoML-Recipes)
52 | * [Ludwig Text Classification](https://github.com/paiml/practical-mlops-book/blob/main/Ludwig.ipynb)
53 | * [FLAML Hello World](https://github.com/noahgift/flaml-nba)
54 | * [Model Explainability](https://github.com/noahgift/model-explainability)
55 |
56 | ### Chapter 6: Monitoring and Logging for Machine Learning
57 | #### Source Code Chapter 6:
58 |
59 | ### Chapter 7: MLOps for AWS
60 | #### Source Code Chapter 7:
61 |
62 | * [Continuous Delivery for Elastic Beanstalk](https://github.com/noahgift/Flask-Elastic-Beanstalk)
63 | * [ECS Fargate](https://github.com/noahgift/eks-fargate-tutorial)
64 | * [AWS ML Certification Exam Guide](https://noahgift.github.io/aws-ml-guide/intro)
65 | * [AWS Cloud Practitioner Exam Guide](https://awscp.noahgift.com/questions-answers)
66 | * [Free AWS Cloud Practitioner Course](https://store.paiml.com/aws-cloud-practitioner)
67 | * [Python MLOps Cookbook](https://github.com/noahgift/Python-MLOps-Cookbook)
68 | * [Container From Scratch](https://github.com/noahgift/container-from-scratch-python)
69 |
70 | ### Chapter 8: MLOps for Azure
71 | #### Source Code Chapter 8:
72 |
73 | ### Chapter 9: MLOps for GCP
74 | #### Source Code Chapter 9:
75 |
76 | * [Project Plan Template](https://github.com/paiml/practical-mlops-book/blob/main/Excel%20Template_Ten%20Week%20Demo%20Schedule.xlsx?raw=true)
77 | * [GCP from Zero](https://github.com/noahgift/gcp-from-zero)
78 | * [Kubernetes Hello World](https://github.com/noahgift/kubernetes-hello-world-python-flask)
79 | * [gcp-flask-ml-deploy](https://github.com/noahgift/gcp-flask-ml-deploy)
80 | * [serverless cookbook](https://github.com/noahgift/serverless-cookbook)
81 |
82 | ### Chapter 10: Machine Learning Interoperability
83 | #### Source Code Chapter 10:
84 |
85 | ### Chapter 11: Building MLOps command-line tools
86 | #### Source Code Chapter 11:
87 |
88 | ### Chapter 12: Machine Learning Engineering and MLOps Case Studies
89 | #### Source Code Chapter 12:
90 |
91 |
92 | ### Community Recipes
93 |
94 | This section includes "community" recipes. Many "may" be included in the book if timing works out.
95 |
96 | * [Jason Adams: FastAPI Sentiment Analysis with Kubernetes](https://github.com/Jason-Adam/sentiment-service)
97 | * [James Salafatinos: Tensorflow.js real-time image classification](https://github.com/james-salafatinos/webcam-ml)
98 | * [Nikhil Bhargava: Sneaker Price Predict](https://github.com/nikhil-bhargava/ids-706-fp)
99 | * [Medical Expenditures](https://github.com/joekrinke15/MLModelDeployment)
100 | * [Flask Salary Predictor](https://github.com/YisongZou/Flask-Salary-Predictor-with-Random-Forest-Algorithm)
101 | * [Covid Predictor](https://github.com/jingyi-xie/covid-prediction)
102 | * [Absenteeism at Work](https://github.com/shangwenyan/IDS721FinalProject)
103 | * [Chest X-Ray on Baidu](https://github.com/Valarzz/Lung-Health-System)
104 | * [Streamlit Traffic Detection](https://github.com/YUA1024/YUA1024)
105 |
106 | ### References
107 |
108 | * [Pragmatic AI](https://www.amazon.com/Pragmatic-AI-Introduction-Cloud-Based-Analytics/dp/0134863860)
109 | * [Python for DevOps](https://www.amazon.com/Python-DevOps-Ruthlessly-Effective-Automation/dp/149205769X)
110 | * [Cloud Computing for Data](https://paiml.com/docs/home/books/cloud-computing-for-data/)
111 |
112 | #### Next Steps: Take Coursera MLOps Course
113 |
114 | 
115 |
116 | * [Take the Specialization](https://www.coursera.org/learn/cloud-computing-foundations-duke?specialization=building-cloud-computing-solutions-at-scale)
117 | * [Cloud Computing Foundations](https://www.coursera.org/learn/cloud-computing-foundations-duke?specialization=building-cloud-computing-solutions-at-scale)
118 | * [Cloud Virtualization, Containers and APIs](https://www.coursera.org/learn/cloud-virtualization-containers-api-duke?specialization=building-cloud-computing-solutions-at-scale)
119 | * [Cloud Data Engineering](https://www.coursera.org/learn/cloud-data-engineering-duke?specialization=building-cloud-computing-solutions-at-scale)
120 | * [Cloud Machine Learning Engineering and MLOps](https://www.coursera.org/learn/cloud-machine-learning-engineering-mlops-duke?specialization=building-cloud-computing-solutions-at-scale)
121 |
122 |
123 | * [✨Pragmatic AI Labs builds courses on edX](https://insight.paiml.com/d69)
124 | * [ 💬 Join our Discord community](https://discord.gg/ZrjWxKay)
125 |
--------------------------------------------------------------------------------
/chap05/Makefile:
--------------------------------------------------------------------------------
1 | install:
2 | pip install --upgrade pip &&\
3 | pip install -r requirements.txt
4 |
--------------------------------------------------------------------------------
/chap05/config.yaml:
--------------------------------------------------------------------------------
1 | input_features:
2 | -
3 | name: text
4 | type: text
5 | level: word
6 | encoder: parallel_cnn
7 |
8 | output_features:
9 | -
10 | name: class
11 | type: category
--------------------------------------------------------------------------------
/chap05/requirements.txt:
--------------------------------------------------------------------------------
1 | ludwig
2 |
--------------------------------------------------------------------------------
/chapter10/.gitignore:
--------------------------------------------------------------------------------
1 | *onnx
2 |
--------------------------------------------------------------------------------
/chapter10/azure/aci.py:
--------------------------------------------------------------------------------
1 | from azureml.core.webservice import AciWebservice
2 |
3 | aciconfig = AciWebservice.deploy_configuration(
4 | cpu_cores=1,
5 | memory_gb=1,
6 | tags={"demo": "onnx"},
7 | description="web service for MNIST ONNX model",
8 | )
9 |
10 | from azureml.core.model import Model
11 |
12 | aci_service_name = "onnx-roberta-demo"
13 | aci_service = Model.deploy(ws, aci_service_name, [model], inference_config, aci_config)
14 | aci_service.wait_for_deployment(True)
15 |
--------------------------------------------------------------------------------
/chapter10/azure/scoring.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import os
3 | import numpy as np
4 | from transformers import RobertaTokenizer
5 | import onnxruntime
6 |
7 |
8 | def init():
9 | global session
10 | model = os.path.join(
11 | os.getenv("AZUREML_MODEL_DIR"), "roberta-sequence-classification-9.onnx"
12 | )
13 | session = onnxruntime.InferenceSession(model)
14 |
15 |
16 | def run(input_data_json):
17 | try:
18 | tokenizer = RobertaTokenizer.from_pretrained("roberta-base")
19 | input_ids = torch.tensor(
20 | tokenizer.encode(input_data_json[0], add_special_tokens=True)
21 | ).unsqueeze(0)
22 |
23 | if input_ids.requires_grad:
24 | numpy_func = input_ids.detach().cpu().numpy()
25 | else:
26 | numpy_func = input_ids.cpu().numpy()
27 |
28 | inputs = {session.get_inputs()[0].name: numpy_func(input_ids)}
29 | out = session.run(None, inputs)
30 |
31 | return {"result": np.argmax(out)}
32 | except Exception as err:
33 | result = str(err)
34 | return {"error": result}
35 |
--------------------------------------------------------------------------------
/chapter10/coreml/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8.8-slim
2 |
3 |
4 |
--------------------------------------------------------------------------------
/chapter10/coreml/README.md:
--------------------------------------------------------------------------------
1 | Sources
2 |
3 | ONNX to Core ML https://coremltools.readme.io/docs/onnx-conversion
4 | ONNX to Core ML supports ONNX Opset version 10 and older.
5 |
6 |
7 | Core ML to ONNX https://github.com/onnx/onnxmltools#coreml-to-onnx-conversion
8 |
9 |
10 | model zoo for mnist https://github.com/onnx/models
11 | https://github.com/onnx/models/tree/master/vision/classification/mnist
12 |
--------------------------------------------------------------------------------
/chapter10/coreml/cli.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from coremltools.converters.onnx import convert
3 |
4 | def main(model_path):
5 | basename = model_path.split('.onnx')[0]
6 | model = convert(model_path, minimum_ios_deployment_target='13')
7 | model.short_description = "ONNX Model converted with coremltools"
8 | model.save(f"{basename}.mlmodel")
9 |
10 | if __name__ == '__main__':
11 | main(sys.argv[-1])
12 |
--------------------------------------------------------------------------------
/chapter10/coreml/requirements.txt:
--------------------------------------------------------------------------------
1 | coremltools
2 |
--------------------------------------------------------------------------------
/chapter10/onnx-checker.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import onnx
3 |
4 | def main(arguments):
5 | help_menu = """
6 | A command-line tool to quickly verify ONNX models using
7 | check_model()
8 | """
9 |
10 | if "--help" in arguments:
11 | print(help_menu)
12 | sys.exit(0)
13 |
14 | model = onnx.load(arguments[-1])
15 | onnx.checker.check_model(model)
16 | print(onnx.helper.printable_graph(model.graph))
17 |
18 |
19 | if __name__ == '__main__':
20 | main(sys.argv)
21 |
--------------------------------------------------------------------------------
/chapter10/tf/requirements.txt:
--------------------------------------------------------------------------------
1 | Keras-Preprocessing==1.1.2
2 | Markdown==3.3.4
3 | Werkzeug==1.0.1
4 | absl-py==0.12.0
5 | astunparse==1.6.3
6 | cachetools==4.2.1
7 | certifi==2020.12.5
8 | chardet==4.0.0
9 | flatbuffers==1.12
10 | gast==0.3.3
11 | google-auth-oauthlib==0.4.3
12 | google-auth==1.28.0
13 | google-pasta==0.2.0
14 | grpcio==1.32.0
15 | h5py==2.10.0
16 | idna==2.10
17 | numpy==1.19.5
18 | numpy==1.20.1
19 | oauthlib==3.1.0
20 | onnx==1.8.1
21 | opt-einsum==3.3.0
22 | protobuf==3.15.6
23 | pyasn1-modules==0.2.8
24 | pyasn1==0.4.8
25 | requests-oauthlib==1.3.0
26 | requests==2.25.1
27 | rsa==4.7.2
28 | six==1.15.0
29 | tensorboard-plugin-wit==1.8.0
30 | tensorboard==2.4.1
31 | tensorflow-estimator==2.4.0
32 | tensorflow==2.5.1
33 | termcolor==1.1.0
34 | tf2onnx==1.8.4
35 | typing-extensions==3.7.4.3
36 | urllib3==1.26.5
37 | wrapt==1.12.1
38 |
--------------------------------------------------------------------------------
/chapter10/torch/README.md:
--------------------------------------------------------------------------------
1 | https://github.com/onnx/tutorials/blob/master/tutorials/ExportModelFromPyTorchForWinML.md
2 |
--------------------------------------------------------------------------------
/chapter10/torch/check.py:
--------------------------------------------------------------------------------
1 | import onnx
2 |
3 | # Load the ONNX model
4 | model = onnx.load("resnet18.onnx")
5 |
6 | # Check that the IR is well formed
7 | onnx.checker.check_model(model)
8 |
9 | # Print a human readable representation of the graph
10 | print(onnx.helper.printable_graph(model.graph))
11 |
--------------------------------------------------------------------------------
/chapter10/torch/convert.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torchvision
3 |
4 | # dummy_in = torch.randn(10, 3, 224, 224)
5 | dummy_in = torch.randn(8, 3, 200, 200)
6 | model = torchvision.models.resnet18(pretrained=True)
7 | #
8 | # in_names = [ "actual_input_1" ] + [ "learned_%d" % i for i in range(16) ]
9 | in_names = ["learned_%d" % i for i in range(16)]
10 | out_names = ["output_1"]
11 | #
12 | torch.onnx.export(
13 | model,
14 | dummy_in,
15 | "resnet18.onnx",
16 | input_names=in_names,
17 | output_names=out_names,
18 | opset_version=7,
19 | verbose=True,
20 | )
21 | #
22 |
23 | import onnx
24 |
25 | # Load the ONNX model
26 | model = onnx.load("resnet18.onnx")
27 |
28 | # Check that the IR is well formed
29 | onnx.checker.check_model(model)
30 |
31 | # Print a human readable representation of the graph
32 | print(onnx.helper.printable_graph(model.graph))
33 |
--------------------------------------------------------------------------------
/chapter10/torch/requirements.txt:
--------------------------------------------------------------------------------
1 | torch
2 |
--------------------------------------------------------------------------------
/chapter11/carriage.csv:
--------------------------------------------------------------------------------
1 | ,name,grape,region,variety,rating,notes
2 | 27932,J. Bookwalter Vintner's Select Cabernet Sauvignon 1996,,"Columbia Valley, Washington",Red Wine,90.0,"Aged in French, Hungarian, and American Oak barrels, this is a big voluptuous wine made in Bordeaux style that will age for years.
3 | Aromas of cherry, cedar, cassis, and chocolate, coupled with a velvety finish, give this wine a smooth, mouth filling memory of Cabernet Sauvignon."
4 |
5 |
--------------------------------------------------------------------------------
/chapter11/linter-modularized/.gitignore:
--------------------------------------------------------------------------------
1 | *.egg-info
2 | *.pyc
3 |
--------------------------------------------------------------------------------
/chapter11/linter-modularized/csv_linter/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paiml/practical-mlops-book/5517dc375d23a511e2bf850f700c12a57a85fddd/chapter11/linter-modularized/csv_linter/__init__.py
--------------------------------------------------------------------------------
/chapter11/linter-modularized/csv_linter/checks.py:
--------------------------------------------------------------------------------
1 |
2 | def carriage_returns(df):
3 | for index, row in df.iterrows():
4 | for column, field in row.iteritems():
5 | try:
6 | if "\r\n" in field:
7 | return index, column, field
8 | except TypeError:
9 | continue
10 |
11 |
12 | def unnamed_columns(df):
13 | bad_columns = []
14 | for key in df.keys():
15 | if "Unnamed" in key:
16 | bad_columns.append(key)
17 | return len(bad_columns)
18 |
19 |
20 | def zero_count_columns(df):
21 | bad_columns = []
22 | for key in df.keys():
23 | if df[key].count() == 0:
24 | bad_columns.append(key)
25 | return bad_columns
26 |
--------------------------------------------------------------------------------
/chapter11/linter-modularized/csv_linter/main.py:
--------------------------------------------------------------------------------
1 | import click
2 | import pandas as pd
3 | from csv_linter.checks import carriage_returns, unnamed_columns, zero_count_columns
4 |
5 |
6 | #def carriage_returns(df):
7 | # for index, row in df.iterrows():
8 | # for column, field in row.iteritems():
9 | # try:
10 | # if "\r\n" in field:
11 | # return index, column, field
12 | # except TypeError:
13 | # continue
14 | #
15 | #
16 | #def unnamed_columns(df):
17 | # bad_columns = []
18 | # for key in df.keys():
19 | # if "Unnamed" in key:
20 | # bad_columns.append(key)
21 | # return len(bad_columns)
22 | #
23 | #
24 | #def zero_count_columns(df):
25 | # bad_columns = []
26 | # for key in df.keys():
27 | # if df[key].count() == 0:
28 | # bad_columns.append(key)
29 | # return bad_columns
30 | #
31 |
32 | @click.command()
33 | @click.argument('filename', type=click.Path(exists=True))
34 | def main(filename):
35 | df = pd.read_csv(filename)
36 | for column in zero_count_columns(df):
37 | click.echo(f"Warning: Column '{column}' has no items in it")
38 | unnamed = unnamed_columns(df)
39 | if unnamed:
40 | click.echo(f"Warning: found {unnamed} columns that are Unnamed")
41 | carriage_field = carriage_returns(df)
42 | if carriage_field:
43 | index, column, field = carriage_field
44 | click.echo((
45 | f"Warning: found carriage returns at index {index}"
46 | f" of column '{column}':")
47 | )
48 | click.echo(f" '{field[:50]}'")
49 |
--------------------------------------------------------------------------------
/chapter11/linter-modularized/requirements.txt:
--------------------------------------------------------------------------------
1 | click==7.1.2
2 | flake8==3.8.4
3 | mccabe==0.6.1
4 | numpy==1.19.4
5 | pandas==1.2.0
6 | pycodestyle==2.6.0
7 | pyflakes==2.2.0
8 | python-dateutil==2.8.1
9 | pytz==2020.5
10 | six==1.15.0
11 |
--------------------------------------------------------------------------------
/chapter11/linter-modularized/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name = 'csv-linter',
5 | description = 'lint csv files',
6 | packages = find_packages(),
7 | author = 'Alfredo Deza',
8 | entry_points="""
9 | [console_scripts]
10 | csv-linter=csv_linter.main:main
11 | """,
12 | install_requires = ['click==7.1.2', 'pandas==1.2.0'],
13 | version = '0.0.1',
14 | url = 'https://github.com/paiml/practical-mlops-book',
15 | )
16 |
--------------------------------------------------------------------------------
/chapter11/linter/.gitignore:
--------------------------------------------------------------------------------
1 | *.egg-info
2 | *.pyc
3 |
--------------------------------------------------------------------------------
/chapter11/linter/carriage.csv:
--------------------------------------------------------------------------------
1 | ,name,grape,region,variety,rating,notes
2 | 27932,J. Bookwalter Vintner's Select Cabernet Sauvignon 1996,,"Columbia Valley, Washington",Red Wine,90.0,"Aged in French, Hungarian, and American Oak barrels, this is a big voluptuous wine made in Bordeaux style that will age for years.
3 | Aromas of cherry, cedar, cassis, and chocolate, coupled with a velvety finish, give this wine a smooth, mouth filling memory of Cabernet Sauvignon."
4 |
5 |
--------------------------------------------------------------------------------
/chapter11/linter/csv_linter.py:
--------------------------------------------------------------------------------
1 | import click
2 | import pandas as pd
3 |
4 |
5 | def carriage_returns(df):
6 | for index, row in df.iterrows():
7 | for column, field in row.iteritems():
8 | try:
9 | if "\r\n" in field:
10 | return index, column, field
11 | except TypeError:
12 | continue
13 |
14 |
15 | def unnamed_columns(df):
16 | bad_columns = []
17 | for key in df.keys():
18 | if "Unnamed" in key:
19 | bad_columns.append(key)
20 | return len(bad_columns)
21 |
22 |
23 | def zero_count_columns(df):
24 | bad_columns = []
25 | for key in df.keys():
26 | if df[key].count() == 0:
27 | bad_columns.append(key)
28 | return bad_columns
29 |
30 |
31 | @click.command()
32 | @click.argument('filename', type=click.Path(exists=True))
33 | def main(filename):
34 | df = pd.read_csv(filename)
35 | for column in zero_count_columns(df):
36 | click.echo(f"Warning: Column '{column}' has no items in it")
37 | unnamed = unnamed_columns(df)
38 | if unnamed:
39 | click.echo(f"Warning: found {unnamed} columns that are Unnamed")
40 | carriage_field = carriage_returns(df)
41 | if carriage_field:
42 | index, column, field = carriage_field
43 | click.echo((
44 | f"Warning: found carriage returns at index {index}"
45 | f" of column '{column}':")
46 | )
47 | click.echo(f" '{field[:50]}'")
48 |
--------------------------------------------------------------------------------
/chapter11/linter/requirements.txt:
--------------------------------------------------------------------------------
1 | click==7.1.2
2 | flake8==3.8.4
3 | mccabe==0.6.1
4 | numpy==1.19.4
5 | pandas==1.2.0
6 | pycodestyle==2.6.0
7 | pyflakes==2.2.0
8 | python-dateutil==2.8.1
9 | pytz==2020.5
10 | six==1.15.0
11 |
--------------------------------------------------------------------------------
/chapter11/linter/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name = 'csv-linter',
5 | description = 'lint csv files',
6 | packages = find_packages(),
7 | author = 'Alfredo Deza',
8 | entry_points="""
9 | [console_scripts]
10 | csv-linter=csv_linter:main
11 | """,
12 | install_requires = ['click==7.1.2', 'pandas==1.2.0'],
13 | version = '0.0.1',
14 | url = 'https://github.com/paiml/practical-mlops-book',
15 | )
16 |
--------------------------------------------------------------------------------
/chapter11/serverless/cli/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name = 'cloud-translate',
5 | description = "translate text with Google's cloud",
6 | packages = find_packages(),
7 | author = 'Alfredo Deza',
8 | entry_points="""
9 | [console_scripts]
10 | cloud-translate=trigger:main
11 | """,
12 | install_requires = ['click==7.1.2', 'requests'],
13 | version = '0.0.1',
14 | url = 'https://github.com/paiml/practical-mlops-book',
15 | )
16 |
--------------------------------------------------------------------------------
/chapter11/serverless/cli/trigger.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | import requests
3 | import click
4 |
5 | url = 'https://us-central1-gcp-book-1.cloudfunctions.net/function-2'
6 |
7 |
8 | def token():
9 | proc = subprocess.Popen(
10 | ["gcloud", "auth", "print-identity-token"],
11 | stdout=subprocess.PIPE)
12 | out, err = proc.communicate()
13 | return out.decode('utf-8').strip('\n')
14 |
15 |
16 | @click.command()
17 | @click.argument('text', type=click.STRING)
18 | def main(text):
19 | resp = requests.post(
20 | url,
21 | json={"message": text},
22 | headers={"Authorization": f"Bearer {token()}"})
23 |
24 | click.echo(f"{resp.text}")
25 |
--------------------------------------------------------------------------------
/chapter11/serverless/foo.py:
--------------------------------------------------------------------------------
1 | from google.oauth2 import service_account
2 | from google.auth.transport.requests import AuthorizedSession
3 |
4 |
5 | url = 'https://us-central1-gcp-book-1.cloudfunctions.net/function-1'
6 |
7 |
8 | creds = service_account.IDTokenCredentials.from_service_account_file(
9 | 'service-account-credentials.json', target_audience=url)
10 |
11 | authed_session = AuthorizedSession(creds)
12 |
13 | # make authenticated request and print the response, status_code
14 | import ipdb;ipdb.set_trace()
15 | resp = authed_session.post(url, data='{"message": "hello from the programming language"}')
16 | print(resp.status_code)
17 | print(resp.text)
18 |
19 |
20 | #import requests
21 | #
22 | #
23 | #METADATA_URL = 'http://metadata.google.internal/computeMetadata/v1/'
24 | #METADATA_HEADERS = {'Metadata-Flavor': 'Google'}
25 | #SERVICE_ACCOUNT = 'default'
26 | #
27 | #
28 | #def get_access_token():
29 | # url = '{}instance/service-accounts/{}/token'.format(
30 | # METADATA_URL, SERVICE_ACCOUNT)
31 | #
32 | # # Request an access token from the metadata server.
33 | # r = requests.get(url, headers=METADATA_HEADERS)
34 | # r.raise_for_status()
35 | #
36 | # # Extract the access token from the response.
37 | # access_token = r.json()['access_token']
38 | #
39 | # return access_token
40 | #
41 | #print(get_access_token())
42 |
--------------------------------------------------------------------------------
/chapter11/serverless/trigger.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | import requests
3 |
4 | url = 'https://us-central1-gcp-book-1.cloudfunctions.net/function-2'
5 |
6 |
7 | def token():
8 | proc = subprocess.Popen(
9 | ["gcloud", "auth", "print-identity-token"],
10 | stdout=subprocess.PIPE)
11 | out, err = proc.communicate()
12 | return out.decode('utf-8').strip('\n')
13 |
14 |
15 | resp = requests.post(
16 | url,
17 | json={"message": "hello from a programming language"},
18 | headers={"Authorization": f"Bearer {token()}"})
19 |
20 | print(resp.text)
21 |
--------------------------------------------------------------------------------
/chapter2/add.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from random import choices
3 |
4 | def add(x,y):
5 | print(f"inside a function and adding {x}, {y}")
6 | return x+y
7 |
8 | #Send random numbers from 1-10, ten times to the add function
9 | numbers = range(1,10)
10 | for num in numbers:
11 | xx = choices(numbers)[0]
12 | yy = choices(numbers)[0]
13 | print(add(xx,yy))
14 |
15 |
--------------------------------------------------------------------------------
/chapter2/hello.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | echo "Hello World"
4 |
--------------------------------------------------------------------------------
/chapter3/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:stable
2 |
3 | RUN apt-get update && apt install -yq curl build-essential gnupg
4 |
5 | RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
6 |
7 | RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
8 |
9 | RUN apt-get update && apt-get install -yq edgetpu-compiler
10 |
11 | CMD ["/bin/bash"]
12 |
--------------------------------------------------------------------------------
/chapter3/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Coral
4 |
5 | 1. Go through the get started guide https://coral.ai/docs/accelerator/get-started
6 | 1. Find the iNat models at https://coral.ai/models/
7 | 1. Use the common-fly.jpg in this repo to run the classification
8 |
9 |
10 | Note: Common Fly image via Pixabay License (Free for commercial use and no attribution required):
11 | https://pixabay.com/photos/macro-fly-nature-insect-bug-green-1802322/
12 |
13 |
--------------------------------------------------------------------------------
/chapter3/images/common-fly.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/paiml/practical-mlops-book/5517dc375d23a511e2bf850f700c12a57a85fddd/chapter3/images/common-fly.jpg
--------------------------------------------------------------------------------
/chapter4/packaging-containers/.gitignore:
--------------------------------------------------------------------------------
1 | *onnx
2 |
--------------------------------------------------------------------------------
/chapter4/packaging-containers/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8
2 |
3 | COPY ./requirements.txt /webapp/requirements.txt
4 |
5 | WORKDIR /webapp
6 |
7 | RUN pip install -r requirements.txt
8 |
9 | COPY webapp/* /webapp
10 |
11 | COPY roberta-sequence-classification-9.onnx /webapp
12 |
13 | ENTRYPOINT [ "python" ]
14 |
15 | CMD [ "app.py" ]
16 |
17 |
--------------------------------------------------------------------------------
/chapter4/packaging-containers/requirements.txt:
--------------------------------------------------------------------------------
1 | simpletransformers==0.4.0
2 | tensorboardX==1.9
3 | transformers==2.1.0
4 | flask==1.1.2
5 | torch==1.7.1
6 | onnxruntime==1.7.0
7 |
--------------------------------------------------------------------------------
/chapter4/packaging-containers/webapp/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request, jsonify
2 | import torch
3 | import numpy as np
4 | from transformers import RobertaTokenizer
5 | import onnxruntime
6 |
7 |
8 | app = Flask(__name__)
9 | tokenizer = RobertaTokenizer.from_pretrained("roberta-base")
10 | session = onnxruntime.InferenceSession("roberta-sequence-classification-9.onnx")
11 |
12 |
13 | def to_numpy(tensor):
14 | return (
15 | tensor.detach().cpu().numpy() if tensor.requires_grad else tensor.cpu().numpy()
16 | )
17 |
18 |
19 | @app.route("/")
20 | def home():
21 | return "RoBERTa Sentiment Analysis Prediction Container
"
22 |
23 |
24 | @app.route("/predict", methods=["POST"])
25 | def predict():
26 | """
27 | Input sample:
28 |
29 | [ "Containers are good" ]
30 |
31 | Output sample:
32 |
33 | {"postive": True}
34 | """
35 | input_ids = torch.tensor(
36 | tokenizer.encode(request.json[0], add_special_tokens=True)
37 | ).unsqueeze(
38 | 0
39 | )
40 |
41 | if input_ids.requires_grad:
42 | numpy_func = input_ids.detach().cpu().numpy()
43 | else:
44 | numpy_func = input_ids.cpu().numpy()
45 |
46 | inputs = {session.get_inputs()[0].name: numpy_func}
47 | out = session.run(None, inputs)
48 |
49 | result = np.argmax(out)
50 |
51 | return jsonify({"positive": bool(result)})
52 |
53 |
54 | if __name__ == "__main__":
55 | app.run(host="0.0.0.0", port=5000, debug=True)
56 |
--------------------------------------------------------------------------------
/chapter4/packaging-containers/webapp/minimal.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request, jsonify
2 | import torch
3 | import numpy as np
4 | #from transformers import RobertaTokenizer
5 | import onnxruntime
6 |
7 |
8 | app = Flask(__name__)
9 | tokenizer = RobertaTokenizer.from_pretrained("roberta-base")
10 | session = onnxruntime.InferenceSession(
11 | "roberta-sequence-classification-9.onnx"
12 | )
13 |
14 |
15 | @app.route("/predict", methods=["POST"])
16 | def predict():
17 | input_ids = torch.tensor(
18 | tokenizer.encode(request.json[0], add_special_tokens=True)
19 | ).unsqueeze(0)
20 |
21 | if input_ids.requires_grad:
22 | numpy_func = input_ids.detach().cpu().numpy()
23 | else:
24 | numpy_func = input_ids.cpu().numpy()
25 |
26 | inputs = {session.get_inputs()[0].name: numpy_func(input_ids)}
27 | out = session.run(None, inputs)
28 |
29 | result = np.argmax(out)
30 |
31 | return jsonify({"positive": bool(result)})
32 |
33 |
34 | if __name__ == "__main__":
35 | app.run(host="0.0.0.0", port=5000, debug=True)
36 |
--------------------------------------------------------------------------------
/chapter6/multiple-loggers/http-app.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import logging
3 |
4 | logging.basicConfig()
5 | root_logger = logging.getLogger()
6 |
7 | # Sets logging level for every single app, the "parent" logger
8 | root_logger.setLevel(logging.DEBUG)
9 |
10 | # new logger for this script
11 | logger = logging.getLogger('http-app')
12 |
13 |
14 | logger.info("About to send a request to example.com")
15 | requests.get('http://example.com')
16 |
17 | # fine tune the urllib logger:
18 | urllib_logger = logging.getLogger('urllib3')
19 | urllib_logger.setLevel(logging.ERROR)
20 | #
21 | logger.info("About to send another request to example.com")
22 | requests.get('http://example.com')
23 |
--------------------------------------------------------------------------------
/chapter6/sagemaker/capture.py:
--------------------------------------------------------------------------------
1 | from sagemaker.model_monitor import DataCaptureConfig
2 |
3 | s3_capture_path = "s3://monitoring/xgb-churn-data"
4 |
5 |
6 | data_capture_config = DataCaptureConfig(
7 | enable_capture=True, sampling_percentage=100, destination_s3_uri=s3_capture_path
8 | )
9 |
10 |
11 | from sagemaker.deserializers import CSVDeserializer
12 |
13 | predictor = model.deploy(
14 | initial_instance_count=1,
15 | instance_type="ml.m4.large",
16 | endpoint_name="xgb-churn-monitor",
17 | data_capture_config=data_capture_config,
18 | deserializer=CSVDeserializer(),
19 | )
20 |
--------------------------------------------------------------------------------
/chapter6/sagemaker/invoke.py:
--------------------------------------------------------------------------------
1 | from sagemaker.predictor import Predictor
2 | from sagemaker.serializers import CSVDeserializer, CSVSerializer
3 | import time
4 |
5 | endpoint_name="xgb-churn-monitor"
6 |
7 | predictor = Predictor(
8 | endpoint_name=endpoint_name,
9 | deserializer=CSVDeserializer(),
10 | serializer=CSVSerializer(),
11 | )
12 |
13 | # get a subset of test data for a quick test
14 | #!head -120 test_data/test-dataset-input-cols.csv > test_data/test_sample.csv
15 | print("Sending test traffic to the endpoint {}. \nPlease wait...".format(endpoint_name))
16 |
17 | with open("test_data/test_sample.csv", "r") as f:
18 | for row in f:
19 | payload = row.rstrip("\n")
20 | response = predictor.predict(data=payload)
21 | time.sleep(0.5)
22 |
23 | print("Done!")
24 |
--------------------------------------------------------------------------------
/chapter6/simple-logging/describe.py:
--------------------------------------------------------------------------------
1 | from os import path
2 | import sys
3 | import pandas as pd
4 |
5 | argument = sys.argv[-1]
6 |
7 | try:
8 | df = pd.read_csv(argument)
9 | print(df.describe())
10 | except Exception as error:
11 | print(f"Had a problem trying to read the CSV file: {error}")
12 |
--------------------------------------------------------------------------------
/chapter6/simple-logging/logging_describe.py:
--------------------------------------------------------------------------------
1 | from os import path
2 | import sys
3 | import pandas as pd
4 | import logging
5 |
6 | log_format = "[%(name)s][%(levelname)-6s] %(message)s"
7 | logging.basicConfig(format=log_format)
8 | logger = logging.getLogger("describe")
9 | logger.setLevel(logging.ERROR)
10 |
11 | argument = sys.argv[-1]
12 | logger.debug("processing input file: %s", argument)
13 |
14 | try:
15 | df = pd.read_csv(argument)
16 | print(df.describe())
17 | except Exception:
18 | logger.exception("Had a problem trying to read the CSV file")
19 |
20 | logger.info("the program continues, without issue")
21 |
--------------------------------------------------------------------------------
/chapter7/batch.py:
--------------------------------------------------------------------------------
1 | @cli.group()
2 | def run():
3 | """AWS Batch CLI"""
4 |
5 | @run.command("submit")
6 | @click.option("--queue", default="queue", help="Batch Queue")
7 | @click.option("--jobname", default="1", help="Name of Job")
8 | @click.option("--jobdef", default="test", help="Job Definition")
9 | @click.option("--cmd", default=["whoami"], help="Container Override Commands")
10 | def submit(queue, jobname, jobdef, cmd):
11 | """Submit a job to AWS Batch SErvice"""
12 |
13 | result = submit_job(
14 | job_name=jobname,
15 | job_queue=queue,
16 | job_definition=jobdef,
17 | command=cmd
18 | )
19 | click.echo(f"CLI: Run Job Called {jobname}")
20 | return result
--------------------------------------------------------------------------------
/chapter7/dyno.py:
--------------------------------------------------------------------------------
1 | def query_police_department_record_by_guid(guid):
2 | """Gets one record in the PD table by guid
3 |
4 | In [5]: rec = query_police_department_record_by_guid(
5 | "7e607b82-9e18-49dc-a9d7-e9628a9147ad"
6 | )
7 |
8 | In [7]: rec
9 | Out[7]:
10 | {'PoliceDepartmentName': 'Hollister',
11 | 'UpdateTime': 'Fri Mar 2 12:43:43 2018',
12 | 'guid': '7e607b82-9e18-49dc-a9d7-e9628a9147ad'}
13 | """
14 |
15 | db = dynamodb_resource()
16 | extra_msg = {"region_name": REGION, "aws_service": "dynamodb",
17 | "police_department_table":POLICE_DEPARTMENTS_TABLE,
18 | "guid":guid}
19 | log.info(f"Get PD record by GUID", extra=extra_msg)
20 | pd_table = db.Table(POLICE_DEPARTMENTS_TABLE)
21 | response = pd_table.get_item(
22 | Key={
23 | 'guid': guid
24 | }
25 | )
26 | return response['Item']
--------------------------------------------------------------------------------
/chapter7/nba_2017_players_with_salary_wiki_twitter.csv:
--------------------------------------------------------------------------------
1 | ,Rk,PLAYER,POSITION,AGE,MP,FG,FGA,FG%,3P,3PA,3P%,2P,2PA,2P%,eFG%,FT,FTA,FT%,ORB,DRB,TRB,AST,STL,BLK,TOV,PF,POINTS,TEAM,GP,MPG,ORPM,DRPM,RPM,WINS_RPM,PIE,PACE,W,SALARY_MILLIONS,PAGEVIEWS,TWITTER_FAVORITE_COUNT,TWITTER_RETWEET_COUNT
2 | 0,1,Russell Westbrook,PG,28,34.6,10.2,24.0,0.425,2.5,7.2,0.34299999999999997,7.7,16.8,0.45899999999999996,0.47600000000000003,8.8,10.4,0.845,1.7,9.0,10.7,10.4,1.6,0.4,5.4,2.3,31.6,OKC,81,34.6,6.74,-0.47,6.27,17.34,23.0,102.31,46,26.5,4279.0,2130.5,559.0
3 | 1,2,James Harden,PG,27,36.4,8.3,18.9,0.44,3.2,9.3,0.34700000000000003,5.1,9.6,0.53,0.525,9.2,10.9,0.847,1.2,7.0,8.1,11.2,1.5,0.5,5.7,2.7,29.1,HOU,81,36.4,6.38,-1.57,4.81,15.54,19.0,102.98,54,26.5,3279.0,969.0,321.5
4 | 2,4,Anthony Davis,C,23,36.1,10.3,20.3,0.505,0.5,1.8,0.299,9.7,18.6,0.524,0.518,6.9,8.6,0.802,2.3,9.5,11.8,2.1,1.3,2.2,2.4,2.2,28.0,NO,75,36.1,0.45,3.9,4.35,12.81,19.2,100.19,31,22.12,82.5,368.0,104.0
5 | 3,6,DeMarcus Cousins,C,26,34.2,9.0,19.9,0.452,1.8,5.0,0.361,7.2,14.8,0.483,0.498,7.2,9.3,0.772,2.1,8.9,11.0,4.6,1.4,1.3,3.7,3.9,27.0,NO/SAC,72,34.2,3.56,0.64,4.2,11.26,17.8,97.11,30,16.96,1625.5,102.0,91.5
6 | 4,7,Damian Lillard,PG,26,35.9,8.8,19.8,0.444,2.9,7.7,0.37,6.0,12.1,0.49200000000000005,0.516,6.5,7.3,0.895,0.6,4.3,4.9,5.9,0.9,0.3,2.6,2.0,27.0,POR,75,35.9,4.63,-1.49,3.14,10.72,15.9,99.68,38,24.33,1830.5,186.5,43.0
7 | 5,8,LeBron James,SF,32,37.8,9.9,18.2,0.5479999999999999,1.7,4.6,0.363,8.3,13.5,0.611,0.594,4.8,7.2,0.674,1.3,7.3,8.6,8.7,1.2,0.6,4.1,1.8,26.4,CLE,74,37.8,6.49,1.93,8.42,20.43,18.3,98.38,51,30.96,14704.0,5533.5,1501.5
8 | 6,9,Kawhi Leonard,SF,25,33.4,8.6,17.7,0.485,2.0,5.2,0.38,6.6,12.5,0.529,0.541,6.3,7.2,0.88,1.1,4.7,5.8,3.5,1.8,0.7,2.1,1.6,25.5,SA,74,33.4,5.83,1.25,7.08,15.53,17.4,95.79,54,17.64,2446.5,2701.5,716.5
9 | 7,10,Stephen Curry,PG,28,33.4,8.5,18.3,0.46799999999999997,4.1,10.0,0.41100000000000003,4.4,8.3,0.537,0.58,4.1,4.6,0.898,0.8,3.7,4.5,6.6,1.8,0.2,3.0,2.3,25.3,GS,79,33.4,7.27,0.14,7.41,18.8,15.1,105.08,65,12.11,17570.5,12278.0,2893.0
10 | 8,11,Kyrie Irving,PG,24,35.1,9.3,19.7,0.473,2.5,6.1,0.401,6.9,13.6,0.505,0.535,4.1,4.6,0.905,0.7,2.5,3.2,5.8,1.2,0.3,2.5,2.2,25.2,CLE,72,35.1,4.35,-2.3,2.05,8.28,13.5,99.12,47,17.64,4796.0,1541.0,695.0
11 | 9,12,Kevin Durant,SF,28,33.4,8.9,16.5,0.537,1.9,5.0,0.375,7.0,11.5,0.608,0.594,5.4,6.2,0.875,0.6,7.6,8.3,4.8,1.1,1.6,2.2,1.9,25.1,GS,62,33.4,4.41,1.33,5.74,12.24,18.6,103.71,51,26.5,6288.5,1425.5,366.0
12 | 10,13,Karl-Anthony Towns,C,21,37.0,9.8,18.0,0.542,1.2,3.4,0.36700000000000005,8.5,14.7,0.5820000000000001,0.5760000000000001,4.3,5.2,0.8320000000000001,3.6,8.7,12.3,2.7,0.7,1.3,2.6,2.9,25.1,MIN,82,37.0,3.54,-1.41,2.13,10.0,17.1,97.1,31,5.96,2046.5,0.0,105.0
13 | 11,14,Jimmy Butler,SF,27,37.0,7.5,16.5,0.455,1.2,3.3,0.36700000000000005,6.3,13.2,0.47700000000000004,0.49200000000000005,7.7,8.9,0.865,1.7,4.5,6.2,5.5,1.9,0.4,2.1,1.5,23.9,CHI,76,37.0,4.82,1.8,6.62,17.35,16.4,97.78,40,17.55,41.0,89.5,30.0
14 | 12,15,Paul George,SF,26,35.9,8.3,18.0,0.461,2.6,6.6,0.39299999999999996,5.7,11.4,0.501,0.534,4.5,5.0,0.898,0.8,5.8,6.6,3.3,1.6,0.4,2.9,2.7,23.7,IND,75,35.9,2.67,-0.09,2.58,9.72,13.6,98.13,39,18.31,1600.0,228.5,128.0
15 | 13,16,Andrew Wiggins,SF,21,37.2,8.6,19.1,0.452,1.3,3.5,0.35600000000000004,7.4,15.6,0.473,0.484,5.0,6.6,0.76,1.2,2.8,4.0,2.3,1.0,0.4,2.3,2.2,23.6,MIN,82,37.2,1.56,-3.16,-1.6,2.94,9.5,97.56,31,6.01,1778.5,182.0,54.5
16 | 14,17,Kemba Walker,PG,26,34.7,8.1,18.3,0.444,3.0,7.6,0.39899999999999997,5.1,10.7,0.47600000000000003,0.527,3.8,4.5,0.847,0.6,3.3,3.9,5.5,1.1,0.3,2.1,1.5,23.2,CHA,79,34.7,3.93,-1.25,2.68,9.95,13.7,98.19,36,12.0,640.5,59.5,32.5
17 | 15,18,Bradley Beal,SG,23,34.9,8.3,17.2,0.48200000000000004,2.9,7.2,0.40399999999999997,5.4,10.0,0.5379999999999999,0.5660000000000001,3.7,4.4,0.825,0.7,2.4,3.1,3.5,1.1,0.3,2.0,2.2,23.1,WSH,77,34.9,3.29,-1.04,2.25,9.26,11.9,100.11,48,22.12,413.5,55.5,40.5
18 | 16,19,John Wall,PG,26,36.4,8.3,18.4,0.451,1.1,3.5,0.327,7.2,14.9,0.48,0.48200000000000004,5.4,6.8,0.8009999999999999,0.8,3.4,4.2,10.7,2.0,0.6,4.1,1.9,23.1,WSH,78,36.4,3.52,-1.26,2.26,9.9,15.3,101.07,48,16.96,57.0,0.0,77.0
19 | 17,22,Carmelo Anthony,SF,32,34.3,8.1,18.8,0.433,2.0,5.7,0.359,6.1,13.1,0.466,0.488,4.1,4.9,0.833,0.8,5.1,5.9,2.9,0.8,0.5,2.1,2.7,22.4,NY,74,34.3,1.87,-1.75,0.12,5.26,11.8,98.48,29,24.56,3773.5,720.5,220.0
20 | 18,24,Klay Thompson,SG,26,34.0,8.3,17.6,0.46799999999999997,3.4,8.3,0.414,4.8,9.3,0.516,0.565,2.4,2.8,0.853,0.6,3.0,3.7,2.1,0.8,0.5,1.6,1.8,22.3,GS,78,34.0,2.78,-0.45,2.33,9.53,10.9,102.15,66,16.66,3708.0,797.0,225.0
21 | 19,25,Devin Booker,SG,20,35.0,7.8,18.3,0.423,1.9,5.2,0.363,5.9,13.2,0.447,0.475,4.7,5.7,0.8320000000000001,0.6,2.6,3.2,3.4,0.9,0.3,3.1,3.1,22.1,PHX,78,35.0,1.28,-2.58,-1.3,3.29,8.9,103.62,24,2.22,1210.5,1170.5,335.5
22 | 20,26,Gordon Hayward,SF,26,34.5,7.5,15.8,0.47100000000000003,2.0,5.1,0.39799999999999996,5.4,10.7,0.506,0.536,5.0,5.9,0.8440000000000001,0.7,4.7,5.4,3.5,1.0,0.3,1.9,1.6,21.9,UTAH,73,34.5,3.14,-0.08,3.06,9.45,15.8,93.44,46,16.07,689.0,319.0,50.5
23 | 21,27,Blake Griffin,PF,27,34.0,7.9,15.9,0.493,0.6,1.9,0.336,7.2,14.1,0.514,0.513,5.2,6.9,0.76,1.8,6.3,8.1,4.9,0.9,0.4,2.3,2.6,21.6,LAC,61,34.0,3.02,0.76,3.78,9.2,15.2,98.71,40,20.14,2257.0,269.0,98.5
24 | 22,28,Eric Bledsoe,PG,27,33.0,6.8,15.7,0.434,1.6,4.7,0.335,5.2,11.0,0.47700000000000004,0.485,5.9,6.9,0.847,0.8,4.1,4.8,6.3,1.4,0.5,3.4,2.5,21.1,PHX,66,33.0,2.33,-0.64,1.69,6.85,14.1,103.09,22,14.0,434.5,9.0,30.5
25 | 23,29,Mike Conley,PG,29,33.2,6.7,14.6,0.46,2.5,6.1,0.408,4.2,8.6,0.49700000000000005,0.545,4.6,5.3,0.8590000000000001,0.4,3.0,3.5,6.3,1.3,0.3,2.3,1.8,20.5,MEM,69,33.2,4.67,-0.2,4.47,10.5,16.0,94.64,35,26.54,11.0,257.5,90.0
26 | 24,31,Goran Dragic,PG,30,33.7,7.3,15.4,0.475,1.6,4.0,0.405,5.7,11.4,0.499,0.527,4.1,5.2,0.79,0.8,3.0,3.8,5.8,1.2,0.2,2.9,2.7,20.3,MIA,73,33.7,2.54,-1.62,0.92,6.3,13.0,98.16,40,15.89,19.0,62.5,45.0
27 | 25,32,Joel Embiid,C,22,25.4,6.5,13.8,0.466,1.2,3.2,0.36700000000000005,5.3,10.7,0.495,0.508,6.2,7.9,0.7829999999999999,2.0,5.9,7.8,2.1,0.9,2.5,3.8,3.6,20.2,PHI,31,25.4,-0.57,2.27,1.7,2.46,17.4,100.25,13,4.83,1075.5,6852.5,2941.0
28 | 26,33,Jabari Parker,PF,21,33.9,7.8,16.0,0.49,1.3,3.5,0.365,6.5,12.5,0.525,0.53,3.2,4.3,0.743,1.5,4.6,6.2,2.8,1.0,0.4,1.8,2.2,20.1,MIL,51,33.9,0.83,-1.61,-0.78,2.59,12.1,98.47,22,5.37,815.0,267.0,125.0
29 | 27,34,Marc Gasol,C,32,34.2,7.2,15.7,0.45899999999999996,1.4,3.6,0.38799999999999996,5.8,12.1,0.48,0.503,3.8,4.5,0.8370000000000001,0.8,5.5,6.3,4.6,0.9,1.3,2.2,2.3,19.5,MEM,74,34.2,1.86,0.72,2.58,8.69,15.1,94.43,40,21.17,825.0,0.0,75.5
30 | 28,36,Kevin Love,PF,28,31.4,6.2,14.5,0.42700000000000005,2.4,6.5,0.373,3.8,8.0,0.47100000000000003,0.51,4.3,4.9,0.871,2.5,8.6,11.1,1.9,0.9,0.4,2.0,2.1,19.0,CLE,60,31.4,2.77,2.26,5.03,9.76,14.8,99.66,40,21.17,2013.5,866.5,194.5
31 | 29,37,Zach LaVine,SG,21,37.2,6.9,15.1,0.45899999999999996,2.6,6.6,0.387,4.4,8.5,0.515,0.544,2.5,3.0,0.836,0.4,3.0,3.4,3.0,0.9,0.2,1.8,2.2,18.9,MIN,47,37.2,-0.62,-2.35,-2.97,0.17,9.5,96.38,16,2.24,1103.0,178.5,67.0
32 | 30,39,Dwyane Wade,SG,35,29.9,6.9,15.9,0.434,0.8,2.4,0.31,6.2,13.5,0.456,0.457,3.7,4.7,0.794,1.1,3.5,4.5,3.8,1.4,0.7,2.3,1.9,18.3,CHI,60,29.9,-0.45,-0.46,-0.91,2.52,12.1,98.2,29,23.2,4671.0,349.5,149.5
33 | 31,40,Danilo Gallinari,SF,28,33.9,5.3,11.9,0.447,2.0,5.1,0.389,3.3,6.8,0.491,0.531,5.5,6.1,0.902,0.6,4.5,5.2,2.1,0.6,0.2,1.3,1.5,18.2,DEN,63,33.9,2.67,0.21,2.88,8.39,11.4,100.53,31,15.05,350.0,29.0,6.0
34 | 32,41,Paul Millsap,PF,31,34.0,6.2,14.1,0.442,1.1,3.5,0.311,5.1,10.6,0.486,0.48100000000000004,4.5,5.9,0.768,1.6,6.1,7.7,3.7,1.3,0.9,2.3,2.7,18.1,ATL,69,34.0,1.23,3.35,4.58,11.51,12.6,100.22,40,20.07,436.5,60.0,32.0
35 | 33,42,Chris Paul,PG,31,31.5,6.1,12.9,0.47600000000000003,2.0,5.0,0.41100000000000003,4.1,7.9,0.518,0.555,3.8,4.3,0.892,0.7,4.3,5.0,9.2,2.0,0.1,2.4,2.4,18.1,LAC,61,31.5,5.16,2.76,7.92,13.48,18.2,98.19,43,22.87,2689.5,829.0,178.5
36 | 34,43,Kristaps Porzingis,PF,21,32.8,6.7,14.9,0.45,1.7,4.8,0.35700000000000004,5.0,10.2,0.493,0.507,3.0,3.8,0.7859999999999999,1.7,5.5,7.2,1.5,0.7,2.0,1.8,3.7,18.1,NY,66,32.8,-0.31,1.9,1.59,6.54,10.6,99.15,26,4.32,20.0,485.0,214.0
37 | 35,44,Derrick Rose,PG,28,32.5,7.2,15.3,0.47100000000000003,0.2,0.9,0.217,7.0,14.3,0.48700000000000004,0.47700000000000004,3.5,4.0,0.8740000000000001,1.0,2.8,3.8,4.4,0.7,0.3,2.3,1.3,18.0,NY,64,32.5,0.11,-2.36,-2.25,1.17,11.2,99.19,26,21.32,3273.5,1864.0,2504.5
38 | 36,45,Dennis Schroder,PG,23,31.5,6.9,15.4,0.451,1.3,3.7,0.34,5.7,11.6,0.48700000000000004,0.493,2.8,3.2,0.855,0.5,2.6,3.1,6.3,0.9,0.2,3.3,1.9,17.9,ATL,79,31.5,0.31,-2.76,-2.45,1.07,11.7,100.32,42,2.71,2.0,51.5,20.0
39 | 37,47,LaMarcus Aldridge,PF,31,32.4,6.9,14.6,0.47700000000000004,0.3,0.8,0.41100000000000003,6.6,13.8,0.48,0.488,3.1,3.8,0.812,2.4,4.9,7.3,1.9,0.6,1.2,1.4,2.2,17.3,SA,72,32.4,-0.09,1.05,0.96,5.79,12.2,94.47,52,20.58,958.0,195.0,63.0
40 | 38,48,Evan Fournier,SG,24,32.9,6.0,13.7,0.439,1.9,5.3,0.35600000000000004,4.1,8.4,0.491,0.508,3.3,4.1,0.805,0.6,2.4,3.1,3.0,1.0,0.1,2.1,2.6,17.2,ORL,68,32.9,0.07,-1.17,-1.1,2.89,8.5,99.6,23,17.0,253.5,21.0,10.0
41 | 39,50,George Hill,PG,30,31.5,5.9,12.4,0.47700000000000004,1.9,4.8,0.40299999999999997,4.0,7.6,0.523,0.5539999999999999,3.2,4.0,0.8009999999999999,0.5,2.9,3.4,4.2,1.0,0.2,1.7,2.3,16.9,UTAH,49,31.5,2.64,1.11,3.75,6.28,12.8,92.9,33,8.0,12.0,33.5,3.0
42 | 40,51,Nikola Jokic,C,21,27.9,6.8,11.7,0.578,0.6,1.9,0.324,6.2,9.8,0.628,0.605,2.6,3.1,0.825,2.9,6.9,9.8,4.9,0.8,0.8,2.3,2.9,16.7,DEN,73,27.9,4.44,2.29,6.73,13.18,16.7,100.76,37,1.36,8.0,0.0,1.0
43 | 41,53,Eric Gordon,SG,28,31.0,5.5,13.5,0.406,3.3,8.8,0.37200000000000005,2.2,4.7,0.46799999999999997,0.527,2.0,2.3,0.84,0.4,2.3,2.7,2.5,0.6,0.5,1.6,2.0,16.2,HOU,75,31.0,1.51,-0.48,1.03,6.36,8.2,103.07,50,12.39,346.0,55.0,26.0
44 | 42,54,Tobias Harris,PF,24,31.3,6.2,13.0,0.48100000000000004,1.3,3.8,0.34700000000000003,4.9,9.1,0.537,0.532,2.3,2.8,0.841,0.8,4.3,5.1,1.7,0.7,0.5,1.2,1.6,16.1,DET,82,31.3,1.19,-0.25,0.94,6.55,11.3,97.41,37,17.2,327.0,2.0,4.0
45 | 43,55,Victor Oladipo,SG,24,33.2,6.1,13.9,0.442,1.9,5.3,0.361,4.3,8.7,0.491,0.51,1.7,2.3,0.753,0.6,3.8,4.3,2.6,1.2,0.3,1.8,2.3,15.9,OKC,67,33.2,0.1,1.56,1.66,6.88,9.5,101.1,39,6.55,715.0,78.0,28.0
46 | 44,56,Dion Waiters,SG,25,30.1,6.1,14.4,0.424,1.8,4.7,0.395,4.3,9.7,0.43799999999999994,0.488,1.8,2.8,0.6459999999999999,0.4,2.9,3.3,4.3,0.9,0.4,2.2,2.1,15.8,MIA,46,30.1,0.12,-0.07,0.05,2.8,10.0,98.03,27,2.9,512.0,0.0,7.0
47 | 45,57,Wilson Chandler,SF,29,30.9,6.1,13.2,0.461,1.5,4.6,0.337,4.5,8.6,0.527,0.52,2.0,2.7,0.727,1.5,5.0,6.5,2.0,0.7,0.4,1.6,2.4,15.7,DEN,71,30.9,0.3,-1.54,-1.24,2.69,9.8,100.43,35,11.23,166.0,31.5,18.5
48 | 46,58,D'Angelo Russell,PG,20,28.7,5.6,13.8,0.405,2.1,6.1,0.35200000000000004,3.4,7.7,0.447,0.483,2.3,3.0,0.782,0.5,3.0,3.5,4.8,1.4,0.3,2.8,2.1,15.6,LAL,63,28.7,0.75,-2.45,-1.7,1.69,9.9,100.87,21,5.33,1327.0,0.0,232.0
49 | 47,59,Jrue Holiday,PG,26,32.7,6.0,13.3,0.45399999999999996,1.5,4.2,0.35600000000000004,4.6,9.1,0.498,0.51,1.8,2.5,0.708,0.7,3.3,3.9,7.3,1.5,0.7,2.9,2.0,15.4,NO,67,32.7,0.64,1.18,1.82,6.89,12.1,100.12,32,11.29,547.5,0.0,39.0
50 | 48,60,Jeff Teague,PG,28,32.4,4.9,11.1,0.442,1.1,3.1,0.35700000000000004,3.8,8.0,0.475,0.49200000000000005,4.4,5.1,0.867,0.4,3.6,4.0,7.8,1.2,0.4,2.6,2.0,15.3,IND,82,32.4,1.86,-0.22,1.64,8.08,13.1,98.98,42,8.8,12.0,17.5,13.5
51 | 49,61,Nicolas Batum,SG,28,34.0,5.1,12.7,0.40299999999999997,1.8,5.3,0.33299999999999996,3.4,7.4,0.45299999999999996,0.47200000000000003,3.2,3.7,0.856,0.6,5.6,6.2,5.9,1.1,0.4,2.5,1.4,15.1,CHA,77,34.0,1.3,-0.35,0.95,6.73,11.8,98.76,35,20.87,374.5,114.5,66.0
52 | 50,63,Gary Harris,SG,22,31.3,5.6,11.2,0.502,1.9,4.5,0.42,3.7,6.7,0.5579999999999999,0.586,1.8,2.4,0.7759999999999999,0.8,2.3,3.1,2.9,1.2,0.1,1.3,1.6,14.9,DEN,57,31.3,2.88,-2.37,0.51,4.29,9.1,100.41,29,1.66,187.5,6.0,2.5
53 | 51,65,Jordan Clarkson,SG,24,29.2,5.8,13.1,0.445,1.4,4.3,0.32899999999999996,4.4,8.7,0.503,0.5,1.6,2.0,0.7979999999999999,0.6,2.4,3.0,2.6,1.1,0.1,2.0,1.8,14.7,LAL,82,29.2,-0.63,-3.39,-4.02,-1.4,8.4,101.34,26,12.5,1386.5,55.5,179.5
54 | 52,66,Khris Middleton,SF,25,30.7,5.2,11.5,0.45,1.6,3.6,0.433,3.6,7.9,0.45899999999999996,0.518,2.8,3.2,0.88,0.4,3.9,4.2,3.4,1.4,0.2,2.2,2.7,14.7,MIL,29,30.7,0.14,1.26,1.4,2.44,10.4,94.63,19,15.2,255.5,16.0,9.0
55 | 53,67,Nikola Vucevic,C,26,28.8,6.4,13.7,0.46799999999999997,0.3,1.0,0.307,6.1,12.7,0.48100000000000004,0.48,1.4,2.1,0.669,2.3,8.0,10.4,2.8,1.0,1.0,1.6,2.4,14.6,ORL,75,28.8,-1.38,2.15,0.77,5.44,14.2,100.74,27,11.75,10.0,5.5,35.5
56 | 54,70,Jeremy Lin,PG,28,24.5,4.9,11.1,0.43799999999999994,1.6,4.3,0.37200000000000005,3.3,6.8,0.48,0.51,3.2,3.9,0.816,0.3,3.4,3.8,5.1,1.2,0.4,2.4,2.2,14.5,BKN,36,24.5,0.48,-0.03,0.45,2.21,13.1,106.86,13,11.48,3147.0,936.5,197.5
57 | 55,71,Myles Turner,C,20,31.4,5.5,10.7,0.511,0.5,1.4,0.348,5.0,9.3,0.536,0.534,3.0,3.7,0.809,1.7,5.6,7.3,1.3,0.9,2.1,1.3,3.2,14.5,IND,81,31.4,0.09,2.57,2.66,9.31,11.2,98.5,42,2.46,10.0,53.0,31.0
58 | 56,73,Enes Kanter,C,24,21.3,5.6,10.2,0.545,0.1,0.5,0.132,5.5,9.7,0.568,0.5489999999999999,3.1,4.0,0.7859999999999999,2.7,4.0,6.7,0.9,0.4,0.5,1.7,2.1,14.3,OKC,72,21.3,0.56,-1.24,-0.68,2.4,15.4,99.17,43,17.15,713.0,147.0,195.0
59 | 57,74,Dirk Nowitzki,PF,38,26.4,5.5,12.6,0.43700000000000006,1.5,3.9,0.37799999999999995,4.0,8.7,0.46299999999999997,0.495,1.8,2.1,0.875,0.4,6.1,6.5,1.5,0.6,0.7,0.9,2.1,14.2,DAL,54,26.4,-0.38,0.64,0.26,3.02,12.4,96.06,23,25.0,2960.0,1564.0,500.5
60 | 58,76,Zach Randolph,PF,35,24.5,5.9,13.2,0.449,0.3,1.3,0.223,5.6,11.9,0.474,0.46,1.9,2.6,0.731,2.5,5.7,8.2,1.7,0.5,0.1,1.4,1.9,14.1,MEM,73,24.5,-0.37,-1.05,-1.42,1.83,15.2,94.36,38,10.36,528.5,47.0,18.5
61 | 59,77,Rudy Gobert,C,24,33.9,5.1,7.7,0.6609999999999999,0.0,0.0,0.0,5.1,7.7,0.662,0.6609999999999999,3.8,5.9,0.653,3.9,8.9,12.8,1.2,0.6,2.6,1.8,3.0,14.0,UTAH,81,33.9,0.35,6.02,6.37,15.55,16.1,93.11,51,2.12,368.5,731.0,225.5
62 | 60,78,Al Horford,C,30,32.3,5.6,11.8,0.473,1.3,3.6,0.355,4.3,8.2,0.524,0.527,1.6,2.0,0.8,1.4,5.4,6.8,5.0,0.8,1.3,1.7,2.0,14.0,BOS,68,32.3,0.76,1.06,1.82,6.93,12.5,98.96,46,26.54,870.0,136.0,71.0
63 | 61,79,Marcus Morris,SF,27,32.5,5.3,12.7,0.418,1.5,4.5,0.331,3.8,8.2,0.466,0.47700000000000004,1.8,2.3,0.784,1.0,3.7,4.6,2.0,0.7,0.3,1.1,2.1,14.0,DET,79,32.5,-0.11,0.63,0.52,5.85,8.0,97.01,36,4.62,5.5,6.0,23.0
64 | 62,80,Markieff Morris,PF,27,31.2,5.3,11.7,0.457,0.9,2.6,0.36200000000000004,4.4,9.1,0.483,0.49700000000000005,2.4,2.8,0.8370000000000001,1.4,5.1,6.5,1.7,1.1,0.6,1.7,3.3,14.0,WSH,76,31.2,-0.55,2.41,1.86,7.61,8.9,100.12,45,7.4,331.5,9.0,18.0
65 | 63,81,Jae Crowder,SF,26,32.4,4.6,10.0,0.46299999999999997,2.2,5.5,0.39799999999999996,2.4,4.5,0.54,0.5720000000000001,2.4,3.0,0.8109999999999999,0.7,5.1,5.8,2.2,1.0,0.3,1.1,2.2,13.9,BOS,72,32.4,2.2,1.69,3.89,10.52,10.3,99.94,48,6.29,438.5,55.0,47.0
66 | 64,82,Kentavious Caldwell-Pope,SG,23,33.3,4.9,12.2,0.39899999999999997,2.0,5.8,0.35,2.9,6.5,0.442,0.48100000000000004,2.0,2.4,0.8320000000000001,0.7,2.5,3.3,2.5,1.2,0.2,1.1,1.6,13.8,DET,76,33.3,0.71,-1.31,-0.6,4.02,7.8,97.61,33,3.68,223.5,9.0,10.0
67 | 65,83,Will Barton,SG,26,28.4,4.9,11.1,0.44299999999999995,1.5,3.9,0.37,3.5,7.2,0.483,0.508,2.4,3.2,0.753,1.0,3.4,4.3,3.4,0.8,0.5,1.6,1.8,13.7,DEN,60,28.4,0.05,-1.2,-1.15,2.2,10.2,100.14,31,3.53,190.0,1.0,3.0
68 | 66,85,Tyler Johnson,PG,24,29.8,4.9,11.3,0.433,1.3,3.4,0.37200000000000005,3.6,7.9,0.46,0.49,2.7,3.5,0.768,0.7,3.3,4.0,3.2,1.2,0.6,1.2,2.4,13.7,MIA,73,29.8,0.13,0.18,0.31,4.72,10.8,98.21,35,5.63,11.0,3.0,6.0
69 | 67,88,Dwight Howard,C,31,29.7,5.2,8.3,0.633,0.0,0.0,0.0,5.2,8.3,0.635,0.633,3.1,5.7,0.5329999999999999,4.0,8.7,12.7,1.4,0.9,1.2,2.3,2.7,13.5,ATL,74,29.7,-1.7,2.6,0.9,5.65,15.1,99.8,37,23.18,2558.5,7.0,1.0
70 | 68,91,Darren Collison,PG,29,30.3,5.0,10.5,0.47600000000000003,1.1,2.6,0.41700000000000004,3.9,7.9,0.495,0.527,2.2,2.5,0.86,0.3,1.9,2.2,4.6,1.0,0.1,1.7,1.8,13.2,SAC,68,30.3,1.06,-1.47,-0.41,3.44,10.0,96.23,26,5.23,295.0,4.0,7.0
71 | 69,92,Julius Randle,PF,22,28.8,5.1,10.4,0.488,0.2,0.9,0.27,4.9,9.6,0.507,0.499,2.8,3.8,0.723,2.0,6.6,8.6,3.6,0.7,0.5,2.3,3.4,13.2,LAL,74,28.8,-0.79,-1.03,-1.82,1.83,11.3,100.9,24,3.27,519.0,102.0,59.0
72 | 70,93,Nick Young,SG,31,25.9,4.5,10.6,0.43,2.8,7.0,0.40399999999999997,1.7,3.5,0.48100000000000004,0.564,1.3,1.5,0.856,0.4,1.9,2.3,1.0,0.6,0.2,0.6,2.3,13.2,LAL,60,25.9,2.28,-1.63,0.65,3.82,7.5,100.59,19,5.44,27.0,45.5,14.0
73 | 71,99,Elfrid Payton,PG,22,29.4,5.2,11.1,0.47100000000000003,0.5,1.8,0.27399999999999997,4.8,9.3,0.509,0.493,1.8,2.6,0.6920000000000001,1.1,3.6,4.7,6.5,1.1,0.5,2.2,2.2,12.8,ORL,82,29.4,1.12,-0.78,0.34,5.38,11.7,100.23,29,2.61,4.0,19.0,8.0
74 | 72,101,Aaron Gordon,SF,21,28.7,4.9,10.8,0.45399999999999996,1.0,3.3,0.28800000000000003,4.0,7.5,0.528,0.499,2.0,2.7,0.7190000000000001,1.5,3.6,5.1,1.9,0.8,0.5,1.1,2.2,12.7,ORL,80,28.7,1.25,-0.78,0.47,5.32,8.8,99.7,29,4.35,666.0,42.5,16.0
75 | 73,105,Pau Gasol,C,36,25.4,4.7,9.4,0.502,0.9,1.6,0.5379999999999999,3.9,7.8,0.494,0.5479999999999999,2.0,2.9,0.7070000000000001,1.7,6.2,7.8,2.3,0.4,1.1,1.3,1.7,12.4,SA,64,25.4,0.86,1.49,2.35,5.48,14.3,95.48,47,15.5,1983.5,882.5,148.5
76 | 74,106,Jamal Crawford,SG,36,26.3,4.4,10.6,0.413,1.4,3.9,0.36,3.0,6.7,0.44299999999999995,0.479,2.1,2.5,0.857,0.2,1.4,1.6,2.6,0.7,0.2,1.6,1.4,12.3,LAC,82,26.3,-0.06,-3.18,-3.24,-0.16,7.9,98.53,51,13.25,1010.0,49.0,32.0
77 | 75,107,Austin Rivers,SG,24,27.8,4.4,9.9,0.442,1.5,4.0,0.371,2.9,5.8,0.491,0.518,1.8,2.6,0.691,0.3,1.9,2.2,2.8,0.6,0.1,1.6,2.5,12.0,LAC,74,27.8,-0.17,-1.73,-1.9,1.62,6.9,98.18,43,11.0,525.0,75.5,23.0
78 | 76,108,Jonas Valanciunas,C,24,25.8,4.9,8.8,0.557,0.0,0.0,0.5,4.9,8.8,0.557,0.5579999999999999,2.2,2.7,0.8109999999999999,2.8,6.7,9.5,0.7,0.5,0.8,1.3,2.7,12.0,TOR,80,25.8,-0.74,0.75,0.01,4.06,13.6,96.61,50,14.38,6.0,65.0,40.0
79 | 77,110,Trevor Ariza,SF,31,34.7,4.1,10.0,0.409,2.4,6.9,0.344,1.7,3.0,0.556,0.528,1.2,1.6,0.738,0.7,5.1,5.7,2.2,1.8,0.3,0.9,1.7,11.7,HOU,80,34.7,0.86,1.07,1.93,9.29,8.5,102.78,53,7.81,417.5,10.0,15.0
80 | 78,111,Frank Kaminsky,C,23,26.1,4.3,10.7,0.39899999999999997,1.5,4.7,0.32799999999999996,2.7,6.0,0.455,0.47100000000000003,1.6,2.1,0.7559999999999999,0.8,3.7,4.5,2.2,0.6,0.5,1.0,1.9,11.7,CHA,75,26.1,0.96,-0.51,0.45,4.4,8.5,98.15,31,2.73,517.0,112.5,29.0
81 | 79,112,Greg Monroe,C,26,22.5,4.8,9.0,0.5329999999999999,0.0,0.0,0.0,4.8,8.9,0.536,0.5329999999999999,2.2,3.0,0.741,2.1,4.5,6.6,2.3,1.1,0.5,1.7,2.1,11.7,MIL,81,22.5,1.08,0.47,1.55,5.34,13.9,96.98,42,17.15,271.0,9.0,7.0
82 | 80,113,Steven Adams,C,23,29.9,4.7,8.2,0.5710000000000001,0.0,0.0,0.0,4.7,8.2,0.5720000000000001,0.5710000000000001,2.0,3.2,0.611,3.5,4.2,7.7,1.1,1.1,1.0,1.8,2.4,11.3,OKC,80,29.9,-0.3,1.68,1.38,7.01,9.7,101.16,47,3.14,1132.0,36.0,99.0
83 | 81,115,Ricky Rubio,PG,26,32.9,3.5,8.7,0.402,0.8,2.6,0.306,2.7,6.1,0.44299999999999995,0.44799999999999995,3.4,3.8,0.8909999999999999,0.9,3.2,4.1,9.1,1.7,0.1,2.6,2.7,11.1,MIN,75,32.9,1.73,0.76,2.49,8.72,11.4,97.07,28,13.55,763.0,262.5,76.0
84 | 82,116,Jerryd Bayless,PG,28,23.7,3.7,10.7,0.344,0.7,1.7,0.4,3.0,9.0,0.33299999999999996,0.375,3.0,3.3,0.9,1.0,3.0,4.0,4.3,0.0,0.0,3.0,1.3,11.0,PHI,3,23.7,-2.23,-0.47,-2.7,0.02,9.7,97.84,1,9.42,188.0,2.0,5.0
85 | 83,119,Emmanuel Mudiay,PG,20,25.6,3.8,10.0,0.377,1.0,3.2,0.315,2.8,6.8,0.408,0.428,2.4,3.0,0.784,0.5,2.7,3.2,3.9,0.7,0.2,2.2,1.7,11.0,DEN,55,25.6,-1.52,-2.38,-3.9,-0.71,7.4,101.93,25,3.24,545.0,69.0,21.0
86 | 84,120,Terrence Ross,SF,25,25.1,4.2,9.6,0.43700000000000006,1.8,5.0,0.363,2.4,4.6,0.518,0.532,0.8,1.0,0.831,0.2,2.4,2.6,1.1,1.1,0.4,0.9,1.8,11.0,ORL/TOR,78,25.1,1.36,-1.23,0.13,4.06,7.8,99.82,39,10.0,446.5,0.0,0.0
87 | 85,122,Thaddeus Young,PF,28,30.2,4.9,9.3,0.527,0.6,1.6,0.381,4.3,7.7,0.557,0.56,0.6,1.2,0.523,1.8,4.3,6.1,1.6,1.5,0.4,1.3,1.8,11.0,IND,74,30.2,-1.09,1.94,0.85,5.66,9.7,98.97,40,14.15,269.0,0.0,1.5
88 | 86,123,J.J. Barea,PG,32,22.0,4.1,9.8,0.414,1.5,4.2,0.358,2.5,5.6,0.456,0.491,1.3,1.5,0.863,0.3,2.1,2.4,5.5,0.4,0.0,1.8,0.9,10.9,DAL,35,22.0,0.15,-1.57,-1.42,0.8,13.4,95.14,11,4.1,5.0,0.0,17.5
89 | 87,124,Justise Winslow,SF,20,34.7,4.4,12.5,0.35600000000000004,0.4,1.9,0.2,4.1,10.6,0.384,0.371,1.6,2.6,0.617,1.3,3.9,5.2,3.7,1.4,0.3,1.8,2.9,10.9,MIA,18,34.7,-4.08,0.16,-3.92,-0.31,5.9,97.53,4,2.59,573.0,88.5,33.5
90 | 88,125,Taj Gibson,PF,31,25.5,4.7,9.1,0.515,0.0,0.2,0.231,4.6,8.9,0.52,0.517,1.4,2.0,0.715,2.0,4.2,6.2,0.9,0.5,0.8,1.3,2.1,10.8,CHI/OKC,78,25.5,-1.76,2.02,0.26,4.29,9.7,98.39,41,8.95,231.0,52.0,17.0
91 | 89,126,Marcin Gortat,C,32,31.2,4.8,8.2,0.579,0.0,0.0,0.0,4.8,8.2,0.58,0.579,1.3,1.9,0.648,2.9,7.5,10.4,1.5,0.5,0.7,1.5,2.6,10.8,WSH,82,31.2,-1.25,1.18,-0.07,5.01,10.9,99.85,49,12.0,315.0,1.0,1.0
92 | 90,128,Courtney Lee,SG,31,31.9,4.2,9.1,0.456,1.4,3.5,0.401,2.8,5.6,0.49,0.5329999999999999,1.1,1.3,0.867,0.7,2.7,3.4,2.3,1.1,0.3,0.9,1.8,10.8,NY,77,31.9,0.58,-1.51,-0.93,3.4,7.7,97.86,29,11.24,337.5,2.0,1.0
93 | 91,129,Allen Crabbe,SG,24,28.5,3.8,8.2,0.46799999999999997,1.7,3.8,0.444,2.1,4.4,0.49,0.5720000000000001,1.3,1.6,0.847,0.2,2.6,2.9,1.2,0.7,0.3,0.8,2.2,10.7,POR,79,28.5,0.22,-2.87,-2.65,0.68,7.4,99.95,39,18.5,193.5,11.5,9.0
94 | 92,132,Buddy Hield,SG,23,23.0,4.0,9.4,0.426,1.8,4.6,0.391,2.2,4.7,0.461,0.523,0.8,0.9,0.8420000000000001,0.4,2.9,3.3,1.5,0.5,0.1,1.2,1.4,10.6,NO/SAC,82,23.0,-0.92,-2.28,-3.2,-0.09,8.4,98.9,31,3.52,811.5,512.0,333.0
95 | 93,133,Nikola Mirotic,PF,25,24.0,3.7,8.9,0.413,1.8,5.4,0.342,1.8,3.5,0.52,0.516,1.4,1.8,0.773,0.9,4.6,5.5,1.1,0.8,0.8,1.1,1.8,10.6,CHI,70,24.0,0.92,1.31,2.23,5.69,10.4,97.95,37,5.78,5.0,119.0,50.5
96 | 94,134,Marcus Smart,SG,22,30.4,3.4,9.5,0.359,1.2,4.2,0.28300000000000003,2.2,5.3,0.42,0.42200000000000004,2.6,3.2,0.812,1.0,2.9,3.9,4.6,1.6,0.4,2.0,2.4,10.6,BOS,79,30.4,0.26,0.49,0.75,5.98,8.1,100.29,51,3.58,399.5,1.0,9.0
97 | 95,135,Marco Belinelli,SG,30,24.0,3.6,8.3,0.429,1.4,3.8,0.36,2.2,4.5,0.488,0.512,2.0,2.3,0.893,0.2,2.2,2.4,2.0,0.6,0.1,0.9,1.2,10.5,CHA,74,24.0,0.67,-1.87,-1.2,2.15,8.8,97.98,34,6.33,282.5,0.0,15.0
98 | 96,136,Wayne Ellington,SG,29,24.2,3.7,9.0,0.41600000000000004,2.4,6.4,0.37799999999999995,1.3,2.6,0.509,0.55,0.6,0.7,0.86,0.3,1.8,2.1,1.1,0.6,0.1,0.5,1.1,10.5,MIA,62,24.2,2.26,-1.6,0.66,3.58,8.2,97.3,35,6.0,204.0,18.0,12.0
99 | 97,139,Tyreke Evans,SF,27,19.7,3.8,9.3,0.405,1.1,3.0,0.35600000000000004,2.7,6.3,0.429,0.462,1.7,2.3,0.75,0.3,3.1,3.4,3.1,0.9,0.2,1.5,1.5,10.3,NO/SAC,40,19.7,-1.52,0.02,-1.5,0.81,11.8,98.53,17,10.2,701.0,6.0,9.0
100 | 98,140,Cody Zeller,PF,24,27.8,4.1,7.1,0.5710000000000001,0.0,0.0,0.0,4.1,7.1,0.5720000000000001,0.5710000000000001,2.1,3.2,0.679,2.2,4.4,6.5,1.6,1.0,0.9,1.0,3.0,10.3,CHA,62,27.8,-0.07,3.42,3.35,6.97,9.6,97.57,33,5.32,376.5,90.0,29.0
101 | 99,142,Draymond Green,PF,26,32.5,3.6,8.6,0.418,1.1,3.5,0.308,2.5,5.1,0.494,0.48100000000000004,2.0,2.8,0.7090000000000001,1.3,6.6,7.9,7.0,2.0,1.4,2.4,2.9,10.2,GS,76,32.5,1.55,5.59,7.14,16.84,11.7,103.34,62,15.33,2946.0,627.0,168.5
102 | 100,144,Jusuf Nurkic,C,22,21.4,4.2,8.2,0.507,0.0,0.0,0.0,4.2,8.2,0.508,0.507,1.8,3.2,0.5710000000000001,2.4,4.8,7.2,1.9,0.8,1.1,2.2,2.5,10.2,DEN/POR,65,21.4,-2.11,2.73,0.62,3.32,11.7,100.65,34,1.92,3.0,45.0,43.5
103 | 101,145,Josh Richardson,SG,23,30.5,3.8,9.7,0.39399999999999996,1.4,4.3,0.33,2.4,5.4,0.444,0.467,1.1,1.5,0.779,0.7,2.5,3.2,2.6,1.1,0.7,1.2,2.5,10.2,MIA,53,30.5,-1.63,0.06,-1.57,1.57,6.9,97.52,23,1.47,192.0,42.0,10.0
104 | 102,146,Kyle Korver,SG,35,26.2,3.6,7.7,0.46399999999999997,2.4,5.4,0.451,1.1,2.3,0.494,0.621,0.6,0.6,0.905,0.1,2.7,2.8,1.6,0.5,0.3,1.0,1.6,10.1,ATL/CLE,67,26.2,-0.15,-2.61,-2.76,0.41,8.1,100.0,33,5.24,842.5,107.5,30.5
105 | 103,148,Trevor Booker,PF,29,24.7,4.3,8.3,0.516,0.4,1.1,0.321,3.9,7.2,0.546,0.537,1.0,1.5,0.6729999999999999,2.0,6.0,8.0,1.9,1.1,0.4,1.8,2.1,10.0,BKN,71,24.7,-2.29,2.0,-0.29,3.3,12.7,103.12,18,9.25,160.5,16.5,8.0
106 | 104,151,Maurice Harkless,SF,23,28.9,4.1,8.1,0.503,0.9,2.5,0.35100000000000003,3.2,5.6,0.5720000000000001,0.5579999999999999,1.0,1.6,0.621,1.6,2.8,4.4,1.1,1.1,0.9,1.1,2.8,10.0,POR,77,28.9,0.38,0.49,0.87,5.67,6.9,99.45,38,8.99,204.5,0.0,14.5
107 | 105,154,Jamal Murray,SG,19,21.5,3.6,8.9,0.40399999999999997,1.4,4.2,0.33399999999999996,2.2,4.7,0.466,0.483,1.3,1.5,0.883,0.5,2.1,2.6,2.1,0.6,0.3,1.4,1.5,9.9,DEN,82,21.5,0.06,-1.57,-1.51,1.86,7.8,101.15,40,3.21,411.0,0.0,62.5
108 | 106,157,Kenneth Faried,PF,27,21.2,3.7,6.8,0.5479999999999999,0.0,0.1,0.0,3.7,6.7,0.556,0.5479999999999999,2.1,3.1,0.693,3.0,4.6,7.6,0.9,0.7,0.7,1.0,2.0,9.6,DEN,61,21.2,-0.07,-1.55,-1.62,1.27,12.4,100.81,29,12.08,376.5,14.0,8.0
109 | 107,158,E'Twaun Moore,SG,27,24.9,3.9,8.5,0.457,1.1,2.8,0.37,2.8,5.6,0.501,0.519,0.8,1.0,0.77,0.5,1.6,2.1,2.2,0.7,0.4,0.8,1.8,9.6,NO,73,24.9,-0.24,-1.58,-1.82,1.5,7.3,100.16,32,8.08,151.0,0.0,10.0
110 | 108,160,Derrick Favors,PF,25,23.7,4.1,8.3,0.48700000000000004,0.1,0.2,0.3,4.0,8.1,0.491,0.49,1.3,2.2,0.615,1.8,4.3,6.1,1.1,0.9,0.8,1.2,2.1,9.5,UTAH,50,23.7,-2.76,2.06,-0.7,1.73,10.8,92.72,33,11.05,295.0,25.0,6.5
111 | 109,161,Patty Mills,PG,28,21.9,3.4,7.8,0.44,1.8,4.4,0.414,1.6,3.3,0.474,0.5579999999999999,0.8,1.0,0.825,0.3,1.5,1.8,3.5,0.8,0.0,1.3,1.4,9.5,SA,80,21.9,2.53,-1.26,1.27,4.86,10.0,98.97,60,3.58,522.0,112.0,19.0
112 | 110,162,Nik Stauskas,SG,23,27.4,3.1,7.9,0.396,1.7,4.5,0.368,1.5,3.4,0.433,0.5,1.5,1.9,0.813,0.3,2.6,2.8,2.4,0.6,0.4,1.6,1.8,9.5,PHI,80,27.4,-0.32,-2.25,-2.57,0.78,6.7,100.28,27,2.99,451.0,28.0,11.0
113 | 111,163,Michael Beasley,PF,28,16.7,3.9,7.3,0.532,0.3,0.8,0.419,3.5,6.5,0.545,0.5539999999999999,1.4,1.9,0.743,0.7,2.7,3.4,0.9,0.5,0.5,1.2,1.6,9.4,MIL,56,16.7,-1.7,-0.61,-2.31,0.48,11.6,97.91,23,1.4,939.0,8.0,5.0
114 | 112,164,Brandon Ingram,SF,19,28.8,3.5,8.7,0.402,0.7,2.4,0.294,2.8,6.3,0.44299999999999995,0.442,1.7,2.7,0.621,0.8,3.2,4.0,2.1,0.6,0.5,1.5,2.0,9.4,LAL,79,28.8,-1.76,-2.93,-4.69,-2.32,5.5,100.45,26,5.28,1203.0,194.0,100.0
115 | 113,166,Cory Joseph,PG,25,25.0,3.7,8.3,0.452,0.6,1.7,0.35600000000000004,3.1,6.6,0.47700000000000004,0.489,1.2,1.5,0.77,0.6,2.3,2.9,3.3,0.8,0.2,1.4,1.8,9.3,TOR,80,25.0,-0.5,-1.94,-2.44,0.86,8.9,97.43,49,7.32,516.5,48.5,15.0
116 | 114,167,Marquese Chriss,PF,19,21.3,3.5,7.7,0.449,0.9,2.7,0.321,2.6,5.0,0.52,0.506,1.4,2.2,0.624,1.2,3.1,4.2,0.7,0.8,0.9,1.3,3.2,9.2,PHX,82,21.3,-1.96,-1.82,-3.78,-0.76,6.3,103.56,24,2.94,252.0,0.0,22.0
117 | 115,168,Jeff Green,PF,30,22.2,3.2,8.1,0.39399999999999996,0.8,2.8,0.275,2.4,5.3,0.45799999999999996,0.442,2.1,2.4,0.863,0.6,2.5,3.1,1.2,0.5,0.2,1.1,1.5,9.2,ORL,69,22.2,-2.03,-1.31,-3.34,-0.21,7.7,98.84,26,15.0,14.0,57.5,28.5
118 | 116,169,Gerald Henderson,SG,29,23.2,3.3,7.7,0.423,0.8,2.4,0.353,2.4,5.3,0.455,0.478,1.8,2.3,0.8059999999999999,0.5,2.1,2.6,1.6,0.6,0.2,0.9,1.8,9.2,PHI,72,23.2,-1.51,-2.42,-3.93,-0.87,7.3,100.88,24,9.0,105.0,1.0,8.0
119 | 117,171,Michael Kidd-Gilchrist,SF,23,29.0,3.6,7.6,0.47700000000000004,0.0,0.1,0.111,3.6,7.5,0.483,0.478,1.9,2.4,0.784,1.9,5.0,7.0,1.4,1.0,1.0,0.7,2.3,9.2,CHA,81,29.0,-1.97,2.47,0.5,5.31,9.3,97.79,36,13.0,575.5,29.0,11.0
120 | 118,172,Jameer Nelson,PG,34,27.3,3.6,8.1,0.444,1.4,3.6,0.38799999999999996,2.2,4.4,0.489,0.531,0.6,0.8,0.7140000000000001,0.4,2.2,2.6,5.1,0.7,0.1,1.7,2.6,9.2,DEN,75,27.3,0.45,-0.44,0.01,4.17,7.5,100.06,34,4.54,369.0,0.0,9.0
121 | 119,173,Tony Allen,SG,35,27.0,3.9,8.4,0.461,0.2,0.8,0.278,3.6,7.6,0.479,0.473,1.1,1.8,0.615,2.3,3.2,5.5,1.4,1.6,0.4,1.4,2.5,9.1,MEM,71,27.0,-2.03,2.15,0.12,3.76,8.0,93.85,36,5.51,17.0,10.5,11.0
122 | 120,174,Channing Frye,C,33,18.9,3.2,7.0,0.45799999999999996,1.9,4.5,0.409,1.4,2.5,0.546,0.589,0.9,1.0,0.851,0.5,3.4,3.9,0.6,0.4,0.5,0.7,1.9,9.1,CLE,74,18.9,1.62,-0.58,1.04,3.7,10.1,98.14,44,7.81,445.0,8.0,4.0
123 | 121,177,Jodie Meeks,SG,29,20.5,2.9,7.1,0.402,1.6,3.8,0.409,1.3,3.3,0.395,0.512,1.8,2.1,0.878,0.1,2.0,2.1,1.3,0.9,0.1,1.0,1.1,9.1,ORL,36,20.5,-0.05,-1.6,-1.65,0.7,8.1,100.24,14,6.54,173.0,36.5,15.0
124 | 122,180,Kelly Olynyk,C,25,20.5,3.5,6.8,0.512,0.9,2.6,0.354,2.6,4.2,0.608,0.579,1.2,1.6,0.732,1.0,3.8,4.8,2.0,0.6,0.4,1.3,2.8,9.0,BOS,75,20.5,-0.31,0.81,0.5,3.54,11.0,99.85,49,3.09,457.0,56.5,16.0
125 | 123,181,Evan Turner,SF,28,25.5,3.6,8.5,0.426,0.5,1.8,0.263,3.1,6.7,0.47,0.45399999999999996,1.3,1.6,0.825,0.6,3.2,3.8,3.2,0.8,0.4,1.5,1.9,9.0,POR,65,25.5,-3.04,0.15,-2.89,0.25,8.8,99.29,34,16.39,564.0,17.0,13.0
126 | 124,182,DeMarre Carroll,SF,30,26.1,3.1,7.6,0.4,1.5,4.4,0.341,1.5,3.2,0.483,0.499,1.2,1.6,0.7609999999999999,0.9,2.9,3.8,1.0,1.1,0.4,0.8,2.0,8.9,TOR,72,26.1,0.0,0.83,0.83,4.66,7.1,96.82,44,14.2,363.5,29.5,8.0
127 | 125,185,Al-Farouq Aminu,SF,26,29.1,3.0,7.6,0.39299999999999996,1.1,3.5,0.33,1.9,4.2,0.445,0.46799999999999997,1.6,2.2,0.706,1.3,6.1,7.4,1.6,1.0,0.7,1.5,1.7,8.7,POR,61,29.1,-1.86,3.13,1.27,4.86,9.2,98.38,33,7.68,330.5,33.0,9.0
128 | 126,187,Nerlens Noel,C,22,20.5,3.6,6.1,0.595,0.0,0.0,0.0,3.6,6.0,0.597,0.595,1.5,2.2,0.6940000000000001,1.8,3.9,5.8,1.0,1.3,1.0,1.0,2.5,8.7,DAL/PHI,51,20.5,-1.33,2.58,1.25,2.95,12.7,98.69,24,4.38,663.0,105.0,64.0
129 | 127,188,Marreese Speights,C,29,15.7,3.0,6.7,0.445,1.3,3.4,0.37200000000000005,1.7,3.3,0.52,0.539,1.5,1.7,0.8759999999999999,1.1,3.5,4.5,0.8,0.3,0.5,0.8,2.8,8.7,LAC,82,15.7,0.67,0.69,1.36,3.66,11.4,97.43,51,1.4,564.5,80.0,12.0
130 | 128,193,Tyson Chandler,C,34,27.6,3.3,4.9,0.6709999999999999,0.0,0.0,,3.3,4.9,0.6709999999999999,0.6709999999999999,1.9,2.6,0.7340000000000001,3.3,8.2,11.5,0.6,0.7,0.5,1.4,2.7,8.4,PHX,47,27.6,-1.06,2.2,1.14,3.55,13.1,101.88,15,12.42,832.0,29.0,9.0
131 | 129,195,Troy Daniels,SG,25,17.7,2.8,7.4,0.374,2.1,5.3,0.389,0.7,2.1,0.336,0.513,0.6,0.8,0.7959999999999999,0.3,1.2,1.5,0.7,0.3,0.1,0.7,1.3,8.2,MEM,67,17.7,-0.02,-2.1,-2.12,0.72,6.5,95.85,33,3.33,106.0,30.0,10.0
132 | 130,197,Willy Hernangomez,C,22,18.4,3.4,6.5,0.529,0.1,0.2,0.267,3.4,6.3,0.5379999999999999,0.5329999999999999,1.3,1.7,0.728,2.4,4.6,7.0,1.3,0.6,0.5,1.4,2.1,8.2,NY,72,18.4,-0.75,0.4,-0.35,2.37,13.3,99.12,27,1.44,2.0,0.0,55.5
133 | 131,198,Caris LeVert,SF,22,21.7,3.0,6.7,0.45,1.0,3.2,0.321,2.0,3.4,0.5710000000000001,0.528,1.2,1.6,0.72,0.4,2.9,3.3,1.9,0.9,0.1,1.0,1.6,8.2,BKN,57,21.7,-0.46,-0.84,-1.3,1.53,8.6,104.52,15,1.56,198.5,0.0,28.0
134 | 132,200,Willie Cauley-Stein,C,23,18.9,3.4,6.4,0.53,0.0,0.0,0.0,3.4,6.4,0.532,0.53,1.3,2.0,0.669,1.1,3.4,4.5,1.1,0.7,0.6,0.9,2.0,8.1,SAC,75,18.9,-2.07,0.9,-1.17,1.72,10.6,96.91,29,3.55,456.5,0.0,17.0
135 | 133,202,Ben McLemore,SG,23,19.3,3.0,6.9,0.43,1.1,2.8,0.382,1.9,4.1,0.462,0.507,1.1,1.5,0.753,0.3,1.8,2.1,0.8,0.5,0.1,1.0,1.8,8.1,SAC,61,19.3,-2.88,-1.58,-4.46,-0.99,6.0,97.04,27,4.01,272.5,30.0,17.0
136 | 134,203,Tristan Thompson,C,25,29.9,3.4,5.6,0.6,0.0,0.0,0.0,3.4,5.6,0.604,0.6,1.4,2.7,0.498,3.7,5.5,9.2,1.0,0.5,1.1,0.8,2.3,8.1,CLE,78,29.9,-1.31,1.31,0.0,4.62,8.9,98.28,50,15.33,1365.5,368.0,211.0
137 | 135,204,Vince Carter,SF,40,24.6,2.6,6.7,0.39399999999999996,1.5,4.1,0.37799999999999995,1.1,2.7,0.418,0.508,1.2,1.6,0.765,0.5,2.6,3.1,1.8,0.8,0.5,0.7,2.2,8.0,MEM,73,24.7,1.36,-0.06,1.3,4.81,7.8,95.02,36,4.26,2891.5,0.0,7.0
138 | 136,205,Alex Len,C,23,20.3,3.0,6.0,0.49700000000000005,0.0,0.2,0.25,2.9,5.9,0.503,0.5,1.9,2.7,0.721,2.0,4.6,6.6,0.6,0.5,1.3,1.3,3.1,8.0,PHX,77,20.3,-3.62,1.28,-2.34,0.81,9.5,103.52,21,4.82,270.0,10.5,6.0
139 | 137,207,Langston Galloway,PG,25,20.2,2.8,7.4,0.38,1.6,4.2,0.39,1.2,3.2,0.36700000000000005,0.49,0.7,0.9,0.797,0.4,1.7,2.1,1.3,0.6,0.1,0.6,1.3,7.9,NO/SAC,74,20.2,0.1,-1.65,-1.55,1.46,6.7,99.48,29,5.2,146.0,14.5,2.0
140 | 138,208,Shelvin Mack,PG,26,21.9,3.1,6.9,0.446,0.7,2.2,0.308,2.4,4.7,0.51,0.495,1.0,1.4,0.688,0.4,1.9,2.3,2.8,0.8,0.1,1.6,1.8,7.8,UTAH,55,21.9,-1.51,-1.28,-2.79,0.25,7.9,93.44,34,2.43,121.5,6.0,3.0
141 | 139,210,Rajon Rondo,PG,30,26.7,3.3,8.1,0.408,0.7,1.9,0.376,2.6,6.2,0.418,0.45299999999999996,0.4,0.7,0.6,1.1,4.1,5.1,6.7,1.4,0.2,2.4,2.1,7.8,CHI,69,26.7,-1.35,-0.49,-1.84,1.51,10.6,97.96,34,14.0,1765.0,185.5,74.0
142 | 140,211,Garrett Temple,SG,30,26.6,2.8,6.6,0.424,1.3,3.4,0.373,1.6,3.3,0.47600000000000003,0.519,0.9,1.1,0.784,0.5,2.3,2.8,2.6,1.3,0.4,1.2,2.2,7.8,SAC,65,26.6,-0.14,-0.46,-0.6,2.67,7.1,97.51,25,8.0,100.0,2.0,1.0
143 | 141,213,Matthew Dellavedova,PG,26,26.1,2.7,7.0,0.39,1.0,2.8,0.36700000000000005,1.7,4.2,0.40399999999999997,0.46299999999999997,1.1,1.3,0.8540000000000001,0.3,1.6,1.9,4.7,0.7,0.0,1.8,2.0,7.6,MIL,76,26.1,-0.88,-1.01,-1.89,1.54,6.6,97.5,39,9.61,910.5,175.0,30.0
144 | 142,214,Luol Deng,SF,31,26.5,2.9,7.6,0.387,0.9,2.9,0.309,2.0,4.6,0.436,0.447,0.8,1.1,0.73,1.1,4.1,5.3,1.3,0.9,0.4,0.8,1.1,7.6,LAL,56,26.5,-1.87,1.54,-0.33,2.69,7.7,100.3,18,18.0,838.0,22.0,13.5
145 | 143,215,Andre Iguodala,SF,33,26.3,2.9,5.5,0.528,0.8,2.3,0.36200000000000004,2.0,3.1,0.6509999999999999,0.605,0.9,1.3,0.706,0.7,3.3,4.0,3.4,1.0,0.5,0.8,1.3,7.6,GS,76,26.3,2.01,1.52,3.53,8.72,10.2,101.92,64,11.13,1748.5,300.5,43.0
146 | 144,216,Manu Ginobili,SG,39,18.7,2.5,6.4,0.39,1.3,3.3,0.392,1.2,3.1,0.387,0.491,1.2,1.6,0.804,0.4,1.9,2.3,2.7,1.2,0.2,1.4,1.7,7.5,SA,69,18.7,1.43,1.0,2.43,4.64,8.7,100.94,51,14.0,53.0,299.0,66.0
147 | 145,224,Danny Green,SG,29,26.6,2.6,6.6,0.392,1.7,4.6,0.379,0.9,2.0,0.42,0.523,0.4,0.5,0.8440000000000001,0.5,2.8,3.3,1.8,1.0,0.8,1.1,1.8,7.3,SA,68,26.6,-0.45,1.71,1.26,4.83,6.9,94.47,51,10.0,6.0,0.0,15.0
148 | 146,225,David Lee,PF,33,18.7,3.1,5.3,0.59,0.0,0.0,,3.1,5.3,0.59,0.59,1.0,1.4,0.708,1.9,3.7,5.6,1.6,0.4,0.5,1.0,1.6,7.3,SA,79,18.7,-0.38,1.5,1.12,3.98,12.6,98.36,58,1.55,42.0,56.5,35.0
149 | 147,227,Thabo Sefolosha,SF,32,25.7,2.8,6.4,0.441,0.7,1.9,0.342,2.1,4.4,0.484,0.49200000000000005,0.9,1.2,0.733,0.9,3.5,4.4,1.7,1.5,0.5,0.9,1.6,7.2,ATL,62,25.7,-1.93,2.27,0.34,3.55,8.6,99.53,32,3.85,280.0,28.5,17.5
150 | 148,229,Justin Anderson,SF,23,16.4,2.5,5.9,0.424,0.8,2.6,0.299,1.7,3.3,0.522,0.49,1.4,1.7,0.7909999999999999,0.9,2.3,3.3,0.9,0.5,0.3,0.9,1.5,7.1,DAL/PHI,75,16.4,-1.36,-2.54,-3.9,-0.59,9.1,97.35,24,1.51,4.0,5.0,10.0
151 | 149,232,Joe Ingles,SF,29,24.0,2.5,5.5,0.452,1.5,3.4,0.441,1.0,2.1,0.47100000000000003,0.589,0.6,0.8,0.735,0.3,2.9,3.2,2.7,1.2,0.1,1.3,2.0,7.1,UTAH,82,24.0,2.29,0.26,2.55,6.68,8.9,94.04,51,2.15,178.0,86.5,4.0
152 | 150,241,Jared Dudley,PF,31,21.3,2.5,5.4,0.45399999999999996,1.2,3.2,0.379,1.3,2.2,0.5589999999999999,0.565,0.7,1.0,0.662,0.5,3.0,3.5,1.9,0.7,0.3,1.1,2.4,6.8,PHX,64,21.3,-0.67,1.89,1.22,3.94,7.9,103.23,17,10.47,184.0,31.0,4.0
153 | 151,243,Patrick Patterson,PF,27,24.6,2.4,5.9,0.401,1.4,3.9,0.37200000000000005,0.9,2.0,0.45799999999999996,0.523,0.7,0.9,0.7170000000000001,1.0,3.6,4.5,1.2,0.6,0.4,0.6,1.8,6.8,TOR,65,24.6,1.05,1.26,2.31,5.4,7.5,97.3,42,6.05,8.0,53.5,8.0
154 | 152,244,Bobby Portis,PF,21,15.6,2.9,5.9,0.488,0.5,1.5,0.33299999999999996,2.4,4.4,0.541,0.531,0.6,0.9,0.6609999999999999,1.2,3.5,4.6,0.5,0.3,0.2,0.6,1.5,6.8,CHI,64,15.6,-1.94,-0.5,-2.44,0.43,11.0,97.38,30,1.45,193.0,37.0,73.0
155 | 153,244,Bobby Portis,PF,21,15.6,2.9,5.9,0.488,0.5,1.5,0.33299999999999996,2.4,4.4,0.541,0.531,0.6,0.9,0.6609999999999999,1.2,3.5,4.6,0.5,0.3,0.2,0.6,1.5,6.8,CHI,64,15.6,-1.94,-0.5,-2.44,0.43,11.0,97.38,30,1.52,193.0,37.0,73.0
156 | 154,246,Brandan Wright,PF,29,16.0,3.0,4.8,0.615,0.0,0.0,0.0,3.0,4.8,0.619,0.615,0.8,1.3,0.657,1.1,1.7,2.8,0.5,0.4,0.7,0.4,1.5,6.8,MEM,28,16.0,-0.86,0.4,-0.46,0.71,11.6,93.54,14,5.71,152.5,0.0,6.0
157 | 155,247,Alec Burks,SG,25,15.5,2.4,5.9,0.39899999999999997,0.6,1.8,0.32899999999999996,1.8,4.1,0.43,0.45,1.4,1.9,0.769,0.4,2.5,2.9,0.7,0.4,0.1,0.8,1.2,6.7,UTAH,42,15.5,-2.47,-0.94,-3.41,-0.12,8.7,95.94,26,10.15,111.0,10.0,15.0
158 | 156,251,Dwight Powell,C,25,17.3,2.5,4.9,0.515,0.3,1.0,0.284,2.2,3.9,0.5710000000000001,0.542,1.4,1.8,0.759,1.2,2.8,4.0,0.6,0.8,0.5,0.4,1.8,6.7,DAL,77,17.3,-0.66,-0.51,-1.17,1.58,10.7,93.78,30,8.38,213.0,0.0,10.0
159 | 157,253,Jaylen Brown,SF,20,17.2,2.5,5.4,0.45399999999999996,0.6,1.7,0.341,1.9,3.7,0.507,0.508,1.1,1.6,0.685,0.6,2.2,2.8,0.8,0.4,0.2,0.9,1.8,6.6,BOS,78,17.2,-2.95,-0.42,-3.37,-0.21,7.1,99.02,50,4.74,441.0,0.0,100.0
160 | 158,254,Michael Carter-Williams,PG,25,18.8,2.5,6.8,0.366,0.3,1.4,0.23399999999999999,2.2,5.4,0.401,0.391,1.3,1.7,0.753,0.5,2.9,3.4,2.5,0.8,0.5,1.5,2.3,6.6,CHI,45,18.8,-2.82,1.12,-1.7,0.77,7.0,98.47,25,3.18,645.0,0.0,5.0
161 | 159,255,Dante Cunningham,SF,29,25.0,2.6,5.4,0.485,1.1,2.7,0.392,1.6,2.7,0.579,0.584,0.2,0.4,0.593,0.8,3.3,4.2,0.6,0.6,0.4,0.4,1.5,6.6,NO,66,25.0,-0.37,-0.56,-0.93,2.29,6.8,99.77,28,2.98,97.5,0.0,3.0
162 | 160,256,Kosta Koufos,C,27,20.0,3.0,5.5,0.551,0.0,0.0,0.0,3.0,5.5,0.552,0.551,0.5,0.9,0.613,1.7,4.0,5.7,0.7,0.5,0.7,0.9,2.4,6.6,SAC,71,20.0,-2.78,2.83,0.05,2.79,9.3,96.17,27,8.05,189.0,37.0,8.0
163 | 161,257,Andre Roberson,SF,25,30.1,2.7,5.9,0.46399999999999997,0.6,2.3,0.245,2.2,3.5,0.609,0.513,0.6,1.4,0.423,1.2,3.9,5.1,1.0,1.2,1.0,0.6,2.6,6.6,OKC,79,30.1,-1.4,2.64,1.24,6.73,5.9,101.08,46,2.18,7.5,10.0,8.0
164 | 162,259,Sam Dekker,SF,22,18.4,2.6,5.6,0.473,0.8,2.4,0.321,1.9,3.1,0.591,0.5429999999999999,0.5,0.9,0.5589999999999999,1.2,2.5,3.7,1.0,0.5,0.3,0.5,1.1,6.5,HOU,77,18.4,0.05,-1.42,-1.37,1.64,8.7,102.25,52,1.72,331.0,139.0,13.0
165 | 163,260,Amir Johnson,PF,29,20.1,2.7,4.6,0.5760000000000001,0.3,0.8,0.409,2.3,3.8,0.612,0.612,0.8,1.3,0.67,1.5,3.1,4.6,1.8,0.6,0.8,1.0,2.6,6.5,BOS,80,20.1,0.22,3.58,3.8,7.02,9.1,97.94,51,12.0,282.0,31.5,6.0
166 | 164,267,Kyle O'Quinn,C,26,15.6,2.7,5.2,0.521,0.0,0.2,0.11800000000000001,2.7,5.0,0.5379999999999999,0.523,0.8,1.1,0.7709999999999999,2.0,3.6,5.6,1.5,0.5,1.3,1.0,2.2,6.3,NY,79,15.6,-1.28,1.16,-0.12,2.31,13.2,97.82,29,3.92,152.0,0.0,7.0
167 | 165,270,Dante Exum,PG,21,18.6,2.3,5.5,0.42700000000000005,0.7,2.3,0.295,1.7,3.2,0.519,0.488,0.9,1.1,0.795,0.5,1.5,2.0,1.7,0.3,0.2,1.2,2.2,6.2,UTAH,66,18.6,-2.14,-0.65,-2.79,0.25,5.4,94.42,40,3.94,483.0,48.5,28.0
168 | 166,272,Spencer Hawes,PF,28,14.8,2.5,5.1,0.484,0.5,1.5,0.309,2.0,3.6,0.556,0.529,0.8,1.0,0.846,0.7,2.8,3.5,1.5,0.3,0.6,0.9,1.4,6.2,MIL/CHA,54,14.8,-2.13,-0.87,-3.0,0.06,10.9,100.06,27,6.35,195.0,12.0,4.0
169 | 167,273,Trey Lyles,PF,21,16.3,2.2,6.2,0.36200000000000004,0.9,2.9,0.319,1.3,3.3,0.4,0.436,0.8,1.1,0.722,0.7,2.6,3.3,1.0,0.4,0.3,0.9,1.4,6.2,UTAH,71,16.3,-2.48,-0.23,-2.71,0.29,7.6,94.43,43,2.34,334.5,48.0,12.0
170 | 168,275,Chandler Parsons,SF,28,19.9,2.2,6.5,0.33799999999999997,0.7,2.7,0.26899999999999996,1.5,3.8,0.38799999999999996,0.39399999999999996,1.0,1.3,0.8140000000000001,0.2,2.3,2.5,1.6,0.6,0.1,0.7,1.5,6.2,MEM,34,19.9,-3.51,-0.33,-3.84,-0.29,5.6,94.04,18,22.12,555.5,59.0,16.0
171 | 169,278,Troy Williams,SF,22,18.6,2.4,5.6,0.43700000000000006,0.6,2.1,0.29,1.8,3.5,0.524,0.491,0.7,1.1,0.6559999999999999,0.5,1.8,2.3,0.8,0.9,0.3,1.1,2.0,6.2,HOU/MEM,30,18.6,-3.06,-0.45,-3.51,-0.14,5.5,98.77,19,1.58,72.0,5.0,9.0
172 | 170,280,JaVale McGee,C,29,9.6,2.7,4.1,0.652,0.0,0.0,0.0,2.7,4.1,0.6579999999999999,0.652,0.7,1.4,0.505,1.3,1.9,3.2,0.2,0.2,0.9,0.5,1.4,6.1,GS,77,9.6,0.83,0.11,0.94,2.04,12.5,105.1,62,1.4,804.5,0.0,32.0
173 | 171,289,Jerian Grant,PG,24,16.3,2.0,4.8,0.425,0.8,2.1,0.366,1.3,2.7,0.473,0.507,1.0,1.2,0.89,0.3,1.5,1.8,1.9,0.7,0.1,0.7,1.5,5.9,CHI,63,16.3,0.5,1.07,1.57,3.03,8.4,97.0,29,1.64,371.0,25.0,32.5
174 | 172,290,Andrew Harrison,PG,22,20.5,1.6,5.0,0.325,0.6,2.2,0.276,1.0,2.8,0.363,0.385,2.1,2.7,0.763,0.3,1.6,1.9,2.8,0.7,0.3,1.2,2.7,5.9,MEM,72,20.5,-1.2,-0.38,-1.58,1.38,5.3,94.76,38,1.31,5.0,0.0,17.0
175 | 173,291,Domantas Sabonis,PF,20,20.1,2.4,5.9,0.39899999999999997,0.6,2.0,0.321,1.7,4.0,0.43799999999999994,0.452,0.5,0.8,0.657,0.6,3.0,3.6,1.0,0.5,0.4,1.0,2.5,5.9,OKC,81,20.1,-3.9,1.86,-2.04,1.14,4.9,99.61,46,2.44,550.5,0.0,82.0
176 | 174,292,Quincy Acy,PF,26,14.7,1.8,4.5,0.41200000000000003,1.0,2.4,0.41100000000000003,0.9,2.1,0.413,0.521,1.2,1.6,0.75,0.5,2.5,3.0,0.5,0.4,0.4,0.6,1.8,5.8,DAL/BKN,38,14.7,-2.38,-0.21,-2.59,0.01,8.3,101.09,11,1.79,179.0,4.0,4.0
177 | 175,301,Ian Mahinmi,C,30,17.9,2.1,3.6,0.586,0.0,0.0,,2.1,3.6,0.586,0.586,1.4,2.4,0.573,1.5,3.3,4.8,0.6,1.1,0.8,1.1,2.9,5.6,WSH,31,17.9,-2.22,3.11,0.89,1.44,8.2,98.9,19,15.94,162.5,10.0,5.0
178 | 176,302,Georgios Papagiannis,C,19,16.1,2.5,4.6,0.5489999999999999,0.0,0.1,0.0,2.5,4.5,0.56,0.5489999999999999,0.5,0.6,0.857,1.1,2.8,3.9,0.9,0.1,0.8,1.1,2.0,5.6,SAC,22,16.1,-2.47,-0.3,-2.77,0.08,8.2,96.93,7,2.2,172.0,7.0,3.0
179 | 177,303,Delon Wright,PG,24,16.5,1.8,4.3,0.42200000000000004,0.4,1.1,0.33299999999999996,1.4,3.2,0.45299999999999996,0.466,1.6,2.0,0.764,0.6,1.2,1.8,2.1,1.0,0.4,0.9,1.1,5.6,TOR,27,16.5,-1.28,0.98,-0.3,0.77,10.5,94.48,19,1.58,202.5,11.5,14.0
180 | 178,306,Anthony Morrow,SG,31,14.6,2.0,5.1,0.389,0.8,2.7,0.308,1.2,2.4,0.479,0.47,0.7,0.8,0.919,0.2,0.4,0.6,0.5,0.5,0.0,0.1,0.9,5.5,CHI/OKC,49,14.6,-0.66,-1.46,-2.12,0.46,5.5,99.75,25,3.49,177.5,0.0,5.0
181 | 179,307,Terry Rozier,PG,22,17.1,2.0,5.6,0.36700000000000005,0.8,2.4,0.318,1.3,3.1,0.405,0.43700000000000006,0.7,0.9,0.773,0.5,2.5,3.1,1.8,0.6,0.1,0.6,0.9,5.5,BOS,74,17.1,-1.05,-0.01,-1.06,1.65,9.1,98.27,47,1.91,208.0,2.0,6.0
182 | 180,309,Malcolm Delaney,PG,27,17.1,2.0,5.3,0.374,0.4,1.5,0.23600000000000002,1.6,3.8,0.428,0.40700000000000003,1.0,1.3,0.8059999999999999,0.1,1.5,1.7,2.6,0.5,0.0,1.3,1.5,5.4,ATL,73,17.1,-2.64,-0.25,-2.89,0.18,6.4,98.7,37,2.5,183.5,1.0,1.0
183 | 181,311,Meyers Leonard,PF,24,16.5,2.0,5.1,0.386,1.0,2.9,0.34700000000000003,1.0,2.2,0.436,0.484,0.5,0.5,0.875,0.4,2.8,3.2,1.0,0.2,0.4,0.5,2.3,5.4,POR,74,16.5,-1.5,0.26,-1.24,1.48,6.1,99.44,37,9.21,276.0,149.0,31.0
184 | 182,314,Chasson Randle,PG,23,11.5,1.5,3.8,0.408,0.6,1.8,0.34,0.9,2.0,0.47100000000000003,0.49,1.6,1.7,0.953,0.2,1.0,1.2,1.3,0.3,0.1,0.7,1.5,5.3,NY/PHI,26,11.5,-0.74,-1.36,-2.1,0.2,8.4,98.49,8,1.31,82.0,13.5,7.0
185 | 183,318,Randy Foye,SG,33,18.6,1.7,4.7,0.363,1.0,2.9,0.33,0.7,1.8,0.418,0.466,0.8,0.9,0.857,0.1,2.1,2.2,2.0,0.5,0.1,1.2,1.4,5.2,BKN,69,18.6,-2.7,-1.42,-4.12,-0.85,5.7,103.71,17,2.5,267.5,2.0,4.0
186 | 184,319,Cameron Payne,PG,22,14.9,2.1,6.2,0.332,0.9,2.8,0.314,1.2,3.5,0.34600000000000003,0.402,0.2,0.3,0.625,0.1,1.5,1.5,1.8,0.5,0.1,0.9,1.7,5.2,CHI/OKC,31,14.9,-4.14,-0.31,-4.45,-0.39,4.1,98.86,16,2.11,324.5,38.5,21.0
187 | 185,323,Luis Scola,PF,36,12.8,2.0,4.2,0.47,0.5,1.4,0.34,1.5,2.8,0.535,0.526,0.7,1.0,0.6759999999999999,1.4,2.4,3.9,1.0,0.4,0.1,0.9,1.8,5.1,BKN,36,12.8,-0.8,-0.29,-1.09,0.62,8.9,102.58,7,5.5,416.0,16.0,60.0
188 | 186,326,Denzel Valentine,SG,23,17.1,1.8,5.1,0.354,1.3,3.6,0.35100000000000003,0.5,1.4,0.363,0.48100000000000004,0.2,0.3,0.778,0.2,2.5,2.6,1.1,0.5,0.1,0.9,1.5,5.1,CHI,57,17.1,-2.21,0.13,-2.08,0.64,5.8,97.94,28,2.09,403.0,23.0,24.0
189 | 187,327,Anthony Bennett,PF,23,11.5,1.7,4.0,0.413,0.6,2.1,0.271,1.1,1.9,0.568,0.484,1.1,1.6,0.722,1.1,2.3,3.4,0.5,0.2,0.1,0.5,0.8,5.0,BKN,23,11.5,-0.76,-1.29,-2.05,0.19,9.4,104.03,3,1.02,19.0,9.0,10.5
190 | 188,328,Aaron Brooks,PG,32,13.8,1.9,4.6,0.40299999999999997,0.7,2.0,0.375,1.1,2.6,0.424,0.483,0.5,0.6,0.8,0.3,0.8,1.1,1.9,0.4,0.1,1.0,1.4,5.0,IND,65,13.8,-1.81,-1.47,-3.28,-0.09,6.2,96.55,36,2.7,10.0,1.0,3.0
191 | 189,329,Trey Burke,PG,24,12.3,2.0,4.5,0.455,0.5,1.2,0.44299999999999995,1.5,3.2,0.45899999999999996,0.516,0.4,0.5,0.759,0.1,0.7,0.8,1.8,0.2,0.1,0.8,0.9,5.0,WSH,57,12.3,-2.15,-3.09,-5.24,-0.92,8.1,96.37,35,3.39,429.5,41.0,14.0
192 | 190,330,Joakim Noah,C,31,22.1,2.2,4.4,0.49,0.0,0.0,0.0,2.2,4.4,0.493,0.49,0.7,1.7,0.436,3.5,5.2,8.8,2.2,0.7,0.8,1.3,2.8,5.0,NY,46,22.1,-2.23,1.17,-1.06,1.32,9.4,98.45,17,17.0,1202.5,0.0,87.0
193 | 191,335,Mario Hezonja,SF,21,14.8,1.8,5.1,0.355,0.7,2.2,0.299,1.1,2.9,0.39799999999999996,0.42,0.6,0.8,0.8,0.3,1.9,2.2,1.0,0.5,0.2,0.9,1.1,4.9,ORL,65,14.8,-4.4,-2.2,-6.6,-2.09,5.9,97.48,20,3.91,396.5,272.0,207.0
194 | 192,337,Tiago Splitter,C,32,9.5,1.8,3.9,0.452,0.3,0.8,0.33299999999999996,1.5,3.1,0.48,0.484,1.1,1.4,0.818,1.0,1.8,2.8,0.5,0.1,0.1,0.8,1.0,4.9,PHI,8,9.5,-1.97,0.69,-1.28,0.09,10.6,99.11,1,8.55,274.5,8.0,9.0
195 | 193,341,Kevin Seraphin,PF,27,11.4,2.2,4.0,0.551,0.0,0.0,0.0,2.2,4.0,0.556,0.551,0.3,0.4,0.636,0.8,2.1,2.9,0.5,0.1,0.4,0.6,1.3,4.7,IND,49,11.4,-2.94,-1.07,-4.01,-0.31,9.5,96.87,23,1.8,16.0,4.5,8.0
196 | 194,342,Isaiah Canaan,SG,25,15.2,1.6,4.4,0.364,0.6,2.4,0.266,1.0,2.0,0.48100000000000004,0.436,0.8,0.8,0.909,0.2,1.1,1.3,0.9,0.6,0.0,0.5,0.9,4.6,CHI,39,15.2,-3.0,-1.12,-4.12,-0.37,5.6,96.46,19,1.02,180.5,24.5,8.0
197 | 195,343,Boris Diaw,PF,34,17.6,2.0,4.5,0.446,0.3,1.1,0.247,1.7,3.4,0.512,0.47700000000000004,0.4,0.5,0.743,0.6,1.5,2.2,2.3,0.2,0.1,1.2,1.1,4.6,UTAH,73,17.6,-2.58,-0.5,-3.08,0.03,7.1,93.08,45,7.0,489.5,136.5,13.0
198 | 196,345,Kris Humphries,PF,31,12.3,1.6,3.8,0.40700000000000003,0.3,1.0,0.35200000000000004,1.2,2.9,0.425,0.451,1.1,1.5,0.78,1.1,2.6,3.7,0.5,0.3,0.4,0.5,1.2,4.6,ATL,56,12.3,-2.3,0.41,-1.89,0.54,10.5,98.12,31,4.0,2940.5,67.0,9.0
199 | 197,348,Corey Brewer,SF,30,15.6,1.8,4.2,0.42200000000000004,0.3,1.4,0.22899999999999998,1.5,2.8,0.522,0.461,0.6,0.8,0.735,0.4,1.6,2.0,1.2,0.7,0.2,0.7,1.6,4.5,HOU/LAL,82,15.6,-1.75,1.0,-0.75,1.99,5.7,101.21,47,7.61,314.0,51.5,15.5
200 | 198,349,Reggie Bullock,SF,25,15.1,1.7,4.1,0.42200000000000004,0.9,2.4,0.384,0.8,1.8,0.473,0.531,0.2,0.2,0.7140000000000001,0.4,1.6,2.1,0.9,0.6,0.1,0.3,0.7,4.5,DET,31,15.1,-0.98,0.57,-0.41,0.79,8.1,98.05,13,2.26,125.0,20.0,13.0
201 | 199,353,Stanley Johnson,SF,20,17.8,1.7,4.7,0.353,0.6,2.0,0.292,1.1,2.7,0.39799999999999996,0.415,0.5,0.7,0.679,0.5,2.0,2.5,1.4,0.7,0.3,0.9,1.6,4.4,DET,77,17.8,-2.34,-0.04,-2.38,0.63,4.8,96.99,35,2.97,10.0,0.0,19.0
202 | 200,358,Noah Vonleh,PF,21,17.1,1.8,3.6,0.48100000000000004,0.1,0.3,0.35,1.7,3.4,0.49200000000000005,0.494,0.8,1.3,0.638,1.8,3.5,5.2,0.4,0.4,0.4,0.9,2.1,4.4,POR,74,17.1,-4.02,1.03,-2.99,0.11,7.3,98.18,38,2.75,218.0,23.5,12.0
203 | 201,360,Tyler Ennis,PG,22,11.1,1.7,3.9,0.433,0.5,1.3,0.386,1.2,2.6,0.457,0.498,0.4,0.5,0.84,0.2,0.7,0.8,1.6,0.5,0.0,0.8,1.1,4.3,HOU/LAL,53,11.1,-1.36,-0.51,-1.87,0.49,7.0,100.23,28,1.73,18.0,11.0,6.0
204 | 202,363,Brandon Rush,SG,31,21.9,1.5,4.0,0.374,0.9,2.4,0.386,0.6,1.6,0.35600000000000004,0.49200000000000005,0.3,0.4,0.722,0.3,1.8,2.1,1.0,0.5,0.5,0.6,0.9,4.2,MIN,47,21.9,-3.13,-0.73,-3.86,-0.48,4.3,96.76,15,3.5,412.5,14.0,1.5
205 | 203,370,Thon Maker,C,19,9.9,1.5,3.2,0.45899999999999996,0.5,1.3,0.37799999999999995,1.0,1.9,0.514,0.536,0.6,0.9,0.653,0.7,1.3,2.0,0.4,0.2,0.5,0.3,1.5,4.0,MIL,57,9.9,-1.23,0.17,-1.06,0.7,7.2,94.0,31,2.57,976.0,0.0,39.5
206 | 204,373,Anthony Brown,SF,24,14.5,1.6,4.5,0.36,0.6,2.5,0.259,1.0,2.1,0.478,0.43,0.0,0.0,,0.7,2.3,3.0,0.7,0.5,0.1,0.5,1.4,3.9,NO/ORL,11,14.5,-1.42,-0.34,-1.76,0.08,5.8,94.67,3,0.06,11.0,3.0,3.0
207 | 205,374,Jarell Martin,PF,22,13.3,1.4,3.6,0.384,0.2,0.6,0.36,1.2,3.0,0.389,0.414,1.0,1.2,0.8,1.0,2.9,3.9,0.2,0.4,0.2,0.7,2.2,3.9,MEM,42,13.3,-4.23,1.17,-3.06,0.02,6.9,97.39,23,1.47,80.5,2.0,4.0
208 | 206,375,Malik Beasley,SG,20,7.5,1.5,3.3,0.452,0.4,1.3,0.321,1.1,2.0,0.5329999999999999,0.514,0.4,0.5,0.8,0.2,0.5,0.8,0.5,0.3,0.0,0.4,0.5,3.8,DEN,22,7.5,-1.74,0.21,-1.53,0.18,8.6,103.72,12,1.63,159.0,0.0,14.0
209 | 207,376,Kris Dunn,PG,22,17.1,1.5,4.0,0.377,0.3,0.9,0.28800000000000003,1.2,3.1,0.40399999999999997,0.41100000000000003,0.5,0.8,0.61,0.3,1.8,2.1,2.4,1.0,0.5,1.1,2.3,3.8,MIN,78,17.1,-2.64,-0.41,-3.05,0.06,5.1,96.78,30,3.87,597.5,165.0,47.0
210 | 208,377,Jonas Jerebko,PF,29,15.8,1.5,3.4,0.435,0.6,1.7,0.34600000000000003,0.9,1.7,0.523,0.521,0.3,0.5,0.703,0.8,2.7,3.5,0.9,0.3,0.2,0.5,1.6,3.8,BOS,78,15.8,-0.89,-0.14,-1.03,1.63,6.9,98.94,51,5.0,215.5,7.0,17.0
211 | 209,379,Tyus Jones,PG,20,12.9,1.3,3.0,0.414,0.4,1.2,0.35600000000000004,0.8,1.8,0.45399999999999996,0.486,0.6,0.7,0.767,0.2,1.0,1.1,2.6,0.8,0.1,0.6,0.8,3.5,MIN,60,12.9,0.11,-0.34,-0.23,1.4,9.6,97.51,23,1.47,472.0,0.0,14.0
212 | 210,381,Adreian Payne,PF,25,7.5,1.3,3.0,0.426,0.2,0.8,0.2,1.1,2.2,0.513,0.45399999999999996,0.8,1.1,0.737,0.5,1.3,1.8,0.4,0.4,0.4,0.4,1.8,3.5,MIN,18,7.5,-1.69,1.36,-0.33,0.24,8.2,96.52,5,2.02,166.0,0.0,13.0
213 | 211,383,Rashad Vaughn,SG,20,11.2,1.4,3.8,0.365,0.6,2.0,0.321,0.8,1.8,0.413,0.449,0.0,0.1,0.4,0.1,1.1,1.2,0.6,0.5,0.2,0.3,0.8,3.5,MIL,41,11.2,-3.44,-0.89,-4.33,-0.35,5.2,96.36,18,1.81,128.0,2.0,7.0
214 | 212,384,Tyler Zeller,C,27,10.3,1.5,3.1,0.494,0.0,0.0,0.0,1.5,3.1,0.49700000000000005,0.494,0.4,0.8,0.564,0.8,1.6,2.4,0.8,0.1,0.4,0.4,1.2,3.5,BOS,51,10.3,-2.5,0.2,-2.3,0.28,8.5,99.46,32,8.0,336.0,15.0,14.0
215 | 213,386,Dragan Bender,PF,19,13.3,1.3,3.7,0.354,0.7,2.3,0.27699999999999997,0.7,1.4,0.483,0.441,0.1,0.3,0.364,0.5,1.9,2.4,0.5,0.2,0.5,0.7,1.7,3.4,PHX,43,13.3,-3.2,-0.68,-3.88,-0.28,3.3,100.06,13,4.28,495.0,,
216 | 214,389,Jared Sullinger,PF,24,10.7,1.4,4.3,0.319,0.3,1.6,0.16699999999999998,1.1,2.6,0.414,0.35100000000000003,0.4,0.7,0.5,0.9,1.5,2.5,0.3,0.4,0.1,0.4,1.5,3.4,TOR,11,10.7,-2.7,0.9,-1.8,0.1,2.7,96.58,2,5.63,368.5,11.0,2.0
217 | 215,395,Henry Ellenson,PF,20,7.7,1.2,3.4,0.359,0.5,1.8,0.28600000000000003,0.7,1.5,0.44799999999999995,0.43799999999999994,0.2,0.4,0.5,0.4,1.7,2.2,0.4,0.1,0.1,0.7,0.3,3.2,DET,19,7.7,-2.32,0.38,-1.94,0.1,7.6,95.74,8,1.7,147.0,6.5,5.0
218 | 216,396,Paul Pierce,SF,39,11.1,1.1,2.8,0.4,0.6,1.7,0.349,0.5,1.1,0.48100000000000004,0.507,0.4,0.5,0.769,0.0,1.9,1.9,0.4,0.2,0.2,0.6,1.6,3.2,LAC,25,11.1,-2.04,1.24,-0.8,0.4,4.4,97.71,16,3.53,1671.0,346.5,142.0
219 | 217,399,Briante Weber,PG,24,10.3,1.3,3.0,0.41700000000000004,0.1,0.5,0.1,1.2,2.5,0.48,0.425,0.6,0.8,0.688,0.4,0.9,1.3,1.1,0.6,0.1,0.4,0.8,3.1,GS/CHA,20,10.3,-2.02,-1.29,-3.31,-0.02,7.2,91.12,12,1.47,81.5,29.5,14.0
220 | 218,400,Jarrett Jack,PG,33,16.5,1.0,1.5,0.667,0.0,0.5,0.0,1.0,1.0,1.0,0.667,1.0,1.0,1.0,0.0,0.0,0.0,2.5,1.0,0.0,1.5,2.0,3.0,NO,2,16.5,-1.85,0.01,-1.84,0.03,5.1,97.67,1,0.06,245.5,11.5,7.0
221 | 219,405,Andrew Bogut,C,32,21.6,1.4,3.0,0.469,0.0,0.0,0.0,1.4,3.0,0.475,0.469,0.1,0.4,0.273,2.1,6.0,8.1,1.8,0.5,0.9,1.6,3.2,2.9,CLE/DAL,27,21.6,-4.01,4.34,0.33,1.2,9.2,93.14,8,0.24,1435.5,57.0,4.0
222 | 220,408,Fred VanVleet,PG,22,7.9,1.1,3.0,0.35100000000000003,0.3,0.8,0.379,0.8,2.2,0.341,0.401,0.5,0.6,0.818,0.1,1.0,1.1,0.9,0.4,0.1,0.4,1.0,2.9,TOR,37,7.9,-2.44,0.83,-1.61,0.28,7.8,97.63,25,1.31,116.5,0.0,17.5
223 | 221,415,Kyle Singler,SF,28,12.0,1.1,2.6,0.41,0.2,1.2,0.18899999999999997,0.8,1.4,0.5870000000000001,0.452,0.4,0.5,0.765,0.3,1.3,1.5,0.3,0.2,0.2,0.3,0.9,2.8,OKC,32,12.0,-3.25,0.9,-2.35,0.19,4.6,99.19,15,4.84,354.0,9.0,6.0
224 | 222,418,Omer Asik,C,30,15.5,1.0,2.1,0.47700000000000004,0.0,0.0,,1.0,2.1,0.47700000000000004,0.47700000000000004,0.7,1.3,0.59,1.5,3.7,5.3,0.5,0.2,0.3,0.5,1.6,2.7,NO,31,15.5,-2.97,1.94,-1.03,0.65,7.3,99.64,12,9.9,8.0,,
225 | 223,422,Tomas Satoransky,SG,25,12.6,1.1,2.6,0.415,0.2,0.6,0.243,0.9,1.9,0.473,0.446,0.4,0.6,0.6970000000000001,0.4,1.0,1.5,1.6,0.5,0.1,0.7,1.1,2.7,WSH,57,12.6,-2.1,1.08,-1.02,0.93,6.4,95.69,33,2.87,4.0,7.0,8.5
226 | 224,425,Andrew Nicholson,PF,27,9.0,1.1,2.9,0.387,0.1,0.7,0.185,1.0,2.2,0.452,0.41,0.2,0.4,0.643,0.4,1.2,1.6,0.3,0.4,0.2,0.4,1.4,2.6,BKN/WSH,38,9.0,-3.54,-0.67,-4.21,-0.23,3.1,97.45,20,6.09,3.0,5.0,5.0
227 | 225,428,DeAndre Liggins,SG,28,12.5,0.9,2.3,0.387,0.3,0.7,0.37,0.6,1.5,0.396,0.447,0.4,0.6,0.625,0.3,1.4,1.7,0.9,0.8,0.2,0.7,1.3,2.5,CLE/DAL,62,12.5,-3.13,0.76,-2.37,0.37,4.6,97.12,40,1.58,150.5,2.0,2.0
228 | 226,431,Miles Plumlee,C,28,10.8,1.0,2.0,0.478,0.0,0.0,,1.0,2.0,0.478,0.478,0.6,0.9,0.6409999999999999,0.8,1.3,2.1,0.5,0.4,0.3,0.7,1.6,2.5,MIL/CHA,45,10.8,-2.79,0.52,-2.27,0.25,4.0,95.64,19,12.5,362.0,11.0,3.0
229 | 227,432,Mike Scott,PF,28,10.8,0.9,3.3,0.28800000000000003,0.2,1.5,0.14800000000000002,0.7,1.8,0.406,0.322,0.4,0.4,0.875,0.6,1.5,2.1,0.9,0.2,0.2,0.4,0.7,2.5,ATL,18,10.8,-2.92,0.26,-2.66,0.06,5.8,97.93,7,3.33,7.0,2.0,1.0
230 | 228,435,Greivis Vasquez,PG,30,13.0,0.7,2.7,0.25,0.3,1.0,0.33299999999999996,0.3,1.7,0.2,0.313,0.7,1.0,0.667,0.0,0.7,0.7,1.7,0.3,0.3,0.3,2.0,2.3,BKN,3,13.0,-1.77,-0.38,-2.15,0.03,0.9,102.41,1,4.35,39.0,10.0,8.5
231 | 229,437,James Young,SG,21,7.6,0.9,2.0,0.431,0.4,1.2,0.34299999999999997,0.4,0.8,0.565,0.534,0.2,0.3,0.667,0.2,0.7,0.9,0.1,0.3,0.1,0.1,0.5,2.3,BOS,29,7.6,-1.88,-0.28,-2.16,0.14,6.4,100.01,16,1.83,12.0,0.0,12.0
232 | 230,442,Rakeem Christmas,PF,25,7.6,0.7,1.5,0.442,0.0,0.0,,0.7,1.5,0.442,0.442,0.7,1.0,0.7240000000000001,0.9,1.0,1.9,0.1,0.1,0.2,0.3,1.3,2.0,IND,29,7.6,-2.2,0.5,-1.7,0.2,4.9,98.43,14,1.47,185.0,13.0,3.0
233 | 231,446,Udonis Haslem,C,36,8.1,0.7,1.4,0.478,0.0,0.2,0.0,0.7,1.3,0.55,0.478,0.6,0.9,0.6,0.5,1.8,2.3,0.4,0.4,0.1,0.5,1.4,1.9,MIA,16,8.1,-2.79,0.73,-2.06,0.09,7.3,98.53,6,4.0,401.0,28.5,18.0
234 | 232,452,Lamar Patterson,SG,25,8.0,0.6,3.0,0.2,0.2,1.2,0.16699999999999998,0.4,1.8,0.222,0.233,0.4,0.6,0.667,0.2,1.2,1.4,1.2,0.2,0.0,0.8,1.2,1.8,ATL,5,8.0,-2.23,0.62,-1.61,0.03,-0.4,93.96,1,0.25,82.0,4.0,4.0
235 | 233,454,Cole Aldrich,C,28,8.6,0.7,1.4,0.523,0.0,0.0,,0.7,1.4,0.523,0.523,0.2,0.4,0.682,0.8,1.7,2.5,0.4,0.4,0.4,0.3,1.4,1.7,MIN,62,8.6,-1.82,1.83,0.01,1.04,7.6,97.42,23,7.64,220.5,22.0,9.0
236 | 234,455,Nick Collison,PF,36,6.4,0.7,1.2,0.609,0.0,0.1,0.0,0.7,1.1,0.636,0.609,0.3,0.4,0.625,0.5,1.1,1.6,0.6,0.1,0.1,0.2,0.9,1.7,OKC,20,6.4,-2.18,1.12,-1.06,0.17,12.0,97.52,10,3.75,283.5,10.0,12.0
237 | 235,457,Bruno Caboclo,SF,21,4.4,0.7,1.8,0.375,0.2,0.7,0.33299999999999996,0.4,1.1,0.4,0.43799999999999994,0.0,0.0,,0.6,0.6,1.1,0.4,0.2,0.1,0.2,0.4,1.6,TOR,9,4.4,-1.76,0.11,-1.65,0.04,9.4,95.38,8,1.59,270.5,11.0,8.0
238 | 236,459,Ryan Kelly,PF,25,6.9,0.5,1.8,0.28600000000000003,0.3,0.6,0.4,0.3,1.1,0.222,0.35700000000000004,0.3,0.4,0.833,0.1,1.1,1.1,0.5,0.3,0.3,0.3,0.3,1.6,ATL,16,6.9,-1.94,1.45,-0.49,0.08,7.3,96.26,6,1.58,23.0,17.0,10.0
239 | 237,460,Jordan Mickey,PF,22,5.6,0.6,1.4,0.441,0.0,0.0,0.0,0.6,1.3,0.455,0.441,0.3,0.6,0.5710000000000001,0.5,0.8,1.4,0.3,0.1,0.2,0.4,0.5,1.5,BOS,25,5.6,-2.62,-0.15,-2.77,0.03,6.0,97.66,9,1.47,129.5,0.0,3.0
240 | 238,475,Alonzo Gee,SF,29,6.8,0.2,1.1,0.214,0.0,0.2,0.0,0.2,0.8,0.273,0.214,0.4,0.7,0.556,0.3,0.8,1.2,0.5,0.4,0.1,0.3,1.1,0.8,DEN,13,6.8,-2.11,0.43,-1.68,0.08,0.8,104.33,4,0.06,109.0,,
241 |
--------------------------------------------------------------------------------