├── LICENSE.md ├── USECASE.md ├── digits_recognizer_notebook.ipynb ├── digits_recognizer_pipeline.ipynb ├── images ├── .DS_Store ├── app-overview.jpg ├── kf_central_dashboard.png ├── kf_kfp_config.png ├── kf_notebook.png ├── kserve.png ├── kubeflow_workflow.png ├── kubernetes_workflow.png ├── pipeline.png ├── test-inference.png └── youtube.png ├── kfp_examples ├── .DS_Store ├── kfpv1_examples.ipynb └── kfpv2_examples.ipynb ├── kserve_python_test.ipynb ├── kubeflow_configs ├── .DS_Store ├── access_kfp_from_jupyter_notebook.yaml ├── create_kserve_inference.yaml ├── proxy-fix-notebooks.yaml └── set-minio-kserve-secret.yaml ├── readme.md ├── tf_saved_model ├── .DS_Store └── detect-digits │ ├── keras_metadata.pb │ ├── saved_model.pb │ └── variables │ ├── variables.data-00000-of-00001 │ └── variables.index └── web_app ├── index.html ├── js_code.js ├── stylesheet.css └── tf.min.js /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright 2022 Cisco Systems. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /USECASE.md: -------------------------------------------------------------------------------- 1 | Recognizing Digits with Kubeflow 2 | ===================================== 3 | MLOps Workflow Demo 4 | 5 | The [MNIST database of handwritten digits](http://yann.lecun.com/exdb/mnist/) is the Hello-World of deep learning and therefore the best example to focus not on the ML model itself, but on creating the ML pipeline. The goal here is to create an automated ML pipeline for getting the data, data pre-processing, and creating and serving the ML model. You can see an overview of the digits recognizer application below. 6 | 7 | ![](images/app-overview.jpg) 8 | 9 | [![youtube](images/youtube.png)](https://youtu.be/6wWdNg0GMV4) 10 | *Check out the [Walk-through Video](https://youtu.be/6wWdNg0GMV4)!* 11 | 12 | ## Related Sandbox 13 | [Cisco Intersight](https://devnetsandbox.cisco.com/RM/Diagram/Index/a63216d2-e891-4856-9f27-309ca61ec862?diagramType=Topology) 14 | -------------------------------------------------------------------------------- /digits_recognizer_notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "id": "c6q8_gU4RClu" 7 | }, 8 | "source": [ 9 | "# Exploring the MNIST Digits Dataset" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": { 15 | "id": "gRRWy7XpRHbq" 16 | }, 17 | "source": [ 18 | "## Importing Data" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": null, 24 | "metadata": { 25 | "colab": { 26 | "base_uri": "https://localhost:8080/" 27 | }, 28 | "id": "X2D5_56r3cSM", 29 | "outputId": "a2ed720b-afc4-495b-c57a-54f10eda2bcb" 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "import numpy as np\n", 34 | "import pandas as pd\n", 35 | "import matplotlib.pyplot as plt\n", 36 | "from tensorflow import keras\n", 37 | "import tensorflow as tf\n", 38 | "\n", 39 | "(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": { 45 | "id": "Z2FU9cyERJZ3" 46 | }, 47 | "source": [ 48 | "## Exploring Data" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": 2, 54 | "metadata": { 55 | "colab": { 56 | "base_uri": "https://localhost:8080/" 57 | }, 58 | "id": "boaW64qdDryf", 59 | "outputId": "a15994bc-87a4-42c8-f105-07b8256952ba" 60 | }, 61 | "outputs": [ 62 | { 63 | "name": "stdout", 64 | "output_type": "stream", 65 | "text": [ 66 | "x_train shape: (60000, 28, 28)\n", 67 | "y_train shape: (60000,)\n", 68 | "x_test shape: (10000, 28, 28)\n", 69 | "y_test shape: (10000,)\n" 70 | ] 71 | } 72 | ], 73 | "source": [ 74 | "# check shape of the data\n", 75 | "\n", 76 | "print(f\"x_train shape: {x_train.shape}\")\n", 77 | "print(f\"y_train shape: {y_train.shape}\")\n", 78 | "\n", 79 | "print(f\"x_test shape: {x_test.shape}\")\n", 80 | "print(f\"y_test shape: {y_test.shape}\")" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 3, 86 | "metadata": { 87 | "colab": { 88 | "base_uri": "https://localhost:8080/", 89 | "height": 282 90 | }, 91 | "id": "F5DH7XCyDx6p", 92 | "outputId": "8b30fe19-cd81-4773-a47c-2592922866f5" 93 | }, 94 | "outputs": [ 95 | { 96 | "data": { 97 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAAAN8klEQVR4nO3df6jVdZ7H8ddrbfojxzI39iZOrWOEUdE6i9nSyjYRTj8o7FYMIzQ0JDl/JDSwyIb7xxSLIVu6rBSDDtXYMus0UJHFMNVm5S6BdDMrs21qoxjlphtmmv1a9b1/3K9xp+75nOs53/PD+34+4HDO+b7P93zffPHl99f53o8jQgAmvj/rdQMAuoOwA0kQdiAJwg4kQdiBJE7o5sJsc+of6LCI8FjT29qy277C9lu237F9ezvfBaCz3Op1dtuTJP1B0gJJOyW9JGlRROwozMOWHeiwTmzZ50l6JyLejYgvJf1G0sI2vg9AB7UT9hmS/jjq/c5q2p+wvcT2kO2hNpYFoE0dP0EXEeskrZPYjQd6qZ0t+y5JZ4x6/51qGoA+1E7YX5J0tu3v2j5R0o8kbaynLQB1a3k3PiIO2V4q6SlJkyQ9EBFv1NYZgFq1fOmtpYVxzA50XEd+VAPg+EHYgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEi0P2Yzjw6RJk4r1U045paPLX7p0acPaSSedVJx39uzZxfqtt95arN9zzz0Na4sWLSrO+/nnnxfrK1euLNbvvPPOYr0X2gq77fckHZB0WNKhiJhbR1MA6lfHlv3SiPiwhu8B0EEcswNJtBv2kPS07ZdtLxnrA7aX2B6yPdTmsgC0od3d+PkRscv2X0h6xvZ/R8Tm0R+IiHWS1kmS7WhzeQBa1NaWPSJ2Vc97JD0maV4dTQGoX8thtz3Z9pSjryX9QNL2uhoDUK92duMHJD1m++j3/HtE/L6WriaYM888s1g/8cQTi/WLL764WJ8/f37D2tSpU4vzXn/99cV6L+3cubNYX7NmTbE+ODjYsHbgwIHivK+++mqx/sILLxTr/ajlsEfEu5L+qsZeAHQQl96AJAg7kARhB5Ig7EAShB1IwhHd+1HbRP0F3Zw5c4r1TZs2Feudvs20Xx05cqRYv/nmm4v1Tz75pOVlDw8PF+sfffRRsf7WW2+1vOxOiwiPNZ0tO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kwXX2GkybNq1Y37JlS7E+a9asOtupVbPe9+3bV6xfeumlDWtffvllcd6svz9oF9fZgeQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJhmyuwd69e4v1ZcuWFetXX311sf7KK68U683+pHLJtm3bivUFCxYU6wcPHizWzzvvvIa12267rTgv6sWWHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeS4H72PnDyyScX682GF167dm3D2uLFi4vz3njjjcX6hg0binX0n5bvZ7f9gO09trePmjbN9jO2366eT62zWQD1G89u/K8kXfG1abdLejYizpb0bPUeQB9rGvaI2Czp678HXShpffV6vaRr620LQN1a/W38QEQcHSzrA0kDjT5oe4mkJS0uB0BN2r4RJiKidOItItZJWidxgg7opVYvve22PV2Squc99bUEoBNaDftGSTdVr2+S9Hg97QDolKa78bY3SPq+pNNs75T0c0krJf3W9mJJ70v6YSebnOj279/f1vwff/xxy/PecsstxfrDDz9crDcbYx39o2nYI2JRg9JlNfcCoIP4uSyQBGEHkiDsQBKEHUiCsANJcIvrBDB58uSGtSeeeKI47yWXXFKsX3nllcX6008/Xayj+xiyGUiOsANJEHYgCcIOJEHYgSQIO5AEYQeS4Dr7BHfWWWcV61u3bi3W9+3bV6w/99xzxfrQ0FDD2n333Vect5v/NicSrrMDyRF2IAnCDiRB2IEkCDuQBGEHkiDsQBJcZ09ucHCwWH/wwQeL9SlTprS87OXLlxfrDz30ULE+PDxcrGfFdXYgOcIOJEHYgSQIO5AEYQeSIOxAEoQdSILr7Cg6//zzi/XVq1cX65dd1vpgv2vXri3WV6xYUazv2rWr5WUfz1q+zm77Adt7bG8fNe0O27tsb6seV9XZLID6jWc3/leSrhhj+r9ExJzq8bt62wJQt6Zhj4jNkvZ2oRcAHdTOCbqltl+rdvNPbfQh20tsD9lu/MfIAHRcq2H/haSzJM2RNCxpVaMPRsS6iJgbEXNbXBaAGrQU9ojYHRGHI+KIpF9KmldvWwDq1lLYbU8f9XZQ0vZGnwXQH5peZ7e9QdL3JZ0mabekn1fv50gKSe9J+mlENL25mOvsE8/UqVOL9WuuuaZhrdm98vaYl4u/smnTpmJ9wYIFxfpE1eg6+wnjmHHRGJPvb7sjAF3Fz2WBJAg7kARhB5Ig7EAShB1Igltc0TNffPFFsX7CCeWLRYcOHSrWL7/88oa1559/vjjv8Yw/JQ0kR9iBJAg7kARhB5Ig7EAShB1IgrADSTS96w25XXDBBcX6DTfcUKxfeOGFDWvNrqM3s2PHjmJ98+bNbX3/RMOWHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeS4Dr7BDd79uxifenSpcX6ddddV6yffvrpx9zTeB0+fLhYHx4u//XyI0eO1NnOcY8tO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kwXX240Cza9mLFo010O6IZtfRZ86c2UpLtRgaGirWV6xYUaxv3LixznYmvKZbdttn2H7O9g7bb9i+rZo+zfYztt+unk/tfLsAWjWe3fhDkv4+Is6V9DeSbrV9rqTbJT0bEWdLerZ6D6BPNQ17RAxHxNbq9QFJb0qaIWmhpPXVx9ZLurZDPQKowTEds9ueKel7krZIGoiIoz9O/kDSQIN5lkha0kaPAGow7rPxtr8t6RFJP4uI/aNrMTI65JiDNkbEuoiYGxFz2+oUQFvGFXbb39JI0H8dEY9Wk3fbnl7Vp0va05kWAdSh6W68bUu6X9KbEbF6VGmjpJskrayeH+9IhxPAwMCYRzhfOffcc4v1e++9t1g/55xzjrmnumzZsqVYv/vuuxvWHn+8/E+GW1TrNZ5j9r+V9GNJr9veVk1brpGQ/9b2YknvS/phRzoEUIumYY+I/5I05uDuki6rtx0AncLPZYEkCDuQBGEHkiDsQBKEHUiCW1zHadq0aQ1ra9euLc47Z86cYn3WrFmttFSLF198sVhftWpVsf7UU08V65999tkx94TOYMsOJEHYgSQIO5AEYQeSIOxAEoQdSIKwA0mkuc5+0UUXFevLli0r1ufNm9ewNmPGjJZ6qsunn37asLZmzZrivHfddVexfvDgwZZ6Qv9hyw4kQdiBJAg7kARhB5Ig7EAShB1IgrADSaS5zj44ONhWvR07duwo1p988sli/dChQ8V66Z7zffv2FedFHmzZgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJR0T5A/YZkh6SNCApJK2LiH+1fYekWyT9b/XR5RHxuybfVV4YgLZFxJijLo8n7NMlTY+IrbanSHpZ0rUaGY/9k4i4Z7xNEHag8xqFfTzjsw9LGq5eH7D9pqTe/mkWAMfsmI7Zbc+U9D1JW6pJS22/ZvsB26c2mGeJ7SHbQ+21CqAdTXfjv/qg/W1JL0haERGP2h6Q9KFGjuP/SSO7+jc3+Q5244EOa/mYXZJsf0vSk5KeiojVY9RnSnoyIs5v8j2EHeiwRmFvuhtv25Lul/Tm6KBXJ+6OGpS0vd0mAXTOeM7Gz5f0n5Jel3Skmrxc0iJJczSyG/+epJ9WJ/NK38WWHeiwtnbj60LYgc5reTcewMRA2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSKLbQzZ/KOn9Ue9Pq6b1o37trV/7kuitVXX29peNCl29n/0bC7eHImJuzxoo6Nfe+rUvid5a1a3e2I0HkiDsQBK9Dvu6Hi+/pF9769e+JHprVVd66+kxO4Du6fWWHUCXEHYgiZ6E3fYVtt+y/Y7t23vRQyO237P9uu1tvR6frhpDb4/t7aOmTbP9jO23q+cxx9jrUW932N5Vrbtttq/qUW9n2H7O9g7bb9i+rZre03VX6Ksr663rx+y2J0n6g6QFknZKeknSoojY0dVGGrD9nqS5EdHzH2DY/jtJn0h66OjQWrb/WdLeiFhZ/Ud5akT8Q5/0doeOcRjvDvXWaJjxn6iH667O4c9b0Yst+zxJ70TEuxHxpaTfSFrYgz76XkRslrT3a5MXSlpfvV6vkX8sXdegt74QEcMRsbV6fUDS0WHGe7ruCn11RS/CPkPSH0e936n+Gu89JD1t+2XbS3rdzBgGRg2z9YGkgV42M4amw3h309eGGe+bddfK8Oft4gTdN82PiL+WdKWkW6vd1b4UI8dg/XTt9BeSztLIGIDDklb1splqmPFHJP0sIvaPrvVy3Y3RV1fWWy/CvkvSGaPef6ea1hciYlf1vEfSYxo57Ognu4+OoFs97+lxP1+JiN0RcTgijkj6pXq47qphxh+R9OuIeLSa3PN1N1Zf3VpvvQj7S5LOtv1d2ydK+pGkjT3o4xtsT65OnMj2ZEk/UP8NRb1R0k3V65skPd7DXv5Evwzj3WiYcfV43fV8+POI6PpD0lUaOSP/P5L+sRc9NOhrlqRXq8cbve5N0gaN7Nb9n0bObSyW9OeSnpX0tqT/kDStj3r7N40M7f2aRoI1vUe9zdfILvprkrZVj6t6ve4KfXVlvfFzWSAJTtABSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBL/DyJ7caZa7LphAAAAAElFTkSuQmCC", 98 | "text/plain": [ 99 | "
" 100 | ] 101 | }, 102 | "metadata": { 103 | "needs_background": "light" 104 | }, 105 | "output_type": "display_data" 106 | }, 107 | { 108 | "name": "stdout", 109 | "output_type": "stream", 110 | "text": [ 111 | "correct number: 5\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "# visualize single data instances\n", 117 | "\n", 118 | "img_no = 0 #change the number to display other examples\n", 119 | "\n", 120 | "first_number = x_train[img_no]\n", 121 | "plt.imshow(first_number, cmap='gray') # visualize the numbers in gray mode\n", 122 | "plt.show()\n", 123 | "print(f\"correct number: {y_train[img_no]}\")" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": { 129 | "id": "jygNEJ4dRm3o" 130 | }, 131 | "source": [ 132 | "## Preparing Data & Splitting" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 3, 138 | "metadata": { 139 | "colab": { 140 | "base_uri": "https://localhost:8080/" 141 | }, 142 | "id": "7B7d7i3uThoq", 143 | "outputId": "20ca74ea-0844-4a93-c7bd-8e48d454d65d" 144 | }, 145 | "outputs": [ 146 | { 147 | "name": "stdout", 148 | "output_type": "stream", 149 | "text": [ 150 | "train X shape: (60000, 28, 28, 1)\n", 151 | "test X shape: (10000, 28, 28, 1)\n" 152 | ] 153 | } 154 | ], 155 | "source": [ 156 | "# reshaping the data\n", 157 | "# reshaping pixels in a 28x28px image with greyscale, canal = 1. This is needed for the Keras API\n", 158 | "x_train = x_train.reshape(-1,28,28,1)\n", 159 | "x_test = x_test.reshape(-1,28,28,1)\n", 160 | "\n", 161 | "# normalizing the data\n", 162 | "# each pixel has a value between 0-255. Here we divide by 255, to get values from 0-1\n", 163 | "x_train = x_train / 255\n", 164 | "x_test = x_test / 255\n", 165 | "\n", 166 | "print(f\"train X shape: {x_train.shape}\")\n", 167 | "print(f\"test X shape: {x_test.shape}\")" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": null, 173 | "metadata": { 174 | "id": "S6tti0-ke81d" 175 | }, 176 | "outputs": [], 177 | "source": [ 178 | "# new functions" 179 | ] 180 | }, 181 | { 182 | "cell_type": "markdown", 183 | "metadata": { 184 | "id": "dOtAx2G9hD2W" 185 | }, 186 | "source": [ 187 | "## Model Building" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 4, 193 | "metadata": { 194 | "colab": { 195 | "base_uri": "https://localhost:8080/" 196 | }, 197 | "id": "DI795SvBhGPW", 198 | "outputId": "b66e6a8c-4dba-4f0d-8675-f5bd9f3ff594" 199 | }, 200 | "outputs": [ 201 | { 202 | "name": "stdout", 203 | "output_type": "stream", 204 | "text": [ 205 | "Model: \"sequential\"\n", 206 | "_________________________________________________________________\n", 207 | "Layer (type) Output Shape Param # \n", 208 | "=================================================================\n", 209 | "conv2d (Conv2D) (None, 26, 26, 64) 640 \n", 210 | "_________________________________________________________________\n", 211 | "max_pooling2d (MaxPooling2D) (None, 13, 13, 64) 0 \n", 212 | "_________________________________________________________________\n", 213 | "conv2d_1 (Conv2D) (None, 11, 11, 64) 36928 \n", 214 | "_________________________________________________________________\n", 215 | "max_pooling2d_1 (MaxPooling2 (None, 5, 5, 64) 0 \n", 216 | "_________________________________________________________________\n", 217 | "conv2d_2 (Conv2D) (None, 3, 3, 64) 36928 \n", 218 | "_________________________________________________________________\n", 219 | "max_pooling2d_2 (MaxPooling2 (None, 1, 1, 64) 0 \n", 220 | "_________________________________________________________________\n", 221 | "flatten (Flatten) (None, 64) 0 \n", 222 | "_________________________________________________________________\n", 223 | "dense (Dense) (None, 64) 4160 \n", 224 | "_________________________________________________________________\n", 225 | "dense_1 (Dense) (None, 32) 2080 \n", 226 | "_________________________________________________________________\n", 227 | "dense_2 (Dense) (None, 10) 330 \n", 228 | "=================================================================\n", 229 | "Total params: 81,066\n", 230 | "Trainable params: 81,066\n", 231 | "Non-trainable params: 0\n", 232 | "_________________________________________________________________\n" 233 | ] 234 | } 235 | ], 236 | "source": [ 237 | "model = keras.models.Sequential()\n", 238 | "model.add(keras.layers.Conv2D(64, (3, 3), activation='relu', input_shape=(28,28,1)))\n", 239 | "model.add(keras.layers.MaxPool2D(2, 2))\n", 240 | "\n", 241 | "model.add(keras.layers.Conv2D(64, (3, 3), activation='relu'))\n", 242 | "model.add(keras.layers.MaxPool2D(2, 2))\n", 243 | "\n", 244 | "model.add(keras.layers.Conv2D(64, (3, 3), activation='relu'))\n", 245 | "model.add(keras.layers.MaxPool2D(2, 2))\n", 246 | "\n", 247 | "model.add(keras.layers.Flatten())\n", 248 | "model.add(keras.layers.Dense(64, activation='relu'))\n", 249 | "\n", 250 | "model.add(keras.layers.Dense(32, activation='relu'))\n", 251 | "\n", 252 | "model.add(keras.layers.Dense(10, activation='softmax')) #output are 10 classes, numbers from 0-9\n", 253 | "\n", 254 | "#show model summary - how it looks\n", 255 | "model.summary()" 256 | ] 257 | }, 258 | { 259 | "cell_type": "code", 260 | "execution_count": 5, 261 | "metadata": { 262 | "id": "UpdaKIGoxHiC" 263 | }, 264 | "outputs": [], 265 | "source": [ 266 | "#compile the model - we want to have a multiple outcome\n", 267 | "model.compile(optimizer=\"adam\",\n", 268 | " loss=\"sparse_categorical_crossentropy\",\n", 269 | " metrics=['accuracy'])" 270 | ] 271 | }, 272 | { 273 | "cell_type": "code", 274 | "execution_count": 6, 275 | "metadata": { 276 | "colab": { 277 | "base_uri": "https://localhost:8080/" 278 | }, 279 | "id": "qHaJJk-gxJLq", 280 | "outputId": "f2d4c7d6-b164-4c54-9ab7-a648169e345b" 281 | }, 282 | "outputs": [ 283 | { 284 | "name": "stdout", 285 | "output_type": "stream", 286 | "text": [ 287 | "1875/1875 [==============================] - 461s 245ms/step - loss: 0.2054 - accuracy: 0.9356\n" 288 | ] 289 | } 290 | ], 291 | "source": [ 292 | "#fit the model and return the history while training\n", 293 | "history = model.fit(\n", 294 | " x=x_train,\n", 295 | " y=y_train,\n", 296 | " epochs=1\n", 297 | ")" 298 | ] 299 | }, 300 | { 301 | "cell_type": "markdown", 302 | "metadata": { 303 | "id": "rvcNPDwWQhib" 304 | }, 305 | "source": [ 306 | "## Model Evaluation" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": 7, 312 | "metadata": {}, 313 | "outputs": [ 314 | { 315 | "name": "stdout", 316 | "output_type": "stream", 317 | "text": [ 318 | "INFO:tensorflow:Assets written to: models/detect-digits/assets\n" 319 | ] 320 | } 321 | ], 322 | "source": [ 323 | "keras.models.save_model(model,\"models/detect-digits\")" 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": 11, 329 | "metadata": {}, 330 | "outputs": [], 331 | "source": [ 332 | "from minio import Minio\n", 333 | "import os\n", 334 | "\n", 335 | "minio_client = Minio(\n", 336 | " \"100.65.11.110:9000\",\n", 337 | " access_key=\"minio\",\n", 338 | " secret_key=\"minio123\",\n", 339 | " secure=False\n", 340 | " )\n", 341 | "minio_bucket = \"mlpipeline\"\n", 342 | "\n", 343 | "\n", 344 | "import glob\n", 345 | "\n", 346 | "def upload_local_directory_to_minio(local_path, bucket_name, minio_path):\n", 347 | " assert os.path.isdir(local_path)\n", 348 | "\n", 349 | " for local_file in glob.glob(local_path + '/**'):\n", 350 | " local_file = local_file.replace(os.sep, \"/\") # Replace \\ with / on Windows\n", 351 | " if not os.path.isfile(local_file):\n", 352 | " upload_local_directory_to_minio(\n", 353 | " local_file, bucket_name, minio_path + \"/\" + os.path.basename(local_file))\n", 354 | " else:\n", 355 | " remote_path = os.path.join(\n", 356 | " minio_path, local_file[1 + len(local_path):])\n", 357 | " remote_path = remote_path.replace(\n", 358 | " os.sep, \"/\") # Replace \\ with / on Windows\n", 359 | " minio_client.fput_object(bucket_name, remote_path, local_file)\n", 360 | " \n", 361 | "upload_local_directory_to_minio(\"models/detect-digits\",minio_bucket,\"/models/detect-digits\")" 362 | ] 363 | }, 364 | { 365 | "cell_type": "code", 366 | "execution_count": 89, 367 | "metadata": { 368 | "id": "ZsrEjddE1bbf" 369 | }, 370 | "outputs": [], 371 | "source": [ 372 | "#load model if already trained\n", 373 | "model = keras.models.load_model(\"detect-digits.h5\")\n", 374 | "#keras.models.save_model(model,\"detect-digits.h5\")" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": 90, 380 | "metadata": { 381 | "colab": { 382 | "base_uri": "https://localhost:8080/" 383 | }, 384 | "id": "bypXyDxMQj0u", 385 | "outputId": "2f679161-a921-4abd-bac5-b2436e9bcbe8" 386 | }, 387 | "outputs": [ 388 | { 389 | "name": "stdout", 390 | "output_type": "stream", 391 | "text": [ 392 | "313/313 [==============================] - 21s 66ms/step - loss: 0.0853 - accuracy: 0.9743\n" 393 | ] 394 | } 395 | ], 396 | "source": [ 397 | "# Test the model against the test dataset\n", 398 | "# Returns the loss value & metrics values for the model in test mode.\n", 399 | "model_loss, model_accuracy = model.evaluate(x=x_test,y=y_test)" 400 | ] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "execution_count": 91, 405 | "metadata": { 406 | "colab": { 407 | "base_uri": "https://localhost:8080/", 408 | "height": 297 409 | }, 410 | "id": "wVohS46mxRDe", 411 | "outputId": "b64db43b-07f7-4b2e-e801-e7791484a6e7" 412 | }, 413 | "outputs": [ 414 | { 415 | "data": { 416 | "text/plain": [ 417 | "[Text(0.5, 15.0, 'Predicted'), Text(33.0, 0.5, 'Actual')]" 418 | ] 419 | }, 420 | "execution_count": 91, 421 | "metadata": {}, 422 | "output_type": "execute_result" 423 | }, 424 | { 425 | "data": { 426 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEGCAYAAAB1iW6ZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8rg+JYAAAACXBIWXMAAAsTAAALEwEAmpwYAABD2klEQVR4nO3dd3wU1drA8d+zSSCFEpAOCiLYUUoooqII0gQpKhawXRVFLuq1dy5YXvSqoNd7pQgqLTQp0pEmRUqAhN4C0gmhBUKoSc77x05iwAQ2yc5kM/f58pkPm9md88yZ3T2ZnDlzHjHGoJRSqvDzFPQOKKWU8g9t0JVSyiW0QVdKKZfQBl0ppVxCG3SllHKJ4ILegZycnvCpY8Nvij/yH6dCqUIkJMi5r8f5tFRH4ogjUbycHD+Xem5fvqt2/vAOn3c5pEx1Jw+lzwK2QVdKKUelpxX0HuSbNuhKKQVg0gt6D/JNG3SllAJI1wZdKaVcwegZulJKuYRDF6btpA26UkqBKy6KFrpx6COXbOSB/pPp1G8SIxZvvOC5YYs2UPudnziWcgaAabE7eOjrX3iw/2Se+G46Ww4c9cs+tGxxNxvWL2TzxsW8+UYPv5SZE4/HQ8yKWUye+JOtcZyqU5UqlZgzexxr18xnTdw8ev79Gdtigb31qlKlIjNnjmb16jmsWvUrPXo8DcB7773C9u3LWbZsOsuWTadly6Z+juvcMdy2dRmxq+ewMmY2y5ZOty0OOPu9ypZJ930JUIXqDD0+4RgTYrYx4sX7CAny0OOHOTS5vgpXlSlBQlIKS7ftp2JkRObrK5cuxpBuLSkRVpTFW/by0YSljOhxX772wePx8M3Xn9CqzaPs3XuAZUunM2XqbDZt2pbf6mXrpZ7PsnnzNkoUL25L+eBsnVJTU3njzd7Exq2nWLEIViyfyZy5C22JZXe9UlPTePvtj4mz6vL771OZO3cxAP/+9xD69x/klzh/jevcMQRofu9DHDlyzJayMzj9vcqWCy6K2naGLiLXi8hbIvKNtbwlIjfkp8wdh45T68oyhBUJJjjIQ72ryzN3w24AvpgWwyut613w+tpVy1EirCgAt1xVloMnUvITHoAG9euwfftO/vhjN+fPn2fs2Mnc365lvsvNTuXKFWnTuhlDh0bbUn4GJ+uUkJBIbNx6AE6eTGHz5m1UrlTBllh21yshIZG4C+oST6VK5f1W/qXiOnUMneLkZzAnxqT7vAQqWxp0EXkLGI33xrQV1iJAtIi8nddya5SPZPUfiSSlnOH0uVQWb9nHweMpzN+4m7IlwrmuYukct50Ys407rq2S19CZKlWuwJ69+zN/3rvvAJVs+jJ99WVv3n7nY9JtPnNwsk5ZVa1ahdq33szyFbG2lO9kva66qgq1a99ETEwcAC+88AQrVsxkwIB/ERlZwpaYYP8xNMYwY3o0y5fN4NlnutgSAwruM3iB9HTflwBl1xn6M0B9Y0xfY8wIa+kLNLCey5aIdBORlSKycsjsFX95vnq5SJ6+62a6D/2VHj/8ynWVSnEuNY0h89fx4r21c9yZmO0HmLQynpdb1c1/zRxyX5vmJCYeZnXsuoLeFVtERIQzdsxgXn29F8nJJwt6d/IlIiKc6OgBvPFGH5KTTzJ48AhuvLEJDRu2JiEhkb59P7Atrt3H8O6mHWnQsBVt23Wle/enuOOOhrbECQhp531fApRdDXo6UCmb9RWt57JljBlkjIkyxkQ906JBtq/pWL8m0T3bMfT51hQPK8o15SPZd+wknb/+hdafjSfxxCke/fdUDiefBmDrgaP0nvA7/R9vSmREaL4rtn9fAldW+bNqVSpXZP/+hHyXe7HGjaNo17YF8VuXMXLEf2na9HZ++vEbv8cB5+qUITg4mHFjBhMdPZFJk2bYFseJegUHBxMdPYAxYyYxefJMABITD5Oeno4xhqFDo4mKutWvMTPiOnIMreN16NARJk2eQf36te2J4/BnMFsuuChqV4P+CjBXRGaIyCBrmQnMBV7OT8FHT3ob6gNJJ5m3YRft6tZg/vsPM+OtB5nx1oOUKxFOdM+2lCkexoGkk7w2YgEfd76TqmVL5rtSADEr46hR42qqVbuSkJAQOnduz5Sps/1Sdlbvvd+XatWjqHFtI7p0fZH585fw5FMv+T0OOFenDIMHfcmmzfH0/9qei4YZnKjXgAGfs2VLPN98833mugoVymU+bt++JRs3bvFrTHDmGIaHh1GsWETm43ub38WGDf6vCzj/GcyWC7pcbBnlYoyZKSLX4u1iqWyt3gfEGGPyNdjztZELOH7qLMEeD+/c34gSYUVyfO2guWtJOnWWTycvAyDY42HU39vmJzxpaWm8/Mr7TJ82iiCPhx9/GsPGjVvzVWZBc7JOtzeuz+NdH2Ttuo2sjPF+YT/4oC8zZs7zeyy769W4cRRdujzAunWbWLbMO6SvV69/0bnz/dxyy40YY9i1ay89e77rt5jg3DEsX74s48cNASAoOIjRoycxe/YCv8bIEBDfqwA+8/aVBGqSaJ0+VxU0nT43fwrb9Lln187yeZeL3tJSp89VSqlAZdID92Knr7RBV0opCOi+cV9pg66UUuCKPnRt0JVSClwxOZc26EopBa44Qy90sy0qpZQt/DgOXUSGikiiiKzPsq60iPwqItus/0tZ68Wa7ypeRNaKSN0s2zxpvX6biDx52biBOmwxuEhlx3bs9P5FToUirNKdjsVS6mI6bDFnZ5aM9HmXQ2/vcsl4ItIEOAkMM8bcbK37HDhqjOlrzWlVyhjzloi0AXoCbYCGwNfGmIYiUhpYCUThPZyrgHrGmBynvtQzdKWUAr+eoRtjFgIXJ2BoD2QkNvgJ6JBl/TDjtQyIFJGKQEvgV2PMUasR/xVodam42oeulFJAbm5iF5FuQLcsqwYZYy43D0N5Y8wB63ECkDHXcmVgT5bX7bXW5bQ+R9qgK6UU5GocutV453kiHWOMERG/90ppl4tSSoETsy0etLpSsP5PtNbvA67M8roq1rqc1udIG3SllAInZlv8BcgYqfIkMDnL+ies0S6NgONW18wsoIWIlLJGxLSw1uVIu1yUUgrAjxOkiUg0cDdQRkT2Ar2AvsBYEXkG2AV0tl4+He8Il3jgFPA0gDHmqIh8BMRYr+tjjLlkpnvXnKH7I2P4+59+RZP7HqFD1xcy182at4j2XZ6n1h1tWL/pz+k8f1+xms5/60nHx7vT+W89Wb4q7i/l/f3Nf15QVl44mQnd4/EQs2IWkyf+dPkX54OTdXIq1uBBX7J/7xriYufaFqMgYm3buozY1XNYGTObZUun2xrLyc9FtvzY5WKMedQYU9EYE2KMqWKMGWKMOWKMaWaMqWmMaZ7ROFujW3oYY64xxtQyxqzMUs5QY0wNa/nhcnFd0aBnZAxv264rtW5tysMPd+CGG2rmupwObe5lwFcfX7CuRvWq9P/0A+rVvvmC9aUiS/DtZ/9k4vDv+OT913inzxcXPP/rgiWEh4flvjJZ+Ktevnqp57Ns3mxvlnUn6+RkrGHDxnJfW/tybhZULIDm9z5EVP0WNLqtjW0xnP6sZ8sFCS5c0aD7K2N4VO1alCxR/IJ111S7iqur/jW59A3X1qBc2SsAqHF1Vc6cPcu5c+cAOHXqNMPGTOD5Jx/JQ23+5GQm9MqVK9KmdTOGDo22pfwMTtbJyViLFi/n6LEkW8ouyFhOcfK9ypE26LknIk/7u8yCzhj+64LF3HhdDYoU8WZP+vfgYTz5SCdCQ/OXw9TJen31ZW/efudj0m3+sDpZp4L+XLiBMYYZ06NZvmwGzz5j318FAfFeaU7RPOmd0xMi0k1EVorIyvT0FCf3Kc/id+ziq/8O5cM3egKweet29uw7QPO7bi/gPfPdfW2ak5h4mNWx6wp6V1SAubtpRxo0bEXbdl3p3v0p7rijYUHvkn3SUn1fApQto1xEZG1OT/Hn3VF/kXWwfm7mcimojOEJiYd4+d2P+PSD17nKih+3YRMbNm+jxQNPkpaWxpFjx3nq72/y47ef57p8p+rVuHEU7dq2oHWrewgNLUqJEsX56cdvbElK7eR7FRCZ5Au5jON16NARJk2eQf36tVm8eLn/4wTCexXAXSm+susMvTzwBNAum+WIv4MVRMbwE8knefGNXrzywtPUveWmzPWPdGzL/F9GMvvnnxj23ZdUu7JynhpzcK5e773fl2rVo6hxbSO6dH2R+fOX2NKYg7PvVUBkki/EwsPDKFYsIvPxvc3vYsOGLbbECoj3ygVdLnaNQ58KFDPGxF38hIgs8Hcwf2UMf6NXX2Ji15KUdIJmHbry4jOPU7JEMf6v33ccTTrOi2/04vqa1RnU7xOif57Cnr37GfDDKAb8MAqAQf0/4YpSkQFXr0DiZJ2cjDVi+H+4q8ltlClTmp07VtK7zxf88OPoQh2rfPmyjB83BICg4CBGj57E7NkL/B4HAuSz7oIzdJ0+F50+V/3v0Olzc3Z6bB+fdzms84dOHkqf6Z2iSikFEKAnt7mhDbpSSgGkBu7oFV9pg66UUhDQFzt9pQ26UkqBKy6KaoOulFKgfehKKeUaeobuDk4OJUyelePMB35XvGUvx2Kp/PGIM6PgPOLcbB+p6b7n6AwI2qArpZQ7mLRC9gsoG9qgK6UU6Bm6Ukq5hg5bVEopl0jXUS5KKeUO2uWilFIu4YKLoq7IKQrOZUK3I87Iuat4oM8PdOo9lBFzvQm/t+xN5InPRvBgnx946T8TOHn6LABLN+7k0U+H8WCfH3j002Gs2LzLL/tQtGhRli6ZyqqVv7Imbh69PnzNL+Vmx8ns7k7Fsvv4DRr4BXv3xBG7ek7mugc63Udc7FzOnN5N3bq3+CVOlSoVmTVrNLGxc1m9eg49evwNgFKlSjJt2kjWr/+NadNGEhlZ0i/xsnLqO5wjzSkaOJzKhO7vOPH7DjFhyVpGvN2Vse8/xaJ129mdeIzew2fxUse7GP/h09xTuyY//RoDQKliYXz9YifGf/g0Hz3Zmvd+mO6X/Th79izNW3SmXtS91ItqQcsWd9OwQV2/lJ2Vk9ndnYxl9/EbNnwcbdt1vWDdho1b6Pzwcyxa5L8MQqmpabz11sfUqdOMJk3a88ILT3D99TV5/fUezJ+/hJtvvov585fw+usv+i1mBqe+wzlKN74vAcq2Bl1ErheRZiJS7KL1reyI51QmdH/H2ZFwlFrVKhJWJITgIA/1al7J3Nit7D54lHo1qwDQ6IaqzF3tnez/+qvKUy7Se0ivqVSGs+dTOXfeP7PEpaScAiAkJJjgkBDsmCvfyezuTmeSt/P4LV68nGMXfe42b45n69YdfosBkJCQSFzcegBOnkxh8+Z4KleuQLt29zJixHgARowYz/33t/BrXHDuO5wjF2QssqVBF5GXgMlAT2C9iLTP8vSndsQsrGpUKsPq+L0knTzN6XPnWbx+BwePJVO9Uhnmr4kH4NfVW0g4duIv285ZvZUbripHkRD/XArxeDysjJnNgX1rmTt3IStiYv1SblZOZnd3OpO8E8fPSVWrVqF27ZtYsSKWcuXKkJCQCHgb/XLlyhTw3tlAz9Bz9BxQzxjTAbgb+EBEXraey/EeZxHpJiIrRWRlenqKTbsWWKpXvIKnWzag+zfj6PHNeK67shwej4feT7Ri7G+xPPrpMFLOnCMkOOiC7eL3H+brib/xfhf/nSmlp6cTVb8FVa+Oon5UHW666Tq/lf2/wE3HLyIinOjogbz+em+Sk0/+5XkXzGP1FyY93eclUNk1ysVjjDkJYIzZKSJ3A+NFpCqXaNCNMYOAQeBsCrqC1vH2W+h4u/ei1jeTFlI+sjhXV7iCAS93BmDXwaMsWvfnn9YHjyXz6oBJfPRUG64sW8rv+3P8+AkW/LbEe0HRz0mBnczuXlCZ5O08fk4IDg5m9OiBjB49kcmTZwKQmHiYChXKkZCQSIUK5Th06HAB76UNdJRLjg6KSO2MH6zGvS1QBqhlU8xC6+gJ718jB46eYF7sNlo3uCFzXXq6YfD0pTzUpDYAJ06doee3P/NyxybUqVHFb/tQpkxpSpYsAUBoaCjNmzVhy5btfis/g5PZ3Z2M5dTxc8LAgf9i8+Z4vvnm+8x1U6f+SteuDwLQteuDTJnya0Htnn1c0OVi1xn6E8AFV+qMManAEyIy0I6ATmVCtyPOa4Mmc/zkGYKDPLzzaHNKhIcycu4qxvzm7YNtVqcm7RvfDMCYBbHsPpTEwGm/M3Da7wAMeOkhSpeIyNc+VKxYnqFD+hMU5MHj8TB+/BSmTZ9z+Q1zycns7k7Gsvv4DR/2LU2sz92O7TH0+ehLjh1Nol+/jyhbtjSTJ/3EmrUbaNu26+ULu4TGjevTpcsDrFu3ieXLZwDw4Yef88UX/2XkyO946qmH2b17H126dPdHtS7g1Hc4RwHcleIrsWMkgz+4tctFp89V2dHpc/MZ69y+fB/AlA8f8bnNiegz2pk3LJdcMw5dKaXyxY/DFkXkHyKyQUTWi0i0iISKyNUislxE4kVkjIgUsV5b1Po53nq+Wl6roA26UkqB3/rQRaQy8BIQZYy5GQgCHgE+A/oZY2oAx4BnrE2eAY5Z6/tZr8sTbdCVUgowqWk+Lz4IBsJEJBgIBw4A9wDjred/AjpYj9tbP2M930wkb31w2qArpRTk6gw96z0z1tItoxhjzD7gC2A33ob8OLAKSLIGhwDsBSpbjysDe6xtU63XX5GXKuhsi0opBbm6pT/rPTMXE5FSeM+6rwaSgHGALVOeXEzP0JVSCvw5Dr058Icx5pAx5jwwAbgdiLS6YACqAPusx/uAKwGs50sCR/JSBT1Dd5iTQwmT53ziSJzizd9zJI6bpTs0fDjdFP67Ie1i/HfD0G6gkYiEA6eBZsBKYD7wIDAaeBLvfFcAv1g/L7Wen2fyOJ5cG3SllALw7WLnZRljlovIeGA13hssY/F2z0wDRovIx9a6IdYmQ4DhIhIPHMU7IiZPtEFXSinw6y39xphewMV/ju8AGmTz2jPAQ/6Iqw26UkpBQM/R4itt0JVSCmxJ6OI0bdCVUgpccYbummGLbkw87K9YvX6cRtNXv+aBXoMz1x1POc3zX0XT7r0BPP9VNCdSTgPes5TPomfT7t3veOif37Np15/zh//y+1ravTeAdu8N4Jff1+a5ToU5ofeluCUh9cU8Hg8xK2YxeeJPl39xPhR8kujCP32uKxp0tyYe9les+xvX4r8vP3zBuqEzltLwhmpM+eQFGt5QjaEzlgGweL03SfUvn7zAB4+35pOR3gQHx1NOM3DKEka8+yQj332SgVOWZP4SyK3CmtD7UtyUkPpiL/V8ls2bt9lWfoaCThJtUtN9XgKVKxp0tyYe9lesetdeRYmI0AvWLYjbRrvbvLlG2t1Wi/lxWzPXt210MyLCLddUJvnUWQ4lneT39TtodGM1SkaEUSIijEY3VmPJ+rwlKC6sCb0vxU0JqbOqXLkibVo3Y+jQaFvKz6rAk0Sn52IJULY16CLSQETqW49vFJFXRaSNHbHcmnjYzlhHTqRQNrIYAGVKRnDEypCUeCyZCqVLZL6ufKniJCYlk5h0kgqlLl7/11yT/6vcmpD6qy978/Y7H5PuguQPl2PSjc9LoLKlQReRXsA3wHci8n/At0AE8LaI5Hhb4f9ikuhAICLkcXI3VUCcSEh9X5vmJCYeZnXsOr+XHZC0Dz1HD+Kdu6AJ0APoYIz5CGgJPJzTRsaYQcaYKGNMlMfje0o1tyYetjPWFSUiOGSdYR9KOknp4uEAlCtVnISjJzJfd/BYMuUii1MushgJxy5eX8wv++IGgZCQ2t8aN46iXdsWxG9dxsgR/6Vp09v56cdv/B4nYGiXS45SjTFpxphTwHZjzAkAY8xpbDgcbk08bGesu26tyZSl3jOvKUvXcXftmpnrpy5bjzGGtdv3USysKGUji9H45uos3fAHJ1JOcyLlNEs3/EHjm6v7ZV/cwI0Jqd97vy/VqkdR49pGdOn6IvPnL+HJp17ye5xA4YYuF7vGoZ8TkXCrQa+XsVJESmJDg+7WxMP+ivX2oEms3LqbpJOnafHGt3S//07+1roRbw6cxMTFa6h0RUk+f74DAHfWuobF67bT7r0BhBYJofdT9wFQMiKMbm1vp8snPwLQrd0dlIwIy1O9CnNC75y4KSF1QSnoJNEmNXAbal/ZkiRaRIoaY85ms74MUNEYc9lOObcmiXaSzrao/lf4I0n00fZ3+dzmlJ78W0BedLLlDD27xtxafxg4bEdMpZTKj1zktwhYeuu/UkpBQF/s9JU26EophZ6hK6WUa2Smby7EtEFXSin0DF0ppVxDG3QV0JwaTpg88Q1H4gAU7/gvx2I5OS7NjWN0A3Jc36WYQrfHf6ENulJKoWfoSinlGiZdz9CVUsoV0tO0QVdKKVfQLhellHIJ7XJRSimXsCmLn6NckVMUnMu47tZYdsQZuXAtD3w+mk6fjWbEb2sy10cvWkeHvtF0+mw0/aYsBWDf0RM0fHMQnb8YS+cvxvLxuN/8sg9Ovlfbti4jdvUcVsbMZtnS6bbGcrJeHo+HmBWzmDzxJ1vjvPzSc8TFzSM2di7Dh/+HokWL2hrvYiZdfF4ClSvO0DMyrrdq8yh79x5g2dLpTJk6m02b/J+p3I2x7IgTf+AIE5ZtZMQrDxASFESPQVNpcmM1DiadZMH6Pxj7emeKBAdxNPlU5jZVypRg7Oud/VElwNn3KkPzex/iyJFjtpUPztfrpZ7PsnnzNkoUL25L+QCVKlWgR4+/ccutTTlz5gyjRg3g4c7tGTZ8rG0xL+aGi6KOnaGLyDC7ynYy47obY9kRZ8fBJGpdVZ6wIiEEB3mod00l5q7bwdjfN/B0s7oUCQ4CyEx9Zwcn3ysnOVmvypUr0qZ1M4YOjbal/KyCg4MJCwslKCiI8LAw9h+wP4VfVq4+QxeRf3OJG9iMMTnmohKRXy5eBTQVkUhr2/tzt5uXll3G9Qb16/gzhKtj2RGnRsXSfDtjOUkpZygaEsTiTbu58cqy7DqUxOod+/l2+nKKBgfxj/sbc/NV5QDYdzSZh78cR7GiIfRo04C61StdJorz9boUYwwzpkdjjGHw4BF8P2SkLXGcrNdXX/bm7Xc+pnhxe/PH7t+fQL9+A9ixfQWnT59hzpzfmDNnoa0xL2ZcfqfoynyUWwXYCHyP95eCAFHAl5faSES6Ad0AJKgkuUkUrQJL9fKleLppHboPnEJYkRCuq3wFHhHS0tM5ceosw1/uxPrdibw5bDbT3utC2RIRzPzgcSIjQtm45xD/+GEGP7/5CMVCixR0VXx2d9OO7N+fQNmyVzBzxmg2b4ln8eLlBb1beXZfm+YkJh5mdew67mpym62xIiNL0q5dS2pe24ikpBOMHj2Qxx7rxKhRE2yNm5Wrhy0aY/JzBSQKeBl4D3jDGBMnIqeNMZe80mWMGQQMgtyloHMy47obY9kVp2OjG+jY6AYAvpm2jPKRxdiZmESzWtUREWpVLY9HhGMpZyhdLCyzG+bGK8tS5YqS7DqUxE1XlstzfCffKyCz7EOHjjBp8gzq169tS4PuVL0aN46iXdsWtG51D6GhRSlRojg//fiNLYmimzW7k507d3P48FEAJk2awW2Nohxt0NP9eIZu9UZ8D9yM96T2b8AWYAxQDdgJdDbGHBMRAb4G2gCngKeMMavzEveyfegiUlZEvhCR6SIyL2O51DbGmHRjTD/gaeA9EfkWGy/AOplx3Y2x7IqTccHzwLFk5q37g9Z1a9K01tXExO8DYFdiEufT0igVEcrRk6dJS/eeIu09coLdh45TpXSJfMV38r0KDw+jWLGIzMf3Nr+LDRu22BLLqXq9935fqlWPosa1jejS9UXmz19iS2MOsGf3Pho0rEtYWCgA9zS9g82b7bt4nR1jxOfFB18DM40x1wO3ApuAt4G5xpiawFzrZ4DWQE1r6QZ8l9c6+NLIjsT7W+U+4AXgSeCQL4UbY/YCD4nIfcCJvO7k5TiZcd2NseyK89qPszh+6izBHg/vdLqTEmFF6dDgenqNns8Dn48mJCiIjx69BxFh9fb9/HdmDMFBHjwivP9QE0pGhAZkvbJTvnxZxo8bAkBQcBCjR09i9uwFtsRysl5OWRETy4QJ01ixYhapqamsidvA4O/tuQaRE3+NchGRkkAT4CkAY8w54JyItAfutl72E7AAeAtoDwwzxhhgmYhEikhFY8yBXMc2lxlNLyKrjDH1RGStMeYWa12MMaZ+boPlRm66XFTB0ulz88+NH3Ynj9/5c/vyHW7jNff5/DbctGP681jX+yyDrC5jRKQ23q7jjXjPzlfh7YLeZ4yJtF4jwDFjTKSITAX6GmMWW8/NBd4yxuT6OqYvZ+jnrf8PWGfa+4HSuQ2klFKBLDd96Fmv92UjGKgL9DTGLBeRr/mzeyVjeyMifv897kuD/rH1J8RrwL+BEsA//L0jSilVkPw4bHEvsNcYk3FFfDzeBv1gRleKiFQEEq3n9wFXZtm+irUu1y57UdQYM9UYc9wYs94Y09QYU88Yc/E4c6WUKtSM8X25dDkmAdgjItdZq5rh7X75Be81SKz/J1uPfwGeEK9GwPG89J+DD2foIvID2XTxGWP+lpeASikViPw5bBHoCYwUkSLADrwj/jzAWBF5BtgFZMxzMR3vkMV4vMMWn85rUF+6XKZmeRwKdMTbj66UUq6R7sdb+o0xcXjvx7lYs2xeawC/zLB22QbdGPNz1p9FJBpY7I/gSikVKPx8hl4g8nKzT00g77fvKcc49fF0cijhib5tHIsV+c4Mx2Jdbviwv3jEuUYrvZBNMO72uVwAEJFkLuxDT8A7GF4ppVzjf+IM3Rhj3yTISikVIArX3xPZ82Uul7m+rFNKqcIsLd3j8xKoLjUfeigQDpQRkVL82SVbAqjswL4ppZRjXDB77iW7XJ4HXgEq4Z2LIKNBPwF8a+9uKaWUs4yjs8/Y41LzoX8NfC0iPY0x/3Zwn5RSynHpLuhE96UzKD0jdRyAiJQSkRft26XcK1q0KEuXTGXVyl9ZEzePXh++Zms8JzOuOxXLyYzrdtQpuG5zQp/oTegT/6RIm+cgKJgiLZ8m9Jn/I7Trh4R2/RAp650uIziqRea60Cf+SdgrAyE097lNBw38gr174ohdPSdzXalSkUyfPooNGxYxffooIiNL+qV+WQ0e9CX7964hLtaeS1nZ1eufvV5n1cpfiVkxi2nTRlKxYnm/x3Xye5WddMTnJVD50qA/Z4xJyvjBGHMMeM62PcqDs2fP0rxFZ+pF3Uu9qBa0bHE3DRvUtSVWRsb1tu26UuvWpjz8cAduuKFmoY6VkXG9UaM21KnTjKCgIB7u3N7vccCeOkmxSILrNOPMqI85M+yfIB6CrmsAwPmF4zgzog9nRvTBHNoDQOrK2Znrzi+eQPrerXDmVK7jDhs+jrbtul6w7s03ejB/3hJuuulO5s9bYkvDNGzYWO5r28Xv5WaWn029vvxqAPWi7qV+g5ZMnz6X9957xa8xnfxe5cQgPi+BypcGPciauxcAEQkCcpXoUUTuEJFXRaRFbnfQVykp3i9kSEgwwSEhtt2o4WTGdSdjOZVx3bY6eTwQHALiQUKKYFKSfNos6PoGpG5ZkaeQixcv59ixC+O0a9eC4SPGATB8xDjuv9//79eixcs5elFcf8quXsnJJzMfR4SH+f375eRnPSdpiM9LoPKlQZ8JjBGRZiLSDIgGLnkLnYisyPL4ObwXUYsDvUTk7Rw3zAePx8PKmNkc2LeWuXMXsiIm1o4w2WZcr1SpQqGOlTXj+p7dsZw4ccK2jOt21MmcTCJ15WzCnv2MsOe/wJw9TfqujQCE3N6R0Md7EXJXZwi66JJRcBGCqt1M2rZV+YqfVblyZUhI8M6KmpCQSLlyZfxWdkHr0/tNtsev4NFHO9K79xd+LdvJ71VO0nOxBCpfGvS3gHl408+9AKwDwi6zTUiWx92Ae40xvYEWQI5/K4pINxFZKSIr09NTfNi1P6WnpxNVvwVVr46iflQdbrrpustvpIALM65fVbUu4RHhPPZYp4LeLd8VDSfomtqcHvIOpwe9ASFFCLqhIecWT+DMjx9wZtQnSGgEwfVbXbBZUPVbSN8Xn6fuFl85dUu/Ez7s9TnX1GhAdPREXuye5wkBA9b/RINujEkHluPNUt0AuAdvwtNLlmtdPL0Cb5q7Q1ZZKUDqJWINMsZEGWOiPJ4IH6twoePHT7DgtyW0bHF3nra/HCczyTsVK2vG9dTU1MyM63awo05BV92AOXEYTp+E9DTStsXiqXgNpBz3viAtldQNSwiqcPWF2+WjuyUniYmHqVDBO9VRhQrlOHToiF/LDwTRoyfSsWNrv5bp5PcqJ67uQxeRa0Wkl4hsxpupaDeAleTicuPQS+Idu74SKG1l50BEimHDnFFlypSmZElvhvjQ0FCaN2vCli3b/R0GcDaTvFOxnMy4bkedTPJRPBWqQ7D30k7QVddjjiZAxJ8jTIJq1CH9cJYkMEXCCKpyLWnxcfmKfbEpU3/l8a4PAfB414eYMsWez4bTatT485dhu3Yt/f79cvJ7lZN08X0JVJe6sWgzsAhoa4yJBxARn1LPGWOq5fBUOt751P2qYsXyDB3Sn6AgDx6Ph/HjpzBt+pzLb5gHTmZcdyqWkxnX7ahTesIfpG1bRWjX9yE9nfTE3aSuW0jRji8j4d5ziPRDezg3Z0TmNkE16pC2cwOknstz3OHDvqVJk9soU6Y0O7bH0OejL/nXv75l1KgBPPX0I+zevZfHHuuer7plZ8Tw/3CXFXfnjpX07vMFP/w42m/lZ1ev1q3u4dprq5Oebti9ey89/v6O3+KBs9+rnATycERfSU59fCLSAXgEuB3vhdHRwPfGmKuz3cDPgotUdk/nYwFx6uPp5Bvl1ulznZpq1q3T56ae25fvik2o8JjPO9wpYVRAtv45drkYYyYZYx4Brgfm450GoJyIfGfn8EOllCoI6SI+L4HKl4uiKcaYUcaYdnizUcei86ErpVzG5GIJVLmaB9IYc8waifKXvHhKKVWYuWHYYl5S0CmllOsE8ugVX2mDrpRSENC39PtKG3SllELP0FWAC+SLN3lV4u3pjsVKHtPTsVjFH3Ym5YCTQwkLm0DuG/eVNuhKKYU7ToC0QVdKKbTLRSmlXEO7XJRSyiXS9AxdKaXcwQ1n6Lm6U1QppdzK33eKikiQiMSKyFTr56tFZLmIxIvIGBEpYq0vav0cbz1fLa91cE2Dbncm9AxVqlRizuxxrF0znzVx8+j592dsjedUJnSn4hQtWpSlS6ayauWvrImbR68PX7Mtlh2fiZGLN/BAv4l0+moiIxZvuOC5YQvXU/vtHziWcgaA5DPneOnHOXTuP4lOX01k0kr/zTHvxPvl5HsFzn0Gc2LDXC4vc2EyoM+AfsaYGsAxIKPxeAY4Zq3vZ70uT1zToNudCT1Damoqb7zZm1tubcrtd7Sje/enbMtO7lQmdCczrp89e5bmLTpTL+pe6kW1oGWLu2nYoK4tsfz9mYhPOMaEmK2M6NGOsS+3Z9HmPew+fAKAhKSTLN22j4qRf2baGrN0E9XLl2TsKx34vltrvpq2gvOpafneD6feLyffKyc/gznxZ4ILEakC3Ad8b/0seLO9jbde8hPQwXrc3voZ6/lm1utzzZYGXUQaikgJ63GYiPQWkSki8pmIlLzc9nlhdyb0DAkJicTGrQfg5MkUNm/eRmWbktk6lQnd6YzrKSneHJ4hIcEEh4TYlnfT35+JHYlJ1LqyLGFFggkO8lDv6grM3bALgC+mruCV1vXJOgu9IKScTcUYw+lz5ykZXpQgT/6/ck6+X069V05/BrOTmy6XrPmPraXbRcX1B97kzx6aK4AkY0xGCs69QGXrcWVgD4D1/HHr9blm1xn6UCAj8+7XeFPSfWat+8GmmI6rWrUKtW+9meUrYm0p36lM6E5nXPd4PKyMmc2BfWuZO3chK2LsOX7+VqNCKVbvPEhSyhlOn0tl8Za9HExKYf6GXZQtEc51lUpf8PpHGt/AH4lJ3PvpGB7sP4k32jXE48n/UAon3y+n3iunP4PZScvFkjX/sbUMyihHRNoCicaYVY5WAPtGuXiy/CaKMsZk/J22WETictrI+i3XDUCCSpLXRNFOiIgIZ+yYwbz6ei+Sk08W9O4UKunp6UTVb0HJkiX4edwQbrrpOjZs2FLQu3VZ1ctF8vRdteg+dDZhIcFcV7E059LSGLJgLd8989ezyd+37uO6iqUZ/Fwr9hxJ5oUhs6hbrTzFQosUwN7nTWF9r/LCjzcW3Q7cLyJtgFCgBN4T20gRCbbaxipARpLbfcCVwF4RCcZ7Apyn7OJ2naGvF5GnrcdrRCQKvImngfM5bZT1t14gN+bBwcGMGzOY6OiJTJpkX5oypzKhF1TG9ePHT7DgtyW0bHG37bH8pWP9a4nueT9DX2hD8bCiXFMukn1HT9K5/2Ra9x1H4okUHv3mFw4nn2Lyym00u7kqIsJVZUpQuVQx/jh0PN/7UBDvl93vVUF9BrPy1ygXY8w7xpgqVm7lR4B5xpgueDO/PWi97ElgsvX4F+tnrOfnmTz2bdnVoD8L3CUi24EbgaUisgMYbD1XqA0e9CWbNsfT/+tBl39xPjiVCd3JjOtlypSmZMkSAISGhtK8WRO/Z5C309GTpwE4kHSSeRt20a5eDeZ/8Cgz3n6IGW8/RLkSEUS/dD9liodTMTKC5fEHADiSfJqdh09QpXTxfO+DU++Xk++Vk5/BnDiQsegt4FURicfbRz7EWj8EuMJa/yrwdl4D2NLlYow5DjxlXRi92oqz1xhz0I54YH8m9Ay3N67P410fZO26jayM8X7gPvigLzNmzvN7LKcyoTuZcb1ixfIMHdKfoCAPHo+H8eOnMG36HFti2fGZeG3EfI6fOkOwx8M77RtRIqxojq99rlltPhy3iAf7TcQAr7SOolREaL7ig3Pvl5PvlZOfwZyk2zA9lzFmAbDAerwDaJDNa84AD/kjnth11Tq/gotUDswdU/8z3Dh9rlulntuX7x7wPlW7+NzmfLhrZEBOFKC3/iulFO649V8bdKWUQqfPVUop17CjD91p2qArpRSasUgppVxD+9CVUsol0lxwjq4NusOCPUGOxUpNz//Mfv/LnBxKmDyjlyNxSrbp40icwkjP0JVSyiX0oqhSSrlE4W/OtUFXSilAu1yUUso19KKoUkq5hBv60F2TU9SpBLN2J4keOPBf7N69mlWrfs1cd8stN/Lbb5NYvnwGS5ZMJSrqVr/GBGcTAjv1XjmVODyDv+s1ct5qHvjoRzp99CMj5nmT32zek8jjn4+i86fDeKzvCNbtPHDBNut3JlDv71/x6+q8z1Q4aOAX7N0TR+zqP2dWfKDTfcTFzuXM6d3UrXtLnsu+lL///RliV88hLnYuPXvam3w9Ow5Mn2s7VzToTiaYtTtJ9PDh47j//icuWPfpp+/yySf9adiwNX36fMmnn77rt3gZnEoI7OR75VTicPB/veL3H2bCkrWMeKsLY999gkXrdrA78Rj9Jy7k+ftuY+y7T9C9bWP6T1yYuU1aejpfT1pIoxuq5asuw4aPo227rhes27BxC50ffo5Fi5bnq+yc3HTjdTzzt0dpfHtb6kW1oE2b5lxzTTVbYuUkHePzEqhc0aA7mWDW7iTRixev4NhFiY2NMZQo4U2MULJkcQ4csGdaeScSAjv5XjmVOBz8X68dCUeoVa0iYUVCvAmpa1Zhbtw2RCDl9FkATp4+S9mSxTK3iV4QS7M6NSldPDxfdVm8ePlfPoObN8ezdeuOfJV7KddfX4MVK+I4ffoMaWlpLFq4jA4dWtsWLzv+ylhUkGxp0EXkJRG50o6ys1NQCWbtThKd4fXXe/N///cu8fHL+L//e58PPvjMljhOJAQOhGTAdvB3vWpULMPq7ftIOnma0+fOs3jDHxw8lswbDzal38SFtHx3IF9NWMhL7e8E4GBSMvPj4ul8Z+38VqVAbNi4hTvuaEDp0pGEhYXSqtU9VMmSks4JJhf/ApVdZ+gfActFZJGIvCgiZX3ZSES6ichKEVmZnp5i0675h5NJort1e5w33uhDjRqNePPNPgwY8C9b4mQkBK56dRT1o+pw003X2RJHXV71ilfw9L316f7v8fT49meuq1IOj8fDuEVreP3Bu5n16fO8/uDd9B4xC4B/jVvAyx3vxOMpnHPAbt4cz7+++C/Tp41i6pQRrFm7gbQ0Z+90TsP4vAQquxr0HXizWn8E1AM2ishMEXlSRHJMqpjXJNFOJ5h1Kkl0hq5dH8iM8/PP9lwUzcrOhMCBkAzYDnbUq+PttYh+53GGvvoIxcOLUrVcKaYs20Cz2t6++RZ1r2X9Lm+MjbsTeGvINFq/P5g5sVv5dPQc5sVty1d8p/3442ga3daGZs0fJOnYcbZts6+LJzva5ZIzY4xJN8bMNsY8A1QC/gu0wtvY+5XTCWadShKd4cCBgzRp0giApk1vJz5+p99jOJUQOBCSAdvBjnodTfZe0zhw9ATz4rbRuv71lC1ZjJXb9gKwYsturiobCcD0j55jxsfepXmda3n3kebcU9uei812KVv2CgCuvLISHTq0ZvToSY7GTzfG5yVQ2TUO/YK/+4wx54FfgF9EJH9XbLLhZIJZu5NEDxv2b+688zbKlClFfPxyPv74K1588W2++OKfBAcHcebMWXr0yHNS8Bw5lRDYyffKqcThYE+9Xhv0C8dTThMcFMQ7DzejRHgoH3a5l8/HzSct3VAkJIgPurTwUw3+NHzYtzSxjtuO7TH0+ehLjh1Nol+/jyhbtjSTJ/3EmrUbaNu26+ULy4UxowdxxRWlOH8+lZdefo/jx0/4tfzLCdxm2ne2JIkWkWuNMfn6NLs1SbTOtqiyo7Mt5s+5s3vzffHgsaodfW5zRu2aGJAXK2w5Q89vY66UUk4L5NErvtJb/5VSCkjVBl0ppdxBz9CVUsolAnk4oq+0QVdKKbBlqgunaYOulFK4Y/pcbdAdpkMJVXaKt+7tSJzkyW85EgegeHt75hyySyDf0u8rbdCVUgp3nKG7YvpcpZTKL2OMz8uliMiVIjJfRDaKyAYRedlaX1pEfhWRbdb/paz1IiLfiEi8iKwVkTwnItAGXSml8OvkXKnAa8aYG4FGQA8RuRF4G5hrjKkJzLV+BmgN1LSWbsB3ea2DNuhKKYX/5kM3xhwwxqy2HicDm4DKQHvgJ+tlPwEdrMftgWHGaxkQKSIV81IHbdCVUorcpaDLmrvBWrplV6aIVAPqAMuB8saYjCSwCUB563FlYE+WzfZa63JNL4oqpRSQZny/tcgYMwi45PzZIlIM+Bl4xRhzQuTP+byMMUZE/H4V1jVn6G7MJF+lSiXmzB7H2jXzWRM3j55/ty8TulPHz8k6gXP1cjKWHZ/Bkb+t4YG+0XTqO4oRC9Zkro9euJYOn46kU99R9PvldwCmrdxC589HZy51/vEfNu89lO99cPK7lR1/pqATkRC8jflIY8wEa/XBjK4U6/9Ea/0+IGvKzirWulxzRYPu1kzyqampvPFmb265tSm339GO7t2fsqVeTh4/p+oEztarMH8G4w8cYcLSjYx49UHGvvEIizbuZPehJGK27WXB+j8Y++YjTHj7MZ5sWhuA+6KuY+ybjzD2zUf4pOu9VC5dguur+JRl8pKc/G5lx18JLsR7Kj4E2GSM+SrLU78AT1qPnwQmZ1n/hDXapRFwPEvXTK7YlSS6iIg8ISLNrZ8fE5FvRaSH9ZvLr9yaST4hIZHYuPUAnDyZwubN26hsQ0JlJ4+fU3UCZ+tVmD+DOw4eo1bV8oQVCSE4yEO9ayoxd+0Oxi5Zz9PN6lIk2DuHf+nif81NM2P1VlrW9c8vLie/W9kxuVgu43bgceAeEYmzljZAX+BeEdkGNLd+BpiON5NbPDAYeDGvdbCrD/0Hq+xwEXkSKAZMAJoBDfjzt5RfZJdxvUH9Ov4MUeCqVq1C7VtvZvmKWL+XXVDHz846gbP1KsyfwRoVSvPttGUkpZyhaEgQizfu4saryrErMYnVO/bz7bRlFA0J5h/tG3PzVeUv2HZ2bDz9n21TQHvuX/66scgYs5iLsrZl0Syb1xvAL310djXotYwxt4hIMN6+oErGmDQRGQGsyWkj60pxNwAJKkluEkW7WUREOGPHDObV13uRnHyyoHfHL9xYp8KqeoXSPN2sLt2/+4WwIsFcV7kMHhHS0g0nTp1l+D8eZP3uRN78cRbTPnicjIt763YmEFokmBoVryjgGviHG+4UtatB94hIESACCAdKAkeBokCOXS5ZrxznJgWdWzPJAwQHBzNuzGCioycyadIMW2I4ffycqBM4W6/C/hns2OhGOja6EYBvpi6lfGQxdiYm0eyW6ogItaqWxyPCsZQzlC4WBsDM2Hha+am7JRDkZpRLoLLrougQYDMQB7wHjBORwUAM4PcsvW7NJA/eK/+bNsfT/+tLjpDKF6ePnxN1AmfrVdg/g0eTTwFw4Fgy89buoHXda2la62pitnkHW+xKTOJ8WjqlIkIBSE83zI6Lp1Ud9zTo/hzlUlDsyinaT0TGWI/3i8gwvBcBBhtjVvg7nlszyd/euD6Pd32Qtes2sjLG2zh88EFfZsyc59c4Th4/p+oEztarsH8GX/thJsdTzhAc5OGdB5tQIrwoHRreQK/oeTzQN5qQYA8fPdYss7tl1fb9VIgsRpUyJf1RJcDZ71Z23DAfugRqJXLT5aKU8o1bp89NPbcvp4uQPqtb8Q6f25zVBxbnO54d9E5RpZTCHWfo2qArpRSQ5oKsotqgK6UUXPYO0MJAG3SllIKAHr3iK23QlVIKPUNXSinX0DN0pch50orCzsmvt1PHsFTHLxyKBMkT33Aslj/oGbpSSrmEG2791wZdKaXQLhellHINo2foSinlDjp9rlJKuYTe+q+UUi7hhjN0VySJ1kzy+edUxvVrr72GlTGzM5cjhzfzUs9nbYv38kvPERc3j9jYuQwf/h+KFi1qSxynM9Zv27qM2NVzWBkzm2VLp/ut3CpVKjJr1mhiY+eyevUcevT4GwCdOt3H6tVzOHVqJ3Xr3pKvGCMXruWBz0fT6bPRjPjtzwRm0YvW0aFvNJ0+G02/KUsv2ObAsWRue3swP82Py1fsS0lLT/d5CVSuOEPPyCQfG7eeYsUiWLF8JnPmLmTTpm1+j5WR3b1Vm0fZu/cAy5ZOZ8rU2YU+1rBhY/nvf3/ghx++9nvZWW3dup2o+i0Ab/127VzFpMn2ZC2qVKkCPXr8jVtubcqZM2cYNWoAD3duz7DhY/0ey6njl1Xzex/iyJFjfi0zNTWNt976mDjru7R06TTmzl3Ehg1bePjhbvznP/+Xr/LjDxxhwrKNjHjlAUKCgugxaCpNbqzGwaSTLFj/B2Nf70yR4KDMhBsZvpz8O7ffcFW+Yl+OjnK5BBGpDnQCrgTSgK3AKGPMCX/HSkhIJCEhEbgwk7wdDV/W7O5AZnb3wh5r0eLlVK1axe/lXso999zBjh272L17n20xgoODCQsL5fz584SHhbH/gD1p4Qri+Nnhr9+leCpXrsDcuYv8Uv6Og0nUuqo8YUW8mSjrXVOJuet2sHHPIZ5uVpciwUEAlC4enrnNvHV/UKl08cxt7OKGPnRbulxE5CVgABAK1MebS/RKYJmI3G1HzAwFkUm+UqUKhT5WQXi4c3vGjJlkW/n79yfQr98AdmxfwZ7dsZw4cYI5cxbaFs9JxhhmTI9m+bIZPPtMF1tiVK1ahdq1b2KFH79LNSqWZvUfB0hKOcPpc+dZvGk3B5NOsutQEqt37Kdr/5955ttJrN/t/aVy6ux5fpwXywst6/ttH3KSjvF5CVR29aE/B7Q2xnyMN/XcTcaY94BWQL+cNhKRbiKyUkRWpqen5DqoZpIvPEJCQmjbtgXjf55qW4zIyJK0a9eSmtc24qqqdQmPCOexxzrZFs9JdzftSIOGrWjbrivduz/FHXc09Gv5ERHhREcP5PXXe/v1u1S9fCmeblqH7gOn0GPQNK6rfAUeEdLS0zlx6izDX+7EK+1u481hszHGMGBWDF3uuoXwovaenYP3l6SvS6Cysw89GG9XS1GgGIAxZreI5PjOGGMGAYMg9ynoNJN84dKqVVNiY9eRmHjYthjNmt3Jzp27OXz4KACTJs3gtkZRjBo1wbaYTsn4HBw6dIRJk2dQv35tFi9e7peyg4ODGT16IKNHT2Ty5Jl+KTOrjo1uoGOjGwD4ZtoyykcWY2diEs1qVUdEqFW1PB4RjqWcYd2ug/y6Zgf9pywj+fRZPCIUDQ7ikTtr+X2/Avlip6/satC/B2JEZDlwJ/AZgIiUBY7aEbAgMsnv25dA587tefwJe0afOBnLaQ8/3MHW7haAPbv30aBhXcLCQjl9+gz3NL2DVavWXH7DABceHobH4+HkyRTCw8O4t/ldfPxJjn/45trAgf9i8+Z4vvnme7+VmdXR5FOULh7OgWPJzFv3B8Ne7oRHhJj4fdSvWZldiUmcT0ujVEQoP/TsmLnddzNjCC8aYktjDu4YtmhLg26M+VpE5gA3AF8aYzZb6w8BTfwdTzPJ55+TGdfDw8No3qwJL75ob8LiFTGxTJgwjRUrZpGamsqauA0M/n6kLbGcPH7ly5dl/LghAAQFBzF69CRmz17gl7IbN65Ply4PsG7dJpYv9/6l++GHn1O0aBG++qoPZcuWZuLEH1i7diPt2j2epxiv/TiL46fOEuzx8E6nOykRVpQODa6n1+j5PPD5aEKCgvjo0XsQcXYez0DuSvGVBGolctvlogqOTp+bf04dwyBPkEOR4NjPrzoWK+y+V/J9CIuFX+3zW37y1B8B+bF3xTh0pZTKLx2HrpRSLqEJLpRSyiXSXTB9rivmclFKqfzy5zh0EWklIltEJF5E3nZg9wE9Q1dKKcB/o1xEJAj4D3AvsBfvEO5fjDEb/RLgEvQMXSml8I5q8nW5jAZAvDFmhzHmHDAaaG/LTl8kYM/QU8/ty9OwIBHpZt1xaiun4miswhXLjXVyc6ysctPmiEg3oFuWVYOy7HNlYE+W5/YC/p2bIQduPEPvdvmXFKo4GqtwxXJjndwcK0+MMYOMMVFZFsd/AWXHjQ26UkoVpH14Z5fNUMVaZztt0JVSyr9igJoicrWIFAEeAX5xInDA9qHng1N/+jj5J5bGKjyx3FgnN8fyO2NMqoj8HZgFBAFDjTEbnIgdsHO5KKWUyh3tclFKKZfQBl0ppVzCNQ26U7faishQEUkUkfV2xcgS60oRmS8iG0Vkg4i8bGOsUBFZISJrrFi97YplxQsSkVgRsS8HnTfOThFZJyJxIrLS5liRIjJeRDaLyCYRuc2mONdZ9clYTojIKzbF+of1eVgvItEiEmpHHCvWy1acDXbVx/VyM39BoC54LzxsB6oDRYA1wI02xWoC1AXWO1CvikBd63FxYKuN9RKgmPU4BFgONLKxbq8Co4CpNh/DnUAZu98rK9ZPwLPW4yJApAMxg4AEoKoNZVcG/gDCrJ/HAk/ZVI+bgfVAON7BGnOAGk68b25a3HKG7tittsaYhdiURi+bWAeMMautx8nAJrxfMjtiGWNMRjbgEGux5Yq5iFQB7sObqtAVRKQk3l/2QwCMMeeMMUkOhG4GbDfG7LKp/GAgTESC8Ta2+22KcwOw3BhzyhiTCvwGuCOjt4Pc0qBnd6utLQ1fQRGRakAdvGfOdsUIEpE4IBH41RhjV6z+wJuAE/OVGmC2iKyybte2y9XAIeAHqyvpexGJsDFehkeAaDsKNsbsA74AdgMHgOPGmNl2xMJ7dn6niFwhIuFAGy68OUf5wC0NuquJSDHgZ+AVY8wJu+IYY9KMMbXx3tnWQERu9ncMEWkLJBpjVvm77BzcYYypC7QGeoiI33PaWoLxdsV9Z4ypA6QAtk6bat20cj8wzqbyS+H9S/dqoBIQISJd7YhljNmEN5n8bGAmEAek2RHLzdzSoBfYrbZ2E5EQvI35SGPMBCdiWl0F84FWNhR/O3C/iOzE2zV2j4iMsCEOkHmWiTEmEZiIt3vODnuBvVn+qhmPt4G3U2tgtTHmoE3lNwf+MMYcMsacByYAjW2KhTFmiDGmnjGmCXAM7zUjlQtuadAL7FZbO4k37fkQYJMx5iubY5UVkUjrcRjeuZw3+zuOMeYdY0wVY0w1vO/TPGOMLWd9IhIhIsUzHgMt8P5p73fGmARgj4hcZ61qBtg9//Wj2NTdYtkNNBKRcOuz2AzvdRxbiEg56/+r8Pafj7Irllu54tZ/4+CttiISDdwNlBGRvUAvY8wQO2LhPZt9HFhn9W0DvGuMmW5DrIrAT9bk/B5grDHG1iGFDigPTPS2RQQDo4wxM22M1xMYaZ1U7ACetiuQ9QvqXuB5u2IYY5aLyHhgNZAKxGLvbfk/i8gVwHmgh0MXlV1Fb/1XSimXcEuXi1JK/c/TBl0ppVxCG3SllHIJbdCVUsoltEFXSimX0AZd2UJE0qyZANeLyDjrdu68lvWjiDxoPf5eRG68xGvvFpFc3/xizcpYJq/7qFQg0AZd2eW0Maa2MeZm4BzwQtYnrcmecs0Y86wx5lI37NyNjXczKhXItEFXTlgE1LDOnheJyC/ARmsysH+JSIyIrBWR58F7h6yIfGvNbz8HKJdRkIgsEJEo63ErEVltzeE+15rA7AXgH9ZfB3dad8D+bMWIEZHbrW2vEJHZ1tzb3+OdPlipQs0Vd4qqwGWdibfGO+ESeOc3udkY84c1++FxY0x9ESkKLBGR2XhnlbwOuBHv3Z4bgaEXlVsWGAw0scoqbYw5KiIDgJPGmC+s140C+hljFlu3lM/CO1VrL2CxMaaPiNwHPGPrgVDKAdqgK7uEZZmuYBHeOWkaAyuMMX9Y61sAt2T0jwMlgZp45xWPNsakAftFZF425TcCFmaUZYzJaY765sCN1u3/ACWs2SubYM23bYyZJiLH8lZNpQKHNujKLqetqXgzWY1qStZVQE9jzKyLXtfGj/vhwZt56Uw2+6KUq2gfuipIs4Du1hTBiMi11qRTC4GHrT72ikDTbLZdBjQRkautbUtb65PxpuvLMBvvpFlYr6ttPVwIPGataw2U8lellCoo2qCrgvQ93v7x1eJNuj0Q71+NE4Ft1nPDgKUXb2iMOQR0AyaIyBpgjPXUFKBjxkVR4CUgyrroupE/R9v0xvsLYQPerpfdNtVRKcfobItKKeUSeoaulFIuoQ26Ukq5hDboSinlEtqgK6WUS2iDrpRSLqENulJKuYQ26Eop5RL/Dy6bD1rXodoLAAAAAElFTkSuQmCC", 427 | "text/plain": [ 428 | "
" 429 | ] 430 | }, 431 | "metadata": { 432 | "needs_background": "light" 433 | }, 434 | "output_type": "display_data" 435 | } 436 | ], 437 | "source": [ 438 | "# Confusion Matrix\n", 439 | "\n", 440 | "# Generates output predictions for the input samples.\n", 441 | "test_predictions = model.predict(x=x_test)\n", 442 | "\n", 443 | "# Returns the indices of the maximum values along an axis.\n", 444 | "test_predictions = np.argmax(test_predictions,axis=1) # the prediction outputs 10 values, we take the index number of the highest value, which is the prediction of the model\n", 445 | "\n", 446 | "# generate confusion matrix\n", 447 | "confusion_matrix = tf.math.confusion_matrix(labels=y_test,predictions=test_predictions)\n", 448 | "\n", 449 | "# plot confusion matrix\n", 450 | "import seaborn as sns\n", 451 | "import matplotlib.pyplot as plt\n", 452 | "h = sns.heatmap(confusion_matrix,annot=True,fmt='d')\n", 453 | "h.set(xlabel='Predicted', ylabel='Actual')" 454 | ] 455 | } 456 | ], 457 | "metadata": { 458 | "colab": { 459 | "collapsed_sections": [], 460 | "name": "digits-recognizer.ipynb", 461 | "provenance": [] 462 | }, 463 | "kernelspec": { 464 | "display_name": "Python 3", 465 | "language": "python", 466 | "name": "python3" 467 | }, 468 | "language_info": { 469 | "codemirror_mode": { 470 | "name": "ipython", 471 | "version": 3 472 | }, 473 | "file_extension": ".py", 474 | "mimetype": "text/x-python", 475 | "name": "python", 476 | "nbconvert_exporter": "python", 477 | "pygments_lexer": "ipython3", 478 | "version": "3.8.10" 479 | } 480 | }, 481 | "nbformat": 4, 482 | "nbformat_minor": 4 483 | } 484 | -------------------------------------------------------------------------------- /digits_recognizer_pipeline.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 8, 6 | "id": "5da13c87-aed3-4c3a-bac8-1807ded41b79", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "data": { 11 | "text/html": [ 12 | "Experiment details." 13 | ], 14 | "text/plain": [ 15 | "" 16 | ] 17 | }, 18 | "metadata": {}, 19 | "output_type": "display_data" 20 | }, 21 | { 22 | "data": { 23 | "text/html": [ 24 | "Run details." 25 | ], 26 | "text/plain": [ 27 | "" 28 | ] 29 | }, 30 | "metadata": {}, 31 | "output_type": "display_data" 32 | } 33 | ], 34 | "source": [ 35 | "# \n", 36 | "# Creating a ML pipeline with the MNIST digits dataset\n", 37 | "# KFPv1 Example with lightweight Python components only\n", 38 | "#\n", 39 | "\n", 40 | "import kfp\n", 41 | "from kfp import dsl\n", 42 | "import kfp.components as components\n", 43 | "\n", 44 | "from typing import NamedTuple\n", 45 | "def get_data_batch() -> NamedTuple('Outputs', [('datapoints_training', float),('datapoints_test', float),('dataset_version', str)]):\n", 46 | " \"\"\"\n", 47 | " Function to get dataset and load it to minio bucket\n", 48 | " \"\"\"\n", 49 | " print(\"getting data\")\n", 50 | " from tensorflow import keras\n", 51 | " from minio import Minio\n", 52 | " import numpy as np\n", 53 | " import json\n", 54 | "\n", 55 | " minio_client = Minio(\n", 56 | " \"100.65.11.110:9000\",\n", 57 | " access_key=\"minio\",\n", 58 | " secret_key=\"minio123\",\n", 59 | " secure=False\n", 60 | " )\n", 61 | " minio_bucket = \"mlpipeline\"\n", 62 | " \n", 63 | " minio_client.fget_object(minio_bucket,\"mnist.npz\",\"/tmp/mnist.npz\")\n", 64 | " \n", 65 | " def load_data():\n", 66 | " with np.load(\"/tmp/mnist.npz\", allow_pickle=True) as f:\n", 67 | " x_train, y_train = f[\"x_train\"], f[\"y_train\"]\n", 68 | " x_test, y_test = f[\"x_test\"], f[\"y_test\"]\n", 69 | "\n", 70 | " return (x_train, y_train), (x_test, y_test)\n", 71 | " \n", 72 | " # Get MNIST data directly from library\n", 73 | " (x_train, y_train), (x_test, y_test) = load_data()\n", 74 | "\n", 75 | " # save to numpy file, store in Minio\n", 76 | " np.save(\"/tmp/x_train.npy\",x_train)\n", 77 | " minio_client.fput_object(minio_bucket,\"x_train\",\"/tmp/x_train.npy\")\n", 78 | "\n", 79 | " np.save(\"/tmp/y_train.npy\",y_train)\n", 80 | " minio_client.fput_object(minio_bucket,\"y_train\",\"/tmp/y_train.npy\")\n", 81 | "\n", 82 | " np.save(\"/tmp/x_test.npy\",x_test)\n", 83 | " minio_client.fput_object(minio_bucket,\"x_test\",\"/tmp/x_test.npy\")\n", 84 | "\n", 85 | " np.save(\"/tmp/y_test.npy\",y_test)\n", 86 | " minio_client.fput_object(minio_bucket,\"y_test\",\"/tmp/y_test.npy\")\n", 87 | " \n", 88 | " dataset_version = \"1.0\"\n", 89 | " \n", 90 | " print(f\"x_train shape: {x_train.shape}\")\n", 91 | " print(f\"y_train shape: {y_train.shape}\")\n", 92 | "\n", 93 | " print(f\"x_test shape: {x_test.shape}\")\n", 94 | " print(f\"y_test shape: {y_test.shape}\")\n", 95 | " \n", 96 | " from collections import namedtuple\n", 97 | " divmod_output = namedtuple('Outputs', ['datapoints_training', 'datapoints_test', 'dataset_version'])\n", 98 | " return [float(x_train.shape[0]),float(x_test.shape[0]),dataset_version]\n", 99 | " \n", 100 | "def get_latest_data():\n", 101 | " \"\"\"\n", 102 | " Dummy functions for showcasing\n", 103 | " \"\"\"\n", 104 | " print(\"Adding latest data\")\n", 105 | " \n", 106 | " \n", 107 | "def reshape_data():\n", 108 | " \"\"\"\n", 109 | " Reshape the data for model building\n", 110 | " \"\"\"\n", 111 | " print(\"reshaping data\")\n", 112 | " \n", 113 | " from minio import Minio\n", 114 | " import numpy as np\n", 115 | "\n", 116 | " minio_client = Minio(\n", 117 | " \"100.65.11.110:9000\",\n", 118 | " access_key=\"minio\",\n", 119 | " secret_key=\"minio123\",\n", 120 | " secure=False\n", 121 | " )\n", 122 | " minio_bucket = \"mlpipeline\"\n", 123 | " \n", 124 | " # load data from minio\n", 125 | " minio_client.fget_object(minio_bucket,\"x_train\",\"/tmp/x_train.npy\")\n", 126 | " x_train = np.load(\"/tmp/x_train.npy\")\n", 127 | " \n", 128 | " minio_client.fget_object(minio_bucket,\"x_test\",\"/tmp/x_test.npy\")\n", 129 | " x_test = np.load(\"/tmp/x_test.npy\")\n", 130 | " \n", 131 | " # reshaping the data\n", 132 | " # reshaping pixels in a 28x28px image with greyscale, canal = 1. This is needed for the Keras API\n", 133 | " x_train = x_train.reshape(-1,28,28,1)\n", 134 | " x_test = x_test.reshape(-1,28,28,1)\n", 135 | "\n", 136 | " # normalizing the data\n", 137 | " # each pixel has a value between 0-255. Here we divide by 255, to get values from 0-1\n", 138 | " x_train = x_train / 255\n", 139 | " x_test = x_test / 255\n", 140 | " \n", 141 | " # save data from minio\n", 142 | " np.save(\"/tmp/x_train.npy\",x_train)\n", 143 | " minio_client.fput_object(minio_bucket,\"x_train\",\"/tmp/x_train.npy\")\n", 144 | " \n", 145 | " np.save(\"/tmp/x_test.npy\",x_test)\n", 146 | " minio_client.fput_object(minio_bucket,\"x_test\",\"/tmp/x_test.npy\")\n", 147 | "\n", 148 | "def model_building(\n", 149 | " no_epochs:int = 1,\n", 150 | " optimizer: str = \"adam\"\n", 151 | ") -> NamedTuple('Output', [('mlpipeline_ui_metadata', 'UI_metadata'),('mlpipeline_metrics', 'Metrics')]):\n", 152 | " \"\"\"\n", 153 | " Build the model with Keras API\n", 154 | " Export model parameters\n", 155 | " \"\"\"\n", 156 | " from tensorflow import keras\n", 157 | " import tensorflow as tf\n", 158 | " from minio import Minio\n", 159 | " import numpy as np\n", 160 | " import pandas as pd\n", 161 | " import json\n", 162 | " \n", 163 | " minio_client = Minio(\n", 164 | " \"100.65.11.110:9000\",\n", 165 | " access_key=\"minio\",\n", 166 | " secret_key=\"minio123\",\n", 167 | " secure=False\n", 168 | " )\n", 169 | " minio_bucket = \"mlpipeline\"\n", 170 | " \n", 171 | " model = keras.models.Sequential()\n", 172 | " model.add(keras.layers.Conv2D(64, (3, 3), activation='relu', input_shape=(28,28,1)))\n", 173 | " model.add(keras.layers.MaxPool2D(2, 2))\n", 174 | "\n", 175 | " model.add(keras.layers.Flatten())\n", 176 | " model.add(keras.layers.Dense(64, activation='relu'))\n", 177 | "\n", 178 | " model.add(keras.layers.Dense(32, activation='relu'))\n", 179 | "\n", 180 | " model.add(keras.layers.Dense(10, activation='softmax')) #output are 10 classes, numbers from 0-9\n", 181 | "\n", 182 | " #show model summary - how it looks\n", 183 | " stringlist = []\n", 184 | " model.summary(print_fn=lambda x: stringlist.append(x))\n", 185 | " metric_model_summary = \"\\n\".join(stringlist)\n", 186 | " \n", 187 | " #compile the model - we want to have a binary outcome\n", 188 | " model.compile(optimizer=optimizer,\n", 189 | " loss=\"sparse_categorical_crossentropy\",\n", 190 | " metrics=['accuracy'])\n", 191 | " \n", 192 | " minio_client.fget_object(minio_bucket,\"x_train\",\"/tmp/x_train.npy\")\n", 193 | " x_train = np.load(\"/tmp/x_train.npy\")\n", 194 | " \n", 195 | " minio_client.fget_object(minio_bucket,\"y_train\",\"/tmp/y_train.npy\")\n", 196 | " y_train = np.load(\"/tmp/y_train.npy\")\n", 197 | " \n", 198 | " #fit the model and return the history while training\n", 199 | " history = model.fit(\n", 200 | " x=x_train,\n", 201 | " y=y_train,\n", 202 | " epochs=no_epochs,\n", 203 | " batch_size=20,\n", 204 | " )\n", 205 | " \n", 206 | " minio_client.fget_object(minio_bucket,\"x_test\",\"/tmp/x_test.npy\")\n", 207 | " x_test = np.load(\"/tmp/x_test.npy\")\n", 208 | " \n", 209 | " minio_client.fget_object(minio_bucket,\"y_test\",\"/tmp/y_test.npy\")\n", 210 | " y_test = np.load(\"/tmp/y_test.npy\")\n", 211 | " \n", 212 | "\n", 213 | " # Test the model against the test dataset\n", 214 | " # Returns the loss value & metrics values for the model in test mode.\n", 215 | " model_loss, model_accuracy = model.evaluate(x=x_test,y=y_test)\n", 216 | " \n", 217 | " # Confusion Matrix\n", 218 | "\n", 219 | " # Generates output predictions for the input samples.\n", 220 | " test_predictions = model.predict(x=x_test)\n", 221 | "\n", 222 | " # Returns the indices of the maximum values along an axis.\n", 223 | " test_predictions = np.argmax(test_predictions,axis=1) # the prediction outputs 10 values, we take the index number of the highest value, which is the prediction of the model\n", 224 | "\n", 225 | " # generate confusion matrix\n", 226 | " confusion_matrix = tf.math.confusion_matrix(labels=y_test,predictions=test_predictions)\n", 227 | " confusion_matrix = confusion_matrix.numpy()\n", 228 | " vocab = list(np.unique(y_test))\n", 229 | " data = []\n", 230 | " for target_index, target_row in enumerate(confusion_matrix):\n", 231 | " for predicted_index, count in enumerate(target_row):\n", 232 | " data.append((vocab[target_index], vocab[predicted_index], count))\n", 233 | "\n", 234 | " df_cm = pd.DataFrame(data, columns=['target', 'predicted', 'count'])\n", 235 | " cm_csv = df_cm.to_csv(header=False, index=False)\n", 236 | " \n", 237 | " metadata = {\n", 238 | " \"outputs\": [\n", 239 | " {\n", 240 | " \"type\": \"confusion_matrix\",\n", 241 | " \"format\": \"csv\",\n", 242 | " \"schema\": [\n", 243 | " {'name': 'target', 'type': 'CATEGORY'},\n", 244 | " {'name': 'predicted', 'type': 'CATEGORY'},\n", 245 | " {'name': 'count', 'type': 'NUMBER'},\n", 246 | " ],\n", 247 | " \"target_col\" : \"actual\",\n", 248 | " \"predicted_col\" : \"predicted\",\n", 249 | " \"source\": cm_csv,\n", 250 | " \"storage\": \"inline\",\n", 251 | " \"labels\": [0,1,2,3,4,5,6,7,8,9]\n", 252 | " },\n", 253 | " {\n", 254 | " 'storage': 'inline',\n", 255 | " 'source': '''# Model Overview\n", 256 | "## Model Summary\n", 257 | "\n", 258 | "```\n", 259 | "{}\n", 260 | "```\n", 261 | "\n", 262 | "## Model Performance\n", 263 | "\n", 264 | "**Accuracy**: {}\n", 265 | "**Loss**: {}\n", 266 | "\n", 267 | "'''.format(metric_model_summary,model_accuracy,model_loss),\n", 268 | " 'type': 'markdown',\n", 269 | " }\n", 270 | " ]\n", 271 | " }\n", 272 | " \n", 273 | " metrics = {\n", 274 | " 'metrics': [{\n", 275 | " 'name': 'model_accuracy',\n", 276 | " 'numberValue': float(model_accuracy),\n", 277 | " 'format' : \"PERCENTAGE\"\n", 278 | " },{\n", 279 | " 'name': 'model_loss',\n", 280 | " 'numberValue': float(model_loss),\n", 281 | " 'format' : \"PERCENTAGE\"\n", 282 | " }]}\n", 283 | " \n", 284 | " ### Save model to minIO\n", 285 | " \n", 286 | " keras.models.save_model(model,\"/tmp/detect-digits\")\n", 287 | " \n", 288 | " from minio import Minio\n", 289 | " import os\n", 290 | "\n", 291 | " minio_client = Minio(\n", 292 | " \"100.65.11.110:9000\",\n", 293 | " access_key=\"minio\",\n", 294 | " secret_key=\"minio123\",\n", 295 | " secure=False\n", 296 | " )\n", 297 | " minio_bucket = \"mlpipeline\"\n", 298 | "\n", 299 | "\n", 300 | " import glob\n", 301 | "\n", 302 | " def upload_local_directory_to_minio(local_path, bucket_name, minio_path):\n", 303 | " assert os.path.isdir(local_path)\n", 304 | "\n", 305 | " for local_file in glob.glob(local_path + '/**'):\n", 306 | " local_file = local_file.replace(os.sep, \"/\") # Replace \\ with / on Windows\n", 307 | " if not os.path.isfile(local_file):\n", 308 | " upload_local_directory_to_minio(\n", 309 | " local_file, bucket_name, minio_path + \"/\" + os.path.basename(local_file))\n", 310 | " else:\n", 311 | " remote_path = os.path.join(\n", 312 | " minio_path, local_file[1 + len(local_path):])\n", 313 | " remote_path = remote_path.replace(\n", 314 | " os.sep, \"/\") # Replace \\ with / on Windows\n", 315 | " minio_client.fput_object(bucket_name, remote_path, local_file)\n", 316 | "\n", 317 | " upload_local_directory_to_minio(\"/tmp/detect-digits\",minio_bucket,\"models/detect-digits/1/\") # 1 for version 1\n", 318 | " \n", 319 | " print(\"Saved model to minIO\")\n", 320 | " \n", 321 | " from collections import namedtuple\n", 322 | " output = namedtuple('output', ['mlpipeline_ui_metadata', 'mlpipeline_metrics'])\n", 323 | " return output(json.dumps(metadata),json.dumps(metrics))\n", 324 | "\n", 325 | "def model_serving():\n", 326 | " \"\"\"\n", 327 | " Create kserve instance\n", 328 | " \"\"\"\n", 329 | " from kubernetes import client \n", 330 | " from kserve import KServeClient\n", 331 | " from kserve import constants\n", 332 | " from kserve import utils\n", 333 | " from kserve import V1beta1InferenceService\n", 334 | " from kserve import V1beta1InferenceServiceSpec\n", 335 | " from kserve import V1beta1PredictorSpec\n", 336 | " from kserve import V1beta1TFServingSpec\n", 337 | " from datetime import datetime\n", 338 | "\n", 339 | " namespace = utils.get_default_target_namespace()\n", 340 | "\n", 341 | " now = datetime.now()\n", 342 | " v = now.strftime(\"%Y-%m-%d--%H-%M-%S\")\n", 343 | "\n", 344 | " name='digits-recognizer-{}'.format(v)\n", 345 | " kserve_version='v1beta1'\n", 346 | " api_version = constants.KSERVE_GROUP + '/' + kserve_version\n", 347 | "\n", 348 | " isvc = V1beta1InferenceService(api_version=api_version,\n", 349 | " kind=constants.KSERVE_KIND,\n", 350 | " metadata=client.V1ObjectMeta(\n", 351 | " name=name, namespace=namespace, annotations={'sidecar.istio.io/inject':'false'}),\n", 352 | " spec=V1beta1InferenceServiceSpec(\n", 353 | " predictor=V1beta1PredictorSpec(\n", 354 | " service_account_name=\"sa-minio-kserve\",\n", 355 | " tensorflow=(V1beta1TFServingSpec(\n", 356 | " storage_uri=\"s3://mlpipeline/models/detect-digits/\"))))\n", 357 | " )\n", 358 | "\n", 359 | " KServe = KServeClient()\n", 360 | " KServe.create(isvc)\n", 361 | "\n", 362 | "comp_get_data_batch = components.create_component_from_func(get_data_batch,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.5.0\")\n", 363 | "comp_get_latest_data = components.create_component_from_func(get_latest_data,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.5.0\")\n", 364 | "comp_reshape_data = components.create_component_from_func(reshape_data,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.5.0\")\n", 365 | "comp_model_building = components.create_component_from_func(model_building,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.5.0\")\n", 366 | "comp_model_serving = components.create_component_from_func(model_serving,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.5.0\",\n", 367 | " packages_to_install=['kserve==0.8.0.1'])\n", 368 | "\n", 369 | "\n", 370 | "@dsl.pipeline(\n", 371 | " name='digits-recognizer-pipeline',\n", 372 | " description='Detect digits'\n", 373 | ")\n", 374 | "def output_test(no_epochs,optimizer):\n", 375 | " step1_1 = comp_get_data_batch()\n", 376 | " step1_2 = comp_get_latest_data()\n", 377 | " \n", 378 | " step2 = comp_reshape_data()\n", 379 | " step2.after(step1_1)\n", 380 | " step2.after(step1_2)\n", 381 | " \n", 382 | " step3 = comp_model_building(no_epochs,optimizer)\n", 383 | " step3.after(step2)\n", 384 | " \n", 385 | " step4 = comp_model_serving()\n", 386 | " step4.after(step3)\n", 387 | "\n", 388 | "\n", 389 | "if __name__ == \"__main__\":\n", 390 | " client = kfp.Client()\n", 391 | "\n", 392 | " arguments = {\n", 393 | " \"no_epochs\" : 1,\n", 394 | " \"optimizer\": \"adam\"\n", 395 | " }\n", 396 | "\n", 397 | " run_directly = 1\n", 398 | " \n", 399 | " if (run_directly == 1):\n", 400 | " client.create_run_from_pipeline_func(output_test,arguments=arguments,experiment_name=\"test\")\n", 401 | " else:\n", 402 | " kfp.compiler.Compiler().compile(pipeline_func=output_test,package_path='output_test.yaml')\n", 403 | " client.upload_pipeline_version(pipeline_package_path='output_test.yaml',pipeline_version_name=\"0.4\",pipeline_name=\"pipeline test\",description=\"just for testing\")" 404 | ] 405 | } 406 | ], 407 | "metadata": { 408 | "kernelspec": { 409 | "display_name": "Python 3", 410 | "language": "python", 411 | "name": "python3" 412 | }, 413 | "language_info": { 414 | "codemirror_mode": { 415 | "name": "ipython", 416 | "version": 3 417 | }, 418 | "file_extension": ".py", 419 | "mimetype": "text/x-python", 420 | "name": "python", 421 | "nbconvert_exporter": "python", 422 | "pygments_lexer": "ipython3", 423 | "version": "3.8.10" 424 | } 425 | }, 426 | "nbformat": 4, 427 | "nbformat_minor": 5 428 | } 429 | -------------------------------------------------------------------------------- /images/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/.DS_Store -------------------------------------------------------------------------------- /images/app-overview.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/app-overview.jpg -------------------------------------------------------------------------------- /images/kf_central_dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/kf_central_dashboard.png -------------------------------------------------------------------------------- /images/kf_kfp_config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/kf_kfp_config.png -------------------------------------------------------------------------------- /images/kf_notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/kf_notebook.png -------------------------------------------------------------------------------- /images/kserve.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/kserve.png -------------------------------------------------------------------------------- /images/kubeflow_workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/kubeflow_workflow.png -------------------------------------------------------------------------------- /images/kubernetes_workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/kubernetes_workflow.png -------------------------------------------------------------------------------- /images/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/pipeline.png -------------------------------------------------------------------------------- /images/test-inference.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/test-inference.png -------------------------------------------------------------------------------- /images/youtube.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/images/youtube.png -------------------------------------------------------------------------------- /kfp_examples/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/kfp_examples/.DS_Store -------------------------------------------------------------------------------- /kfp_examples/kfpv1_examples.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 93, 6 | "id": "aba02d26-9508-4550-986c-a529fd3dae5e", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "data": { 11 | "text/html": [ 12 | "Experiment details." 13 | ], 14 | "text/plain": [ 15 | "" 16 | ] 17 | }, 18 | "metadata": {}, 19 | "output_type": "display_data" 20 | }, 21 | { 22 | "data": { 23 | "text/html": [ 24 | "Run details." 25 | ], 26 | "text/plain": [ 27 | "" 28 | ] 29 | }, 30 | "metadata": {}, 31 | "output_type": "display_data" 32 | } 33 | ], 34 | "source": [ 35 | "import kfp\n", 36 | "import kfp.components as components\n", 37 | "\n", 38 | "# https://www.kubeflow.org/docs/components/pipelines/sdk/pipelines-metrics/\n", 39 | "#https://elyra.readthedocs.io/en/latest/recipes/visualizing-output-in-the-kfp-ui.html\n", 40 | "\n", 41 | "def get_data_batch() -> NamedTuple('Outputs', [('mlpipeline_metrics', 'Metrics')]):\n", 42 | " print(\"getting data\")\n", 43 | " import json\n", 44 | " \n", 45 | " accuracy = 0.9\n", 46 | " metrics = {\n", 47 | " 'metrics': [{\n", 48 | " 'name': 'accuracy-score', # The name of the metric. Visualized as the column name in the runs table.\n", 49 | " 'numberValue': accuracy, # The value of the metric. Must be a numeric value.\n", 50 | " 'format': \"PERCENTAGE\", # The optional format of the metric. Supported values are \"RAW\" (displayed in raw format) and \"PERCENTAGE\" (displayed in percentage format).\n", 51 | " }]\n", 52 | " }\n", 53 | " return [json.dumps(metrics)]\n", 54 | "\n", 55 | "\n", 56 | "def get_latest_data() -> NamedTuple('VisualizationOutput', [('mlpipeline_ui_metadata', 'UI_metadata')]):\n", 57 | " print(\"Getting latest data\")\n", 58 | " from sklearn.metrics import confusion_matrix\n", 59 | " import json\n", 60 | " import pandas as pd\n", 61 | " \n", 62 | " matrix = [\n", 63 | " ['yummy', 'yummy', 10],\n", 64 | " ['yummy', 'not yummy', 2],\n", 65 | " ['not yummy', 'yummy', 6],\n", 66 | " ['not yummy', 'not yummy', 7]\n", 67 | " ]\n", 68 | "\n", 69 | " df = pd.DataFrame(matrix,columns=['target','predicted','count'])\n", 70 | "\n", 71 | " metadata = {\n", 72 | " \"outputs\": [\n", 73 | " {\n", 74 | " \"type\": \"confusion_matrix\",\n", 75 | " \"format\": \"csv\",\n", 76 | " \"schema\": [\n", 77 | " {\n", 78 | " \"name\": \"target\",\n", 79 | " \"type\": \"CATEGORY\"\n", 80 | " },\n", 81 | " {\n", 82 | " \"name\": \"predicted\",\n", 83 | " \"type\": \"CATEGORY\"\n", 84 | " },\n", 85 | " {\n", 86 | " \"name\": \"count\",\n", 87 | " \"type\": \"NUMBER\"\n", 88 | " }\n", 89 | " ],\n", 90 | " \"source\": df.to_csv(header=False, index=False),\n", 91 | " \"storage\": \"inline\",\n", 92 | " \"labels\": [\n", 93 | " \"yummy\",\n", 94 | " \"not yummy\"\n", 95 | " ]\n", 96 | " }\n", 97 | " ]\n", 98 | " }\n", 99 | " \n", 100 | " from collections import namedtuple\n", 101 | " visualization_output = namedtuple('VisualizationOutput', ['mlpipeline_ui_metadata'])\n", 102 | " return visualization_output(json.dumps(metadata))\n", 103 | " \n", 104 | "\n", 105 | " \n", 106 | " \n", 107 | "from typing import NamedTuple\n", 108 | "def reshape_data() -> NamedTuple('MyDivmodOutput', [('mlpipeline_ui_metadata', 'UI_metadata'), ('mlpipeline_metrics', 'Metrics')]):\n", 109 | " print(\"reshaping data\")\n", 110 | " \n", 111 | " \n", 112 | " # Exports a sample tensorboard:\n", 113 | " metadata = {\n", 114 | " 'outputs': [\n", 115 | " {\n", 116 | " # Markdown that is hardcoded inline\n", 117 | " 'storage': 'inline',\n", 118 | " 'source': '''# Inline Markdown\n", 119 | "* [Kubeflow official doc](https://www.kubeflow.org/).\n", 120 | "''',\n", 121 | " 'type': 'markdown',\n", 122 | " },\n", 123 | " {\n", 124 | " # Markdown that is read from a file\n", 125 | " 'source': 'https://raw.githubusercontent.com/kubeflow/pipelines/master/README.md',\n", 126 | " # Alternatively, use Google Cloud Storage for sample.\n", 127 | " # 'source': 'gs://jamxl-kfp-bucket/v2-compatible/markdown/markdown_example.md',\n", 128 | " 'type': 'markdown',\n", 129 | " }]\n", 130 | " }\n", 131 | "\n", 132 | " # Exports two sample metrics:\n", 133 | " metrics = {\n", 134 | " 'metrics': [{\n", 135 | " 'name': 'quotient',\n", 136 | " 'numberValue': float(2),\n", 137 | " },{\n", 138 | " 'name': 'remainder',\n", 139 | " 'numberValue': float(3),\n", 140 | " }]}\n", 141 | " \n", 142 | " from collections import namedtuple\n", 143 | " import json\n", 144 | " \n", 145 | " divmod_output = namedtuple('MyDivmodOutput', ['mlpipeline_ui_metadata', 'mlpipeline_metrics'])\n", 146 | " return divmod_output(json.dumps(metadata), json.dumps(metrics))\n", 147 | "\n", 148 | "\n", 149 | "def model_building(no_epochs:int):\n", 150 | " print(\"model building\")\n", 151 | " print(no_epochs)\n", 152 | " print(type(no_epochs))\n", 153 | " \n", 154 | " \n", 155 | "\n", 156 | "comp_get_data_batch = components.create_component_from_func(get_data_batch,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.4\")\n", 157 | "comp_get_latest_data = components.create_component_from_func(get_latest_data,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.4\")\n", 158 | "comp_reshape_data = components.create_component_from_func(reshape_data,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.4\")\n", 159 | "comp_model_building = components.create_component_from_func(model_building,base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.4\")\n", 160 | "\n", 161 | "\n", 162 | "@kfp.dsl.pipeline(\n", 163 | " name='output_test',\n", 164 | " description='test'\n", 165 | ")\n", 166 | "def output_test(no_epochs:int):\n", 167 | " step1_1 = comp_get_data_batch()\n", 168 | " step1_2 = comp_get_latest_data()\n", 169 | " \n", 170 | " step2 = comp_reshape_data()\n", 171 | " step2.after(step1_1)\n", 172 | " step2.after(step1_2)\n", 173 | " \n", 174 | " step3 = comp_model_building(no_epochs)\n", 175 | " step3.after(step2)\n", 176 | "\n", 177 | "\n", 178 | "if __name__ == \"__main__\":\n", 179 | " client = kfp.Client()\n", 180 | "\n", 181 | " arguments = {\n", 182 | " \"no_epochs\" : 3\n", 183 | " }\n", 184 | "\n", 185 | " run_directly = 1\n", 186 | " \n", 187 | " if (run_directly == 1):\n", 188 | " client.create_run_from_pipeline_func(output_test,arguments=arguments,experiment_name=\"test\")\n", 189 | " else:\n", 190 | " kfp.compiler.Compiler().compile(pipeline_func=output_test,package_path='output_test.yaml')\n", 191 | " client.upload_pipeline_version(pipeline_package_path='output_test.yaml',pipeline_version_name=\"0.4\",pipeline_name=\"pipeline test\",description=\"just for testing\")" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": null, 197 | "id": "f75b4dec-c66e-4689-9384-d18521624e70", 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": null, 205 | "id": "7ce3bae5-730a-428f-afab-57ce4b67ae99", 206 | "metadata": {}, 207 | "outputs": [], 208 | "source": [ 209 | "\"\"\"\n", 210 | "create artifacts, kfpv1\n", 211 | "\n", 212 | "def get_data_batch(metadata_data_batch : kfp.components.OutputPath()):\n", 213 | " print(\"getting data\")\n", 214 | " import json\n", 215 | "\n", 216 | " metadata = {\n", 217 | " 'outputs' : [\n", 218 | " # Markdown that is hardcoded inline\n", 219 | " {\n", 220 | " 'storage': 'inline',\n", 221 | " 'source': '# Inline Markdown\\n[A link](https://www.kubeflow.org/)',\n", 222 | " 'type': 'markdown',\n", 223 | " }]\n", 224 | " }\n", 225 | " \n", 226 | " with open(metadata_data_batch, 'w') as metadata_file:\n", 227 | " json.dump(metadata, metadata_file)\n", 228 | "\"\"\"" 229 | ] 230 | } 231 | ], 232 | "metadata": { 233 | "kernelspec": { 234 | "display_name": "Python 3", 235 | "language": "python", 236 | "name": "python3" 237 | }, 238 | "language_info": { 239 | "codemirror_mode": { 240 | "name": "ipython", 241 | "version": 3 242 | }, 243 | "file_extension": ".py", 244 | "mimetype": "text/x-python", 245 | "name": "python", 246 | "nbconvert_exporter": "python", 247 | "pygments_lexer": "ipython3", 248 | "version": "3.8.10" 249 | } 250 | }, 251 | "nbformat": 4, 252 | "nbformat_minor": 5 253 | } 254 | -------------------------------------------------------------------------------- /kfp_examples/kfpv2_examples.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "id": "12066209-f6f4-43bb-b664-b905079ba06b", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "data": { 11 | "text/html": [ 12 | "Experiment details." 13 | ], 14 | "text/plain": [ 15 | "" 16 | ] 17 | }, 18 | "metadata": {}, 19 | "output_type": "display_data" 20 | }, 21 | { 22 | "data": { 23 | "text/html": [ 24 | "Run details." 25 | ], 26 | "text/plain": [ 27 | "" 28 | ] 29 | }, 30 | "metadata": {}, 31 | "output_type": "display_data" 32 | } 33 | ], 34 | "source": [ 35 | "import kfp\n", 36 | "from kfp.v2 import dsl\n", 37 | "from kfp.v2.dsl import (\n", 38 | " component,\n", 39 | " Input,\n", 40 | " Output,\n", 41 | " Artifact,\n", 42 | " Dataset,\n", 43 | " ClassificationMetrics,\n", 44 | " Metrics,\n", 45 | " HTML,\n", 46 | " Markdown\n", 47 | ")\n", 48 | "\n", 49 | "from typing import NamedTuple\n", 50 | "@component(\n", 51 | " packages_to_install=[],\n", 52 | " base_image=\"python:3.8\"\n", 53 | ")\n", 54 | "def get_data_batch() -> NamedTuple('MyFunctionOutputs', [('output_name_1', str), ('output_name_2', str)]):\n", 55 | " print(\"getting data\")\n", 56 | "\n", 57 | " # Exports a sample tensorboard:\n", 58 | " metadata = {\n", 59 | " 'outputs' : [{\n", 60 | " 'type': 'tensorboard',\n", 61 | " 'source': 'gs://ml-pipeline-dataset/tensorboard-train',\n", 62 | " }]\n", 63 | " }\n", 64 | "\n", 65 | " # Exports two sample metrics:\n", 66 | " metrics = {\n", 67 | " 'metrics': [{\n", 68 | " 'name': 'quotient',\n", 69 | " 'numberValue': float(2),\n", 70 | " },{\n", 71 | " 'name': 'remainder',\n", 72 | " 'numberValue': float(3),\n", 73 | " }]}\n", 74 | "\n", 75 | " from collections import namedtuple\n", 76 | " import json\n", 77 | " divmod_output = namedtuple('MyFunctionOutputs', ['output_name_1', 'output_name_2'])\n", 78 | " return divmod_output(\"test\", \"test2\")\n", 79 | "\n", 80 | "@component(\n", 81 | " packages_to_install=[\"sklearn\"],\n", 82 | " base_image=\"python:3.8\"\n", 83 | ")\n", 84 | "def get_latest_data(metrics: Output[ClassificationMetrics]):\n", 85 | " print(\"Getting latest data\")\n", 86 | " from sklearn.metrics import confusion_matrix\n", 87 | " \n", 88 | " train_y = [1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0]\n", 89 | " predictions = [1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0]\n", 90 | " \n", 91 | " metrics.log_confusion_matrix(\n", 92 | " ['Setosa', 'Versicolour'],\n", 93 | " confusion_matrix(train_y, predictions).tolist() # .tolist() to convert np array to list.\n", 94 | " )\n", 95 | "\n", 96 | "\n", 97 | "@component(\n", 98 | " packages_to_install=[],\n", 99 | " base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.4\"\n", 100 | ")\n", 101 | "def reshape_data():\n", 102 | " print(\"reshaping data\")\n", 103 | "\n", 104 | "\n", 105 | "@component(\n", 106 | " packages_to_install=[],\n", 107 | " base_image=\"public.ecr.aws/j1r0q0g6/notebooks/notebook-servers/jupyter-tensorflow-full:v1.4\"\n", 108 | ")\n", 109 | "def model_building(no_epochs:int):\n", 110 | " print(\"model building\")\n", 111 | " print(no_epochs)\n", 112 | " print(type(no_epochs))\n", 113 | " \n", 114 | "\n", 115 | "@dsl.pipeline(\n", 116 | " name='output-test',\n", 117 | " description='test outputs'\n", 118 | ")\n", 119 | "def output_test(no_epochs:int):\n", 120 | " step1_1 = get_data_batch()\n", 121 | " step1_2 = get_latest_data()\n", 122 | " \n", 123 | " step2 = reshape_data()\n", 124 | " step2.after(step1_1)\n", 125 | " step2.after(step1_2)\n", 126 | " \n", 127 | " step3 = model_building(no_epochs)\n", 128 | " step3.after(step2)\n", 129 | "\n", 130 | "\n", 131 | "if __name__ == \"__main__\":\n", 132 | " client = kfp.Client()\n", 133 | "\n", 134 | " arguments = {\n", 135 | " \"no_epochs\" : 3\n", 136 | " }\n", 137 | "\n", 138 | " run_directly = 1\n", 139 | " \n", 140 | " if (run_directly == 1):\n", 141 | " client.create_run_from_pipeline_func(output_test,arguments=arguments,experiment_name=\"test\",mode=kfp.dsl.PipelineExecutionMode.V2_COMPATIBLE)\n", 142 | " else:\n", 143 | " kfp.compiler.Compiler(mode=kfp.dsl.PipelineExecutionMode.V2_COMPATIBLE).compile(pipeline_func=output_test,package_path='output_test.yaml')\n", 144 | " client.upload_pipeline_version(pipeline_package_path='output_test.yaml',pipeline_version_name=\"0.3\",pipeline_name=\"pipeline test\",description=\"just for testing\")" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": null, 150 | "id": "433fe12c-6fa4-49e8-90b9-78acf05ba9b6", 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [] 154 | } 155 | ], 156 | "metadata": { 157 | "kernelspec": { 158 | "display_name": "Python 3", 159 | "language": "python", 160 | "name": "python3" 161 | }, 162 | "language_info": { 163 | "codemirror_mode": { 164 | "name": "ipython", 165 | "version": 3 166 | }, 167 | "file_extension": ".py", 168 | "mimetype": "text/x-python", 169 | "name": "python", 170 | "nbconvert_exporter": "python", 171 | "pygments_lexer": "ipython3", 172 | "version": "3.8.10" 173 | } 174 | }, 175 | "nbformat": 4, 176 | "nbformat_minor": 5 177 | } 178 | -------------------------------------------------------------------------------- /kserve_python_test.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 104, 6 | "id": "31935992-2313-491b-a014-f9ff439a609b", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | "Actual Number: 5\n", 14 | "Predicted: 5\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "\"\"\"\n", 20 | "Test Kserve model inference service for MNIST digits\n", 21 | "Input: None,28,28,1\n", 22 | "Output: Number Prediction\n", 23 | "\"\"\"\n", 24 | "\n", 25 | "from kubernetes import client \n", 26 | "from kserve import KServeClient\n", 27 | "from kserve import constants\n", 28 | "from kserve import utils\n", 29 | "from kserve import V1beta1InferenceService\n", 30 | "from kserve import V1beta1InferenceServiceSpec\n", 31 | "from kserve import V1beta1PredictorSpec\n", 32 | "from kserve import V1beta1SKLearnSpec\n", 33 | "import numpy as np\n", 34 | "import pandas as pd\n", 35 | "\n", 36 | "print(\"Actual Number: 5\")\n", 37 | "x_number_five = np.array([[[[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 38 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 39 | " 0, 0],\n", 40 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 41 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 42 | " 0, 0],\n", 43 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 44 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 45 | " 0, 0],\n", 46 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 47 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 48 | " 0, 0],\n", 49 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 50 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 51 | " 0, 0],\n", 52 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3,\n", 53 | " 18, 18, 18, 126, 136, 175, 26, 166, 255, 247, 127, 0, 0,\n", 54 | " 0, 0],\n", 55 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 30, 36, 94, 154, 170,\n", 56 | " 253, 253, 253, 253, 253, 225, 172, 253, 242, 195, 64, 0, 0,\n", 57 | " 0, 0],\n", 58 | " [ 0, 0, 0, 0, 0, 0, 0, 49, 238, 253, 253, 253, 253,\n", 59 | " 253, 253, 253, 253, 251, 93, 82, 82, 56, 39, 0, 0, 0,\n", 60 | " 0, 0],\n", 61 | " [ 0, 0, 0, 0, 0, 0, 0, 18, 219, 253, 253, 253, 253,\n", 62 | " 253, 198, 182, 247, 241, 0, 0, 0, 0, 0, 0, 0, 0,\n", 63 | " 0, 0],\n", 64 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 80, 156, 107, 253, 253,\n", 65 | " 205, 11, 0, 43, 154, 0, 0, 0, 0, 0, 0, 0, 0,\n", 66 | " 0, 0],\n", 67 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 1, 154, 253,\n", 68 | " 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 69 | " 0, 0],\n", 70 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 253,\n", 71 | " 190, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 72 | " 0, 0],\n", 73 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 190,\n", 74 | " 253, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 75 | " 0, 0],\n", 76 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35,\n", 77 | " 241, 225, 160, 108, 1, 0, 0, 0, 0, 0, 0, 0, 0,\n", 78 | " 0, 0],\n", 79 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 80 | " 81, 240, 253, 253, 119, 25, 0, 0, 0, 0, 0, 0, 0,\n", 81 | " 0, 0],\n", 82 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 83 | " 0, 45, 186, 253, 253, 150, 27, 0, 0, 0, 0, 0, 0,\n", 84 | " 0, 0],\n", 85 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 86 | " 0, 0, 16, 93, 252, 253, 187, 0, 0, 0, 0, 0, 0,\n", 87 | " 0, 0],\n", 88 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 89 | " 0, 0, 0, 0, 249, 253, 249, 64, 0, 0, 0, 0, 0,\n", 90 | " 0, 0],\n", 91 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 92 | " 0, 46, 130, 183, 253, 253, 207, 2, 0, 0, 0, 0, 0,\n", 93 | " 0, 0],\n", 94 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39,\n", 95 | " 148, 229, 253, 253, 253, 250, 182, 0, 0, 0, 0, 0, 0,\n", 96 | " 0, 0],\n", 97 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 114, 221,\n", 98 | " 253, 253, 253, 253, 201, 78, 0, 0, 0, 0, 0, 0, 0,\n", 99 | " 0, 0],\n", 100 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 23, 66, 213, 253, 253,\n", 101 | " 253, 253, 198, 81, 2, 0, 0, 0, 0, 0, 0, 0, 0,\n", 102 | " 0, 0],\n", 103 | " [ 0, 0, 0, 0, 0, 0, 18, 171, 219, 253, 253, 253, 253,\n", 104 | " 195, 80, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 105 | " 0, 0],\n", 106 | " [ 0, 0, 0, 0, 55, 172, 226, 253, 253, 253, 253, 244, 133,\n", 107 | " 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 108 | " 0, 0],\n", 109 | " [ 0, 0, 0, 0, 136, 253, 253, 253, 212, 135, 132, 16, 0,\n", 110 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 111 | " 0, 0],\n", 112 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 113 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 114 | " 0, 0],\n", 115 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 116 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 117 | " 0, 0],\n", 118 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 119 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 120 | " 0, 0]]]])\n", 121 | "\n", 122 | "KServe = KServeClient()\n", 123 | "\n", 124 | "isvc_resp = KServe.get(\"digits-recognizer\", namespace=\"kubeflow-user-example-com\")\n", 125 | "isvc_url = isvc_resp['status']['address']['url']\n", 126 | "\n", 127 | "t = np.array(x_number_five)\n", 128 | "t = t.reshape(-1,28,28,1)\n", 129 | "\n", 130 | "inference_input = {\n", 131 | " 'instances': t.tolist()\n", 132 | "}\n", 133 | "\n", 134 | "response = requests.post(isvc_url, json=inference_input)\n", 135 | "r = json.loads(response.text)\n", 136 | "print(\"Predicted: {}\".format(np.argmax(r[\"predictions\"])))" 137 | ] 138 | } 139 | ], 140 | "metadata": { 141 | "kernelspec": { 142 | "display_name": "Python 3", 143 | "language": "python", 144 | "name": "python3" 145 | }, 146 | "language_info": { 147 | "codemirror_mode": { 148 | "name": "ipython", 149 | "version": 3 150 | }, 151 | "file_extension": ".py", 152 | "mimetype": "text/x-python", 153 | "name": "python", 154 | "nbconvert_exporter": "python", 155 | "pygments_lexer": "ipython3", 156 | "version": "3.8.10" 157 | } 158 | }, 159 | "nbformat": 4, 160 | "nbformat_minor": 5 161 | } 162 | -------------------------------------------------------------------------------- /kubeflow_configs/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/kubeflow_configs/.DS_Store -------------------------------------------------------------------------------- /kubeflow_configs/access_kfp_from_jupyter_notebook.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: kubeflow.org/v1alpha1 2 | kind: PodDefault 3 | metadata: 4 | name: access-ml-pipeline 5 | namespace: <<>> 6 | spec: 7 | desc: Allow access to Kubeflow Pipelines 8 | selector: 9 | matchLabels: 10 | access-ml-pipeline: "true" 11 | volumes: 12 | - name: volume-kf-pipeline-token 13 | projected: 14 | sources: 15 | - serviceAccountToken: 16 | path: token 17 | expirationSeconds: 7200 18 | audience: pipelines.kubeflow.org 19 | volumeMounts: 20 | - mountPath: /var/run/secrets/kubeflow/pipelines 21 | name: volume-kf-pipeline-token 22 | readOnly: true 23 | env: 24 | - name: KF_PIPELINES_SA_TOKEN_PATH 25 | value: /var/run/secrets/kubeflow/pipelines/token -------------------------------------------------------------------------------- /kubeflow_configs/create_kserve_inference.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: serving.kserve.io/v1beta1 2 | kind: "InferenceService" 3 | metadata: 4 | name: digits-recognizer 5 | namespace: kubeflow-user-example-com 6 | annotations: 7 | sidecar.istio.io/inject: "false" 8 | spec: 9 | predictor: 10 | serviceAccountName: sa-minio-kserve 11 | tensorflow: 12 | storageUri: s3://mlpipeline/models/detect-digits/ 13 | -------------------------------------------------------------------------------- /kubeflow_configs/proxy-fix-notebooks.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.istio.io/v1beta1 2 | kind: ServiceEntry 3 | metadata: 4 | name: proxy 5 | spec: 6 | hosts: 7 | - my-company-proxy.com # ignored 8 | addresses: 9 | - 64.103.36.135 10 | ports: 11 | - number: 80 12 | name: tcp 13 | protocol: TCP 14 | location: MESH_EXTERNAL 15 | -------------------------------------------------------------------------------- /kubeflow_configs/set-minio-kserve-secret.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: minio-kserve-secret 5 | namespace: kubeflow-user-example-com 6 | annotations: 7 | serving.kserve.io/s3-endpoint: "minio-service.kubeflow:9000" 8 | serving.kserve.io/s3-usehttps: "0" 9 | serving.kserve.io/s3-useanoncredential: "false" 10 | type: Opaque 11 | stringData: 12 | AWS_ACCESS_KEY_ID: "minio" 13 | AWS_SECRET_ACCESS_KEY: "minio123" 14 | --- 15 | apiVersion: v1 16 | kind: ServiceAccount 17 | metadata: 18 | name: sa-minio-kserve 19 | namespace: kubeflow-user-example-com 20 | secrets: 21 | - name: minio-kserve-secret -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # MLOps Workflow: Recognizing Digits with Kubeflow 2 | 3 | The [MNIST database of handwritten digits](http://yann.lecun.com/exdb/mnist/) is the Hello-World of deep learning and therefore the best example to focus not on the ML model itself, but on creating the ML pipeline. The goal here is to create an automated ML pipeline for getting the data, data pre-processing, and creating and serving the ML model. You can see an overview of the digits recognizer application below. 4 | 5 | ![](images/app-overview.jpg) 6 | 7 | **You need to follow these steps**: 8 | 9 | 1. Deploy a Kubernetes Cluster and install Kubeflow 10 | 2. Access the Kubeflow Central Dashboard 11 | 3. Setup Jupyter Notebooks 12 | 4. Setup MinIO for Object Storage 13 | 5. Setting up Kserve 14 | 6. Create a ML pipeline with Kubeflow Pipelines 15 | 7. Test the model inference 16 | 17 | **Used Components**: 18 | 19 | * Kubeflow 1.5.1 - Notebook, Pipelines, Kserve 20 | * MinIO 21 | * Kubernetes 1.21 22 | * Hardware: Cisco UCS Server 23 | 24 | [![youtube](images/youtube.png)](https://youtu.be/6wWdNg0GMV4) 25 | *Check out the [Walk-through Video](https://youtu.be/6wWdNg0GMV4)!* 26 | 27 | 28 | ## 1. Deploy a Kubernetes Cluster and install Kubeflow 29 | 30 | Install Kubeflow on your Kubernetes cluster. You can find more information in the [Kubeflow docs](https://www.kubeflow.org/docs/started/installing-kubeflow/). 31 | 32 | You can check with kubectl if all pods are coming up successfully: 33 | 34 | ``` 35 | flpachin@FLPACHIN-M-MY32 ~ % kubectl get pods --all-namespaces 36 | NAMESPACE NAME READY STATUS RESTARTS AGE 37 | auth dex-5ddf47d88d-cksfj 1/1 Running 1 3h7m 38 | cert-manager cert-manager-7b8c77d4bd-m4zht 1/1 Running 0 3h7m 39 | cert-manager cert-manager-cainjector-7c744f57b5-nzfb4 1/1 Running 0 3h7m 40 | cert-manager cert-manager-webhook-fcd445bc4-7fkj4 1/1 Running 0 3h7m 41 | istio-system authservice-0 1/1 Running 0 3h7m 42 | istio-system cluster-local-gateway-64f58f66cb-ncnkd 1/1 Running 0 3h7m 43 | istio-system istio-ingressgateway-8577c57fb6-c8t9p 1/1 Running 0 3h7m 44 | istio-system istiod-6c86784695-bvgqs 1/1 Running 0 3h7m 45 | knative-eventing eventing-controller-79895f9c56-2zpmv 1/1 Running 0 3h7m 46 | knative-eventing eventing-webhook-78f897666-n5m5q 1/1 Running 0 3h7m 47 | knative-eventing imc-controller-688df5bdb4-66gvz 1/1 Running 0 3h7m 48 | knative-eventing imc-dispatcher-646978d797-2z2b2 1/1 Running 0 3h7m 49 | knative-eventing mt-broker-controller-67c977497-mgtmc 1/1 Running 0 3h7m 50 | knative-eventing mt-broker-filter-66d4d77c8b-gjrhc 1/1 Running 0 3h7m 51 | knative-eventing mt-broker-ingress-5c8dc4b5d7-tgh6l 1/1 Running 0 3h7m 52 | knative-serving activator-7476cc56d4-lwtqq 2/2 Running 2 3h7m 53 | knative-serving autoscaler-5c648f7465-wzmzl 2/2 Running 0 3h7m 54 | knative-serving controller-57c545cbfb-nnjcm 2/2 Running 0 3h6m 55 | knative-serving istio-webhook-578b6b7654-s445x 2/2 Running 0 3h7m 56 | knative-serving networking-istio-6b88f745c-887mz 2/2 Running 0 3h7m 57 | knative-serving webhook-6fffdc4d78-ml2mn 2/2 Running 0 3h7m 58 | kserve kserve-controller-manager-0 2/2 Running 0 3h7m 59 | kubeflow-user-example-com ml-pipeline-ui-artifact-d57bd98d7-s84t4 2/2 Running 0 174m 60 | kubeflow-user-example-com ml-pipeline-visualizationserver-65f5bfb4bf-bmtg8 2/2 Running 0 174m 61 | kubeflow admission-webhook-deployment-7df7558c67-d7mfm 1/1 Running 0 3h7m 62 | kubeflow cache-deployer-deployment-6f4bcc969-zh9vx 2/2 Running 1 3h7m 63 | kubeflow cache-server-575d97c95-jc4nw 2/2 Running 0 3h7m 64 | kubeflow centraldashboard-79f489b55-cr7hn 2/2 Running 0 3h7m 65 | kubeflow jupyter-web-app-deployment-5886974887-m96wv 1/1 Running 0 3h7m 66 | kubeflow katib-controller-58ddb4b856-9zjtj 1/1 Running 0 3h7m 67 | kubeflow katib-db-manager-d77c6757f-jt9b6 1/1 Running 4 3h7m 68 | kubeflow katib-mysql-7894994f88-zzwrz 1/1 Running 0 3h7m 69 | kubeflow katib-ui-f787b9d88-cwg9l 1/1 Running 0 3h7m 70 | kubeflow kfserving-controller-manager-0 2/2 Running 0 3h6m 71 | kubeflow kfserving-models-web-app-7884f597cf-8vg4b 2/2 Running 0 3h7m 72 | kubeflow kserve-models-web-app-5c64c8d8bb-sqtzs 2/2 Running 0 3h7m 73 | kubeflow kubeflow-pipelines-profile-controller-84bcbdb899-rddgd 1/1 Running 0 3h7m 74 | kubeflow metacontroller-0 1/1 Running 0 3h6m 75 | kubeflow metadata-envoy-deployment-7b847ff6c5-cqvkw 1/1 Running 0 3h7m 76 | kubeflow metadata-grpc-deployment-f8d68f687-kqlgq 2/2 Running 4 3h7m 77 | kubeflow metadata-writer-78fc7d5bb8-t5xp7 2/2 Running 0 3h7m 78 | kubeflow minio-5b65df66c9-sx8kb 2/2 Running 0 3h7m 79 | kubeflow ml-pipeline-7bb5966955-87jbv 2/2 Running 6 3h7m 80 | kubeflow ml-pipeline-persistenceagent-87b6888c4-n9tgd 2/2 Running 0 3h7m 81 | kubeflow ml-pipeline-scheduledworkflow-665847bb9-2mpz6 2/2 Running 0 3h7m 82 | kubeflow ml-pipeline-ui-554ffbd6cd-8sswm 2/2 Running 0 3h7m 83 | kubeflow ml-pipeline-viewer-crd-68777557fb-k65lr 2/2 Running 1 3h7m 84 | kubeflow ml-pipeline-visualizationserver-66c54744c-cp2np 2/2 Running 0 3h7m 85 | kubeflow mysql-f7b9b7dd4-56gjd 2/2 Running 0 3h7m 86 | kubeflow notebook-controller-deployment-7474fbff66-26fzm 2/2 Running 1 3h7m 87 | kubeflow profiles-deployment-5cc86bc965-vjfv6 3/3 Running 1 3h7m 88 | kubeflow tensorboard-controller-controller-manager-5cbddb7fb5-cglzr 3/3 Running 1 3h7m 89 | kubeflow tensorboards-web-app-deployment-7c5db448d7-84pjw 1/1 Running 0 3h7m 90 | kubeflow training-operator-6bfc7b8d86-l59l8 1/1 Running 0 3h7m 91 | kubeflow volumes-web-app-deployment-87484c848-rl4rl 1/1 Running 0 3h7m 92 | kubeflow workflow-controller-5cb67bb9db-7bfqc 2/2 Running 2 3h7m 93 | ``` 94 | 95 | ## 2. Access the Kubeflow Central Dashboard 96 | 97 | Once you have everything deployed, you can do a port-forward with the following command: 98 | 99 | ``` 100 | kubectl port-forward svc/istio-ingressgateway -n istio-system 8080:80 101 | ``` 102 | 103 | and access the Kubeflow Central Dashboard remotely at [http://localhost:8080](http://localhost:8080). 104 | 105 | ![](images/kf_central_dashboard.png) 106 | 107 | ## 3. Setup Jupyter Notebooks 108 | 109 | ### Allow access to Kubeflow Pipelines from Jupyter Notebooks 110 | 111 | In this demo you will access the Kubeflow Pipeline via the Python SDK in a Jupyter notebook. Therefore, one additional setting is required to allow this. 112 | 113 | At first insert your Kubeflow username in this Kubernetes manifest (your Kubeflow username is also the name of a Kubernetes namespace where all your user-specific containers will be spun up): [kubeflow_config/access_kfp_from_jupyter_notebook.yaml](kubeflow_config/access_kfp_from_jupyter_notebook.yaml). You can the extract namespace name under the **Manage Contributers** menu. 114 | 115 | Once done, apply it with this command: 116 | 117 | ``` 118 | kubectl apply -f access_kfp_from_jupyter_notebook.yaml 119 | ``` 120 | 121 | ### Spinning up a new Notebook Instance 122 | 123 | Now, you need to spin a up new Jupyter notebook instance. For the container image select **jupyter-tensorflow-full:v1.5.0**. This can take several minutes depending on your download speed. 124 | 125 | ![](images/kf_notebook.png) 126 | 127 | Don't forget to enable this configuration: 128 | 129 | ![](images/kf_kfp_config.png) 130 | 131 | ### Access Jupyter Notebooks & Cloning the code from Github 132 | 133 | Go to **Notebooks** and click on **CONNECT** to start the Jupyter Notebook container. 134 | 135 | With Juypter Lab you have access to a terminal and Python notebook in your web browser. This is where your data science team and you can collaborate on exploring that dataset and also create your Kubeflow Pipeline. 136 | 137 | At first, let's clone this repository so you have access to the code. You can use the terminal or directly do that in the browser. 138 | 139 | ``` 140 | git clone https://github.com/flopach/digits-recognizer-kubeflow-intersight 141 | ``` 142 | 143 | Then open `digits_recognizer_notebook.ipynb` to get a feeling of the [dataset](http://yann.lecun.com/exdb/mnist/) and its format. 144 | 145 | ### Update Python Packages 146 | 147 | Once started, double check if the latest versions of the Kubeflow python packages are installed within the Jupyter notebook container: 148 | 149 | `pip list` should list versions above these:: 150 | 151 | ``` 152 | kfp 1.8.12 153 | kfp-pipeline-spec 0.1.13 154 | kfp-server-api 1.8.2 155 | kserve 0.8.0 156 | ``` 157 | 158 | ### Behind a Proxy fix (optional) 159 | 160 | If you are behind a proxy, apply the [kubeflow_configs/proxy-fix-notebooks.yaml](kubeflow_configs/proxy-fix-notebooks.yaml) fix to your kubernetes cluster. 161 | 162 | ## 4. Setup MinIO for Object Storage 163 | 164 | In order to provide a single source of truth where all your working data (training and testing data, saved ML models etc.) is available to all your components, using an object storage is a recommended way. For our app, we will setup [MinIO](https://min.io). 165 | 166 | Since Kubeflow has already setup a MinIO tenant, we will leverage the **mlpipeline bucket**. But you can also deploy your own MinIO tenant. 167 | 168 | ### Get credentials from Kubeflow's integrated MinIO 169 | 170 | 1. Obtain the accesskey and secretkey for MinIO with these commands: 171 | 172 | ``` 173 | kubectl get secret mlpipeline-minio-artifact -n kubeflow -o jsonpath="{.data.accesskey}" | base64 --decode 174 | ``` 175 | 176 | ``` 177 | kubectl get secret mlpipeline-minio-artifact -n kubeflow -o jsonpath="{.data.secretkey}" | base64 --decode 178 | ``` 179 | 180 | 2. In order to get access to MinIO from outside of your Kubernetes cluster and check the bucket, do a port-forward: 181 | 182 | ``` 183 | kubectl port-forward -n kubeflow svc/minio-service 9000:9000 184 | ``` 185 | 186 | 3. Then you can access the MinIO dashboard at [http://localhost:9000](http://localhost:9000) and check the bucket name or create your own bucket. Alternatively, you can use the [MinIO CLI Client](https://docs.min.io/docs/minio-client-quickstart-guide.html) 187 | 188 | **Default** values should be (already in the code and no action on your end): 189 | 190 | * accesskey: **minio** 191 | * secretkey: **minio123** 192 | * bucket: **mlpipeline** 193 | 194 | ## 5. Setting up Kserve 195 | 196 | In this step we are setting up Kserve for model inference serving. The Kserve container will be created when we are executing our ML pipeline which will happen in the next step. 197 | 198 | ### Set minIO secret for kserve 199 | 200 | We need to apply this yaml file so that the created model which is saved on minIO can be accessed by Kserve. Kserve will copy the saved model in the newly created inference container. 201 | 202 | ``` 203 | kubectl apply -f kubeflow_configs/set-minio-kserve-secret.yaml 204 | ``` 205 | 206 | ### Troubleshooting: Can't fetch docker image 207 | 208 | If kserve can't fetch the docker image at container startup, you need to edit the configuration: 209 | 210 | `kubectl -n knative-serving edit configmap config-deployment` 211 | 212 | Add the key-value *registriesSkippingTagResolving* directly below data and apply: 213 | 214 | ``` 215 | apiVersion: v1 216 | data: 217 | registriesSkippingTagResolving: "index.docker.io" 218 | _example: | 219 | ################################ 220 | # # 221 | # EXAMPLE CONFIGURATION # 222 | # # 223 | ################################ 224 | ... 225 | ``` 226 | 227 | Find more troubleshooting information: [https://kserve.github.io/website/developer/debug/](https://kserve.github.io/website/developer/debug/) 228 | 229 | ## 6. Create a ML pipeline with Kubeflow Pipelines 230 | 231 | Kubeflow Pipelines (KFP) is the most used component of Kubeflow. It allows you to create for every step or function in your ML project a reusable containerized pipeline component which can be chained together as a ML pipeline. 232 | 233 | For the digits recognizer application, the pipeline is already created with the Python SDK. You can find the code in the file `digits_recognizer_pipeline.ipynb` 234 | 235 | ![](images/pipeline.png) 236 | 237 | Here is a more detailed example of Kubeflow Pipelines: 238 | [https://github.com/StatCan/aaw-contrib-jupyter-notebooks/blob/master/kfp-basics/demo_kfp_lightweight_components.ipynb](https://github.com/StatCan/aaw-contrib-jupyter-notebooks/blob/master/kfp-basics/demo_kfp_lightweight_components.ipynb) 239 | 240 | ![](images/kserve.png) 241 | 242 | ## 7. Test the model inference 243 | 244 | Now you can test the model inference. The simplest way is to use a Python script directly in the Jupyter Notebook: 245 | 246 | ![](images/test-inference.png) 247 | 248 | Alternatively, you can use the web application which you can find in the `web_app` folder. Be aware that some configuration needs to be done if you want to access the inference service from outside of the cluster. 249 | 250 | ## Versioning 251 | 252 | **1.0** - Sample ML workflow with Kubeflow 1.5.1 253 | 254 | ## Authors 255 | 256 | * **Flo Pachinger** - [flopach](https://github.com/flopach) 257 | 258 | ## License 259 | 260 | This project is licensed under the Apache License 2.0 - see the [LICENSE.md](LICENSE.md) file for details 261 | 262 | ## Further Links 263 | 264 | * [Cisco DevNet Website](https://developer.cisco.com) 265 | 266 | 267 | 268 | 269 | -------------------------------------------------------------------------------- /tf_saved_model/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/tf_saved_model/.DS_Store -------------------------------------------------------------------------------- /tf_saved_model/detect-digits/keras_metadata.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/tf_saved_model/detect-digits/keras_metadata.pb -------------------------------------------------------------------------------- /tf_saved_model/detect-digits/saved_model.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/tf_saved_model/detect-digits/saved_model.pb -------------------------------------------------------------------------------- /tf_saved_model/detect-digits/variables/variables.data-00000-of-00001: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/tf_saved_model/detect-digits/variables/variables.data-00000-of-00001 -------------------------------------------------------------------------------- /tf_saved_model/detect-digits/variables/variables.index: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flopach/digits-recognizer-kubeflow/90c62557ceb8a808b855df8aeb88d26ccbbff97a/tf_saved_model/detect-digits/variables/variables.index -------------------------------------------------------------------------------- /web_app/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Digits recognizer 5 | 6 | 7 |
8 |
9 |

Digits recognizer

10 |
11 |
12 | Your browser does not support the HTML5 canvas tag. 13 |
14 |
15 | 16 | 17 |
18 |
19 |
20 |
21 |
22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /web_app/js_code.js: -------------------------------------------------------------------------------- 1 | const canvas = document.getElementById('number-drawing'); 2 | const ctx = canvas.getContext('2d'); 3 | 4 | let isPainting = false; 5 | let lineWidth = 15; 6 | let startX; 7 | let startY; 8 | 9 | /* offset for the canvas 10 | height: 60px; 11 | margin-left:10px; 12 | */ 13 | let offsetY = 60; 14 | let offsetX = 10; 15 | 16 | addEventListener('click', e => { 17 | if (e.target.id === 'clear') { 18 | ctx.clearRect(0, 0, canvas.width, canvas.height); 19 | } 20 | 21 | if (e.target.id === 'detect') { 22 | console.log("clicked"); 23 | 24 | // Creating new canvas, scale to 28x28px image 25 | const scaled_canvas = document.createElement("canvas"); 26 | scaled_canvas.width = 28; 27 | scaled_canvas.height = 28; 28 | 29 | const img_tensor = tf.browser.fromPixels(scaled_canvas,2); 30 | const img_reshaped = img_tensor.reshape([-1,28, 28,1]); 31 | 32 | 33 | img_data = { 34 | "instances" : img_reshaped.array() 35 | } 36 | 37 | console.log(img_data) 38 | 39 | fetch('http://localhost:8080/v1/models/digits-recognizer:predict', { 40 | method: 'POST', 41 | headers: { 42 | 'Host': 'digits-recognizer.kubeflow-user-example-com.example.com' 43 | }, 44 | body: JSON.stringify(img_data) 45 | }) 46 | .then(response => response) 47 | .then(data => { 48 | console.log('Success:', data); 49 | }) 50 | .catch((error) => { 51 | console.error('Error:', error); 52 | }); 53 | 54 | document.getElementById("results").innerHTML = "Example Output"; //Output still needs to be formatted! 55 | } 56 | }); 57 | 58 | const draw = (e) => { 59 | if(!isPainting) { 60 | return; 61 | } 62 | 63 | ctx.lineWidth = lineWidth; 64 | ctx.lineCap = 'round'; 65 | 66 | ctx.lineTo(e.clientX - offsetX, e.clientY - offsetY); 67 | ctx.stroke(); 68 | } 69 | 70 | canvas.addEventListener('mousedown', (e) => { 71 | isPainting = true; 72 | startX = e.clientX; 73 | startY = e.clientY; 74 | }); 75 | 76 | canvas.addEventListener('mouseup', e => { 77 | isPainting = false; 78 | ctx.stroke(); 79 | ctx.beginPath(); 80 | }); 81 | 82 | canvas.addEventListener('mousemove', draw); 83 | -------------------------------------------------------------------------------- /web_app/stylesheet.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | padding: 0; 4 | height: 100%; 5 | overflow: hidden; 6 | color: #000000; 7 | } 8 | 9 | .container { 10 | display: flex; 11 | flex-direction: column; 12 | height: 100%; 13 | margin-left:10px; 14 | } 15 | 16 | .header{ 17 | height: 60px; 18 | } 19 | 20 | h1 { 21 | font-size:28px; 22 | font-weight:bold; 23 | font-family: Helvetica, sans-serif; 24 | vertical-align: middle; 25 | } 26 | 27 | .actions { 28 | padding:15px; 29 | } 30 | 31 | .outcome { 32 | padding:15px; 33 | } --------------------------------------------------------------------------------