├── .gitignore ├── Keras_Tensorflow ├── 00_DevelopModel.ipynb ├── 01_DevelopModelDriver.ipynb ├── 02_BuildImage.ipynb ├── 03_TestLocally.ipynb ├── 04_DeployOnAKS.ipynb ├── 05_TestWebApp.ipynb ├── 06_SpeedTestWebApp.ipynb ├── 07_TearDown.ipynb ├── README.md ├── environment.yml ├── resnet152.py └── testing_utilities.py ├── LICENSE ├── Pytorch ├── 00_DevelopModel.ipynb ├── 01_DevelopModelDriver.ipynb ├── 02_BuildImage.ipynb ├── 03_TestLocally.ipynb ├── 04_DeployOnAKS.ipynb ├── 05_TestWebApp.ipynb ├── 06_SpeedTestWebApp.ipynb ├── 07_TearDown.ipynb ├── README.md ├── environment.yml └── testing_utilities.py ├── README.md ├── Tensorflow ├── 00_DevelopModel.ipynb ├── 01_DevelopModelDriver.ipynb ├── 02_BuildImage.ipynb ├── 03_TestLocally.ipynb ├── 04_DeployOnAKS.ipynb ├── 05_TestWebApp.ipynb ├── 06_SpeedTestWebApp.ipynb ├── 07_TearDown.ipynb ├── README.md ├── environment.yml └── testing_utilities.py └── static ├── Design.png └── example.png /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | 103 | # Project files to ignore 104 | *.jpg 105 | synset.txt 106 | *.ckpt 107 | *.tar.gz 108 | flaskwebapp 109 | driver.py 110 | -------------------------------------------------------------------------------- /Keras_Tensorflow/01_DevelopModelDriver.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Develop Model Driver" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this notebook, we will develop the API that will call our model. This module initializes the model, transforms the input so that it is in the appropriate format and defines the scoring method that will produce the predictions. The API will expect the input to be in JSON format. Once a request is received, the API will convert the json encoded request body into the image format. There are two main functions in the API. The first function loads the model and returns a scoring function. The second function process the images and uses the first function to score them." 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 1, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import logging\n", 24 | "from testing_utilities import img_url_to_json" 25 | ] 26 | }, 27 | { 28 | "cell_type": "markdown", 29 | "metadata": {}, 30 | "source": [ 31 | "We use the writefile magic to write the contents of the below cell to driver.py which includes the driver methods." 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 2, 37 | "metadata": {}, 38 | "outputs": [ 39 | { 40 | "name": "stdout", 41 | "output_type": "stream", 42 | "text": [ 43 | "Overwriting driver.py\n" 44 | ] 45 | } 46 | ], 47 | "source": [ 48 | "%%writefile driver.py\n", 49 | "\n", 50 | "import tensorflow as tf\n", 51 | "from resnet152 import ResNet152\n", 52 | "from keras.preprocessing import image\n", 53 | "from keras.applications.imagenet_utils import preprocess_input, decode_predictions\n", 54 | "\n", 55 | "import numpy as np\n", 56 | "import timeit as t\n", 57 | "import base64\n", 58 | "import json\n", 59 | "from PIL import Image, ImageOps\n", 60 | "from io import BytesIO\n", 61 | "import logging\n", 62 | "\n", 63 | "number_results = 3\n", 64 | "logger = logging.getLogger(\"model_driver\")\n", 65 | "\n", 66 | "def _base64img_to_numpy(base64_img_string):\n", 67 | " decoded_img = base64.b64decode(base64_img_string)\n", 68 | " img_buffer = BytesIO(decoded_img)\n", 69 | " imageData = Image.open(img_buffer).convert(\"RGB\")\n", 70 | " img = ImageOps.fit(imageData, (224, 224), Image.ANTIALIAS)\n", 71 | " img = image.img_to_array(img)\n", 72 | " return img\n", 73 | "\n", 74 | "def create_scoring_func():\n", 75 | " \"\"\" Initialize ResNet 152 Model \n", 76 | " \"\"\" \n", 77 | " start = t.default_timer()\n", 78 | " model = ResNet152(weights='imagenet')\n", 79 | " end = t.default_timer()\n", 80 | " \n", 81 | " loadTimeMsg = \"Model loading time: {0} ms\".format(round((end-start)*1000, 2))\n", 82 | " logger.info(loadTimeMsg)\n", 83 | " \n", 84 | " def call_model(img_array):\n", 85 | " img_array = np.expand_dims(img_array, axis=0)\n", 86 | " img_array = preprocess_input(img_array)\n", 87 | " preds = model.predict(img_array)\n", 88 | " preds = decode_predictions(preds, top=number_results)[0] \n", 89 | " return preds\n", 90 | " \n", 91 | " return call_model \n", 92 | "\n", 93 | "def get_model_api():\n", 94 | " logger = logging.getLogger(\"model_driver\")\n", 95 | " scoring_func = create_scoring_func()\n", 96 | " \n", 97 | " def process_and_score(inputString):\n", 98 | " \"\"\" Classify the input using the loaded model\n", 99 | " \"\"\"\n", 100 | " start = t.default_timer()\n", 101 | "\n", 102 | " base64Dict = json.loads(inputString) \n", 103 | " img_file_name, base64Img= next(iter(base64Dict.items()))\n", 104 | " img_array = _base64img_to_numpy(base64Img)\n", 105 | " preds = scoring_func(img_array)\n", 106 | " responses = {img_file_name: preds}\n", 107 | "\n", 108 | " end = t.default_timer()\n", 109 | " \n", 110 | " logger.info(\"Predictions: {0}\".format(responses))\n", 111 | " logger.info(\"Predictions took {0} ms\".format(round((end-start)*1000, 2)))\n", 112 | " return (responses, \"Computed in {0} ms\".format(round((end-start)*1000, 2)))\n", 113 | " return process_and_score\n", 114 | "\n", 115 | "def version():\n", 116 | " return tf.__version__" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "metadata": {}, 122 | "source": [ 123 | "Let's test the module." 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": 3, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "logging.basicConfig(level=logging.DEBUG)" 133 | ] 134 | }, 135 | { 136 | "cell_type": "markdown", 137 | "metadata": {}, 138 | "source": [ 139 | "We run the file driver.py which will bring everything into the context of the notebook." 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": 5, 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "%run driver.py" 149 | ] 150 | }, 151 | { 152 | "cell_type": "markdown", 153 | "metadata": {}, 154 | "source": [ 155 | "We will use the same Lynx image we used ealier to check that our driver works as expected." 156 | ] 157 | }, 158 | { 159 | "cell_type": "code", 160 | "execution_count": 6, 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "IMAGEURL = \"https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg\"" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 7, 170 | "metadata": {}, 171 | "outputs": [ 172 | { 173 | "name": "stderr", 174 | "output_type": "stream", 175 | "text": [ 176 | "INFO:model_driver:Model loading time: 42520.51 ms\n" 177 | ] 178 | } 179 | ], 180 | "source": [ 181 | "predict_for = get_model_api()" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 8, 187 | "metadata": {}, 188 | "outputs": [ 189 | { 190 | "name": "stderr", 191 | "output_type": "stream", 192 | "text": [ 193 | "DEBUG:PIL.PngImagePlugin:STREAM b'IHDR' 16 13\n", 194 | "DEBUG:PIL.PngImagePlugin:STREAM b'iCCP' 41 292\n", 195 | "DEBUG:PIL.PngImagePlugin:iCCP profile name b'ICC Profile'\n", 196 | "DEBUG:PIL.PngImagePlugin:Compression method 0\n", 197 | "DEBUG:PIL.PngImagePlugin:STREAM b'IDAT' 345 65536\n", 198 | "INFO:model_driver:Predictions: {'image': [('n02127052', 'lynx', 0.9816483), ('n02128385', 'leopard', 0.0077441484), ('n02123159', 'tiger_cat', 0.0036861342)]}\n", 199 | "INFO:model_driver:Predictions took 4221.36 ms\n" 200 | ] 201 | } 202 | ], 203 | "source": [ 204 | "jsonimg = img_url_to_json(IMAGEURL)\n", 205 | "json_load_img = json.loads(jsonimg)\n", 206 | "body = json_load_img['input']\n", 207 | "resp = predict_for(body)" 208 | ] 209 | }, 210 | { 211 | "cell_type": "markdown", 212 | "metadata": {}, 213 | "source": [ 214 | "Next, we can move on to [building our docker image](02_BuildImage.ipynb)." 215 | ] 216 | } 217 | ], 218 | "metadata": { 219 | "kernelspec": { 220 | "display_name": "Python [conda env:AKSDeploymentKeras]", 221 | "language": "python", 222 | "name": "conda-env-AKSDeploymentKeras-py" 223 | }, 224 | "language_info": { 225 | "codemirror_mode": { 226 | "name": "ipython", 227 | "version": 3 228 | }, 229 | "file_extension": ".py", 230 | "mimetype": "text/x-python", 231 | "name": "python", 232 | "nbconvert_exporter": "python", 233 | "pygments_lexer": "ipython3", 234 | "version": "3.5.5" 235 | } 236 | }, 237 | "nbformat": 4, 238 | "nbformat_minor": 2 239 | } 240 | -------------------------------------------------------------------------------- /Keras_Tensorflow/04_DeployOnAKS.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Deploy Web App on Azure Container Services (AKS)\n" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this notebook, we will set up an Azure Container Service which will be managed by Kubernetes. We will then take the Docker image we created earlier that contains our app and deploy it to the AKS cluster. Then, we will check everything is working by sending an image to it and getting it scored. \n", 15 | "\n", 16 | "The process is split into the following steps:\n", 17 | "- Define our resource names\n", 18 | "- Login to Azure\n", 19 | "- Create resource group and create AKS\n", 20 | "- Connect to AKS\n", 21 | "- Deploy our app\n", 22 | "\n", 23 | "We assume that this notebook is running on Linux and Azure CLI is installed before proceeding." 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import os\n", 33 | "import json\n", 34 | "from testing_utilities import write_json_to_file\n", 35 | "%load_ext dotenv" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "## Setup" 43 | ] 44 | }, 45 | { 46 | "cell_type": "markdown", 47 | "metadata": {}, 48 | "source": [ 49 | "Below are the various name definitions for the resources needed to setup AKS." 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": { 56 | "tags": [ 57 | "parameters" 58 | ] 59 | }, 60 | "outputs": [], 61 | "source": [ 62 | "%%writefile --append .env\n", 63 | "# This cell is tagged `parameters`\n", 64 | "# Please modify the values below as you see fit\n", 65 | "\n", 66 | "# If you have multiple subscriptions select the subscription you want to use \n", 67 | "selected_subscription = \"YOUR_SUBSCRIPTION\"\n", 68 | "\n", 69 | "# Resource group, name and location for AKS cluster.\n", 70 | "resource_group = \"RESOURCE_GROUP\" \n", 71 | "aks_name = \"AKS_CLUSTER_NAME\"\n", 72 | "location = \"eastus\"" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": null, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "%dotenv\n", 82 | "image_name = os.getenv('docker_login') + os.getenv('image_repo')" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "metadata": {}, 88 | "source": [ 89 | "## Azure account login" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": {}, 95 | "source": [ 96 | "If you are not already logged in to an Azure account, the command below will initiate a login. This will pop up a browser where you can select your login." 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "%%bash\n", 106 | "list=`az account list -o table`\n", 107 | "if [ \\\"$list\\\" == '[]' ] || [ \\\"$list\\\" == '' ]; then\n", 108 | " az login -o table\n", 109 | "else\n", 110 | " az account list -o table\n", 111 | "fi" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "!az account set --subscription \"$selected_subscription\"" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": null, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "!az account show" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "You will also need to register the container service resources on your subscription if you haven't already done so." 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [ 145 | "!az provider register -n Microsoft.ContainerService" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [ 154 | "!az provider show -n Microsoft.ContainerService" 155 | ] 156 | }, 157 | { 158 | "cell_type": "markdown", 159 | "metadata": {}, 160 | "source": [ 161 | "## Create resources and dependencies" 162 | ] 163 | }, 164 | { 165 | "cell_type": "markdown", 166 | "metadata": {}, 167 | "source": [ 168 | "### Create resource group and AKS cluster" 169 | ] 170 | }, 171 | { 172 | "cell_type": "markdown", 173 | "metadata": {}, 174 | "source": [ 175 | "Azure encourages the use of groups to organize all the Azure components you deploy. That way it is easier to find them but also we can delete a number of resources simply by deleting the group." 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "metadata": {}, 182 | "outputs": [], 183 | "source": [ 184 | "!az group create --name $resource_group --location $location" 185 | ] 186 | }, 187 | { 188 | "cell_type": "markdown", 189 | "metadata": {}, 190 | "source": [ 191 | "Below, we create the AKS cluster in the resource group we created earlier. This could take up to 15 minutes." 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": null, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "%%time\n", 201 | "!az aks create --resource-group $resource_group --name $aks_name --node-count 1 --generate-ssh-keys -s Standard_NC6" 202 | ] 203 | }, 204 | { 205 | "cell_type": "markdown", 206 | "metadata": {}, 207 | "source": [ 208 | "### Install kubectl CLI" 209 | ] 210 | }, 211 | { 212 | "cell_type": "markdown", 213 | "metadata": {}, 214 | "source": [ 215 | "To connect to the Kubernetes cluster, we will use kubectl, the Kubernetes command-line client. To install, run the following:" 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": null, 221 | "metadata": {}, 222 | "outputs": [], 223 | "source": [ 224 | "!sudo env \"PATH=$PATH\" az aks install-cli" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": {}, 230 | "source": [ 231 | "## Connect to AKS cluster" 232 | ] 233 | }, 234 | { 235 | "cell_type": "markdown", 236 | "metadata": {}, 237 | "source": [ 238 | "To configure kubectl to connect to the Kubernetes cluster, run the following command:" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": null, 244 | "metadata": {}, 245 | "outputs": [], 246 | "source": [ 247 | "!az aks get-credentials --resource-group $resource_group --name $aks_name" 248 | ] 249 | }, 250 | { 251 | "cell_type": "markdown", 252 | "metadata": {}, 253 | "source": [ 254 | "Let's verify connection by listing the nodes." 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "execution_count": null, 260 | "metadata": {}, 261 | "outputs": [], 262 | "source": [ 263 | "!kubectl get nodes" 264 | ] 265 | }, 266 | { 267 | "cell_type": "markdown", 268 | "metadata": {}, 269 | "source": [ 270 | "Let's check the pods on our cluster." 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": null, 276 | "metadata": {}, 277 | "outputs": [], 278 | "source": [ 279 | "!kubectl get pods --all-namespaces" 280 | ] 281 | }, 282 | { 283 | "cell_type": "markdown", 284 | "metadata": {}, 285 | "source": [ 286 | "## Deploy application" 287 | ] 288 | }, 289 | { 290 | "cell_type": "markdown", 291 | "metadata": {}, 292 | "source": [ 293 | "Below we define our Kubernetes manifest file for our service and load balancer. Note that we have to specify the volume mounts to the drivers that are located on the node." 294 | ] 295 | }, 296 | { 297 | "cell_type": "code", 298 | "execution_count": null, 299 | "metadata": {}, 300 | "outputs": [], 301 | "source": [ 302 | "app_template = {\n", 303 | " \"apiVersion\": \"apps/v1beta1\",\n", 304 | " \"kind\": \"Deployment\",\n", 305 | " \"metadata\": {\n", 306 | " \"name\": \"azure-dl\"\n", 307 | " },\n", 308 | " \"spec\":{\n", 309 | " \"replicas\":1,\n", 310 | " \"template\":{\n", 311 | " \"metadata\":{\n", 312 | " \"labels\":{\n", 313 | " \"app\":\"azure-dl\"\n", 314 | " }\n", 315 | " },\n", 316 | " \"spec\":{\n", 317 | " \"containers\":[\n", 318 | " {\n", 319 | " \"name\": \"azure-dl\",\n", 320 | " \"image\": image_name,\n", 321 | " \"env\":[\n", 322 | " {\n", 323 | " \"name\": \"LD_LIBRARY_PATH\",\n", 324 | " \"value\": \"$LD_LIBRARY_PATH:/usr/local/nvidia/lib64:/opt/conda/envs/py3.5/lib\"\n", 325 | " }\n", 326 | " ],\n", 327 | " \"ports\":[\n", 328 | " {\n", 329 | " \"containerPort\":80,\n", 330 | " \"name\":\"model\"\n", 331 | " }\n", 332 | " ],\n", 333 | " \"volumeMounts\":[\n", 334 | " {\n", 335 | " \"mountPath\":\"/usr/local/nvidia\",\n", 336 | " \"name\": \"nvidia\",\n", 337 | " }\n", 338 | " ],\n", 339 | " \"resources\":{\n", 340 | " \"requests\":{\n", 341 | " \"alpha.kubernetes.io/nvidia-gpu\": 1\n", 342 | " },\n", 343 | " \"limits\":{\n", 344 | " \"alpha.kubernetes.io/nvidia-gpu\": 1\n", 345 | " }\n", 346 | " } \n", 347 | " }\n", 348 | " ],\n", 349 | " \"volumes\":[\n", 350 | " {\n", 351 | " \"name\": \"nvidia\",\n", 352 | " \"hostPath\":{\n", 353 | " \"path\":\"/usr/local/nvidia\"\n", 354 | " },\n", 355 | " },\n", 356 | " ]\n", 357 | " }\n", 358 | " }\n", 359 | " }\n", 360 | "}\n", 361 | "\n", 362 | "service_temp = {\n", 363 | " \"apiVersion\": \"v1\",\n", 364 | " \"kind\": \"Service\",\n", 365 | " \"metadata\": {\n", 366 | " \"name\": \"azure-dl\"\n", 367 | " },\n", 368 | " \"spec\":{\n", 369 | " \"type\": \"LoadBalancer\",\n", 370 | " \"ports\":[\n", 371 | " {\n", 372 | " \"port\":80\n", 373 | " }\n", 374 | " ],\n", 375 | " \"selector\":{\n", 376 | " \"app\":\"azure-dl\"\n", 377 | " }\n", 378 | " }\n", 379 | "}" 380 | ] 381 | }, 382 | { 383 | "cell_type": "code", 384 | "execution_count": null, 385 | "metadata": {}, 386 | "outputs": [], 387 | "source": [ 388 | "write_json_to_file(app_template, 'az-dl.json')" 389 | ] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "execution_count": null, 394 | "metadata": {}, 395 | "outputs": [], 396 | "source": [ 397 | "write_json_to_file(service_temp, 'az-dl.json', mode='a')" 398 | ] 399 | }, 400 | { 401 | "cell_type": "markdown", 402 | "metadata": {}, 403 | "source": [ 404 | "Let's check the manifest created." 405 | ] 406 | }, 407 | { 408 | "cell_type": "code", 409 | "execution_count": null, 410 | "metadata": {}, 411 | "outputs": [], 412 | "source": [ 413 | "!cat az-dl.json" 414 | ] 415 | }, 416 | { 417 | "cell_type": "markdown", 418 | "metadata": {}, 419 | "source": [ 420 | "Next, we will use kubectl create command to deploy our application." 421 | ] 422 | }, 423 | { 424 | "cell_type": "code", 425 | "execution_count": null, 426 | "metadata": {}, 427 | "outputs": [], 428 | "source": [ 429 | "!kubectl create -f az-dl.json" 430 | ] 431 | }, 432 | { 433 | "cell_type": "markdown", 434 | "metadata": {}, 435 | "source": [ 436 | "Let's check if the pod is deployed. It may take as many as 10 minutes for the container to be ready." 437 | ] 438 | }, 439 | { 440 | "cell_type": "code", 441 | "execution_count": null, 442 | "metadata": {}, 443 | "outputs": [], 444 | "source": [ 445 | "!kubectl get pods --all-namespaces" 446 | ] 447 | }, 448 | { 449 | "cell_type": "markdown", 450 | "metadata": {}, 451 | "source": [ 452 | "If anything goes wrong you can use the commands below to observe the events on the node as well as review the logs." 453 | ] 454 | }, 455 | { 456 | "cell_type": "code", 457 | "execution_count": null, 458 | "metadata": {}, 459 | "outputs": [], 460 | "source": [ 461 | "!kubectl get events" 462 | ] 463 | }, 464 | { 465 | "cell_type": "markdown", 466 | "metadata": {}, 467 | "source": [ 468 | "Check the logs for the application pod." 469 | ] 470 | }, 471 | { 472 | "cell_type": "code", 473 | "execution_count": null, 474 | "metadata": {}, 475 | "outputs": [], 476 | "source": [ 477 | "pod_json = !kubectl get pods -o json\n", 478 | "pod_dict = json.loads(''.join(pod_json))" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": null, 484 | "metadata": {}, 485 | "outputs": [], 486 | "source": [ 487 | "!kubectl logs {pod_dict['items'][0]['metadata']['name']}" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": null, 493 | "metadata": {}, 494 | "outputs": [], 495 | "source": [ 496 | "!kubectl get deployment" 497 | ] 498 | }, 499 | { 500 | "cell_type": "markdown", 501 | "metadata": {}, 502 | "source": [ 503 | "It can take a few minutes for the service to populate the EXTERNAL-IP field below. This will be the IP you use to call the service. You can also specify an IP to use, please see the AKS documentation for further details." 504 | ] 505 | }, 506 | { 507 | "cell_type": "code", 508 | "execution_count": null, 509 | "metadata": {}, 510 | "outputs": [], 511 | "source": [ 512 | "!kubectl get service azure-dl" 513 | ] 514 | }, 515 | { 516 | "cell_type": "markdown", 517 | "metadata": {}, 518 | "source": [ 519 | "Next, we will [test our web application deployed on AKS](05_TestWebApp.ipynb)." 520 | ] 521 | } 522 | ], 523 | "metadata": { 524 | "kernelspec": { 525 | "display_name": "Python [conda env:AKSDeploymentKeras]", 526 | "language": "python", 527 | "name": "conda-env-AKSDeploymentKeras-py" 528 | }, 529 | "language_info": { 530 | "codemirror_mode": { 531 | "name": "ipython", 532 | "version": 3 533 | }, 534 | "file_extension": ".py", 535 | "mimetype": "text/x-python", 536 | "name": "python", 537 | "nbconvert_exporter": "python", 538 | "pygments_lexer": "ipython3", 539 | "version": "3.5.5" 540 | } 541 | }, 542 | "nbformat": 4, 543 | "nbformat_minor": 2 544 | } 545 | -------------------------------------------------------------------------------- /Keras_Tensorflow/07_TearDown.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Tear it all down\n", 8 | "Once you are done with your cluster you can use the following two commands to destroy it all." 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "%load_ext dotenv\n", 18 | "%dotenv" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "Once you are done with your cluster you can use the following two commands to destroy it all. First, delete the application." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stdout", 35 | "output_type": "stream", 36 | "text": [ 37 | "deployment.apps \"azure-dl\" deleted\n", 38 | "service \"azure-dl\" deleted\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "!kubectl delete -f az-dl.json" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "Next, you delete the AKS cluster. This step may take a few minutes." 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "!az aks delete -n $aks_name -g $resource_group -y" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "Finally, you should delete the resource group. This also deletes the AKS cluster and can be used instead of the above command if the resource group is only used for this purpose." 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "!az group delete --name $resource_group -y" 76 | ] 77 | } 78 | ], 79 | "metadata": { 80 | "kernelspec": { 81 | "display_name": "Python [conda env:AKSDeployment]", 82 | "language": "python", 83 | "name": "conda-env-AKSDeployment-py" 84 | }, 85 | "language_info": { 86 | "codemirror_mode": { 87 | "name": "ipython", 88 | "version": 3 89 | }, 90 | "file_extension": ".py", 91 | "mimetype": "text/x-python", 92 | "name": "python", 93 | "nbconvert_exporter": "python", 94 | "pygments_lexer": "ipython3", 95 | "version": "3.5.5" 96 | } 97 | }, 98 | "nbformat": 4, 99 | "nbformat_minor": 2 100 | } 101 | -------------------------------------------------------------------------------- /Keras_Tensorflow/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Deploy ResNet 152 model on GPU enaled Kubernetes cluster using Keras with Tensorflow 3 | 4 | In this folder are the tutorials for deploying a Keras model (with Tensorflow backend) on a Kubernetes cluster. 5 | 6 | The tutorial is made up of the following notebooks: 7 | * [Model development](00_DevelopModel.ipynb) where we load the pretrained model and test it by using it to score images 8 | * [Developing the interface](01_DevelopModelDriver.ipynb) our Flask app will use to load and call the model 9 | * [Building the Docker Image](02_BuildImage.ipynb) with our Flask REST API and model 10 | * [Testing our Docker image](03_TestLocally.ipynb) before deployment 11 | * [Creating our Kubernetes cluster](04_DeployOnAKS.ipynb) and deploying our application to it 12 | * [Testing the deployed model](05_TestWebApp.ipynb) 13 | * [Testing the throughput](06_SpeedTestWebApp.ipynb) of our model 14 | * [Cleaning the resources](07_TearDown.ipynb) used 15 | 16 | -------------------------------------------------------------------------------- /Keras_Tensorflow/environment.yml: -------------------------------------------------------------------------------- 1 | name: AKSDeploymentKeras 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.5 6 | - nb_conda==2.2.0 7 | - tornado==4.5.3 8 | - pip: 9 | - papermill==0.14.1 10 | - python-dotenv==0.9.0 11 | - Pillow==5.2.0 12 | - wget==3.2 13 | - matplotlib==2.2.2 14 | - aiohttp==3.3.2 15 | - toolz==0.9.0 16 | - tqdm==4.23.4 17 | - azure-cli==2.0.41 18 | - tensorflow-gpu==1.9.0 19 | - keras==2.2.0 20 | -------------------------------------------------------------------------------- /Keras_Tensorflow/resnet152.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ResNet152 model for Keras. 3 | 4 | # Reference: 5 | 6 | - [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) 7 | 8 | Adaptation of code from flyyufelix, mvoelk, BigMoyan, fchollet at https://github.com/adamcasson/resnet152 9 | 10 | """ 11 | 12 | import numpy as np 13 | import warnings 14 | 15 | from keras.layers import Input 16 | from keras.layers import Dense 17 | from keras.layers import Activation 18 | from keras.layers import Flatten 19 | from keras.layers import Conv2D 20 | from keras.layers import MaxPooling2D 21 | from keras.layers import GlobalMaxPooling2D 22 | from keras.layers import ZeroPadding2D 23 | from keras.layers import AveragePooling2D 24 | from keras.layers import GlobalAveragePooling2D 25 | from keras.layers import BatchNormalization 26 | from keras.layers import add 27 | from keras.models import Model 28 | import keras.backend as K 29 | from keras.engine.topology import get_source_inputs 30 | from keras.utils import layer_utils 31 | from keras import initializers 32 | from keras.engine import Layer, InputSpec 33 | from keras.preprocessing import image 34 | from keras.utils.data_utils import get_file 35 | from keras.applications.imagenet_utils import decode_predictions 36 | from keras.applications.imagenet_utils import preprocess_input 37 | from keras.applications.imagenet_utils import _obtain_input_shape 38 | 39 | import sys 40 | sys.setrecursionlimit(3000) 41 | 42 | WEIGHTS_PATH = 'https://github.com/adamcasson/resnet152/releases/download/v0.1/resnet152_weights_tf.h5' 43 | WEIGHTS_PATH_NO_TOP = 'https://github.com/adamcasson/resnet152/releases/download/v0.1/resnet152_weights_tf_notop.h5' 44 | 45 | class Scale(Layer): 46 | """Custom Layer for ResNet used for BatchNormalization. 47 | 48 | Learns a set of weights and biases used for scaling the input data. 49 | the output consists simply in an element-wise multiplication of the input 50 | and a sum of a set of constants: 51 | 52 | out = in * gamma + beta, 53 | 54 | where 'gamma' and 'beta' are the weights and biases larned. 55 | 56 | Keyword arguments: 57 | axis -- integer, axis along which to normalize in mode 0. For instance, 58 | if your input tensor has shape (samples, channels, rows, cols), 59 | set axis to 1 to normalize per feature map (channels axis). 60 | momentum -- momentum in the computation of the exponential average 61 | of the mean and standard deviation of the data, for 62 | feature-wise normalization. 63 | weights -- Initialization weights. 64 | List of 2 Numpy arrays, with shapes: 65 | `[(input_shape,), (input_shape,)]` 66 | beta_init -- name of initialization function for shift parameter 67 | (see [initializers](../initializers.md)), or alternatively, 68 | Theano/TensorFlow function to use for weights initialization. 69 | This parameter is only relevant if you don't pass a `weights` argument. 70 | gamma_init -- name of initialization function for scale parameter (see 71 | [initializers](../initializers.md)), or alternatively, 72 | Theano/TensorFlow function to use for weights initialization. 73 | This parameter is only relevant if you don't pass a `weights` argument. 74 | 75 | """ 76 | def __init__(self, weights=None, axis=-1, momentum = 0.9, beta_init='zero', gamma_init='one', **kwargs): 77 | self.momentum = momentum 78 | self.axis = axis 79 | self.beta_init = initializers.get(beta_init) 80 | self.gamma_init = initializers.get(gamma_init) 81 | self.initial_weights = weights 82 | super(Scale, self).__init__(**kwargs) 83 | 84 | def build(self, input_shape): 85 | self.input_spec = [InputSpec(shape=input_shape)] 86 | shape = (int(input_shape[self.axis]),) 87 | 88 | self.gamma = K.variable(self.gamma_init(shape), name='%s_gamma'%self.name) 89 | self.beta = K.variable(self.beta_init(shape), name='%s_beta'%self.name) 90 | self.trainable_weights = [self.gamma, self.beta] 91 | 92 | if self.initial_weights is not None: 93 | self.set_weights(self.initial_weights) 94 | del self.initial_weights 95 | 96 | def call(self, x, mask=None): 97 | input_shape = self.input_spec[0].shape 98 | broadcast_shape = [1] * len(input_shape) 99 | broadcast_shape[self.axis] = input_shape[self.axis] 100 | 101 | out = K.reshape(self.gamma, broadcast_shape) * x + K.reshape(self.beta, broadcast_shape) 102 | return out 103 | 104 | def get_config(self): 105 | config = {"momentum": self.momentum, "axis": self.axis} 106 | base_config = super(Scale, self).get_config() 107 | return dict(list(base_config.items()) + list(config.items())) 108 | 109 | def identity_block(input_tensor, kernel_size, filters, stage, block): 110 | """The identity_block is the block that has no conv layer at shortcut 111 | 112 | Keyword arguments 113 | input_tensor -- input tensor 114 | kernel_size -- defualt 3, the kernel size of middle conv layer at main path 115 | filters -- list of integers, the nb_filters of 3 conv layer at main path 116 | stage -- integer, current stage label, used for generating layer names 117 | block -- 'a','b'..., current block label, used for generating layer names 118 | 119 | """ 120 | eps = 1.1e-5 121 | 122 | if K.image_dim_ordering() == 'tf': 123 | bn_axis = 3 124 | else: 125 | bn_axis = 1 126 | 127 | nb_filter1, nb_filter2, nb_filter3 = filters 128 | conv_name_base = 'res' + str(stage) + block + '_branch' 129 | bn_name_base = 'bn' + str(stage) + block + '_branch' 130 | scale_name_base = 'scale' + str(stage) + block + '_branch' 131 | 132 | x = Conv2D(nb_filter1, (1, 1), name=conv_name_base + '2a', use_bias=False)(input_tensor) 133 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '2a')(x) 134 | x = Scale(axis=bn_axis, name=scale_name_base + '2a')(x) 135 | x = Activation('relu', name=conv_name_base + '2a_relu')(x) 136 | 137 | x = ZeroPadding2D((1, 1), name=conv_name_base + '2b_zeropadding')(x) 138 | x = Conv2D(nb_filter2, (kernel_size, kernel_size), name=conv_name_base + '2b', use_bias=False)(x) 139 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '2b')(x) 140 | x = Scale(axis=bn_axis, name=scale_name_base + '2b')(x) 141 | x = Activation('relu', name=conv_name_base + '2b_relu')(x) 142 | 143 | x = Conv2D(nb_filter3, (1, 1), name=conv_name_base + '2c', use_bias=False)(x) 144 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '2c')(x) 145 | x = Scale(axis=bn_axis, name=scale_name_base + '2c')(x) 146 | 147 | x = add([x, input_tensor], name='res' + str(stage) + block) 148 | x = Activation('relu', name='res' + str(stage) + block + '_relu')(x) 149 | return x 150 | 151 | def conv_block(input_tensor, kernel_size, filters, stage, block, strides=(2, 2)): 152 | """conv_block is the block that has a conv layer at shortcut 153 | 154 | Keyword arguments: 155 | input_tensor -- input tensor 156 | kernel_size -- defualt 3, the kernel size of middle conv layer at main path 157 | filters -- list of integers, the nb_filters of 3 conv layer at main path 158 | stage -- integer, current stage label, used for generating layer names 159 | block -- 'a','b'..., current block label, used for generating layer names 160 | 161 | Note that from stage 3, the first conv layer at main path is with subsample=(2,2) 162 | And the shortcut should have subsample=(2,2) as well 163 | 164 | """ 165 | eps = 1.1e-5 166 | 167 | if K.image_dim_ordering() == 'tf': 168 | bn_axis = 3 169 | else: 170 | bn_axis = 1 171 | 172 | nb_filter1, nb_filter2, nb_filter3 = filters 173 | conv_name_base = 'res' + str(stage) + block + '_branch' 174 | bn_name_base = 'bn' + str(stage) + block + '_branch' 175 | scale_name_base = 'scale' + str(stage) + block + '_branch' 176 | 177 | x = Conv2D(nb_filter1, (1, 1), strides=strides, name=conv_name_base + '2a', use_bias=False)(input_tensor) 178 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '2a')(x) 179 | x = Scale(axis=bn_axis, name=scale_name_base + '2a')(x) 180 | x = Activation('relu', name=conv_name_base + '2a_relu')(x) 181 | 182 | x = ZeroPadding2D((1, 1), name=conv_name_base + '2b_zeropadding')(x) 183 | x = Conv2D(nb_filter2, (kernel_size, kernel_size), 184 | name=conv_name_base + '2b', use_bias=False)(x) 185 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '2b')(x) 186 | x = Scale(axis=bn_axis, name=scale_name_base + '2b')(x) 187 | x = Activation('relu', name=conv_name_base + '2b_relu')(x) 188 | 189 | x = Conv2D(nb_filter3, (1, 1), name=conv_name_base + '2c', use_bias=False)(x) 190 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '2c')(x) 191 | x = Scale(axis=bn_axis, name=scale_name_base + '2c')(x) 192 | 193 | shortcut = Conv2D(nb_filter3, (1, 1), strides=strides, 194 | name=conv_name_base + '1', use_bias=False)(input_tensor) 195 | shortcut = BatchNormalization(epsilon=eps, axis=bn_axis, name=bn_name_base + '1')(shortcut) 196 | shortcut = Scale(axis=bn_axis, name=scale_name_base + '1')(shortcut) 197 | 198 | x = add([x, shortcut], name='res' + str(stage) + block) 199 | x = Activation('relu', name='res' + str(stage) + block + '_relu')(x) 200 | return x 201 | 202 | def ResNet152(include_top=True, weights=None, 203 | input_tensor=None, input_shape=None, 204 | large_input=False, pooling=None, 205 | classes=1000): 206 | """Instantiate the ResNet152 architecture. 207 | 208 | Keyword arguments: 209 | include_top -- whether to include the fully-connected layer at the 210 | top of the network. (default True) 211 | weights -- one of `None` (random initialization) or "imagenet" 212 | (pre-training on ImageNet). (default None) 213 | input_tensor -- optional Keras tensor (i.e. output of `layers.Input()`) 214 | to use as image input for the model.(default None) 215 | input_shape -- optional shape tuple, only to be specified if 216 | `include_top` is False (otherwise the input shape has to be 217 | `(224, 224, 3)` (with `channels_last` data format) or 218 | `(3, 224, 224)` (with `channels_first` data format). It should 219 | have exactly 3 inputs channels, and width and height should be 220 | no smaller than 197. E.g. `(200, 200, 3)` would be one valid value. 221 | (default None) 222 | large_input -- if True, then the input shape expected will be 223 | `(448, 448, 3)` (with `channels_last` data format) or 224 | `(3, 448, 448)` (with `channels_first` data format). (default False) 225 | pooling -- Optional pooling mode for feature extraction when 226 | `include_top` is `False`. 227 | - `None` means that the output of the model will be the 4D 228 | tensor output of the last convolutional layer. 229 | - `avg` means that global average pooling will be applied to 230 | the output of the last convolutional layer, and thus 231 | the output of the model will be a 2D tensor. 232 | - `max` means that global max pooling will be applied. 233 | (default None) 234 | classes -- optional number of classes to classify image into, only 235 | to be specified if `include_top` is True, and if no `weights` 236 | argument is specified. (default 1000) 237 | 238 | Returns: 239 | A Keras model instance. 240 | 241 | Raises: 242 | ValueError: in case of invalid argument for `weights`, 243 | or invalid input shape. 244 | """ 245 | if weights not in {'imagenet', None}: 246 | raise ValueError('The `weights` argument should be either ' 247 | '`None` (random initialization) or `imagenet` ' 248 | '(pre-training on ImageNet).') 249 | 250 | if weights == 'imagenet' and include_top and classes != 1000: 251 | raise ValueError('If using `weights` as imagenet with `include_top`' 252 | ' as true, `classes` should be 1000') 253 | 254 | eps = 1.1e-5 255 | 256 | if large_input: 257 | img_size = 448 258 | else: 259 | img_size = 224 260 | 261 | # Determine proper input shape 262 | input_shape = _obtain_input_shape(input_shape, 263 | default_size=img_size, 264 | min_size=197, 265 | data_format=K.image_data_format(), 266 | require_flatten=include_top) 267 | 268 | if input_tensor is None: 269 | img_input = Input(shape=input_shape) 270 | else: 271 | if not K.is_keras_tensor(input_tensor): 272 | img_input = Input(tensor=input_tensor, shape=input_shape) 273 | else: 274 | img_input = input_tensor 275 | 276 | # handle dimension ordering for different backends 277 | if K.image_dim_ordering() == 'tf': 278 | bn_axis = 3 279 | else: 280 | bn_axis = 1 281 | 282 | x = ZeroPadding2D((3, 3), name='conv1_zeropadding')(img_input) 283 | x = Conv2D(64, (7, 7), strides=(2, 2), name='conv1', use_bias=False)(x) 284 | x = BatchNormalization(epsilon=eps, axis=bn_axis, name='bn_conv1')(x) 285 | x = Scale(axis=bn_axis, name='scale_conv1')(x) 286 | x = Activation('relu', name='conv1_relu')(x) 287 | x = MaxPooling2D((3, 3), strides=(2, 2), name='pool1')(x) 288 | 289 | x = conv_block(x, 3, [64, 64, 256], stage=2, block='a', strides=(1, 1)) 290 | x = identity_block(x, 3, [64, 64, 256], stage=2, block='b') 291 | x = identity_block(x, 3, [64, 64, 256], stage=2, block='c') 292 | 293 | x = conv_block(x, 3, [128, 128, 512], stage=3, block='a') 294 | for i in range(1,8): 295 | x = identity_block(x, 3, [128, 128, 512], stage=3, block='b'+str(i)) 296 | 297 | x = conv_block(x, 3, [256, 256, 1024], stage=4, block='a') 298 | for i in range(1,36): 299 | x = identity_block(x, 3, [256, 256, 1024], stage=4, block='b'+str(i)) 300 | 301 | x = conv_block(x, 3, [512, 512, 2048], stage=5, block='a') 302 | x = identity_block(x, 3, [512, 512, 2048], stage=5, block='b') 303 | x = identity_block(x, 3, [512, 512, 2048], stage=5, block='c') 304 | 305 | if large_input: 306 | x = AveragePooling2D((14, 14), name='avg_pool')(x) 307 | else: 308 | x = AveragePooling2D((7, 7), name='avg_pool')(x) 309 | 310 | # include classification layer by default, not included for feature extraction 311 | if include_top: 312 | x = Flatten()(x) 313 | x = Dense(classes, activation='softmax', name='fc1000')(x) 314 | else: 315 | if pooling == 'avg': 316 | x = GlobalAveragePooling2D()(x) 317 | elif pooling == 'max': 318 | x = GlobalMaxPooling2D()(x) 319 | 320 | # Ensure that the model takes into account 321 | # any potential predecessors of `input_tensor`. 322 | if input_tensor is not None: 323 | inputs = get_source_inputs(input_tensor) 324 | else: 325 | inputs = img_input 326 | # Create model. 327 | model = Model(inputs, x, name='resnet152') 328 | 329 | # load weights 330 | if weights == 'imagenet': 331 | if include_top: 332 | weights_path = get_file('resnet152_weights_tf.h5', 333 | WEIGHTS_PATH, 334 | cache_subdir='models', 335 | md5_hash='cdb18a2158b88e392c0905d47dcef965') 336 | else: 337 | weights_path = get_file('resnet152_weights_tf_notop.h5', 338 | WEIGHTS_PATH_NO_TOP, 339 | cache_subdir='models', 340 | md5_hash='4a90dcdafacbd17d772af1fb44fc2660') 341 | model.load_weights(weights_path, by_name=True) 342 | if K.backend() == 'theano': 343 | layer_utils.convert_all_kernels_in_model(model) 344 | if include_top: 345 | maxpool = model.get_layer(name='avg_pool') 346 | shape = maxpool.output_shape[1:] 347 | dense = model.get_layer(name='fc1000') 348 | layer_utils.convert_dense_weights_data_format(dense, shape, 'channels_first') 349 | 350 | if K.image_data_format() == 'channels_first' and K.backend() == 'tensorflow': 351 | warnings.warn('You are using the TensorFlow backend, yet you ' 352 | 'are using the Theano ' 353 | 'image data format convention ' 354 | '(`image_data_format="channels_first"`). ' 355 | 'For best performance, set ' 356 | '`image_data_format="channels_last"` in ' 357 | 'your Keras config ' 358 | 'at ~/.keras/keras.json.') 359 | return model 360 | 361 | if __name__ == '__main__': 362 | model = ResNet152(include_top=True, weights='imagenet') 363 | 364 | img_path = 'elephant.jpg' 365 | img = image.load_img(img_path, target_size=(224,224)) 366 | x = image.img_to_array(img) 367 | x = np.expand_dims(x, axis=0) 368 | x = preprocess_input(x) 369 | print('Input image shape:', x.shape) 370 | 371 | preds = model.predict(x) 372 | print('Predicted:', decode_predictions(preds)) -------------------------------------------------------------------------------- /Keras_Tensorflow/testing_utilities.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import urllib 4 | from io import BytesIO 5 | 6 | import matplotlib.gridspec as gridspec 7 | import matplotlib.pyplot as plt 8 | import toolz 9 | from PIL import Image, ImageOps 10 | import random 11 | 12 | 13 | def read_image_from(url): 14 | return toolz.pipe(url, 15 | urllib.request.urlopen, 16 | lambda x: x.read(), 17 | BytesIO) 18 | 19 | 20 | def to_rgb(img_bytes): 21 | return Image.open(img_bytes).convert('RGB') 22 | 23 | 24 | @toolz.curry 25 | def resize(img_file, new_size=(100, 100)): 26 | return ImageOps.fit(img_file, new_size, Image.ANTIALIAS) 27 | 28 | 29 | def to_base64(img): 30 | imgio = BytesIO() 31 | img.save(imgio, 'PNG') 32 | imgio.seek(0) 33 | dataimg = base64.b64encode(imgio.read()) 34 | return dataimg.decode('utf-8') 35 | 36 | 37 | def to_img(img_url): 38 | return toolz.pipe(img_url, 39 | read_image_from, 40 | to_rgb, 41 | resize(new_size=(224,224))) 42 | 43 | def img_url_to_json(url, label='image'): 44 | img_data = toolz.pipe(url, 45 | to_img, 46 | to_base64) 47 | img_dict = {label: img_data} 48 | body = json.dumps(img_dict) 49 | return json.dumps({'input':'{0}'.format(body)}) 50 | 51 | 52 | def _plot_image(ax, img): 53 | ax.imshow(to_img(img)) 54 | ax.tick_params(axis='both', 55 | which='both', 56 | bottom='off', 57 | top='off', 58 | left='off', 59 | right='off', 60 | labelleft='off', 61 | labelbottom='off') 62 | return ax 63 | 64 | 65 | def _plot_prediction_bar_dict(ax, r): 66 | res_dict = eval(r.json()['result'])[0] 67 | perf = list(c[2] for c in list(res_dict.values())[0]) 68 | ax.barh(range(3, 0, -1), perf, align='center', color='#55DD55') 69 | ax.tick_params(axis='both', 70 | which='both', 71 | bottom='off', 72 | top='off', 73 | left='off', 74 | right='off', 75 | labelbottom='off') 76 | tick_labels = reversed(list(c[1] for c in list(res_dict.values())[0])) 77 | ax.yaxis.set_ticks([1,2,3]) 78 | ax.yaxis.set_ticklabels(tick_labels, position=(0.5,0), minor=False, horizontalalignment='center') 79 | 80 | 81 | def plot_predictions_dict(images, classification_results): 82 | if len(images)!=6: 83 | raise Exception('This method is only designed for 6 images') 84 | gs = gridspec.GridSpec(2, 3) 85 | fig = plt.figure(figsize=(12, 9)) 86 | gs.update(hspace=0.1, wspace=0.001) 87 | 88 | for gg,r, img in zip(gs, classification_results, images): 89 | gg2 = gridspec.GridSpecFromSubplotSpec(4, 10, subplot_spec=gg) 90 | ax = fig.add_subplot(gg2[0:3, :]) 91 | _plot_image(ax, img) 92 | ax = fig.add_subplot(gg2[3, 1:9]) 93 | _plot_prediction_bar_dict(ax, r) 94 | 95 | def write_json_to_file(json_dict, filename, mode='w'): 96 | with open(filename, mode) as outfile: 97 | json.dump(json_dict, outfile, indent=4, sort_keys=True) 98 | outfile.write('\n\n') 99 | 100 | def gen_variations_of_one_image(IMAGEURL, num, label='image'): 101 | out_images = [] 102 | img = to_img(IMAGEURL).convert('RGB') 103 | # Flip the colours for one-pixel 104 | # "Different Image" 105 | for i in range(num): 106 | diff_img = img.copy() 107 | rndm_pixel_x_y = (random.randint(0, diff_img.size[0]-1), 108 | random.randint(0, diff_img.size[1]-1)) 109 | current_color = diff_img.getpixel(rndm_pixel_x_y) 110 | diff_img.putpixel(rndm_pixel_x_y, current_color[::-1]) 111 | b64img = to_base64(diff_img) 112 | img_dict = {label: b64img} 113 | body = json.dumps(img_dict) 114 | out_images.append(json.dumps({'input':'{}'.format(body)})) 115 | return out_images -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /Pytorch/01_DevelopModelDriver.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": "# Develop Model Driver" 7 | }, 8 | { 9 | "cell_type": "markdown", 10 | "metadata": {}, 11 | "source": "In this notebook, we will develop the API that will call our model. This module initializes the model, transforms the input so that it is in the appropriate format and defines the scoring method that will produce the predictions. The API will expect the input to be in JSON format. Once a request is received, the API will convert the json encoded request body into the image format. There are two main functions in the API. The first function loads the model and returns a scoring function. The second function process the images and uses the first function to score them." 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": "import logging\nfrom testing_utilities import img_url_to_json\nfrom pprint import pprint" 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 2, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": "logging.basicConfig(level=logging.DEBUG)" 26 | }, 27 | { 28 | "cell_type": "markdown", 29 | "metadata": {}, 30 | "source": "We use the writefile magic to write the contents of the below cell to driver.py which includes the driver methods." 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": 3, 35 | "metadata": { 36 | "lines_to_end_of_cell_marker": 2 37 | }, 38 | "outputs": [ 39 | { 40 | "name": "stdout", 41 | "output_type": "stream", 42 | "text": "Overwriting driver.py\n" 43 | } 44 | ], 45 | "source": "%%writefile driver.py \nimport base64\nimport json\nimport logging\nimport os\nimport timeit as t\nfrom io import BytesIO\n\nimport PIL\nimport numpy as np\nimport torch\nimport torch.nn as nn\nimport torchvision\nfrom PIL import Image\nfrom torchvision import models, transforms\n\n\n\n_LABEL_FILE = os.getenv(\"LABEL_FILE\", \"synset.txt\")\n_NUMBER_RESULTS = 3\n\n\ndef _create_label_lookup(label_path):\n with open(label_path, \"r\") as f:\n label_list = [l.rstrip() for l in f]\n\n def _label_lookup(*label_locks):\n return [label_list[l] for l in label_locks]\n\n return _label_lookup\n\n\ndef _load_model():\n # Load the model\n model = models.resnet152(pretrained=True)\n model = model.cuda()\n softmax = nn.Softmax(dim=1).cuda()\n model = model.eval()\n\n preprocess_input = transforms.Compose(\n [\n torchvision.transforms.Resize((224, 224), interpolation=PIL.Image.BICUBIC),\n transforms.ToTensor(),\n transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),\n ]\n )\n\n def predict_for(image):\n image = preprocess_input(image)\n with torch.no_grad():\n image = image.unsqueeze(0)\n image_gpu = image.type(torch.float).cuda()\n outputs = model(image_gpu)\n pred_proba = softmax(outputs)\n return pred_proba.cpu().numpy().squeeze()\n\n return predict_for\n\n\ndef _base64img_to_pil_image(base64_img_string):\n if base64_img_string.startswith(\"b'\"):\n base64_img_string = base64_img_string[2:-1]\n base64Img = base64_img_string.encode(\"utf-8\")\n\n # Preprocess the input data\n decoded_img = base64.b64decode(base64Img)\n img_buffer = BytesIO(decoded_img)\n\n # Load image with PIL (RGB)\n pil_img = Image.open(img_buffer).convert(\"RGB\")\n return pil_img\n\n\ndef create_scoring_func(label_path=_LABEL_FILE):\n logger = logging.getLogger(\"model_driver\")\n\n start = t.default_timer()\n labels_for = _create_label_lookup(label_path)\n predict_for = _load_model()\n end = t.default_timer()\n\n loadTimeMsg = \"Model loading time: {0} ms\".format(round((end - start) * 1000, 2))\n logger.info(loadTimeMsg)\n\n def call_model(image, number_results=_NUMBER_RESULTS):\n pred_proba = predict_for(image).squeeze()\n selected_results = np.flip(np.argsort(pred_proba), 0)[:number_results]\n labels = labels_for(*selected_results)\n return list(zip(labels, pred_proba[selected_results].astype(np.float64)))\n\n return call_model\n\n\ndef get_model_api():\n logger = logging.getLogger(\"model_driver\")\n scoring_func = create_scoring_func()\n\n def process_and_score(images_dict, number_results=_NUMBER_RESULTS):\n start = t.default_timer()\n\n results = {}\n for key, base64_img_string in images_dict.items():\n rgb_image = _base64img_to_pil_image(base64_img_string)\n results[key] = scoring_func(rgb_image, number_results=number_results)\n\n end = t.default_timer()\n\n logger.info(\"Predictions: {0}\".format(results))\n logger.info(\"Predictions took {0} ms\".format(round((end - start) * 1000, 2)))\n return (results, \"Computed in {0} ms\".format(round((end - start) * 1000, 2)))\n\n return process_and_score\n\n\ndef version():\n return torch.__version__" 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": "Let's test the module." 51 | }, 52 | { 53 | "cell_type": "markdown", 54 | "metadata": {}, 55 | "source": "We run the file driver.py which will bring everything into the context of the notebook." 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 4, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": "%run driver.py" 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": "We will use the same Lynx image we used ealier to check that our driver works as expected." 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 5, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": "IMAGEURL = \"https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg\"" 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 6, 79 | "metadata": {}, 80 | "outputs": [ 81 | { 82 | "name": "stderr", 83 | "output_type": "stream", 84 | "text": "INFO:model_driver:Model loading time: 3972.62 ms\n" 85 | } 86 | ], 87 | "source": "predict_for = get_model_api()" 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 7, 92 | "metadata": {}, 93 | "outputs": [ 94 | { 95 | "name": "stderr", 96 | "output_type": "stream", 97 | "text": "DEBUG:PIL.PngImagePlugin:STREAM b'IHDR' 16 13\nDEBUG:PIL.PngImagePlugin:STREAM b'iCCP' 41 292\nDEBUG:PIL.PngImagePlugin:iCCP profile name b'ICC Profile'\nDEBUG:PIL.PngImagePlugin:Compression method 0\nDEBUG:PIL.PngImagePlugin:STREAM b'IDAT' 345 65536\nINFO:model_driver:Predictions: {'image': [('n02127052 lynx, catamount', 0.9965722560882568), ('n02128757 snow leopard, ounce, Panthera uncia', 0.0013256857637315989), ('n02128385 leopard, Panthera pardus', 0.0009192737634293735)]}\nINFO:model_driver:Predictions took 84.51 ms\n" 98 | } 99 | ], 100 | "source": "jsonimg = img_url_to_json(IMAGEURL)\njson_load_img = json.loads(jsonimg)\nbody = json_load_img[\"input\"]\nresp = predict_for(body)" 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 8, 105 | "metadata": {}, 106 | "outputs": [ 107 | { 108 | "name": "stdout", 109 | "output_type": "stream", 110 | "text": "{'image': [('n02127052 lynx, catamount', 0.9965722560882568),\n ('n02128757 snow leopard, ounce, Panthera uncia',\n 0.0013256857637315989),\n ('n02128385 leopard, Panthera pardus', 0.0009192737634293735)]}\n" 111 | } 112 | ], 113 | "source": "pprint(resp[0])" 114 | }, 115 | { 116 | "cell_type": "markdown", 117 | "metadata": {}, 118 | "source": "Next, we can move on to [building our docker image](02_BuildImage.ipynb)." 119 | } 120 | ], 121 | "metadata": { 122 | "jupytext_format_version": "1.3", 123 | "jupytext_formats": "py:light", 124 | "kernelspec": { 125 | "display_name": "Python 3", 126 | "language": "python", 127 | "name": "python3" 128 | }, 129 | "language_info": { 130 | "codemirror_mode": { 131 | "name": "ipython", 132 | "version": 3 133 | }, 134 | "file_extension": ".py", 135 | "mimetype": "text/x-python", 136 | "name": "python", 137 | "nbconvert_exporter": "python", 138 | "pygments_lexer": "ipython3", 139 | "version": "3.6.6" 140 | } 141 | }, 142 | "nbformat": 4, 143 | "nbformat_minor": 2 144 | } 145 | -------------------------------------------------------------------------------- /Pytorch/02_BuildImage.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": "# Build Docker image " 7 | }, 8 | { 9 | "cell_type": "markdown", 10 | "metadata": {}, 11 | "source": "In this notebook, we will build the docker container that contains the ResNet152 model, Flask web application, model driver and all dependencies.\nMake sure you have logged in using docker login." 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 7, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": "import os\nfrom os import path\nimport json\nimport shutil\nfrom dotenv import set_key, get_key, find_dotenv\nfrom pathlib import Path" 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": "We are going to use a .env file to store all our information" 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 8, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": "env_path = find_dotenv()\nif env_path=='':\n Path('.env').touch()\n env_path = find_dotenv()" 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "metadata": {}, 35 | "source": "We will be using the following Docker information to push the image to docker hub. " 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 9, 40 | "metadata": {}, 41 | "outputs": [ 42 | { 43 | "data": { 44 | "text/plain": "(True, 'image_repo', 'pytorch-gpu')" 45 | }, 46 | "execution_count": 9, 47 | "metadata": {}, 48 | "output_type": "execute_result" 49 | } 50 | ], 51 | "source": "set_key(env_path, \"docker_login\", \"\") # Replace YOUR_DOCKER_LOGIN with your dockerhub login\nset_key(env_path, \"image_repo\", \"pytorch-gpu\")" 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 10, 56 | "metadata": { 57 | "tags": [ 58 | "stripout" 59 | ] 60 | }, 61 | "outputs": [], 62 | "source": "!cat .env" 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 11, 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": "os.makedirs(\"flaskwebapp\", exist_ok=True)\nos.makedirs(os.path.join(\"flaskwebapp\", \"nginx\"), exist_ok=True)\nos.makedirs(os.path.join(\"flaskwebapp\", \"etc\"), exist_ok=True)" 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 12, 74 | "metadata": {}, 75 | "outputs": [ 76 | { 77 | "data": { 78 | "text/plain": "['synset.txt',\n 'etc',\n 'driver.py',\n 'dockerfile',\n 'kill_supervisor.py',\n 'requirements.txt',\n 'app.py',\n 'gunicorn_logging.conf',\n 'wsgi.py',\n 'nginx']" 79 | }, 80 | "execution_count": 12, 81 | "metadata": {}, 82 | "output_type": "execute_result" 83 | } 84 | ], 85 | "source": "shutil.copy(\"synset.txt\", \"flaskwebapp\")\nshutil.copy(\"driver.py\", \"flaskwebapp\")\nos.listdir(\"flaskwebapp\")" 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "metadata": {}, 90 | "source": "Below, we create the module for the Flask web application." 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 13, 95 | "metadata": {}, 96 | "outputs": [ 97 | { 98 | "name": "stdout", 99 | "output_type": "stream", 100 | "text": "Overwriting flaskwebapp/app.py\n" 101 | } 102 | ], 103 | "source": "%%writefile flaskwebapp/app.py\n\nfrom flask import Flask, request, Response\nimport logging\nimport json\nimport driver\n\napp = Flask(__name__)\npredict_for = driver.get_model_api()\n \n@app.route(\"/score\", methods = ['POST'])\ndef scoreRRS():\n \"\"\" Endpoint for scoring\n \"\"\"\n if request.headers['Content-Type'] != 'application/json':\n return Response(json.dumps({}), status= 415, mimetype ='application/json')\n request_input = request.json['input']\n response = predict_for(request_input)\n return json.dumps({'result': response})\n\n\n@app.route(\"/\")\ndef healthy():\n return \"Healthy\"\n\n# PyTorch Version\n@app.route('/version', methods = ['GET'])\ndef version_request():\n return driver.version()\n\nif __name__ == \"__main__\":\n app.run(host='0.0.0.0', port=5000)" 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 14, 108 | "metadata": {}, 109 | "outputs": [ 110 | { 111 | "name": "stdout", 112 | "output_type": "stream", 113 | "text": "Overwriting flaskwebapp/wsgi.py\n" 114 | } 115 | ], 116 | "source": "%%writefile flaskwebapp/wsgi.py\nfrom app import app as application\n\ndef create():\n print(\"Initialising\")\n application.run(host='127.0.0.1', port=5000)" 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": "Here, we write the configuration for the Nginx which creates a proxy between ports **80** and **5000**." 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 15, 126 | "metadata": {}, 127 | "outputs": [ 128 | { 129 | "name": "stdout", 130 | "output_type": "stream", 131 | "text": "Overwriting flaskwebapp/nginx/app\n" 132 | } 133 | ], 134 | "source": "%%writefile flaskwebapp/nginx/app\nserver {\n listen 80;\n server_name _;\n \n location / {\n include proxy_params;\n proxy_pass http://127.0.0.1:5000;\n proxy_connect_timeout 5000s;\n proxy_read_timeout 5000s;\n }\n}" 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 16, 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stdout", 143 | "output_type": "stream", 144 | "text": "Overwriting flaskwebapp/gunicorn_logging.conf\n" 145 | } 146 | ], 147 | "source": "%%writefile flaskwebapp/gunicorn_logging.conf\n\n[loggers]\nkeys=root, gunicorn.error\n\n[handlers]\nkeys=console\n\n[formatters]\nkeys=json\n\n[logger_root]\nlevel=INFO\nhandlers=console\n\n[logger_gunicorn.error]\nlevel=ERROR\nhandlers=console\npropagate=0\nqualname=gunicorn.error\n\n[handler_console]\nclass=StreamHandler\nformatter=json\nargs=(sys.stdout, )\n\n[formatter_json]\nclass=jsonlogging.JSONFormatter" 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 17, 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "name": "stdout", 156 | "output_type": "stream", 157 | "text": "Overwriting flaskwebapp/kill_supervisor.py\n" 158 | } 159 | ], 160 | "source": "%%writefile flaskwebapp/kill_supervisor.py\nimport sys\nimport os\nimport signal\n\ndef write_stdout(s):\n sys.stdout.write(s)\n sys.stdout.flush()\n\n# this function is modified from the code and knowledge found here: http://supervisord.org/events.html#example-event-listener-implementation\ndef main():\n while 1:\n write_stdout('READY\\n')\n # wait for the event on stdin that supervisord will send\n line = sys.stdin.readline()\n write_stdout('Killing supervisor with this event: ' + line);\n try:\n # supervisord writes its pid to its file from which we read it here, see supervisord.conf\n pidfile = open('/tmp/supervisord.pid','r')\n pid = int(pidfile.readline());\n os.kill(pid, signal.SIGQUIT)\n except Exception as e:\n write_stdout('Could not kill supervisor: ' + e.strerror + '\\n')\n write_stdout('RESULT 2\\nOK')\n\nmain()" 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": 18, 165 | "metadata": {}, 166 | "outputs": [ 167 | { 168 | "name": "stdout", 169 | "output_type": "stream", 170 | "text": "Overwriting flaskwebapp/etc/supervisord.conf\n" 171 | } 172 | ], 173 | "source": "%%writefile flaskwebapp/etc/supervisord.conf \n[supervisord]\nlogfile=/tmp/supervisord.log ; (main log file;default $CWD/supervisord.log)\nlogfile_maxbytes=50MB ; (max main logfile bytes b4 rotation;default 50MB)\nlogfile_backups=10 ; (num of main logfile rotation backups;default 10)\nloglevel=info ; (log level;default info; others: debug,warn,trace)\npidfile=/tmp/supervisord.pid ; (supervisord pidfile;default supervisord.pid)\nnodaemon=true ; (start in foreground if true;default false)\nminfds=1024 ; (min. avail startup file descriptors;default 1024)\nminprocs=200 ; (min. avail process descriptors;default 200)\n\n[program:gunicorn]\ncommand=bash -c \"gunicorn --workers 1 -m 007 --timeout 100000 --capture-output --error-logfile - --log-level debug --log-config gunicorn_logging.conf \\\"wsgi:create()\\\"\"\ndirectory=/code\nredirect_stderr=true\nstdout_logfile =/dev/stdout\nstdout_logfile_maxbytes=0\nstartretries=2\nstartsecs=20\n\n[program:nginx]\ncommand=/usr/sbin/nginx -g \"daemon off;\"\nstartretries=2\nstartsecs=5\npriority=3\n\n[eventlistener:program_exit]\ncommand=python kill_supervisor.py\ndirectory=/code\nevents=PROCESS_STATE_FATAL\npriority=2" 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": {}, 178 | "source": "We create a custom image based on the CUDA 9 image from NVIDIA and install all the necessary dependencies. This is in order to try and keep the size of the image as small as possible." 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": 19, 183 | "metadata": {}, 184 | "outputs": [ 185 | { 186 | "name": "stdout", 187 | "output_type": "stream", 188 | "text": "Overwriting flaskwebapp/requirements.txt\n" 189 | } 190 | ], 191 | "source": "%%writefile flaskwebapp/requirements.txt\nPillow==5.0.0\nclick==6.7\nconfigparser==3.5.0\nFlask==0.12.2\ngunicorn==19.6.0\njson-logging-py==0.2\nMarkupSafe==1.0\nolefile==0.44\nrequests==2.12.3" 192 | }, 193 | { 194 | "cell_type": "code", 195 | "execution_count": 52, 196 | "metadata": {}, 197 | "outputs": [ 198 | { 199 | "name": "stdout", 200 | "output_type": "stream", 201 | "text": "Overwriting flaskwebapp/dockerfile\n" 202 | } 203 | ], 204 | "source": "%%writefile flaskwebapp/dockerfile\n\nFROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04\n\nRUN echo \"deb http://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1604/x86_64 /\" > /etc/apt/sources.list.d/nvidia-ml.list\n\nRUN mkdir /code\nWORKDIR /code\nADD . /code/\nADD etc /etc\n\nRUN apt-get update && apt-get install -y --no-install-recommends \\\n build-essential \\\n ca-certificates \\\n cmake \\\n curl \\\n git \\\n nginx \\\n supervisor \\\n wget && \\\n rm -rf /var/lib/apt/lists/*\n\n\nENV PYTHON_VERSION=3.6\nRUN curl -o ~/miniconda.sh -O https://repo.anaconda.com/miniconda/Miniconda3-4.5.11-Linux-x86_64.sh && \\\n chmod +x ~/miniconda.sh && \\\n ~/miniconda.sh -b -p /opt/conda && \\\n rm ~/miniconda.sh && \\\n /opt/conda/bin/conda create -y --name py$PYTHON_VERSION python=$PYTHON_VERSION numpy scipy pandas scikit-learn && \\\n /opt/conda/bin/conda clean -ya && \\\n ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \\ \n echo \". /opt/conda/etc/profile.d/conda.sh\" >> ~/.bashrc && \\ \n echo \"conda activate py$PYTHON_VERSION\" >> ~/.bashrc\n\nENV PATH /opt/conda/envs/py$PYTHON_VERSION/bin:$PATH\nENV LD_LIBRARY_PATH /opt/conda/envs/py$PYTHON_VERSION/lib:/usr/local/cuda/lib64/:$LD_LIBRARY_PATH\nENV PYTHONPATH /code/:$PYTHONPATH\n\nRUN rm /etc/nginx/sites-enabled/default && \\\n cp /code/nginx/app /etc/nginx/sites-available/ && \\\n ln -s /etc/nginx/sites-available/app /etc/nginx/sites-enabled/ && \\\n /opt/conda/bin/conda install --name py$PYTHON_VERSION -c pytorch pytorch==0.4.1 && \\\n pip install --upgrade pip && \\\n pip install torchvision==0.2.1 && \\\n pip install -r /code/requirements.txt && \\ \n /opt/conda/bin/conda clean -yt\n\nEXPOSE 80\nCMD [\"supervisord\", \"-c\", \"/code/etc/supervisord.conf\"]" 205 | }, 206 | { 207 | "cell_type": "markdown", 208 | "metadata": {}, 209 | "source": "The image name below refers to our dockerhub account. If you wish to push the image to your account make sure you change the docker login." 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": 53, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": "image_name = get_key(env_path, 'docker_login') + '/' +get_key(env_path, 'image_repo') \napplication_path = 'flaskwebapp'\ndocker_file_location = path.join(application_path, 'dockerfile')" 217 | }, 218 | { 219 | "cell_type": "markdown", 220 | "metadata": {}, 221 | "source": "Next, we build our docker image. The output of this cell is cleared from this notebook as it is quite long due to all the installations required to build the image. However, you should make sure you see 'Successfully built' and 'Successfully tagged' messages in the last line of the output when you run the cell. " 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 54, 226 | "metadata": { 227 | "scrolled": false, 228 | "tags": [ 229 | "stripout" 230 | ] 231 | }, 232 | "outputs": [], 233 | "source": "!docker build -t $image_name -f $docker_file_location $application_path" 234 | }, 235 | { 236 | "cell_type": "markdown", 237 | "metadata": {}, 238 | "source": "Below we will push the image created to our dockerhub registry. Make sure you have already logged in to the appropriate dockerhub account using the docker login command. If you haven't loged in to the approrpiate dockerhub account you will get an error." 239 | }, 240 | { 241 | "cell_type": "code", 242 | "execution_count": 50, 243 | "metadata": { 244 | "tags": [ 245 | "stripout" 246 | ] 247 | }, 248 | "outputs": [], 249 | "source": "!docker push $image_name" 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": 51, 254 | "metadata": { 255 | "tags": [ 256 | "stripout" 257 | ] 258 | }, 259 | "outputs": [], 260 | "source": "print('Docker image name {}'.format(image_name))" 261 | }, 262 | { 263 | "cell_type": "markdown", 264 | "metadata": {}, 265 | "source": "We can now [test our image locally](03_TestLocally.ipynb)." 266 | } 267 | ], 268 | "metadata": { 269 | "celltoolbar": "Tags", 270 | "jupytext_format_version": "1.3", 271 | "jupytext_formats": "py:light", 272 | "kernelspec": { 273 | "display_name": "Python 3", 274 | "language": "python", 275 | "name": "python3" 276 | }, 277 | "language_info": { 278 | "codemirror_mode": { 279 | "name": "ipython", 280 | "version": 3 281 | }, 282 | "file_extension": ".py", 283 | "mimetype": "text/x-python", 284 | "name": "python", 285 | "nbconvert_exporter": "python", 286 | "pygments_lexer": "ipython3", 287 | "version": "3.6.6" 288 | } 289 | }, 290 | "nbformat": 4, 291 | "nbformat_minor": 2 292 | } 293 | -------------------------------------------------------------------------------- /Pytorch/07_TearDown.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": "# Tear it all down\nOnce you are done with your cluster you can use the following two commands to destroy it all." 7 | }, 8 | { 9 | "cell_type": "code", 10 | "execution_count": 4, 11 | "metadata": {}, 12 | "outputs": [], 13 | "source": "from dotenv import get_key, find_dotenv" 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 5, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": "env_path = find_dotenv(raise_error_if_not_found=True)" 21 | }, 22 | { 23 | "cell_type": "markdown", 24 | "metadata": {}, 25 | "source": "Once you are done with your cluster you can use the following two commands to destroy it all. First, delete the application." 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 6, 30 | "metadata": {}, 31 | "outputs": [ 32 | { 33 | "name": "stdout", 34 | "output_type": "stream", 35 | "text": "deployment.apps \"azure-dl\" deleted\nservice \"azure-dl\" deleted\n" 36 | } 37 | ], 38 | "source": "!kubectl delete -f az-dl.json" 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": "Next, you delete the AKS cluster. This step may take a few minutes." 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 7, 48 | "metadata": {}, 49 | "outputs": [ 50 | { 51 | "name": "stdout", 52 | "output_type": "stream", 53 | "text": "\u001b[K\u001b[0minished .." 54 | } 55 | ], 56 | "source": "!az aks delete -n {get_key(env_path, 'aks_name')} \\\n -g {get_key(env_path, 'resource_group')} \\\n -y" 57 | }, 58 | { 59 | "cell_type": "markdown", 60 | "metadata": {}, 61 | "source": "Finally, you should delete the resource group. This also deletes the AKS cluster and can be used instead of the above command if the resource group is only used for this purpose." 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 8, 66 | "metadata": {}, 67 | "outputs": [ 68 | { 69 | "name": "stdout", 70 | "output_type": "stream", 71 | "text": "\u001b[K\u001b[0minished .." 72 | } 73 | ], 74 | "source": "!az group delete --name {get_key(env_path, 'resource_group')} -y" 75 | } 76 | ], 77 | "metadata": { 78 | "jupytext_format_version": "1.3", 79 | "jupytext_formats": "py:light", 80 | "kernelspec": { 81 | "display_name": "Python 3", 82 | "language": "python", 83 | "name": "python3" 84 | }, 85 | "language_info": { 86 | "codemirror_mode": { 87 | "name": "ipython", 88 | "version": 3 89 | }, 90 | "file_extension": ".py", 91 | "mimetype": "text/x-python", 92 | "name": "python", 93 | "nbconvert_exporter": "python", 94 | "pygments_lexer": "ipython3", 95 | "version": "3.6.6" 96 | } 97 | }, 98 | "nbformat": 4, 99 | "nbformat_minor": 2 100 | } 101 | -------------------------------------------------------------------------------- /Pytorch/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Deploy ResNet 152 model on GPU enabled Kubernetes cluster using PyTorch 3 | 4 | In this folder are the tutorials for deploying a PyTorch on a Kubernetes cluster. 5 | 6 | The tutorial is made up of the following notebooks: 7 | * [Model development](00_DevelopModel.ipynb) where we load the pretrained model and test it by using it to score images 8 | * [Developing the interface](01_DevelopModelDriver.ipynb) our Flask app will use to load and call the model 9 | * [Building the Docker Image](02_BuildImage.ipynb) with our Flask REST API and model 10 | * [Testing our Docker image](03_TestLocally.ipynb) before deployment 11 | * [Creating our Kubernetes cluster](04_DeployOnAKS.ipynb) and deploying our application to it 12 | * [Testing the deployed model](05_TestWebApp.ipynb) 13 | * [Testing the throughput](06_SpeedTestWebApp.ipynb) of our model 14 | * [Cleaning the resources](07_TearDown.ipynb) used 15 | 16 | -------------------------------------------------------------------------------- /Pytorch/environment.yml: -------------------------------------------------------------------------------- 1 | name: AKSDeploymentPytorch 2 | channels: 3 | - conda-forge 4 | - pytorch 5 | dependencies: 6 | - python=3.6 7 | - nb_conda==2.2.0 8 | - tornado==4.5.3 9 | - pytorch 10 | - pip: 11 | - papermill==0.14.1 12 | - https://github.com/theskumar/python-dotenv/archive/master.zip 13 | - Pillow==5.2.0 14 | - wget==3.2 15 | - matplotlib==2.2.2 16 | - aiohttp==3.3.2 17 | - toolz==0.9.0 18 | - tqdm==4.23.4 19 | - azure-cli==2.0.41 20 | - torchvision 21 | -------------------------------------------------------------------------------- /Pytorch/testing_utilities.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import urllib 4 | from io import BytesIO 5 | 6 | import matplotlib.gridspec as gridspec 7 | import matplotlib.pyplot as plt 8 | import toolz 9 | from PIL import Image, ImageOps 10 | import random 11 | 12 | def read_image_from(url): 13 | return toolz.pipe(url, 14 | urllib.request.urlopen, 15 | lambda x: x.read(), 16 | BytesIO) 17 | 18 | 19 | def to_rgb(img_bytes): 20 | return Image.open(img_bytes).convert('RGB') 21 | 22 | 23 | @toolz.curry 24 | def resize(img_file, new_size=(100, 100)): 25 | return ImageOps.fit(img_file, new_size, Image.ANTIALIAS) 26 | 27 | 28 | def to_base64(img): 29 | imgio = BytesIO() 30 | img.save(imgio, 'PNG') 31 | imgio.seek(0) 32 | dataimg = base64.b64encode(imgio.read()) 33 | return dataimg.decode('utf-8') 34 | 35 | 36 | def to_img(img_url): 37 | return toolz.pipe(img_url, 38 | read_image_from, 39 | to_rgb, 40 | resize(new_size=(224,224))) 41 | 42 | 43 | def img_url_to_json(url, label='image'): 44 | img_data = toolz.pipe(url, 45 | to_img, 46 | to_base64) 47 | return json.dumps({'input':{label:'\"{0}\"'.format(img_data)}}) 48 | 49 | 50 | def _plot_image(ax, img): 51 | ax.imshow(to_img(img)) 52 | ax.tick_params(axis='both', 53 | which='both', 54 | bottom=False, 55 | top=False, 56 | left=False, 57 | right=False, 58 | labelleft=False, 59 | labelbottom=False) 60 | return ax 61 | 62 | 63 | def _plot_prediction_bar(ax, r): 64 | perf = list(c[1] for c in r.json()['result'][0]['image']) 65 | ax.barh(range(3, 0, -1), perf, align='center', color='#55DD55') 66 | ax.tick_params(axis='both', 67 | which='both', 68 | bottom=False, 69 | top=False, 70 | left=False, 71 | right=False, 72 | labelbottom=False) 73 | tick_labels = reversed(list(' '.join(c[0].split()[1:]).split(',')[0] for c in r.json()['result'][0]['image'])) 74 | ax.yaxis.set_ticks([1,2,3]) 75 | ax.yaxis.set_ticklabels(tick_labels, position=(0.5,0), minor=False, horizontalalignment='center') 76 | 77 | 78 | def plot_predictions(images, classification_results): 79 | if len(images)!=6: 80 | raise Exception('This method is only designed for 6 images') 81 | gs = gridspec.GridSpec(2, 3) 82 | fig = plt.figure(figsize=(12, 9)) 83 | gs.update(hspace=0.1, wspace=0.001) 84 | 85 | for gg,r, img in zip(gs, classification_results, images): 86 | gg2 = gridspec.GridSpecFromSubplotSpec(4, 10, subplot_spec=gg) 87 | ax = fig.add_subplot(gg2[0:3, :]) 88 | _plot_image(ax, img) 89 | ax = fig.add_subplot(gg2[3, 1:9]) 90 | _plot_prediction_bar(ax, r) 91 | 92 | def write_json_to_file(json_dict, filename, mode='w'): 93 | with open(filename, mode) as outfile: 94 | json.dump(json_dict, outfile, indent=4,sort_keys=True) 95 | outfile.write('\n\n') 96 | 97 | def gen_variations_of_one_image(IMAGEURL, num, label='image'): 98 | out_images = [] 99 | img = to_img(IMAGEURL).convert('RGB') 100 | # Flip the colours for one-pixel 101 | # "Different Image" 102 | for i in range(num): 103 | diff_img = img.copy() 104 | rndm_pixel_x_y = (random.randint(0, diff_img.size[0]-1), 105 | random.randint(0, diff_img.size[1]-1)) 106 | current_color = diff_img.getpixel(rndm_pixel_x_y) 107 | diff_img.putpixel(rndm_pixel_x_y, current_color[::-1]) 108 | b64img = to_base64(diff_img) 109 | out_images.append(json.dumps({'input':{label:'\"{0}\"'.format(b64img)}})) 110 | return out_images -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # This repo is no longer actively maintained, please see newer version available using Azure Machine Learning [here](https://github.com/Microsoft/AKSDeploymentTutorial_AML). 2 | 3 | ### Authors: Mathew Salvaris and Fidan Boylu Uz 4 | 5 | # Deploy Deep Learning CNN on Kubernetes Cluster with GPUs 6 | ## Overview 7 | In this repository there are a number of tutorials in Jupyter notebooks that have step-by-step instructions on how to deploy a pretrained deep learning model on a GPU enabled Kubernetes cluster. The tutorials cover how to deploy models from the following deep learning frameworks: 8 | * [TensorFlow](Tensorflow) 9 | * [Keras (TensorFlow backend)](Keras_Tensorflow) 10 | * [Pytorch](Pytorch) 11 | 12 | ![alt text](static/example.png "Example Classification") 13 | 14 | For each framework, we go through the following steps: 15 | * Model development where we load the pretrained model and test it by using it to score images 16 | * Developing the interface our Flask app will use to load and call the model 17 | * Building the Docker Image with our Flask REST API and model 18 | * Testing our Docker image before deployment 19 | * Creating our Kubernetes cluster and deploying our application to it 20 | * Testing the deployed model 21 | * Testing the throughput of our model 22 | * Cleaning up resources 23 | 24 | ## Design 25 | ![alt text](static/Design.png "Design") 26 | 27 | The application we will develop is a simple image classification service, where we will submit an image and get back what class the image belongs to. The application flow for the deep learning model is as follows: 28 | 1) The client sends a HTTP POST request with the encoded image data. 29 | 2) The Flask app extracts the image from the request. 30 | 3) The image is then appropriately preprocessed and sent to the model for scoring. 31 | 4) The scoring result is then piped into a JSON object and returned to the client. 32 | 33 | If you already have a Docker image that you would like to deploy you can skip the first four notebooks. 34 | 35 | **NOTE**: The tutorial goes through step by step how to deploy a deep learning model on Azure; it **does** **not** include enterprise best practices such as securing the endpoints and setting up remote logging etc. 36 | 37 | **Deploying with GPUS:** For a detailed comparison of the deployments of various deep learning models, see the blog post [here](https://azure.microsoft.com/en-us/blog/gpus-vs-cpus-for-deployment-of-deep-learning-models/) which provides evidence that, at least in the scenarios tested, GPUs provide better throughput and stability at a lower cost. 38 | 39 | ## Prerequisites 40 | * Linux(Ubuntu). The tutorial was developed on an Azure Linux DSVM 41 | * [Docker installed](https://docs.docker.com/v17.12/install/linux/docker-ee/ubuntu/). NOTE: Even with docker installed you may need to set it up so that you don't require sudo to execute docker commands see ["Manage Docker as a non-root user"](https://docs.docker.com/install/linux/linux-postinstall/) 42 | * [Dockerhub account](https://hub.docker.com/) 43 | * Port 9999 open: Jupyter notebook will use port 9999 so please ensure that it is open. For instructions on how to do that on Azure see [here](https://blogs.msdn.microsoft.com/pkirchner/2016/02/02/allow-incoming-web-traffic-to-web-server-in-azure-vm/) 44 | 45 | ## Setup 46 | 1. Clone the repo: 47 | ```bash 48 | git clone 49 | ``` 50 | 2. Login to Docker with your username and password. 51 | ```bash 52 | docker login 53 | ``` 54 | 3. Go to the framework folder you would like to run the notebooks for. 55 | 4. Create a conda environment: 56 | ```bash 57 | conda env create -f environment.yml 58 | ``` 59 | 5. Activate the environment: 60 | ```bash 61 | source activate 62 | ``` 63 | 6. Run: 64 | ```bash 65 | jupyter notebook 66 | ``` 67 | 7. Start the first notebook and make sure the kernel corresponding to the above environment is selected. 68 | 69 | ## Steps 70 | After following the setup instructions above, run the Jupyter notebooks in order. The same basic steps are followed for each deep learning framework. 71 | 72 | ## Cleaning up 73 | To remove the conda environment created see [here](https://conda.io/docs/commands/env/conda-env-remove.html). The last Jupyter notebook within each folder also gives details on deleting Azure resources associated with this repo. 74 | 75 | # Contributing 76 | 77 | This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. 78 | 79 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. 80 | 81 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 82 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or 83 | contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 84 | 85 | -------------------------------------------------------------------------------- /Tensorflow/00_DevelopModel.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Develop Model\n", 8 | "In this notebook we are going to go through the steps to develop the code around our model (ResNet152). We will look at how to load the model, process the data into an appropriate format and call it." 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 2, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "import numpy as np\n", 18 | "import tensorflow as tf\n", 19 | "from IPython.display import Image as show_image\n", 20 | "from PIL import Image\n", 21 | "from tensorflow.contrib.slim.nets import resnet_v1" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 3, 27 | "metadata": {}, 28 | "outputs": [ 29 | { 30 | "name": "stdout", 31 | "output_type": "stream", 32 | "text": [ 33 | "1.9.0\n" 34 | ] 35 | } 36 | ], 37 | "source": [ 38 | "print(tf.__version__)" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "metadata": {}, 44 | "source": [ 45 | "We download the model checkpoint." 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 3, 51 | "metadata": { 52 | "scrolled": true 53 | }, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "--2018-08-03 15:15:13-- http://download.tensorflow.org/models/resnet_v1_152_2016_08_28.tar.gz\n", 60 | "Resolving download.tensorflow.org... 216.58.218.176, 2607:f8b0:4000:80a::2010\n", 61 | "Connecting to download.tensorflow.org|216.58.218.176|:80... connected.\n", 62 | "HTTP request sent, awaiting response... 200 OK\n", 63 | "Length: 224342140 (214M) [application/x-tar]\n", 64 | "Saving to: ‘resnet_v1_152_2016_08_28.tar.gz’\n", 65 | "\n", 66 | "resnet_v1_152_2016_ 100%[===================>] 213.95M 90.4MB/s in 2.4s \n", 67 | "\n", 68 | "2018-08-03 15:15:16 (90.4 MB/s) - ‘resnet_v1_152_2016_08_28.tar.gz’ saved [224342140/224342140]\n", 69 | "\n" 70 | ] 71 | } 72 | ], 73 | "source": [ 74 | "!wget http://download.tensorflow.org/models/resnet_v1_152_2016_08_28.tar.gz" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 4, 80 | "metadata": {}, 81 | "outputs": [ 82 | { 83 | "name": "stdout", 84 | "output_type": "stream", 85 | "text": [ 86 | "resnet_v1_152.ckpt\r\n" 87 | ] 88 | } 89 | ], 90 | "source": [ 91 | "!tar xvf resnet_v1_152_2016_08_28.tar.gz" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "We download the synset for the model. This translates the output of the model to a specific label." 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 5, 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "--2018-08-03 15:15:44-- http://data.dmlc.ml/mxnet/models/imagenet/synset.txt\n", 111 | "Resolving data.dmlc.ml... 54.208.175.7\n", 112 | "Connecting to data.dmlc.ml|54.208.175.7|:80... connected.\n", 113 | "HTTP request sent, awaiting response... 200 OK\n", 114 | "Length: 31675 (31K) [text/plain]\n", 115 | "Saving to: ‘synset.txt’\n", 116 | "\n", 117 | "synset.txt 100%[===================>] 30.93K --.-KB/s in 0.03s \n", 118 | "\n", 119 | "2018-08-03 15:15:45 (894 KB/s) - ‘synset.txt’ saved [31675/31675]\n", 120 | "\n" 121 | ] 122 | } 123 | ], 124 | "source": [ 125 | "!wget \"http://data.dmlc.ml/mxnet/models/imagenet/synset.txt\"" 126 | ] 127 | }, 128 | { 129 | "cell_type": "markdown", 130 | "metadata": {}, 131 | "source": [ 132 | "This is how we will call our model. We first create the input placeholder for our input image. The dimensions of the image are 224 by 224 and has three color channels." 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 6, 138 | "metadata": {}, 139 | "outputs": [], 140 | "source": [ 141 | "# Placeholders\n", 142 | "input_tensor = tf.placeholder(tf.float32, shape=(None,224,224,3), name='input_image')" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 7, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "# Load the model\n", 152 | "sess = tf.Session()\n", 153 | "arg_scope = resnet_v1.resnet_arg_scope()\n", 154 | "with tf.contrib.slim.arg_scope(arg_scope):\n", 155 | " logits, _ = resnet_v1.resnet_v1_152(input_tensor, num_classes=1000, is_training=False)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "To get probabilities we add the softmax layer." 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 8, 168 | "metadata": {}, 169 | "outputs": [], 170 | "source": [ 171 | "probabilities = tf.nn.softmax(logits)" 172 | ] 173 | }, 174 | { 175 | "cell_type": "markdown", 176 | "metadata": {}, 177 | "source": [ 178 | "Here we load the pretrained weights we downloaded earlier into the model." 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 9, 184 | "metadata": {}, 185 | "outputs": [ 186 | { 187 | "name": "stdout", 188 | "output_type": "stream", 189 | "text": [ 190 | "INFO:tensorflow:Restoring parameters from resnet_v1_152.ckpt\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "checkpoint_file = 'resnet_v1_152.ckpt'\n", 196 | "saver = tf.train.Saver()\n", 197 | "saver.restore(sess, checkpoint_file)" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": {}, 203 | "source": [ 204 | "Let's test our model with an image of a Lynx" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": 10, 210 | "metadata": {}, 211 | "outputs": [ 212 | { 213 | "name": "stdout", 214 | "output_type": "stream", 215 | "text": [ 216 | "--2018-08-03 15:18:18-- https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg\n", 217 | "Resolving upload.wikimedia.org... 208.80.153.240, 2620:0:860:ed1a::2:b\n", 218 | "Connecting to upload.wikimedia.org|208.80.153.240|:443... connected.\n", 219 | "HTTP request sent, awaiting response... 200 OK\n", 220 | "Length: 27183 (27K) [image/jpeg]\n", 221 | "Saving to: ‘220px-Lynx_lynx_poing.jpg’\n", 222 | "\n", 223 | "220px-Lynx_lynx_poi 100%[===================>] 26.55K --.-KB/s in 0.01s \n", 224 | "\n", 225 | "2018-08-03 15:18:18 (2.49 MB/s) - ‘220px-Lynx_lynx_poing.jpg’ saved [27183/27183]\n", 226 | "\n" 227 | ] 228 | } 229 | ], 230 | "source": [ 231 | "!wget https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": 11, 237 | "metadata": {}, 238 | "outputs": [ 239 | { 240 | "data": { 241 | "image/jpeg": "/9j/4QB0RXhpZgAATU0AKgAAAAgABQEaAAUAAAABAAAASgEbAAUAAAABAAAAUgEoAAMAAAABAAIAAAE7AAIAAAARAAAAWgITAAMAAAABAAEAAAAAAAAAAABIAAAAAQAAAEgAAAABQmVybmFyZCBMYW5kZ3JhZgAA/+ICQElDQ19QUk9GSUxFAAEBAAACMEFEQkUCEAAAbW50clJHQiBYWVogB88ABgADAAAAAAAAYWNzcEFQUEwAAAAAbm9uZQAAAAAAAAAAAAAAAAAAAAAAAPbWAAEAAAAA0y1BREJFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKY3BydAAAAPwAAAAyZGVzYwAAATAAAABrd3RwdAAAAZwAAAAUYmtwdAAAAbAAAAAUclRSQwAAAcQAAAAOZ1RSQwAAAdQAAAAOYlRSQwAAAeQAAAAOclhZWgAAAfQAAAAUZ1hZWgAAAggAAAAUYlhZWgAAAhwAAAAUdGV4dAAAAABDb3B5cmlnaHQgMTk5OSBBZG9iZSBTeXN0ZW1zIEluY29ycG9yYXRlZAAAAGRlc2MAAAAAAAAAEUFkb2JlIFJHQiAoMTk5OCkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFhZWiAAAAAAAADzUQABAAAAARbMWFlaIAAAAAAAAAAAAAAAAAAAAABjdXJ2AAAAAAAAAAECMwAAY3VydgAAAAAAAAABAjMAAGN1cnYAAAAAAAAAAQIzAABYWVogAAAAAAAAnBgAAE+lAAAE/FhZWiAAAAAAAAA0jQAAoCwAAA+VWFlaIAAAAAAAACYxAAAQLwAAvpz/2wBDAAQDAwQDAwQEAwQFBAQFBgoHBgYGBg0JCggKDw0QEA8NDw4RExgUERIXEg4PFRwVFxkZGxsbEBQdHx0aHxgaGxr/2wBDAQQFBQYFBgwHBwwaEQ8RGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhoaGhr/wAARCAFKANwDASIAAhEBAxEB/8QAHAAAAgMBAQEBAAAAAAAAAAAABQYDBAcCCAEA/8QAQBAAAgECBQIEBAQEBAYCAgMAAQIDBBEABRIhMQZBEyJRYRQycYEHI0KRFVKhscHR4fAkM2JygvEWQwg0F0SS/8QAGgEAAwEBAQEAAAAAAAAAAAAAAQIDBAAFBv/EACwRAAICAgICAgIBAgcBAAAAAAABAhEDIRIxBEETIlFhMoHwBRQjcZGh0UL/2gAMAwEAAhEDEQA/APKHiaWNtsWKWt0SXJA98VamF0UtYm/pihHPaQc2PfGZMrpmlZNVpIyE8ntjTcrrKZqVA+kFVt9MYVldW0TAamH0OHOlzd1jGl+eTjx/PwWrM8otM0OreCrsqMNN9zxf0xLSZAXGqFgSN9LHn/LCplmYfnxmW5DMN/TGk5SIywKPuRyOMfKeRklif1DFWUIenBWMysdDqCSR6bEYiPS8cgZSjhhwWPJwyMrUmbwqxJhqI3RLbecb2/YH9vcYuN4RfUrE7XsuMy82T0M40Z4cnVCyFNO2zY+RZazkINIJPI3w6VFILEst3O598VqahVJC+jciyi2NMfJbQqcokNDSmmp/Dhtbvcc4YYK6OOn0kaXtpCHkn2xXSEwoxKhfp22xVakFS4MihmPB9BjBzlztl45pQ6BXUMUlRE+pGVn4Btc++M9GRvHMWVS44N8apPQRlLygi21icDaimhQgRi5Ivvj0sWdxVInlySybYlxZW17eD6X0jjFsZSsbKyKLjkWw0RRRxoWH2GIJLMSwIW3rhJ+VkTpEkitRv8PYFQBb/ZwQ/iEewbnscUdYcG6jbv64pSuUcuWsO2N+LLKUdh+aUdB41Cug0ERleTfn3x9ourpMnlKFy8TbNbe3uB3wurmDDYMf88diJqhgSFNz5cUlrsWOeado0ak6wiqomkFUhU8L3/bBPL6x5YtWsAW5tvjOqalaNgVTTp3NxhnpK4xKgAJ+hxiyTbVRK/NOW2EK+Z0Ny5Bt3wNkzBgP+cbex3x8zLMPE8r3Nx2/tgHLKW1aRY9sVwTl/wDQryyCBzNiW1sWA4IOPsEnxJ1aiF4FvTApAzG5O18EKdTGLatO4xon+USUnZPUNJHfST9CcD2nqCTYk/fFyaUs1hfjc4kWMMoJOFhka7KqcvRgByymkuEsW4IIxQqemkdi0W3a1sSwUdb45aQugB/bDflVE7gCdQ4OxIx6/LgrciduPTM8koJqaTSI2YdiF4PsMWaNpVlA8NtiL7b41GTIoFS7IALb7b4GDLYRVDw1AtuAe+H/AM3GUKY3y6pnzpyF2qoxPE4iJve2NayGmjgGlGBjJutzhUyuCOBAHYC2+4wagzWOG4NtR9MfKeZGWWX1HjOKLfVlWlFSrWqR/wABKKtR3AjBZ/3jEv7fTFukrIZMyrKaFgxhKm4Nxpa9iD6G22FrOZanMMunNHG0tTB+dHH3mA3Mf1Ivb3PvjPPw06s+L6ypaJ5CRLQLA5b9ZhYLGw92jdDb/pwmDwXPDKXuI7aZus15Dote2+/bFUIIXuBx/TE8kiPECSBYYXa/MhFINLHnewxjxRk5UBrVhl5jfRFdkbnfjHPinSLAWHv2wHirzILltv744lzXw7qGsSLW52xoeFz0iV/kI1dYirYvpC+vbC1WV9pSFa5tjuoqjKS76jfc6thgY7LJIQbJtY7YtixOPZzVlyGtlJ3sFOJDK0huWsDsb4r0tKNJPr2vx+2I6lzGDbbf15OL8Rf0gsI1cc6bDvihUUoaQLrub/TEENVM0Z0qW/piCordJGq4333w/GS6FoIxZeNJXQGA/ri9HEIXW/Fv974pU2YK8e7fTFStzS5IVijN6DCyWSWgpIPLUlp1jW1mIF7YMQ00gib4dWZR8x7YRKSSaacNrPt74eKbOEoqGY1j2jijMhktsoHJOB8ck6HTXTKMtQA+lwS17W7374pT1HhhhENcigXH6V92PYYVW6lkzmqZcvXVLUswuDpKJcED2Y7sT2G3JwzUdEtLEdVpJSdWogm378nvc49NYFijc+znUeyxSrKdLTP5SLkIbA+mCquqKAosMBFqRrsW974IQhWAsb34xOX2FWySdlCm/fa+Kni6BYsR9GxJUoQv04N8VFhLC9z+5wix0FMVM3y1KRbhQWF7C/bFeknkiQWQ2HG2L0s3xEpDeYdj6YsQQQjcEcffGWE5KXKUrQrTB9RWzygKCVGwN8RxU8jEM7HVff3xcrKa92VQMc08ZsgNzc49b5MXD6klGVkqtUsPIzMo23OKL1NRCxIFwL3IwVkkWBbod+5vipCpqX8q3vzbe2PLyRXrspxR+pM7mgdWGpXXcH3xltPD/Afxry8UoWOOrzFJolBtp8YE6bf917fUY2SmyhGsZL3724GMU/FSgqelur6PP6X8yVa1ZfMTYSIVZbHspUKP3x6H+GxlKU4S9xaKY9t0ei2qn0BVY8WHviqKLxJizhjf3xVy7Osu6jy+DN8jn8WiqBcA/NE53aJx2Zb2I+4uCDiz8Y0W+rVtvvjyYQeNtNbDKZ3NTJHHps2rtvgJJTSCS9j9m74L/H+MoDhTviIyIpuQb9gP7YpjnGLoTkRQ0JAUkMftilVUMqNqBAHoF3ODMeYRjZidu3tiGqq45EADKO9r4upfYZW9gyOUqoVb/Um2Iqm8oHhpewve+JCqiS6jVxfbBWCnjMZ2C35OGdexuO7KNBGvykEsO/bBB6GCcANbcE3O1jjlo40J07WsBbHLy2J3sV3ttikZSrQssaeytNkbQsHiuAAbEfTC7XwS+KDuFBNwcOv8VUQgOANtu2BVRJBLdgAxA2uf64OacscOaVkISrTAtNIYHWw3J2viLrzO6pOk5o6WGd4n81VUJHqRYwPl1cai1vsPfF7MaugyRY5Mxi+KrJVD02XBSpddrSSt+iP0HLewwoVua1VfW/EZrJ40oJ0okeiJAeVWMbAc78nFfBhky1knGjdHBX2nr9f+jJ0plMeSZeq1LBswmUPUOR8txfQPpfnub4YPEZ72Yge2BtNTLNGHhNlZBtfgEcf1xdghcMAwAA9MLPJKTamtmeqb5HxxJrURg+/tgvTsyxAMBffviKJUDfTm+Lkjoqdr997Y70FNEZLMB+/9PXH5YgosSTb3x9DqSSGGni18SCZbC4U+5UnGXK3fQXS6EeWM+IxiG55ucRxeKsgsCu33xYjkUPbV5ufrgg0kEMP5xH745QjHQspMqCcoSGAIHe2IppkFjDa1sCswzZBIRE2oDsMDpa6XTcAffFVjFTDD1AkU99+5wRoJ0W221vvheo52kuAyqRglHIqsDcAj0xJ/yuzmrDv8U8JuAFHc4AdZZHT9Y5RUI6BmVAA991f9DevO1vQjsMSykyiy2Yenphl6HSBszlosxTXT1aeG1mt/vtvjR49/MndD40oTTPLPSnU2Y9F54XjaeExSeHW04NhKFNirKdr+h5GPTi002Z5HBnuTN/FMpkTW8sMZ8Sm2uRNHyo/6hddjxjJfxn/DqpyfO2zKCKVBr8GsIUW1g2jlG3DC3ruDvg9+DPW2Z9LvoLSpCSiGWDzFTcncXFwR5e3b3x9Hk8fB5cbmqf59r/0tkxbpDlT1bMgZSHB3Ug3viZ5WZTbDV1DJ09ntPNmsRpchzLQZZSWCU8/AIdbfluCd3G3God8ImXZvQ5wjvldVBVrExSQRvuhBsbjn78Y+Y8jwMnjZOXa/P99EOLXZJLHK5IW5udxe+P0ayn19z6YKQqildQU+u+LBeIHUi7evOIwbu2BzrRDTwFUubk8i+O2ke+leP74imqTGPRT6YryZkkKF0O59Th34qztOLEUpLYVhSNPO72xBUzREkR7k9wO2F1q955NmNr+uJI5iXFm2vjRmksceGMXbdstZhIQnlOn74ho5PAo2zGri8anVylLE1rVEg5JHJiQkav5jZR3x3Bl0mb1LU7ymnpoE8Som2/LTmwvy7WIUfU8A4miiHUNXF8HHDFRUKBIYpGPhx6Tst/1dr2NySSdzbDePheR0+jf4+PXyS/p/f6F+XJ6+eSpzPM0nnq6yTxJJXXw0DH1kPLbfKt7W5FsAM2pzRuUKCJkW5R1dTv39wfW2NTzFIKSFqiGmKzuoH/Dt4Aa3ZYyNrfub74yrqPNf4hURiBUmS7aCIiro97MpU/Kw9tm+vHvwXFFpfZjv0jUtVZZE0molQVN+PUD+p/fB/wAQKbHY+lsLHSqpluW6GJUs1xf6e+LlXWHUbEE8bHtjxvIjeZ0Ysn8wzGolfyubA+2LbyJGrWFsLdJmDIOQB6euLsM7TSXJt677Yl6oiWZagKbXtq7YjSt2+cj7HFaqvtYAmw53OKJY3PI9sGMbQ9aPzxKh8u5wFzt5mjYQvZvUYYKuBtekEnbjFcUKsWEgDe2M+qv0FJMTMmyqZ5mepLtvsT2w1S0UPhopNn98EVjihTSqjUduOcUapSDqYnb0OFXJu10FpVZSjywqxKBk37dsW6fLxI17WYep5Hrjg1irFbUAR64hhrpFkupsRhpY09i6C0FMAwvuTzbBvLwlFXUsodlVHBNr354thHmzhopNR8o98G8nzKWsKhtl5HB/vgRu7/Bz0a91XlND1BQieaNKuKaAw1CqTpkiPqNjsfTGAVnTEPSOYGnkeoCAtJTSkHTUQXtf6rdRv3XHoPIqlpclMU2WTiJYmYOiHQ5t2Ww3/fGZ9eNVOiUsuX0+Z5VJI8ksNU0kMkTlR54JFF0fy9wVN7MpGPo4ZNKfp9noY3zhX4M6zzNpKuglpIbspIWZZiDe57Dm5v8AQ/THHSXTU0cqzZa8rSRqLsjcKQC1gO3cg+hxJV9IGqKPklbHW0rFTKtWFhlUbgA2JVmHcra4Kmw3GGDLq2syZCKeinSuXymNEbVe9vLpvubnbe30wZZbVLaLLEn2N+b5bNlq0U1PAa6GRTJPoITwFsPU+axO5HtgRV5tT5UjHM6CokvPZfBnCgr5rg3B3FufbE/WnUaZlkUUEUeYUsAhZ6yWSnYFNNn8NSB8psLnbZsD82zGspMnnq+oKGDLXSETBfE1yuGIGk7BdTKFIBN7W3GMnw4elBA+DGvRVXNVzmqy+ryZ42yeSF2qIZ20SQEKTdm+x224xWyqvpc5zpYHWjWmkqZRFdnQyBFGgC+9pD/ntgdUZXLk/R1BW2ikgzashklSSk8qQyaQY/V2YDe/8p2x9paiolz+lqM1eWSSSol0SP5DDEFI1qBcKIxYknjykm2NkYY4ppJUJ8ceqCFFPS1uarHFTSSUqEpIsUxGhhcEl2HI8pIANtwcEfAqqe3wEFG+shXM0/ieENBJOoWFx7c3G2IKfKJJs0zfJZ0nhzGElS8tgaqJx4kcupLqxIIBHJvvuDivBkUkaVcdHMlJU+AryRuliyAMoZLG21rG498ZuOGEr4L/AIHWCPpB/qOV6Hp3LspRgaioQVlUyILmRxsL/pspA9gzeuLPTlTLllCi5eFzCVfNKIJfDWH/AKvFY7duBue9sJM1Bns08orYGnD+VpYmsHN7BN+L6DvbYXP1q5dU5rHIyutZHpkYFaeNtJF9tJUH9+eMNgiuIcunS6HbqLPIaqIQLLUNrWzUzZnrfm1vDcXsb3Dc9iO+Me8SRauRyzS2bSmtgT7A+hA2PfbDlnfVdTlaSxVEVXLHCR4yTM1zqP6gbm+3O2wwvwxydST6fiKSOrBIiTw7aiFvbUN2O2wPNudsba0ZrG+NDHRRIW1SR7SBbbOQCf6EY5iMkpAfn3POKXRJasyeu+IaSSrTM5hUvJcN4hVbbHjZTt7YYEo7ONPJ2vbtjzJ8Y5JJkW0VivhC9gT6jFmkmYjfb72xdmoS0I0XIOIYKQhCq8/3xkm0l9SNqghTqs0YZmAG49f/AHis0Dam37/zWxCzGlLAE6b9hiRKoSLfXbttYYaN0CKZTarZX4shwRB8ZFA2H9cLfxIkcXbvfHb5o0F1BvsR9MefkgpavQiTCUreGdXp6YqvUF1sCDcb7dsC2zGSU/Kz82/0xVkrTG1mUhr32OKQg0tsZN9FmpID6bbg8j/e2IS0hS47XscfIQZXu92J/TbfBsZYzRjygDT+2GepI7oXzEZH0yAMvFwL2+2DVBIKGJSNmAuLC2+AlfKaI31eXV+rH2DMlmOknf0vi0k70g1rY39NdSz0+azJ4kki1FNIqoTsSBq9x2O+LM8iVkKVQrqdJJQSYvELAW+ZQoJAYA2IuObgdsKWQtUnqfJkpfzHap4JIDLYlht7Xw6/w2SeNkpZIhDNIXjKRBbA8A2a1+eFB9+cb/HXDHT6N2DcSCLpSJmWrzBvDoyQAsZAea5Pe2ym5G29/TjDNldVDSSTxZbH4QlXRrlAQxkszWPBN9wGFr3H2E01E0jReAfDp6dSipKwIZm8puo3Nx3He18cSUjSMTVgeLAQgmWNiqAWYEjm+ocW59BhGlekejG62yKWaWvzKlllmU02WLG1RJOp0s1jpJC3A4F13NwL3GI/xGostfpqSqrYJJBVM0tFC48Z6iQnbTCCDcksTfy73NgADa/D/Pcz6izavyzJ+nIqLJstUtXGokHiuHL/ADCQggudhsbHbYNvoGTZBTdYZjU/xXUuXJBCsT0YI0RvIRIEbcgAJpvfcKT64E5PG1Y8Y/J0JscFb/8AGKMV9PS/xhqZ5cylZ0nNLIiszbINMYHhqAdzuFXjA/pCipqGhklzCjFQKSBJj4dzKAC0ZlBX5bMyhyB8twy74Y8hp6Cip/xCjyNJv4pVwVdLFQCKJ0eOkgkcVCrySwcFd+Co5OPmQNRt+H+eZtXZ/LllOjxw0U9GFDLM7lxIo/VpYQlvUgahsMSea1aLfDTpme1+UiTqvL3mlgrqCrjZI6iKnEclHqi1+CzQk6w2u4V0UsVOn3lzKjqqytgh6WQPPJpgSA6d1DONYJ3KnSzsAdwLk7HGt9P9OeF0DPXvTQNnAWapnaGjMMlWT5NaLcabRsjkfzOTfbYPX5RW5t0pV0VA+W12bRwRxHMZZHPhwhdMi2W4M7RqqBQdyXOqzal6GZ5WqBPFwTMafPY2jT4VpBQweWmkkVlEj8PMfQyHcA30qAPUmaoqoc3pxouKyMaopWne7k+UAC9iNweMMJyU5nls2ZZFk82Um6rVw1MsctTrA0iZXLERjghIhcDa7DAYZfmE5VamSeeUqrhpnvquRax5B3sfpjfB09Hnzi32BK2GOekqZKlWkLfM8g0oBa23+J7E+2BnTEtE1XEYtTiPTdlGkOyjsewA/thymp2YfD1sT+H8peNlAANxyQQ309fvhTocpfLc6kiqiyJJ+XGz2ADHgf27Y0crX7M/GmO2RmMvVVFrPX1clZNvyzGw/oP6nBido0YaPTb2wIr4ky/OKumhtGIHAAVr/pHfvj4k7C/m252x87PJN55Kf5f/AEYG7k2HY51eO8huR/THLSKFurAk7ADFF3aKm1OR98DGzEgEm4ufti0YxbsSmwjUfmoxIBI9O2BhLoSFJt7tbEprfEjubEW2GKLVhJ2CnFU3+B1aAkNSQ91G1+L84syT6QNdrHf3xYy/p9UlXeR7Ddmbv7YvZtlMdNBsfPa5Y+mMVwjJKT2w/wCxQp6inkjJIAHvi1Hk4qJbxatbcXHOF2eXwnDIxup/fBLJ+pGpwUmA1/oYjGvBFStSOUUxghyz4ODXI+gjY3O598Q1WdfDBxBZ9ucAM36pkqZPh4FEo/UeLYlpYmkp/Ec2Zhc2FwcdlwwjtD0gfPX/AB4bUFAVt1Bx9o6VROzXvq30ngY7OWFjIQNtiLC+K0YeFj5rk9/THJpaQKtD90JlOXydTZdLX6FjiLOoZiNT2sBccc3v7Yb4stQ1EsFEY1jSR2YCVt1OwAttuTx7Xt3wufh/TSUTvm1QNbRwP4KyFXS5HJHII53xxVVeaZnHPUZfNHl0UjNIaiYWaS5IVrbWRbMRe9zbGmN8KN/ix4xtmmLT0lAVkzKBIY5GFPG51P5/YE2GkA+Yn74J5xlrvkEcOXV8+TLVjyVkPhhgOSza7jWw/lubelr4ybo/Man8UOqaDofLc8XJsiymNX+ISm8WorXVvO2ttgLkb7aj67DANsjqT+KeedKfiXn3UJoKNk+EaSwmZZADG7C62B2B323Hfa3CUYt9UaeStG1dD9Gr062c5pSdSx9QTS5fLTziqji+JSMnya3UEOQTufe9rb4ufhxnVPQfh5mU2dU1bBmBlqp6qnjBa08A1GNBtbWFVgCd9T9r486/h8/XVf1lnFB0zUvlk9M7QQQ1NFrpyePBkuoYggX1KTa3FiMe1+gVy/8AFb8G6LO6Siiy3NamA/GLE9jDXU+qCUG3spse4sfTGDycWSVqLTa3/wBG3x8sItck6ZjfUWXxT0VM9LWx5Z42ZR5dXTAhXlDIpkQG2w8RpSdO2hUO9tgn4d9JxZzkuU+KYYMvizColpGdlUwSP+YWRSN2eM7DjyJqGws45nTfGZHG9dVpR1EsEnxdN+kKsbwGdtiblC9yTa4ZjYrbAuL4ZenBqhigiSspmeMNcqIx5YiRY2utjvcArv5TjxlkvFx9HrOFZOTPld+JmV5IXhr6eNctbK5GNDTSgJNK7tqEWxKIy6tSnvf0sUL8Is1zHN8u6kp4ZIslyyVJKejJuRChSxI3vquSdQ33AGFzpvKafqL8Sc1qbxVlHSgvFLFsJ499JI28wZ3BFhxgN1P19m9D1JmlH0PVjJaLJZAi1FFAssrzAAtpJ4tdhexJx9H4vj8IWeB5Pkc50auvQf8A/GSU6dN57l0kE7F3XM8vMUknHlEqk6jpvsd/MDtpLYFZxUOlV+UA+ZVsQKrJOLyX2DEjaw2Xa9sZdlXVHVXUOWSVFb1Z1CGtIV+IrZBGFA1avD2UAHbgcE9xg50r0XmXUn4fVvW3U2dZlRxrJJNlrhlZo4wfPICwLbkW/r7404sctqTujPPJF1xQxZ0/w60sOdDwKqbbVGqhWcetuPX+3pipksEVXWQpUS6yWUFmbcm9w1t78Wvft9sZtD1rmENOkXVUVRm1GGTTUTRkvGfmVr2Aa4PI5w05fmUTTRzZdVq1LI+uMuxFtQ8yj6el784rwcdkuSloa+uaKVOra9hG+p9BItz5bX/pgLEsiygAaLbkMN8OfVMgnpMszVpnYSweEY2bxDcdw1t15sMJ81Qb7b+9rY8zPBfM3L3v/k86UadFiV2l0gm6gf0xWlpIp2UAhh635xy1S0kYUDdtjimI5dYZDYDbY84jw39dHJUiappBCdMZtba1rm2IRAy7KWAHvixK00cB13ck7dsVxFrF2UseCd8aopnDfHLT1DgpOHsLWXYDC71VG8ZCn5ANiDgblte1C5IZtOrzADF2vrhXo6Pzba4t9sfP5MM35McgYqtCb46A6ZrHfti0KOSa5i45GOKrJXlmVoVFiefXDJllMaOEGRAxXcnnHsuUIK7GpIG0PTpkYMQ0YBvxhoOXJFBpI1iwsT9MVo81CAmNtAN9uMWqatFZMFZtRP8Ab6YjklbQitnEOXHTpVAW5Ptim2WRPIdXe4v6YZY3Hh6VHPJxVjp9VUG/KK9lckKP23P2xkalnlUXX7O2iCtjrcs6ckhjog0MwEKoulR3Opr79x729MI9ZleY9a9VU/TERqIcmy1A1fPKvgrK43LEC3l30gdh9car1nX0mTZdTwRpEKwIJLLAGKbg3IubD6nbFTpXM8p6Jo6TMuts0+C+PlM3iqHYzPubkL8wsTvxj38UZY48VtnpY19FyIsxp6aSlyHN/wAOpaXJs2yGZo8uqIqbTTVirsYnGnzarWIO/cHjDlm+Y9Gf/kOaKatq2/Dn8WMsjEBStUNFOvPhFiQsyHkC+oYy7qrrzpfrGvqKjJMyzZ8saIR11EKVgsyk2Ghb3NvmLrpK27i+I8vmi+Liy+dsuarpPKUijZhNGflN5Y2sd7ahe54OOySniReEY5Ga7B+Gv46Zbl9VQ5NTdMo8kBiTOqeofxYkOx8JHHzW43Nub3thy/8AxBnfozp/rnpev8VKfJVgqy8q6bPKjq3k3P8A9Y+tr98CeiPxAzbKJKWhieOGiVQq0kVUZrJ9jcXI/qAPXFSrzbMupvxAr83WogpZZ08Co8NPD0RIrDSd7hgvJ5BNgb2A8uflxgriqZ6WPxXPTehyGWscrrGloUr6dmNVLE0d0lPygGW4a2olyd1AO4wrdVpXZP0tX5dDVR0tWtOyy1cJD3AYAM1/Vna78gE8Xw45nPV5dkwShZUhjiZIw0Y0MLLpDC19AFxbYk2udxhF6v6Xqs2p56ikCeLCgBo3F2cabsVIILm5F1J5Dem3lYodI9PLJ7Zjv4e1aZZmHVQqXpxPVQmSGPUAGlBOpRxYk3O3r2xmHQPT3WPUFfXZh07l0WZCaVpKmGeQosjlibg8cn+mDnUGW1lFr8Cd9Gg+JHYFlW/6jsb7fYWwY6H6xq8l6ZmoqFljaKRmWWPUCVNr7gEX9CfU74+xg0sdM+UnG52Mf/wGeOMTfi1mWWdMZESGnoKKZ5KmtsP+WWt5RtYhQb8Y56064Xrqmp8ty9KnJehaWDStMXhheYpYqsw82lBYeUMDa5bC1mGbyZnNrqGNRU8LLLIzWW1rBrkj1tbf1wu1EdTG6UtS2XqynxZ4IVKxhbizSsb7ex784CbeloDSW2O+by5XnVItDJX09XFNHdY4FdlBUbbgaQB69u2EzLMiqKKnkoZVV1pp7rKCLgN/MD2uOb++CWR9V5Dk9RNR1t6gTysHLU5aKMEcLYhdN7kWvivm8zUecLXs5NNITAzjU0MiA3AFwCNj3ta2+2K00qJ2ns0jpyqjzTL/AOF5iDS1cKawsnl1jk22AP1uO2AzExzNFNGYyrFbEcf79eMBcizyOOUZfXhlEbEQVA8skW/yi3It24O+2C2ZRFKhfDeJl07GJAg2tyoAtz98YfLwqcFL2iGVbs5qVRGFuSNxftis9Uqnvf8AV/livPLKj/mMD/Lb1x8RGVA99Nxz6DGXEpJfYk6ReSpWdQt9yRc+mJ1aKMaVcKBtgNKdHygqAdz6f64lLsd7N/4gkDGqwktO0FOfzQQ3p3OLEdbTNIRYg7ndbXx+zSiWorhPFeNLEg4qtURIjLstvbnGHhJ7T0BwfovxVNHrAayrfzAfXBeqementCFU22Cm5I+uAUYo/hldioPJv647FXTBF0nSSdnU2uPTGOUZLJGHdixdvYWFDTTRG1irLx3GIqSlTLn1t5lJ2JF7YGZhUeE4mp3sxtq3tcf4YoT5t46WLtGDsBj1vhi47NvxJobhWwvq3G3Ze+GToDJZ88rXqo3qENM3ljp49L/UyMLKP+06vS2M6oYBKlnYIDYEi/r2tj0l0n0vV9PdFyQJM/xE41pG0fgGJTwSFZySeblgbW2GE8bxHLLyb0iHGmYj+Kk9Pl1TDSR3aZ5kMrF7+JGP0AHYXvudyeO+FOfxupvxFL53UGUUcaNHA5+RiLebbleO/wBAMcdU/D0nUyu7xZhX/EGT4mWQhYwCRdbXt6Cx2sTzaw2POM6+OzTPBlk9NldBRv8ADyGE6Hk2Fyx4BPf02x6TXaRrTWrNbzapeipo4F6fpp6RI71A8TTLEBvdUVNTbC91IPthKrKijp56nLspeozOGVlklyyQjSuvcPaa5DnuVsrWsQDuZPwuymr6moJ8/OZ11JmlQdQeQGaAb942sLG9tiLY1WmyVMyrsulq6WjrYo5goZvniZfmCkkllJ5B3W4tccZOLVoupJ7RX6Vyiny7KqRlqZ3mkhbwkiXS0YFxeQ2UhRb5VB3XlsOPS/wlNnVHRrVIa6sRpTC72kKjZ5ACAbAnYkG2ock4LZP4edZxVLQ1TSKj+EyeIrOoDA2PDFR3vbYg88rnUXTU2Sfiv011PQxrVU9TE2TZlHoaSQQSEMsib/pdbHYmxO5x89lhyk7Po8UlFIc83qJpIUA0ayLFxGGHoO/v62N9sK/VFf4QMtS0cj+H5Vmv+Xte9huG1BOCOTx2vda9RZH0bCYc9zamo65v+VEfO4sf5FJttfnGcz/iB0t8BVy5hnlNQySK4hgaVgS+hbXOkqL3PO2w2xXBiyOrQmbLj3TMo6jzCiCBp4hFRCUkqzKojJAAI323P29N8LMcBoWPit5QgDPGoup3sw42xx13TzZlPRUEL+ImZMZXmU6rxcr9QfUe+GCTJqehyZDrVKeKIDU5LFO1rG19x64+l41E+cbtipWtDHOjZlSCWQhVMBQJIQRcEuD5fpubHHFJl8lV4FXWVFN8ASdEBiZRM30a9zbvv74a8mjyvqCjrKeBkqZVkRQkasrC17kk8g232298MUXRk1MPiFaaJHA8Z1pzKBGosFUcKN/vfB2lonqxLp3oa+llp6ZoqgNeFotCgJ2tY/39sDslWshy2uyipp5KyGOzU7RyaW2N9Gk7N32/94aOpPw6q2FRmORZbP8AFEWdHiILC/cHg3tvgBkNP1NDUyw55keY0wkXSZWjAXYb3NwLcG43xXHF1onNqw90U1DnMBoMzUHNKJvytT+HJKluNRYXI4seR3xoWZdOSV+VibLJHnqaRQPDMIjlVQPlaNiLiw2IJItxjL6vKs8p66F8xiendyWiq9Is57C5tqG/oT+5xs/RHUddQ0giz3KvEhK6DKsDaf8A/O4IPrdePtjR8cZxal7IyZjuZ18kcrJKmmSPyurKVKn0INiMdU1QaqLjfDN1xQZPX5uZslP5Jcjw1c3BB8ysrElSD6W9xgStMlInkQEA7W/tjyJwWJ8bsWlRXWmYsDNcA8Abf7OLgiFtrAe98cTVCk+Kbsx4UY/RZmqJaYB3vucJJS7SC1QN/iVRJFHCsEg5vttiqax12qANTbL5bc4sx1FRSM1Q7Q6L3CaT2xOufUGZH4eZFjlI2IGOSUVTVCRy1qgepeRwrDUP7nFevFQrAwFw4G+22D0NEIiNJUsTcC+JhUJCSJ1KnbbTtg48cemXjFNCBWZjXxkeJ4nlO9gd8RUubVLS2mUlDv5ha/vh4rKammBc+b2P98VZMto1pw1lLruTfYY1OkVpsYOhp4mzbL3TMKXLiJAfEqadqjTbfUq2tf3JFubjnHqKvqxVZC8cEs0tKQVaWaQRCT1bc2/a4+vOPJvTeZ0eVnVE8Lzrshmj12PsLc43TN6vMR0sk1ZWL/EfBDhIgpKG3zszXCnjsdI4W+HwVBNCyxuxNzrJOlOnpxX5vJSiebzQQVVToR72AYofPJbayqON/fDH0fFlPVcLTS5hT11OrECjoaW8QI2BMpG7E8Iw2HJx5zy3Io+qOvViqZjnEksh8d0MirKbeZdZUs+2xYkbX7Y9Y9Kx0ENDT0uWiCaCNAqR0IEFNCP+kAkWHdiSWJvxbF3BMHJoYIsloNC00MLLDHfVGClgB632J+gtgdTVeRUvUHwjaamnecSVq0bGOxta7yJxfgnsMMs8RqaQ0tAGRJEszQKsahfYsCT9bHEMeT0+W5c1LCViU31PMXqnJ9gxt9rAe2JOH4GUl7M/zan6NyrqMZrW9OdeZHmGWS//ACSWOlhaoGplEZpyxvqS0amxsCD73xovSmU9Kdezda+D1hWdQR5jJTg0c7shyoMqyIkSOLh9fn2JtpA7HCzk/wCIec5HVT5XFnq5dNHBItPB1BReNQ1LkkxyxTxHXGv6NLFgOBuMOOVZ3X5uaPNqrpfprPJKWSGejq8uzVI3WUApK+lxZSmo2FztYEgnHkTj9vsv7/oetGX10zz7+MPS3UHSfVUGTZnRZFmVHm0swy+piExr5bKZHlqGfyFmI3Ck7sLA3Ax59zvqmKHLqSjyugjmq6l9D+IAEQ3245Y/449vfjQX63p6amqcsqenMwyvxnos3hrIppI/EXQUZEuGRwqllaxAAIOoY8ndGfhhJFlk+eZ0lPWVdHmr00OUQzuhEkfnM7ORpZTc6VJHGN2BwS/SMmZzf+7L/SPQ0OUUYzLqiSKKseDw6emZwqQKTqIQsbAk3sfUmwwGp3yDMqamyyqiznPaRqiSr+LP5Ypin6QSLt6AenGHXOKx80padsyyCCcxjxIxVVsYWOUN5QCpN/8AD64XM46tq4lcSZnS0EUUJRqfLoA0sbte5DvYD7Y18tmShh6Qr1OX+BT9MJktNmDiWkSPXLV+IWu7+WxEdgTpvycOnw8lZVxQT5XnLU4AIlfLYVjVhxfUCxHfGU9KUtTmsUlSoaCORVRZTUu0kxA2Lt+/ltYXPONe6YgqKWiEEsj61AIaOQMSPpZcOm5MVqiSWlo6oPERWJOv6BT07yEDnSD6f7vjM89zegymU1VNUwTwAkGOfpmEMLEagZICChU3B1i4vuOManmFfMkH5cyTwL/zY6i5VeLc29eb7HnGPdd11NWMI8yp4DPPcLUvEGEgXZVcsNLcje77Ws4GNKjSItsasr6g6d6tpBFVUlKVC6UqKFjLTS24ujjxI3sLEMFba4BGHGid+lKCOroJI806clkAZ0lVnpmbazBra1PHOrYAgEDHlzJ46nLsxc01O+X1VK4MqqxYBePMrknSwsNmZTttxjY8szGqgiary+oWD4hCJoiDpkVuxF9/T24ws8igco2fepYKWfMZKrKEmp1dd6eU6vDA7I/JT0B3F7b4D65CnmIUA7Ei2o4vsJkUGMKocgFENgp9B7f64qyU85YN4OthcIL2AHqfvjypJzlyaL8Y0RxRfMLqATfc458GEXAYD6rfHcdFVsQfB0gNvdhgrFRT6BqcKfQH/PAoCGnLvw+yctKrv8adNgHkAGOD+G3TsEviT0jRyudmje6qPT+mNHyToxKXxKmSITk31xotgQe/1vhoh6VgkphT06FITsSeQDviXCZ6XHEvSMXj6AyqQSGnqWieNfIQ9wdsXaP8KEqnZaqts5UNGBsSO+NKi6Vy3IJpHrnPgMvleM7Mbb7YnpMxyMywRxFRKVsxfYD6Y5RldUBwxd0Iy/hFl8TLHVRVEsfJlR9iPX2x+qvwmyqayZfCzR/qBJ4G3PrfGp5zSrV0S0uX1j0zSAgPT7+/3xnFfT9QdM1JlpJq6tluS8kpQKE91xo4TJ8oCnnXQuVdHvFmmXRVctX4q6YjpsrexIJJ9LAnAPMOsZs3o3yx45DNJdnDS2UW5BYlS1trgW+oxoeUdS13UdNWpUmVKqjjO9LHoZS2wUubhL9zzbgY85dZUVZkmb1LywGQzE62UGU+w1yAE2PuAPttaEVHVkZu3pEOV1VPHnj0qAv4lllmLgKU3uoAO6Cx8i8nm+2PSfSOZwy0ST1CmloYipijmCoikbAkDa/pzuTYWtjzd09l9F8S1TqDzSR7x6y767j2HoB9z250yClrAiCSQosYPhqVLEOQBqt6249L8DFJZVEisdm8L1nl0dHJUNUKsCfql8lyOSQdx98Ldb1zHVQyyQ2eIjve1jwbcm/YHnm1rYzJcglr2j+LZvhwRqRzrvvsD6/QfuMNFDTfEOkIW8SNe22/Nyfc/sBsOMZ5Z3IqsSQu9T5zTdSTxGulqaeVSz09VTEiaHSCFZDe19W+9wNO29hh/wDwwyXNMty5YouoqPM6isZxLJKhLQiQfKFJ8m4BPbnjA6DpKJZpZnQPPMLXP6Rt27D0Ha3vgrHl1NlMCGniCPEdfG+q1ht3IH2ucSyRU0UhJwFzqf8ADPrLqWqFZJ1HDWZOkToY4qR6cNKjaSRHsSpFyLm1wCPfO678Oc9o4a+TpeOQT080jSxaz+aNZRxzbVbtz5e1sescu6qpMzyeFKpo6eqp0B0u3cDt2vxvjKurc4EM3jqvwqxSSGNFa5AkNmuf1AkNcHvvhIOUdDySlsxCPLcynphGmZJKJGksrRi4KkeRh3IIYE99jvhSXpGlp8xvmlV8ZMl5Fp9yoU2O+/YgnDnnVQ1ZPMgvEqupR1BXQff9ht9R3OFPLqB8sDPO7GZTbfkMOL/c2xoUtEHEfspqRSCNg2lFJSxNwO2knsPT7Ya48yi8PXTs7sgLssdmdRbche9rW09/vjJK3MTFCtTBIVEutzcXFhybf+LfUXxRHUdUJI5BKUnp5PKVN7A+nO1r7cEH7YrCdISUbNazPM0qo4XjZHqnBeCWBgI6xCLCxvsxG1u52IBGMa6krBnCzw0szaZNLPC8elTpuNLoQNLr/ML2sb2uRj9VdQTNUSUbi8VVJdksRoYsbuBvuDb3te99sR5oLVZmYsZfDYuTck25JPN+9/S18W+SxOAGytqiGRVSUoV8ilzqC9iBfcKe68dx6YNUmaV9NK0aSsgBIK2uF9RibLcqTMHnkBK+G9tHN73v9d/74LJFT5eszVEuuc22C3t2ufU4jkkn2FRopQ9S18Ru4BS+liRa+/P9sWYOs5yNBgLsOQmJIqWKvQ2F1UW22N/8MX6LptTGTpOtzdyBvbE6iBkC9XyRi3w1mI7uP/eJB1bM+5hBPu2LsHS9M+o7NIuzNqBtiNsoplOl5FuNvlvgvGhVTPWuZ1MlFFNFSOsREhYawdva/pghktVUTRR+PoGoAB0Nwwt3GLmZDLzUOtWoTT5gwFxbEkWZ5fEFhgaJjGATvYaexB4tjVwsvzoq9Tq9PkMy0FHDXVkf/wCpStII/HfnQDvyL4QZOlIpKePMMyp5ctaYB3gkIYQkjcah/TGqx1FHVprp5EaVOFve3uMBc4zrKoy0eYBzfa2m629Ld8c4R7YFN9IzeLqsZHWfBZTqzNwQwAP6ewNvlv7Y+9TdQieJcvWmU5nULrqIwdQpoj+pyRyTso5OGk5hQCjkfpykp55V3V/B0hD6bDGKZz+JFLSdRVGT5QslbJK3/G1ciWaeTiwNvkU8D0XkXviU/qvqOnb2MfTSRZJSPH4AVWl8qyS62lO93bTa5NrAb8/U4q9QdL0WeIzykB5pW1Mo8RmOwNhuOx33t24vgwlMsatWyxEO1g/iRhnYGw9hHc7Aeg974uMskqsHjSJPCOtVOlQuw3bsLXvxf6YyW7K0qswvOOnKrpyOWbL5PCj0KxjvpU3J0ljwW+YgbgXJ3OCHS/VdKJaWjq2mkkZSRdGYyAHkA/KCbhb8i7NbDf1BWQBWFUqMkS6tKsAWc/Mbn+VAR30iw7m+ddWdPyUsmuiiMbzFHmCjzSA7ovNwnPJ3sLg3sKwdrZKSp6NgyySLMqCKoo5UEAVlDDuf+k8EWv5hf0vhkoKGGlRFQAfygglvUlifc3t/ljLukc0zCOnlbMnipoo7Q00CtqEcSnSt24uefbfa++Gmk6meqrXipXDxQqNTAaULMLrqJ3CgG+nliQTa2EqKYyujQHMdNTuxJ02uWIuz+9sAJKvxagU0YBcG7A28ptxf+bi/pjnL2krtdfVyMVey06N3Ci+th2ubn6AepwhZrn9ZVZzHlWSQOBVMSZ02KoPmIPZj69gxP0VuzlEcq6VSFu4YHYKovvfTx+/7WwiZ3WhJJFcl9MpAW53NgbX9OLn/AFxaquq6MZj8BRyKI4JDGmlyTMUTzt66QCN+TwNzhB62gzaokU5YjR6hpQAkXCAlr/uPTk4nx+2yl6A2Z9R0MXjjx9VxqjKm+slSdRPobWH1wvVnUDy19PDGrKGltUNr3JJIA/cLc+rYNUH4fzMqCpAMjyaSSdggHp7dvpi+/RUFNRtJEELi+ockbhQb/wDcP93xdOMeiLTYgfxGpKxK4YyMpUKObWJAHodf9zjiCCpmqYmplZ/MFL3sbcgkdjc29MaHXdMRQV9Q4UNHsRtfzjzc9jt97YH/AAwy6YeGAXZVY27AjzW+9v64opoXgDnoki8KUamZAWFz5rEC/wBwAfrjioq0bw9Olw0b6eD+orYnsbH+2I2cVGsMWU67KGsdSkWv+39vXFPL6IwIiatc4LlWa+4G9gfcX/xwyAXaWWppgFkEsKVGoKw2ZgN9P9/298Hcup6adB4km9zYHm2KeX10b5fC1UIgqsPFW+6lfLqH1FsFWehqpB8K9pG2sPU++A0md1sKZVTXqFip47g+W4HH1wRzaA0cbeFISdPINtR9sWemKyloIn8UrINPzk2DH29sJ/WGdT11c0dLbQTsf9MMoqtiy2LWY5tmFLKTTVOmMEhVU7c7k4IUWdSz06yTVDlm3+2AjZHV1cxNmA7knBilyKeOBFUXA7lrYbVCVs97Z5k9W8N6V0BUEpYlf6Yy2XJq2bNxJ4gprGzaedzwRexHvtje3ii1EbiwwFzLLIquMpKqkMCtjt/hjY1sEZCtly0mS0rrV1boQLyApwRyQRxjjMOpsmjJSGpgnLDYarFhzsT398QVnQqVTgQ19Qg1AGKQg3+jbH/1hazOnpaWumyylPx1RAqmpjdVCRqdxq5sxtwPrhXpWxlt6OKvqCfPnNBkUb0eUQqPGn8WzSv3BB4A9eDfvhQg/CfKqXNxmE/xM1TyjTi4Q83UHn2P02GGiKtrpGWFhCkVNtGkctmJvwFtv9ScdeBOH0yLHObEaGZmtfe1z+/fHnZJ8nZqjGlQKzXNocopwVif4eO406FZQ38xudvrbA3LepIM00sCkKK4XeXUNu+gDk3FgTuSTwMFOosspc6oXEkC07gWJaTxSLepPvuNsYpF0vX5Pn0kFIIhRM58WWWIhY1JBd7XN3bi54BNgb3CRSvYW36DnU9LXZlnc1fl0SpC6WilKgkoSLaFNxuTqvxdVPmtbDZkNGGysjMAA8r+NZ9wCb+a7XLE25Pv72N0FIuaKszo6xPJH82zFQLlt97kEAf9x4sMc9QSzwZdUTZYumaUFI5AN1JUW0/Tf/Ywrm6r0ckrsDZ3lrrlhSgjip5piATMRdBcDjkN3sdwB74+9M9PrlcB8SoeSRmPiSOLbn5jv35txfbe22CfT1PPNlYarEcJiYjUwJAUmy3J9APvbe/GLFVTT1M1OtMtR8KGLtIRZpGPyqq9thex7G5tfAYQrOJJSsFGpjaRFi1EXEatYu3rcgWUdzbtijmOTx0NLPXRgLMisivbdi2wH0A0n3Fr4ZKOOGlRXkKmWecFELkgPuAtzu319QT3GPjJ8TBqd1KJ5kYi+tybubD2AA9hbtg8PbBy/BjnSnSHwNQ9fXqRVSmRo0LX0XNlFvqdz3a3phxqaeGVUWoVGaI2Yrwzhb2v7nnAXrXNa3LKlly6LxJxGUd330sRqBt3sWG3qBxY4gpKaqOSQrWgS1WmNpAzHcsCrEkcmzHb37YDSDs/ThKeNEkbztI4QAWIFrk78ENc/RhgfXGKldqZUXwIkCXHyqgksW35/mPs2EqHPcwkq6FqthG0cWhiwPm8MuQbnkkMb39Rg9SZzDmc9NSxpHIxlYSASG/mS6gjsDYi/v74ZwraFUvTK1bNJGsjOuhIgHZbg6V0kH62J+tt8LGZAJWDXIzxpez6rBibWF+1rgW+p4OGuSjMkXjPJpCSBkK2uNiQGv2vccb3F7Wwg5rmsUOYS0pVl8RtK6QFBHYH3HBt6jDRTfQJNewSax484mobWQKI42fYjYXHpz3xZs8vgSq20SOCo+W24+2xOLwymKpzKEiQSoLkOQdWxtv+1v8A3inNEaKhYxytvO1iOQp7f1H74o3fRNKialqI3rKVGAME8YiWy+/r74dqHpqneCSSmkDWvpHdh/v/ACxndBA1VU5Y0RICVIZlG3l1bj24xsC5DTLAamOWWDUN/Ob377dsK3WzkrZTyDpuITmfM5PEXvrNh7C3r7DBnPKTJadQtGwE+q+rSL/b74D/AAcQBemzIEC5Ch7gW7n3xQqhCJU+JqAoAN7HU7H6du2D8kEth4SvRRqqkxTWEZC2YhfmZvc/4Yo/GTjZgCfYce2DNc+VJTsaao/N4ZQ97n/3iOKhRo1Kxhwf1MbX/pxjozjPoLhXZ77kmV7eBIwYfzrYfS+IJZAqWms5Iwn5n1JFThtEFVHb/wCwOQp++Aa9Q1VXK3gyOY4t3tY29b/6bY9gwoYOoOoFgAhpNL1ZOlVdiqge7DfGW9UdQUmRRPTMHqpXZpGKSeGGY8nvsB6k4uddfiNlnTMXi1jR1MqIVcIwVgNvU2A9ycYnXdW0/XAb+HwOwc2VTKCpv3AB/wAceb5ORr6o24YLth8/iPXVtcUp8npBRKP+cZ9go9Tve+/F+O1sWav8ccmpmkpK3XEyWLLECb72PmA2APt/fAPp/IRlKFqSWcKpYiBr+X6E8c8EftfBaq6dyqukWoq6IRTqn/N9Pc3AIuD7i3tjGpIs0wVmH415HCUs9RIl7ExsZLE2uL8iwNuO/GCuSda5P1aA0IjWRAT8PI4MqhRz4Y+h3J3/AHxzUdCZJVHQEiKsBqGtGO+xHr99/wCmM4zvoCWhWWqyOqliEh1CHUGjJBBFh6/XYYtGMHpkW5J2egYalZkvLpDJ8xLjdjvvbvzg4KdJnhSZRM91jFxYC5uduAACPsMebOjOuqqhzaDLc+iIpgNMJVidJIIFyeSxvueSRxtja6HP4qqAtES4dtMbI1yb8AHsSAv745w4uzlOxilMMyKkbgIussqi+the7E+hA+u2A+Z5g+VQUUCyK1fXkmFByq93t7L5iTfi2B0gqR1DSaBrpqqMpMB8qIC+4Hfhv39hg/Pk6ZrVwVrqDPJGPD0ixsSLD2AsvHJ++OS0c2CJ81larkq2EooqZzTQKzWaaQkKX29DwBx5ie2GGikaMAVCojPqjKgWEKAWH9FxRraSHL/DgVbaVARNN2JvbSo9yD9ucTpTyIGllb/i2UhyW8sYO5/8jfn29MB3YbSRXrKKCrmaWoju76Xcmw8oN2Fu5JC3/wBcBK1lRbxIukaWbt5bFbW25udh9cfc+zuiydW/iNXHCqR6VW5uSLXP03tvbnc7Yy3MfxTglgSbLoXm8RWCEg/mDYod/lXTz9fpgLG5dHc0gvmPTL5hTtBEpimfSRMF2SXSN/obC/sx9MCMg6JqMveOqkVzURDUWuSGANt7Xta/fa1r4Xsl/FfPJpolgpUqJAsbrdCoHGpT339udsPtPnGZVojlQhdIY/D6yAlxuDvf79uCODjmnDVhTUt0VM2elppXhqNUKyAFZW7Na6G/IuOTvuO2M76jyiB61sxSN4fBZGZQ177Dg8dtuAR98P8AWVLyzPFUQyUjHQAJEfTe3y78/sNiMB62CGoYiLS8Mi6dKSkA+oJI/pyORtho6A9iUtZNBUqI2PgyAup0qLkjYDm1+PocUswmashRQCJBrYp8puP0274tZl07U5ZMZ6DeKMkMkhBCewO1/b2P1vUqKeofVXp5GIV3jbe9tmAvztvinH2JfoL9L03xhpVAY3gUsQQN9jjZJOuqmloWosyyhZCQRfUDq9NRA/phL6ASjpsq11i+HIZXCuouPDNrW/1wfiqoswhkMdXfQboG439++M/yuMqRpjiUo2wHJHCk8k0VO63N9NrAWFyx++2JJMsoalUqJNa6gNelre/zentgjmAkrEgjoo4dcR/MJOxXuf8AAe+BmioEskcmkKCSpNiL82sNvT9sT5uW2ivxqOrOKeHL6NSngqsg+VlUNp77fvycXUbyKFhjsBYapwDiplkdPWTl8ynB8MkEF9z7nsN+BgkcjpSzGnrF8Im663sbfTB/joRLl0emsw6SaoZjKYnjP6Aukc/939xherqLK+m6eaoqnLtCCSAy7bcDTY/txhjz3rKjywMs0kcUmvSC0iqdxzueP8jjITkuU9S5pX11dnNLm0E6XaMTESQggaTa+38wPtbcE49vJkUFrs8yEHJ7AGf9XZRmyzRoKeogc2AcAnnsxF7g9jce2KWT0eXU8DpldOsEDOdQUMpWw5uBuSReww0TfhjlMoMlMA1S2oeRtKuQNue+/PF+2FmKjDNEqpUUHhOIpoCui9uPXY/Swx4k1Pts9OLiUuoMvzimomq8kl/OjNwkjHS+1xttb6H13xnFN+JvXmT5ggraL4pXsPAePUP5uR3xtU9RHRVjUtSizpULaJlN2DAEkHtwCR9ucUcypIayGJ4vzEAIVdFybA737bH0w+N8dNE8m+mL3Tf4gUvUiVALpFWlBeJQTve4ttZha230w1zwfEUrQyghn1lVax1EC9hxte5uebYR4ukYqPM/jsqijp0K3MbLpC2t6eo4tt/XDoiVEg8TUBY63AJMgAUEDfYC242w3vRL0UM86Vpa6OKop4o0qUtNTMBvqIB1D10sp++KfR9BX5VlklBNITHTy+SS9yW2f9gAF/vzhtSpik8GYsCQpVrKRZtRNiL7Gx/oDgfmVSMvqSCQItACn+Uh7hj7XuD6AYe3Qi7G3JK0VdPBU1CaGv4bkjjlbj66mO3qcNcFSsBnqJF0onhxgdz3t9SR/bGfZJOZptAuqglX/wCkodV7f+VvsMN9RJ4sNKijXDHIZnUHkCxF/W9kH39sUj0B9lCevX+LzQxAfHOBqe4/JH8q34+vpc9scTV48O0Xm8MG7cKbfr+nNvbfAKlomopTVzFZqyrMheRtxZma9gd/5b/S3bF6jZfDkqKgM6aTIVJuZDwo+n9NsSf6HM6m6FrOqs0q8xziZ4KVyIaeEtxESfM3/U1tVuFAH3vHpnJqdo1SmSSnCeGjIpYm3Jt3v9hvhwzaWQvAhX4g3BaEbLc2uWPoBew73wMmnMkSxJJEKg8aQLqOCLWt7Wv29sK5OqCkL9NkdDlayExiAIfMkPmf6hrC/wBOefpghV0qNEpdJ1FwBoXUWXsdJ5/f+uBGb9QUmTQTtU1PhPEhZPDlsGPiWaw21EXvexGxthZzL8WMl8YtE8tZNCvh2SwvcgEXIta1je59PfHKMpDOUYn7Pekosykaeplkjie6qs8xDPfnyraxv9RhXzDLsxytGgymlNTBzJGyF3IAO5YEHv2H2wSi6zhrmHw2XCdnb5oUtPEw9VI8w/7WI53vbHRzWKRo1mREsdyZHY3H3sMV3F7E0+gbk+bNJTRR12qFiSoLd9uCTyO337Ymr5DP+QiWpGjaNmQjZu1/Tf7c8bDFvMYoswhBihjWnUXkZl0M3v5bgH3++AuVSoaXNaeOXxqdIzpkYi+k77/Ti+G16ANmS5hHTUNNEsLBIo9FmGxv9Od74KyZ7Tikj/4K7IbSJHbUB7AYV6OpkqqalpKCshlrHYMY32LC3r/j74LKtPDL8dUxtQz/ACuARvb/AH34wssakx4zJqiLLMwAljgqaI348Qiw9P6n++Li5PllKwq8rrp38tmgk7H2wJ6gzCmWmifLFcqDrY22X/HC5R5xmk4eHLIVdmNrsvA98N8cWgfI0x2osxpnUxtRrpBu2rkfT+m+DsVRA0SaIJClvKb2uMIHS01XTTfA1qxyvJYgv5dNje3r/bB2qzrMqWokhpaV5YkNlZCGB29cQngb2mXhmS7NSr+mIJGXM488gkzKchkqJ4RVOfN5QqkhLKQNgL7HffCtk2bVOSZxVTHIci6iy2No4TVUNKKasiBa2hoflYXufse5xWzOopcyliqMvzXMqYhQlQ0FcYiQxOktE2pWFv0i1wb2IFsEMtrWoBEKnRO5iCLODaWTSeWUcrcizC/uB2qptknFId5Ho85oTPkMlVRypfxKXhk9mRuN+9+2MSzbrnqLpbqCTKOsYoJqSpTSKiBtYi3upL2G17XXkbWxovUmcwZPl6Z5R1EmuBrPHFGDqJtclxvsDtb2GEn8SPhs66ceWRIjHoHhhbbki4BuLg9/bkYtxXsk5OtBczTT0FSsk58UC4nUsrxg2swY8gEi31GC6TpOkEtM3iLGoFQS2oKLWI082JubdrbYyf8ADXPzU0cuWVreJU5f5HubeNDaynf9Q4+4w9pKKWv+IjcyQT2N+SG0239iL7HviThTDysYTTgur3EcYVQoSc7WBsCbbXsbH3OIcwqtfhSLPqUnUsbsNUbcWDckc7H1wMqcwEdVPCSFEJBVkYsrod7Hvydj9sUp69EXwjKFW90J3UAji42I9/8ATB4ith5Mya6pqQiQalcLrvvsd9/t9sD83rY5qR9TC0a3FxuAT/Ve33/cE1VJTgx6nZC/lDcxt9e44H7b4+/m5h4CgM7S3iYKbgk29Pp/fHUCzSemKV48nlnSPVL+asSk3DBRYfYmw+2LWW5oK+etXW0kcYSFQrbvrdlH0vaQ27AA+2JqmoTJenoaeiN6iW6xe5JJ1fS/bvt64C5dAmS5WkSlmd7NrW15CF0k7fYf+V+2HriqBdhCeojmeeTysyr4SnsL7G3/AEji/J3OIvESprYk1HwYbSyFjYbWAv8AfgeuFY5r/wATPrbUtJTuToPzz3sLey3Fv/eCtDVeGJCxXTAqNKb9+y3P0t9sT4j2Xs6rooYnC3BsFULbWzNf29Af7YVq3LKmvpWepqpaKJtKrEBpBX0N7EbD09Dgk2dLNNO62azk6rgAkLtffiw/0wIzjqmiyVfErDLUO11hRRcyEnsAL+3l9cTcLGUqAadE0LSzzy3qZJGBSSUl22bUBfa1ue+KFZ0tSLKFy6gp0dDdn0rpP0VeObe+H7L6XM85oPHr2rKDLiQIaanULKwN7C9vJfggA++LeaTZTkVI6y1+W5WIE88UkTVMzoQCSCg07Ej03Njzg8Gl2G0zLanp2tih/LVIRHpVkHlMW5LbD/e2BVTkFQ0nw/w5hk0CyknULkm52JNvTYcY1uClqq2ON8g6ueepCjSk2ThVBuWZtLEG1uLnYDgnC71JTZlC8WYxmnz5kdHkNIxiddtyV2LbAbkbXPrgpaOYgQxZv00Y5JfDrqVBZooLsyDkhvte+xxTq1jWrqHpTpjrIh4Mjm4W7Dym223G/wB8aCOpsszekEkDzTCMkuNtUQJAJKEC/vceuKvUeXwyZC01GINMA1B41Ck35uLk74rGuhHaF3Lum1ygpXTZhaqDaoQgv9bf2xUqZKOWuArRNU1JcssaycH3X0+t8Rz/ABMtVT0MVVGilRI8oS5gTud+54F++DdDT5FT1aIKswso0x+HF4jFrcsx5vgtgRbmkE8SUNPOFhdRNKlItmFv0Fzv+3bHBWCop2bKZFptFjIqJ5lH+OKdQgNSVLvPUqT4cdQPCjtf5rjkYhjyWrnqJZ/EYSspOpJikYH1/wA8DsIVy/RFUpHVyRGMsXSXxBqa+5vfvg+et8ziJjo6OiaBDpRvCPmHrt/7xnVOJYZ1NRWweY6ddw50nket8EZWoAwEFciqBazxm9/ttjra6YR+dVzWsSlzLLknpTKtpIyFttZkVWJJsDdW0sBuNrk46pYqX+EzZfSRzTVdNI1TSL45SVEDgtFcklgVUjSxNjxe20mfZPBlNHUZxl2ZyijdfCnpC29PKTsI9RvZ2IGk8G9jtgLk2SPlGeJVLVP4WtVZZH+SYKfFNrbA2Um/BJ97hJpjN6Imd5qnqHIMwLtTNElVRyoo0qGF7Aclhfe++/0xcMSVuSCFzriEYRlJ2B7H2I0/3wSgh+JmlqVZoxWRjydlFz+ni+/btxgEteuWZiaKskRZWVbFhYOApAN+/c8bWxeS2RTsSaCjfJOpamqVrxSRmN9I9bafuDt9vbDlNUPElM8bam076TzbcW+oH73wvdRI8EVRLB8whCXbufEBBPuQG98XoaxXaAqdi3lB7DYj+5xPs56CNVXNMVb/APtBSiujW8QDt9bHbFJKp2USRsGVra1I5HG49R/jgfNVnwHmjIBX5gR8rb+YfY4lm1eKpUkhWYNfubd8CtihilbxAsenSrnSVO44t/TbDR0+iU8viSAEKS5A2uwFh/fnC1l7KZlNrxq1lJ37Bif6YIS5stHRzzDy6Uvt3PNh99v/AFiij7BY2VuaiuryglEaU66CynYMSL29gB9gMRZnm8T1ThCIqeKKMItvkiUXH3Y8/Q4QnzM0kckc7XlaPVPvvdmBI+t7D6EY+HM5KqerMzW8RlQjgKqLcn9zb6Wx1HWEMsDSSRLUt+bNVePL3AUgWH7f22wTWuEmWNITpaWXxrHi5G1wPTc/thWp8xJrgvyrouTba4S/9B/fFitr1jgZLkJFHZwOSbAW+pFh9BgUGzhsxnq3KU3lIa0bOCVUnuQNifQf5YY+kfw1h/iRz7PhJXyxprC1IuqD1fc+m23p3wJ6MpJ63MI0p/Cj1XVFlZlVuxJI+Y34v2G2NnrIjFk5hggb+HhQZYqaOQTTMdtpibKLi2wvbCS+iseC5ugLWdV5RllDUVNK0mYTyIWEUVPZqdO66SQT3Jbb9sK1Jnla4kny3MkiTxfD+Emyl4U0vYXPhlmFzpBcgLcL33xTr6WkkplmoclrOnpYhqK1UQnaRWuDdr3Iub35N+Nt+zI1ZlsOYdPo2eqp01XwJO0aqSzaWsQ2oEaQRcAC2Mv2m9mulFUgtms2YZll0S5NntJQ5i6mL4USF6eVg3/LYsoIPa9wexBvsGnrq3MJYc0p0hpMxhTRUUL1AiSWw3AspbWBuPPft7Y+U3S+U9QoKzKWqllqR/8ArGEBCygHYW1LIttzzb0G+IcxqvHy+rzGUrLmOVSLHPSGUhXBYLYG24YbqSPa+NEY30Qk9gHqXIRFWjPciifVCynMKfQUZlI+a1iDvzwfbvi3STUuYKk0kbMFvHIFuACe5474sUWZypm1bAQEZYkqYDI2kEWFybDknv73+i5nOZJQ5xXIPFlpqyFnOi50tfkMNt9vY4PcgaSAOWrWQVtTHSSpG1QzrUyu4LWHA9vTBRZ0kgn8IxM7R618RrBuzA2HPt3xXoaOusZJI08KNXkDzSRKzPpvpBJuw3Gw39MfpY6aGkRauAy62MsstMU0tfk6lJ3t67YL7AgtSTTiGCenzApRyQsUenKs6hTYgo24Av3xekp69aZHo5lr7S6YnsthwbkLtYXwK0pV0SDMMxkqohdtULKrxoNljcjylr9+f2xH4tFRUn5U9DBPIW0Tzs5Vjp4N/LvuQ3sQe2F7D0XKelihMseZ00NJOi+Jp8QapCTuykbW747NRmFETT0dHTmCPyoZIkZiPc6sDKfLTHTpU1NVR5nrKtFUa4wUiY8Jta3sRiSly2CJHjjiRwsjjVFUoqt5jY2bcbWw3QLHHJav+LdTZNQ5hCwp45TNPHYaZpEQFNu4Dupv3scTZlAo6geWgN6GnUxSFjfxJz8yg8eW9i3vbscL1If4LUSBmda5Ifh4o0Qakbd5JG/l0llB9CAMdfxQZdRKYWdoqddtJuwG5JPqbm5HJvfFMNSWxJ6GpZ6eFI4IdfgwbLpPmiIHFr3sfT2xn/4lV4egjML30S7Eb7Hke3r9vviWmzmaqM82tZtTXuN7r2uNiCCP2wv53WQ5nG8EsgIIIKjdhv3GxI4NxvtiknbE6QUyytfPOnInnIqKhBZiBYyAC24PDWxRmqhCvhhriNvKf5hv/qPtivlldHRUSxMxlNguqNCVbb+a23vfjAyqrpahpPDV4hGN2B1FRfe+m4I+nHe2JaXY1N9BSafVHVssgIeCUq/r5Db/AA++L71zSxbCzqVOhj6rcN7j/LCElRLJLMPHmbTdHRDsvqD7Y6pqmWiqmqfCmkkIIPii4KkcWv8Atjk1YOOjS6WuJhRUP6iqk+hvv9/7DEWaZl4kaFSWVmJF+NthhQpuosxCxr8BHK17rba/psDiF+rZSELUcI8xYanNi3phuSF4saqiXU8tQSWHiM4vvfYBR9N/6YkNSIYS1yWkJC2PuF/tc39sJ1R1FVFXDRwPc+YAE7jta+5xXqOoqp0CpGyME3IHa5Frfc46w0NP8RWKqXW24UkgHgm4/wARixTzzVdSsKKHckSMgBPmIGw9d9/XCIzV8cqCsp5EkZVcKVIJXYj7m9/vhx/D/Iq3rDq+ky6BlgdpDI7TM/kjHmJCLb7XNuMMqFpnqr8P+j6Og6WeXN6UslQoDQyozM6r+llbc/T6jFfq3Pcly0x088JFVITHSzyKFRJG7BAtlOna4vwN++ClTXR5fSLFRTR0dBTKIIy8RPiabcEcknthQqs3yfqF4YczyaGolYuEFfqBJK2APfbfckEWGMGbNzl+Eehix8EK+W5kzZlEZqmCqp51BWgrVAdZLlSVkW2ri12GxuDglNNQ9J5pTZhWwy0MmYKI3r6albwUdQQvigbhSSPN6jna+Ph6Syk5wElyU5NIACauKaaWWUW38Nlvbtaw25GCeY9OZ1NReB07X11JoUyWrJBNFKR+h1cakuOQt7m/G+BC7DOhequpcvNYaDqGM0atWLDUshsKWoNmhnEg/S4KkMe9/sKz2Omroc7kIipK5nbLakqWVHk0krqSxUE3Nrc3PoDixUSV8Oo060eY0jM1NWUMx0NFpOpoixALKPMyltxtbtgTmMaZnXPJQrDDDW0Sa/h5xaSWGRTDIP0k6bA7eYX4O+NaejMwLWZZW0/UmQZhAztTCnSjrGbixFtRH7/cYg6skjnanE2geChjqDTnSpQHbsN/32tv2wWr55suqqn4l4Gp3jmmp2cXWzEWBB4INha/FsIVFVVNVrlrCsLHUwkEhMZF97ruLW9PT1wF3YrJhTx0E4EcsRUSr4fhxhpL3s3y3Nxo3B+2Lsta0fgx5pTSQR08njD4ZvDWVrkA+GQCdrjftb1x+gmnZJI4KqSrZVRDSMxZZbtYsswX5Q2wcm+1rGxxekoUIQRSQq6sAIpCpjAAvqLHkCx4IJ7C/HHEks5mqIWpRJMadvngkDK5tcX9CAR5QT39LY5zeCM5/FLUZlJUxSMoKU9iKdSg+RVudO537i9zfENNAssdJUUtM+Y1kcTLTin/AOHQIzXUgKSxChmtsu1rmwOO41jFPXUxrqqlhM0ckKxqfC0jzIs1rltdrcbck4G7CUUyrKKhI5FozA0dnSSmpwLFj5GBL7A257HHU+T005SY55XxSSIGkSWoJKt3AJIJHpfBuWkFRmEjyPXUtNLLdZZpRBBFGQbAM5AZQo4ANxvtiGOqy2nQRmpymoK3HiCiafVYkXLOl+3HA4GBbQ1DJUdL9SSZjnea1+tKGcS1InpSKgwoXLmNl2NibepFhyL4UY8sekpY56ypp5IpR4Wso1PaQnRay33bZlewJIYXFiMa/QZjpglny6jqaZlX5vGEjkd7odjvxip8F05n0ky5lT0ywuAaiRYdGthvuO9iP7486Hkyr7K6/AnKjMZcvpVmp5a2JJ4onjSQljMdRJAkYCxUXVhe1iT+90xZIkBaqy/JqikjYqJZI1dObXuCCtibG/puRhizPoaKrpCMp6hWSCamMXgtHqeRQAFTUNwoHAPfFbMumKylo4VzZaCtly9AsVQmXrMHhkJ1qyvf5Re/ueLY0Y/Jg9WMqlsX8z6UyCsoXq56ikpaHfQ8k0Rjgu+1gsl7EhtjqsN9wMLD9OxPRtJlNQtbTICYpixgYHVZgCBbSRbc+h5sMM0+QCmlSWGoFDTw+MNcAECanW2jw2JADKTe2wJ+Xvj9TwJR1Apa3MZEinMLxTJQa1Rbg3UX1sNB/SR32xXmm9DpC3X9PZlUwCnpoZZbt+ZKNJitwNBNhvYgfvxgZWdH55VS/kQvHCt28IaPFRANpDc3YGx3G217DbGlfwqntRGqy6Ol1Ivw0VQ0U0jys1wya2YauToPmUWJ5tiCupY6AxLJNBSPQaWOlGlMQFxKW1s6kXIGltKgnzKLjAjJ+jnBGejpc5DK9UYc4WOmAeWq1pFHvpNgRcXsbXuRe9sFZaemkktLQUsSb6mnVzoPNirlVDfzKLD0w4HK56eaabwUTQfBjjoy14gyAmzSOVVb6r3Y97AnfC9LL8dUCB82MkMsiAfxGhZ3eQkIgVjcg/LuBbYHjBtyBxoEy0suZUcNblkmT0qMxUNQUjLsLBiznUyg30gsF4O1t8TQZfW1hpqmauWKk06IpaX81AdLNYy6TpPNrgX3tY74O1MKwLVvldZTfxIMFnieRpoJVUtawViQgHyyHTuSD7GskyesqaYVkED04nYRzNHJC6tG6W1HYXU2NywIJPJOBKVIMYpsSaHI6HqHOaSlos3lq4JGMTTzzhjcsLMqiwHuNxtsRfHo7o78LaL8MZKuomq6fMWaOzyJS+CNN7gXJa5J9D2xk0T5dlWaeE8pfMIFhMjlFhbWy20oXA06LEEHc7X4Ax6JzBXrl01ao1OiKZCxsGYgHU3rbjDxn/pv8ncPvsS5YqvOamrrGklqaqRyEW7JBFGCNKgWAU7G9tycdZR0949dHVw08dTUSBo56hpbmOx+UC1jb02POC1bna0yQUTzVArpH/JpYaU63BPzCPgD3Ntt8Vp3GTUEtFDLG2bVc3iVHhtrjjBBsqi249d9sZnjXHlI0Kbb4opZx13NkipRLBBmLhRrD3ZAyncaRf2N7gbgYGS9cxZnBK+YRz0Ufh3kTL5DEVI30+hFiL733wKzHJkgvKzmQhLLHESjXuLB1tYfS1sVKLp6n/ONW60tXAdarPUK5TULKCAVBBH6uOB2wuOLaDNpM0Xpzp7IepmTMa5npcylI8Gsp2cTMoAtvsD3tcHAHqr8PyRUQZVJFNPLI70qFGjkd7WaMk/Nq2JNgAbHF2gzqgyKjb4YLCWVyFKNMIwAurStybHfcW74/R5oklXTV9POtZHMTH5QY1Kk22VgSGB/USL43Q5VsyS42YJ1x0x1d049M3VOWPSQkeRlmR0RQAApkHlNjxybi9sUsukJpzHRVdJGI7wx3V31MTdbNYoG+gFrcb3x6kzL+H5lTVWXZlHT1cAjZZoKpFmWMW2ZVII1Agc488ZlkKReM5y2mkhpyQs0TGxiXSQDoV0jJO1iFvvxfDSWhF+gcyiWnhWt1PUpIFSOCplmUhVuwCxrZWJN7WNyTY2GLTUxiy9KtBVNQSMwWup4nljd1vqDPKVC2BbVq3HYGxxAg+HiSmiZMxp3HiUrLP5fMLLsrKpazEEsysL9rnF6hy2nzGWVsrq8qWSojZYZGiL1FQAACzKrEbDynupuSDzhHaGqysksKCaRqGgyrLgVSqjqJpjbfV4t4QoudIX1NgCdOPmV5FSilrZ6GiqWNKyQxRUlPdV/+zUUdiXVgrH9BO9r2tjha/8AjdUtMTQCUN4MgZCguG/N2vZX1BQRsCDe4724IAECwPNFRTSvDBTVM0VOdSSfoBfUF3AUnVa5t64D0FIr1OUy0ki6o6pq55WeFGomhjdSASI9wTc3FgQbAC221GenqYihb+MUokUMqU0jGO3F1LyaiLg7m/1OC0+XzyIKeeKeYQO1w1P4ZDGwN32AsR8hKnUoIVjubEUz0sUS1lPNJOY0Z2gqvh0J0i50aWAJO5sbXJ2HGOsNDZT5n8IzTZTXLPHIQiU0bkqoA3uWFuMSrV0VCIn+HkomdyvgwOZOd9RYbgDGcfxHMaCvMcWWNEFcp4MLszAsefTB9qvNK55Ep0maigIP5biJ0Y8lrc2x4dSW2Rp9mi1Wd0lQkVRRTT1sAIjUU8HhFX5DNfc299sXKmtiFNHQ55I81XINSaIlEjm+xsux55FsK2TzVsNRTQRI70ym7VVQ19ZI9NrjBxaoQUrSinpZHEoHxKtYbHew9L4guN79FI6Wz7FkeU5hTyLm8D/GMpiFVHDqZV/mbcqD783wBj6IWhjmo1gfM6ZYQE0yNBIyAfMlrBmHox5JwZh6kmhSSBJfE/ODSyKABJv8u3P+WLL9ULTuzTSTBbFTGrnSD/0jm2Kwyzj/ABYI96Fym6anoYI6nKIo88aOEgwTFIKiytv5ralYqACyk3YDtthWkzBKerD+FU0ltMU7PR+I0io4IjcHbsN22N777DGjfx+JaYS0FPNUVURJZGk/MJPcjBVuoMtqoooM3y+PMIWQFpJJLyxn04uef64tDypJ1JFXKzMHWWunlLgPAJnHwUilfEYtvKtlARbMykKSSCDfBqhTLq6iemmpJ4ZIamOop5fFOphZhIkbsbAaWA8pYAKCSCRhtr8i6bkiV0i8KGRNQp6YlTve4I4I37/4YW67oSEGokyBmqKt0C+JVzF/ABNyEU/Ix42uD3viy8mEtdD2k9imZ6KvhqaRKWoqIqkPG0ta7Rp4jPqVQ7qy6bAixZr/ADCxucD5pvhq6Va2nRad52iVl8wRGB0qwRgWRTYgr5QRYnDvR/h5mNXk8q1sUq+NtUCKoDtIVbUshU286kbWIWxI0i+Fyo6PrXopKbNqaaAaGenJdVkmKsNRLanMYCggLqsxPF8UjkxztWFP8A6XNZ6ampqqI0dDQw2jRYqAP4+51gSSOxHmsbEBvQ2x6DybrPKuoKHK6aKtSGsmUeNSh7ulgDcgcf64wOiyqPJ8vjpwKhJtIEaSRtJHUITspFuBtYmxHawx8pKD/wCOZxB1DAxj1hUlWYFgDfzKj/pPBs1gb8njF4ZIt8WJK/6m/wCYQCeveqljCU8aFKdI7XdT2LfMd9/QYCSpPV1TRR+DTsgCqrJ5gTyo23X/ACxZo+pqI5RT1c8kVO7q10exdz6KL3JP7nFavq5aeP4oLHIZY0bwpFsdR7AA359+2BO5y30PF8Y6AVWJ2rDSUE9QY5bxiKMgF2UbMGb0O47W2tgJmkq0VA9HmTPEkmkhVULIxCnUJD7knj125xDnGdVlPDUTZjUF6iIyfk03mji1EAqDyLgc2wlZjmMMglKUxEdGxZIzcgPe43tuD6G2Nf0UaiZm5N2w41RUS5pTJA7uscTSytqJY3vsN/MCLc7jcYZsszh4YzTyp4njoIGMy2Kk6iNh7dzuOMJGX1plMckyMwMZIIazKCblRffSDb78YOUOYRSyQyMmpTLa6D5QBe/re4H9cVdRSSJptu2PfS+fzpn7QQlZBCFjDObnSF/USb3BJBG5sMYvJl+SwZrV53l81ZrkqJNLUU3w0KyBiWDFy3y3/SO9wotgrl/UEiJWZtloMs5hkCLCrSSG5Nrgc22JPtjNmyOvoaZ5VpXCyzEwOpVAylQWBRt7jVx29xjnJNUCmhveSKZ5nqswpYGd3MTQyNJLpJsUabTYi1rsApvueTezLn1PBCxj0xCVGSWx8K6sVCkSX1hhpAFyw9Qe6TV5T1CjOzxTiJQrNK1QbKCo5t7W2w5dE5H1Bl9RPJkVW9BQSMiZhUVlHHUwOB2EbgiQXufQDm2JuqGXKz8me0Ge16T1kNdmGaRq8yVk8YrFiSOMt8ygOWBBJGuwC8nsVjRm0UtRlqVpkZahJ4Zlp3XxEMYk0F2YDzKLhrEBL9yZB4y1001Z0XkMoQEJWUVJJRI6AgGUETKtzq/SN+3fEWZZv0vmeastf0BBV18ssYmWhzCcB1W35Ua3dbFAAGuAvFr3xN02VjaWyWTJZZKqTKqqd0jQ2qEnhZJGnU20CWS2vWwQgm+gG4J3Bq0uU5xNTRNLDEZNNntXxbEc/wA39zi/nFRlOWZXHLncWc5VNEwWHKqrN3qdMaklTrJGnTYduRwBipT02QzwpUZjWLDWT3kmjOfCHQSTYWkUsdrea+97jbCpq2M+hqo6rMRSUkBq/is4kkV0pTGmllU9+5784MZzMuaCaopR/CYppAQwi2eQbOAQPlG4GP2hYen8xqIVEdQIWUSqLMBYbX5thUzF2HTkoDEBQgWx4Fu2Pn+bikguVRoLVOZx5fSVE7iOWWNfLIx8yHi634v2OOsvqaunmpJ6c058VSI6ZUDF/cjj64G5citlxZlBYvYkj0tbDJJGiRh0RVdTswFiOMZpSbTE2ypmjrTUvxNZRUQrC+iKOKUqAx/Va3P+9sTKXzOKWWqkXw4ob7buZPp6f3wDo0WXNm8VQ+1/ML72G+GSrAbpPO3YBnSTyseV8vY4T5ZcEBO9F/pmotJ8TTQrEXiIledrOu3NgPl/yx1R1tDSvUfHSx19TKSFlihICjt79/8Ad8IHTs0jZlloaR2DRi92O++HWhJ/iWb78bf1GGWSW1f7KuTUUSZfNT0SqauKOjjSTyKsl2c+oHJ+nbE9V1GM7WQ5XItI0QsjPGLsQdxvt6XwrKizdQVEkyiSRTpVmFyBttfEeaSNHJXeGzJoPksbadu3piiV02BPY4ZM0lGyLnLyFZgSsqC1jfi2C9escdGTVV1oJWUGFkCqx+3c4XcrJmkgaYmRrjdtzi1AxqIczWoJlVXYKH81hftfBkrmWljSdE+ZZHVVUEUVFPD8NIQfCCjQp/vb74p5h+HlZNHErUMQiZQJwJxJDKB6odr+2O4wAJSAAVRyD6YpZTW1KZ14aVEyoWIKiQgEXHbHSyyxxbXohyakK8+XNGksyVtLTmnqW8KCGlFy9rWDm+nv/higjZxl1f4tL8JJGFu8WtmC9wzFbkenP7Y1+vp4R1CwEUYDWLeUbnbnAWanhSolCRIo1k7KBvYY3482+LRSUuRmFbnbZlUUtEaebLZmhZnMkVo73v8AzXAP1JwLqEooPhRX5kkdRmEReMyAqnkaylid8a7UxR1PTNa9SizOEZdTjUbbbXPbGL5jTxVHTlQaiJJTCVEZdQ2ga+BfjHoQk20JJJRtlmtqIIKJP4cDmTmUwyvTv/y+xYnjSeQfbASt6o+Ek+EqqSojWWNow/gPG7E8MpO3Y8DDl0tSU6Uw0QRLaQWsgFtk/wAz++D1LBFUwyJURJMq1tlDqGAG+2+BLM4y3sXgqtGQQV/TKrZBWwTMpIeOr8No2PcWG53P+mD2SdbZXQJUUsU9XmCMRYVo8eSQcghtuD3WxHOCOWZdR1sM5rKSCoKTkL4sStpGvtcYUswApszzhqYeCyVbIpTylVsNhbtjUtoX+IZj6yyakqJPhYKyAyRLEfAqwyOg28Pzbjbi9z74+Hqmjq6pqhYanMIYomR0nkV/D1MOFYFb7WtzzgHUUFIcqeU0sHifDzvr8MX1C1jf198OOSZVQHpwSGipi7Krs3grctcbk25xz0rAm2SGoytY5zF0rS5qxZrHMatQISbXN9huNiCbgcWxbqc2zN8nzJMpkpcvQyrHVy5QVZIo9A8jvKdbKuoLqFhf6Yk6Dijq3lWqRZ1YTlhINQJFwCb/AEGP1XNIOucrgEjiBqSoQx6jpKldxbix7jEmtWWUblQtw1+VZY6VMNLBV1YcqrVjeKsiAcsWYAb8gC3vvbFqr6qqppzJDXZRKHAYlctKgEjcAA2242wv0FHTv05MHgiYLnxhW6A2jIF0H/SbDbjG+5TkuW/w+Jf4fSaUaRVHgLYAO1gNuMdJcTouz//Z\n", 242 | "text/plain": [ 243 | "" 244 | ] 245 | }, 246 | "execution_count": 11, 247 | "metadata": {}, 248 | "output_type": "execute_result" 249 | } 250 | ], 251 | "source": [ 252 | "show_image(\"220px-Lynx_lynx_poing.jpg\")" 253 | ] 254 | }, 255 | { 256 | "cell_type": "markdown", 257 | "metadata": {}, 258 | "source": [ 259 | "We load the image, resize it and transform it into an array" 260 | ] 261 | }, 262 | { 263 | "cell_type": "code", 264 | "execution_count": 12, 265 | "metadata": {}, 266 | "outputs": [], 267 | "source": [ 268 | "im = Image.open(\"220px-Lynx_lynx_poing.jpg\").resize((224,224))\n", 269 | "im = np.array(im)\n", 270 | "im = np.expand_dims(im, 0)" 271 | ] 272 | }, 273 | { 274 | "cell_type": "markdown", 275 | "metadata": {}, 276 | "source": [ 277 | "We run it against the model and get the predictions" 278 | ] 279 | }, 280 | { 281 | "cell_type": "code", 282 | "execution_count": 13, 283 | "metadata": {}, 284 | "outputs": [], 285 | "source": [ 286 | "pred, pred_proba = sess.run([logits,probabilities], feed_dict={input_tensor: im})" 287 | ] 288 | }, 289 | { 290 | "cell_type": "markdown", 291 | "metadata": {}, 292 | "source": [ 293 | "We take the top three predictions and get the appropriate labels for them." 294 | ] 295 | }, 296 | { 297 | "cell_type": "code", 298 | "execution_count": 14, 299 | "metadata": {}, 300 | "outputs": [], 301 | "source": [ 302 | "def create_label_lookup():\n", 303 | " with open('synset.txt', 'r') as f:\n", 304 | " label_list = [l.rstrip() for l in f]\n", 305 | " def _label_lookup(*label_locks):\n", 306 | " return [label_list[l] for l in label_locks]\n", 307 | " return _label_lookup" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": 15, 313 | "metadata": {}, 314 | "outputs": [], 315 | "source": [ 316 | "label_lookup = create_label_lookup()" 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "execution_count": 16, 322 | "metadata": {}, 323 | "outputs": [], 324 | "source": [ 325 | "top_results = np.flip(np.sort(pred_proba.squeeze()), 0)[:3]" 326 | ] 327 | }, 328 | { 329 | "cell_type": "code", 330 | "execution_count": 17, 331 | "metadata": {}, 332 | "outputs": [], 333 | "source": [ 334 | "labels = label_lookup(*np.flip(np.argsort(pred_proba.squeeze()), 0)[:3])" 335 | ] 336 | }, 337 | { 338 | "cell_type": "markdown", 339 | "metadata": {}, 340 | "source": [ 341 | "The top guess is Lynx with a probability of 99.8%" 342 | ] 343 | }, 344 | { 345 | "cell_type": "code", 346 | "execution_count": 18, 347 | "metadata": {}, 348 | "outputs": [ 349 | { 350 | "data": { 351 | "text/plain": [ 352 | "{'n02123159 tiger cat': 0.0007903022,\n", 353 | " 'n02124075 Egyptian cat': 0.0008608192,\n", 354 | " 'n02127052 lynx, catamount': 0.9979286}" 355 | ] 356 | }, 357 | "execution_count": 18, 358 | "metadata": {}, 359 | "output_type": "execute_result" 360 | } 361 | ], 362 | "source": [ 363 | "dict(zip(labels, top_results))" 364 | ] 365 | }, 366 | { 367 | "cell_type": "markdown", 368 | "metadata": {}, 369 | "source": [ 370 | "We can move onto [developing the model api for our model](01_DevelopModelDriver.ipynb)" 371 | ] 372 | } 373 | ], 374 | "metadata": { 375 | "kernelspec": { 376 | "display_name": "Python [conda env:AKSDeployment]", 377 | "language": "python", 378 | "name": "conda-env-AKSDeployment-py" 379 | }, 380 | "language_info": { 381 | "codemirror_mode": { 382 | "name": "ipython", 383 | "version": 3 384 | }, 385 | "file_extension": ".py", 386 | "mimetype": "text/x-python", 387 | "name": "python", 388 | "nbconvert_exporter": "python", 389 | "pygments_lexer": "ipython3", 390 | "version": "3.5.5" 391 | } 392 | }, 393 | "nbformat": 4, 394 | "nbformat_minor": 2 395 | } 396 | -------------------------------------------------------------------------------- /Tensorflow/01_DevelopModelDriver.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Develop Model Driver\n", 8 | "In this notebook we will develop the API that will call our model. We need it to initialise the model and transform the input from the Flask app so that it is in the appropriate format to call the model. We expect the input to be JSON that will have the image encoded as a base64 string. The code below uses the writefile magic to write the contents of the cell to the file driver.py" 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "import logging\n", 18 | "from testing_utilities import img_url_to_json" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 2, 24 | "metadata": {}, 25 | "outputs": [ 26 | { 27 | "name": "stdout", 28 | "output_type": "stream", 29 | "text": [ 30 | "Overwriting driver.py\n" 31 | ] 32 | } 33 | ], 34 | "source": [ 35 | "%%writefile driver.py\n", 36 | "import base64\n", 37 | "import json\n", 38 | "import logging\n", 39 | "import os\n", 40 | "import timeit as t\n", 41 | "from io import BytesIO\n", 42 | "\n", 43 | "import numpy as np\n", 44 | "import tensorflow as tf\n", 45 | "from PIL import Image, ImageOps\n", 46 | "from tensorflow.contrib.slim.nets import resnet_v1\n", 47 | "\n", 48 | "_MODEL_FILE = os.getenv('MODEL_FILE', \"resnet_v1_152.ckpt\")\n", 49 | "_LABEL_FILE = os.getenv('LABEL_FILE', \"synset.txt\")\n", 50 | "_NUMBER_RESULTS = 3\n", 51 | "\n", 52 | "\n", 53 | "def _create_label_lookup(label_path):\n", 54 | " with open(label_path, 'r') as f:\n", 55 | " label_list = [l.rstrip() for l in f]\n", 56 | " \n", 57 | " def _label_lookup(*label_locks):\n", 58 | " return [label_list[l] for l in label_locks]\n", 59 | " \n", 60 | " return _label_lookup\n", 61 | "\n", 62 | "\n", 63 | "def _load_tf_model(checkpoint_file):\n", 64 | " # Placeholder\n", 65 | " input_tensor = tf.placeholder(tf.float32, shape=(None,224,224,3), name='input_image')\n", 66 | " \n", 67 | " # Load the model\n", 68 | " sess = tf.Session()\n", 69 | " arg_scope = resnet_v1.resnet_arg_scope()\n", 70 | " with tf.contrib.slim.arg_scope(arg_scope):\n", 71 | " logits, _ = resnet_v1.resnet_v1_152(input_tensor, num_classes=1000, is_training=False, reuse=tf.AUTO_REUSE)\n", 72 | " probabilities = tf.nn.softmax(logits)\n", 73 | " \n", 74 | " saver = tf.train.Saver()\n", 75 | " saver.restore(sess, checkpoint_file)\n", 76 | " \n", 77 | " def predict_for(image):\n", 78 | " pred, pred_proba = sess.run([logits,probabilities], feed_dict={input_tensor: image})\n", 79 | " return pred_proba\n", 80 | " \n", 81 | " return predict_for\n", 82 | "\n", 83 | "\n", 84 | "def _base64img_to_numpy(base64_img_string):\n", 85 | " if base64_img_string.startswith('b\\''):\n", 86 | " base64_img_string = base64_img_string[2:-1]\n", 87 | " base64Img = base64_img_string.encode('utf-8')\n", 88 | "\n", 89 | " # Preprocess the input data \n", 90 | " startPreprocess = t.default_timer()\n", 91 | " decoded_img = base64.b64decode(base64Img)\n", 92 | " img_buffer = BytesIO(decoded_img)\n", 93 | "\n", 94 | " # Load image with PIL (RGB)\n", 95 | " pil_img = Image.open(img_buffer).convert('RGB')\n", 96 | " pil_img = ImageOps.fit(pil_img, (224, 224), Image.ANTIALIAS)\n", 97 | " return np.array(pil_img, dtype=np.float32)\n", 98 | "\n", 99 | "\n", 100 | "def create_scoring_func(model_path=_MODEL_FILE, label_path=_LABEL_FILE):\n", 101 | " logger = logging.getLogger(\"model_driver\")\n", 102 | " \n", 103 | " start = t.default_timer()\n", 104 | " labels_for = _create_label_lookup(label_path)\n", 105 | " predict_for = _load_tf_model(model_path)\n", 106 | " end = t.default_timer()\n", 107 | "\n", 108 | " loadTimeMsg = \"Model loading time: {0} ms\".format(round((end-start)*1000, 2))\n", 109 | " logger.info(loadTimeMsg)\n", 110 | " \n", 111 | " def call_model(image_array, number_results=_NUMBER_RESULTS):\n", 112 | " pred_proba = predict_for(image_array).squeeze()\n", 113 | " selected_results = np.flip(np.argsort(pred_proba), 0)[:number_results]\n", 114 | " labels = labels_for(*selected_results)\n", 115 | " return list(zip(labels, pred_proba[selected_results].astype(np.float64)))\n", 116 | " return call_model\n", 117 | "\n", 118 | "\n", 119 | "def get_model_api():\n", 120 | " logger = logging.getLogger(\"model_driver\")\n", 121 | " scoring_func = create_scoring_func()\n", 122 | " \n", 123 | " def process_and_score(images_dict, number_results=_NUMBER_RESULTS):\n", 124 | " start = t.default_timer()\n", 125 | "\n", 126 | " results = {}\n", 127 | " for key, base64_img_string in images_dict.items():\n", 128 | " rgb_image = _base64img_to_numpy(base64_img_string)\n", 129 | " batch_image = np.expand_dims(rgb_image, 0)\n", 130 | " results[key]=scoring_func(batch_image, number_results=_NUMBER_RESULTS)\n", 131 | " \n", 132 | " end = t.default_timer()\n", 133 | "\n", 134 | " logger.info(\"Predictions: {0}\".format(results))\n", 135 | " logger.info(\"Predictions took {0} ms\".format(round((end-start)*1000, 2)))\n", 136 | " return (results, 'Computed in {0} ms'.format(round((end-start)*1000, 2)))\n", 137 | " return process_and_score\n", 138 | "\n", 139 | "def version():\n", 140 | " return tf.__version__\n", 141 | " " 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": 3, 147 | "metadata": {}, 148 | "outputs": [], 149 | "source": [ 150 | "logging.basicConfig(level=logging.DEBUG)" 151 | ] 152 | }, 153 | { 154 | "cell_type": "markdown", 155 | "metadata": {}, 156 | "source": [ 157 | "We run the file driver.py which will be everything into the context of the notebook." 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": 4, 163 | "metadata": {}, 164 | "outputs": [], 165 | "source": [ 166 | "%run driver.py" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "We will use the same Lynx image we used ealier to check that our driver works as expected." 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 5, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "IMAGEURL = \"https://upload.wikimedia.org/wikipedia/commons/thumb/6/68/Lynx_lynx_poing.jpg/220px-Lynx_lynx_poing.jpg\"" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 6, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "jsonimg = img_url_to_json(IMAGEURL)" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": 7, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "json_lod= json.loads(jsonimg)" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": 8, 206 | "metadata": {}, 207 | "outputs": [ 208 | { 209 | "name": "stderr", 210 | "output_type": "stream", 211 | "text": [ 212 | "INFO:tensorflow:Restoring parameters from resnet_v1_152.ckpt\n", 213 | "INFO:model_driver:Model loading time: 17208.69 ms\n" 214 | ] 215 | } 216 | ], 217 | "source": [ 218 | "predict_for = get_model_api()" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": 9, 224 | "metadata": {}, 225 | "outputs": [ 226 | { 227 | "name": "stderr", 228 | "output_type": "stream", 229 | "text": [ 230 | "DEBUG:PIL.PngImagePlugin:STREAM b'IHDR' 16 13\n", 231 | "DEBUG:PIL.PngImagePlugin:STREAM b'iCCP' 41 292\n", 232 | "DEBUG:PIL.PngImagePlugin:iCCP profile name b'ICC Profile'\n", 233 | "DEBUG:PIL.PngImagePlugin:Compression method 0\n", 234 | "DEBUG:PIL.PngImagePlugin:STREAM b'IDAT' 345 65536\n", 235 | "INFO:model_driver:Predictions: {'image': [('n02127052 lynx, catamount', 0.9974517226219177), ('n02128385 leopard, Panthera pardus', 0.0015077503630891442), ('n02128757 snow leopard, ounce, Panthera uncia', 0.0005164773901924491)]}\n", 236 | "INFO:model_driver:Predictions took 1916.85 ms\n" 237 | ] 238 | } 239 | ], 240 | "source": [ 241 | "output = predict_for(json_lod['input'])" 242 | ] 243 | }, 244 | { 245 | "cell_type": "markdown", 246 | "metadata": {}, 247 | "source": [ 248 | "The output of our prediction function is JSON that will be returned to our Flask app. It looks like our model predicted Lynx with over 99% probability." 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": 10, 254 | "metadata": {}, 255 | "outputs": [ 256 | { 257 | "data": { 258 | "text/plain": [ 259 | "'[{\"image\": [[\"n02127052 lynx, catamount\", 0.9974517226219177], [\"n02128385 leopard, Panthera pardus\", 0.0015077503630891442], [\"n02128757 snow leopard, ounce, Panthera uncia\", 0.0005164773901924491]]}, \"Computed in 1916.85 ms\"]'" 260 | ] 261 | }, 262 | "execution_count": 10, 263 | "metadata": {}, 264 | "output_type": "execute_result" 265 | } 266 | ], 267 | "source": [ 268 | "json.dumps(output)" 269 | ] 270 | }, 271 | { 272 | "cell_type": "markdown", 273 | "metadata": {}, 274 | "source": [ 275 | "We can move onto [building our docker image](02_BuildImage.ipynb)" 276 | ] 277 | } 278 | ], 279 | "metadata": { 280 | "kernelspec": { 281 | "display_name": "Python [conda env:AKSDeployment]", 282 | "language": "python", 283 | "name": "conda-env-AKSDeployment-py" 284 | }, 285 | "language_info": { 286 | "codemirror_mode": { 287 | "name": "ipython", 288 | "version": 3 289 | }, 290 | "file_extension": ".py", 291 | "mimetype": "text/x-python", 292 | "name": "python", 293 | "nbconvert_exporter": "python", 294 | "pygments_lexer": "ipython3", 295 | "version": "3.5.5" 296 | } 297 | }, 298 | "nbformat": 4, 299 | "nbformat_minor": 2 300 | } 301 | -------------------------------------------------------------------------------- /Tensorflow/02_BuildImage.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Build Docker Image \n", 8 | "In this notebook we will build the docker image that contains the Resnet 152 model, Flask web application, model driver and all dependencies." 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "import os\n", 18 | "from os import path\n", 19 | "import json\n", 20 | "import shutil\n", 21 | "%load_ext dotenv" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "We will be using the following Docker information to push the image to docker hub." 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 2, 34 | "metadata": { 35 | "tags": [ 36 | "parameters" 37 | ] 38 | }, 39 | "outputs": [ 40 | { 41 | "name": "stdout", 42 | "output_type": "stream", 43 | "text": [ 44 | "Overwriting .env\n" 45 | ] 46 | } 47 | ], 48 | "source": [ 49 | "%%writefile .env\n", 50 | "# This cell is tagged `parameters`\n", 51 | "# Please modify the values below as you see fit\n", 52 | "\n", 53 | "# Your docker login and image repository name\n", 54 | "docker_login = \"YOUR_DOCKER_LOGIN\"\n", 55 | "image_repo = \"/tfresnet-gpu\"" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 5, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "os.makedirs('flaskwebapp', exist_ok=True)\n", 65 | "os.makedirs(os.path.join('flaskwebapp', 'nginx'), exist_ok=True)\n", 66 | "os.makedirs(os.path.join('flaskwebapp', 'etc'), exist_ok=True)" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 6, 72 | "metadata": {}, 73 | "outputs": [ 74 | { 75 | "data": { 76 | "text/plain": [ 77 | "['etc', 'driver.py', 'synset.txt', 'nginx', 'resnet_v1_152.ckpt']" 78 | ] 79 | }, 80 | "execution_count": 6, 81 | "metadata": {}, 82 | "output_type": "execute_result" 83 | } 84 | ], 85 | "source": [ 86 | "shutil.copy('resnet_v1_152.ckpt', 'flaskwebapp')\n", 87 | "shutil.copy('synset.txt', 'flaskwebapp')\n", 88 | "shutil.copy('driver.py', 'flaskwebapp')\n", 89 | "os.listdir('flaskwebapp')" 90 | ] 91 | }, 92 | { 93 | "cell_type": "markdown", 94 | "metadata": {}, 95 | "source": [ 96 | "Below is the module for the Flask web application." 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": 7, 102 | "metadata": {}, 103 | "outputs": [ 104 | { 105 | "name": "stdout", 106 | "output_type": "stream", 107 | "text": [ 108 | "Writing flaskwebapp/app.py\n" 109 | ] 110 | } 111 | ], 112 | "source": [ 113 | "%%writefile flaskwebapp/app.py\n", 114 | "from flask import Flask, request\n", 115 | "import time\n", 116 | "import logging\n", 117 | "import json\n", 118 | "import driver\n", 119 | "\n", 120 | "app = Flask(__name__)\n", 121 | "predict_for = driver.get_model_api()\n", 122 | "\n", 123 | "\n", 124 | "@app.route('/score', methods = ['POST'])\n", 125 | "def scoreRRS():\n", 126 | " \"\"\" Endpoint for scoring\n", 127 | " \"\"\"\n", 128 | " if request.headers['Content-Type'] != 'application/json':\n", 129 | " return Response(json.dumps({}), status= 415, mimetype ='application/json')\n", 130 | " request_input = request.json['input']\n", 131 | " predictions = predict_for(request_input)\n", 132 | " return json.dumps({'result': predictions})\n", 133 | "\n", 134 | "\n", 135 | "@app.route(\"/\")\n", 136 | "def healthy():\n", 137 | " return \"Healthy\"\n", 138 | "\n", 139 | "\n", 140 | "@app.route('/version', methods = ['GET'])\n", 141 | "def version_request():\n", 142 | " return driver.version()\n", 143 | "\n", 144 | "\n", 145 | "if __name__ == \"__main__\":\n", 146 | " app.run(host='0.0.0.0') # Ignore, Development server" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 8, 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "name": "stdout", 156 | "output_type": "stream", 157 | "text": [ 158 | "Writing flaskwebapp/wsgi.py\n" 159 | ] 160 | } 161 | ], 162 | "source": [ 163 | "%%writefile flaskwebapp/wsgi.py\n", 164 | "import sys\n", 165 | "from app import app as application\n", 166 | "\n", 167 | "def create():\n", 168 | " print(\"Initialising\")\n", 169 | " application.run(host='127.0.0.1', port=5000)" 170 | ] 171 | }, 172 | { 173 | "cell_type": "markdown", 174 | "metadata": {}, 175 | "source": [ 176 | "The configuration for the Nginx. Note that it creates a proxy between ports **80** and **5000**." 177 | ] 178 | }, 179 | { 180 | "cell_type": "code", 181 | "execution_count": 9, 182 | "metadata": {}, 183 | "outputs": [ 184 | { 185 | "name": "stdout", 186 | "output_type": "stream", 187 | "text": [ 188 | "Writing flaskwebapp/nginx/app\n" 189 | ] 190 | } 191 | ], 192 | "source": [ 193 | "%%writefile flaskwebapp/nginx/app\n", 194 | "server {\n", 195 | " listen 80;\n", 196 | " server_name _;\n", 197 | " \n", 198 | " location / {\n", 199 | " include proxy_params;\n", 200 | " proxy_pass http://127.0.0.1:5000;\n", 201 | " proxy_connect_timeout 5000s;\n", 202 | " proxy_read_timeout 5000s;\n", 203 | " }\n", 204 | "}" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": 10, 210 | "metadata": {}, 211 | "outputs": [ 212 | { 213 | "name": "stdout", 214 | "output_type": "stream", 215 | "text": [ 216 | "Writing flaskwebapp/gunicorn_logging.conf\n" 217 | ] 218 | } 219 | ], 220 | "source": [ 221 | "%%writefile flaskwebapp/gunicorn_logging.conf\n", 222 | "\n", 223 | "[loggers]\n", 224 | "keys=root, gunicorn.error\n", 225 | "\n", 226 | "[handlers]\n", 227 | "keys=console\n", 228 | "\n", 229 | "[formatters]\n", 230 | "keys=json\n", 231 | "\n", 232 | "[logger_root]\n", 233 | "level=INFO\n", 234 | "handlers=console\n", 235 | "\n", 236 | "[logger_gunicorn.error]\n", 237 | "level=ERROR\n", 238 | "handlers=console\n", 239 | "propagate=0\n", 240 | "qualname=gunicorn.error\n", 241 | "\n", 242 | "[handler_console]\n", 243 | "class=StreamHandler\n", 244 | "formatter=json\n", 245 | "args=(sys.stdout, )\n", 246 | "\n", 247 | "[formatter_json]\n", 248 | "class=jsonlogging.JSONFormatter" 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": 11, 254 | "metadata": {}, 255 | "outputs": [ 256 | { 257 | "name": "stdout", 258 | "output_type": "stream", 259 | "text": [ 260 | "Writing flaskwebapp/kill_supervisor.py\n" 261 | ] 262 | } 263 | ], 264 | "source": [ 265 | "%%writefile flaskwebapp/kill_supervisor.py\n", 266 | "import sys\n", 267 | "import os\n", 268 | "import signal\n", 269 | "\n", 270 | "\n", 271 | "def write_stdout(s):\n", 272 | " sys.stdout.write(s)\n", 273 | " sys.stdout.flush()\n", 274 | "\n", 275 | "# this function is modified from the code and knowledge found here: http://supervisord.org/events.html#example-event-listener-implementation\n", 276 | "def main():\n", 277 | " while 1:\n", 278 | " write_stdout('READY\\n')\n", 279 | " # wait for the event on stdin that supervisord will send\n", 280 | " line = sys.stdin.readline()\n", 281 | " write_stdout('Killing supervisor with this event: ' + line);\n", 282 | " try:\n", 283 | " # supervisord writes its pid to its file from which we read it here, see supervisord.conf\n", 284 | " pidfile = open('/tmp/supervisord.pid','r')\n", 285 | " pid = int(pidfile.readline());\n", 286 | " os.kill(pid, signal.SIGQUIT)\n", 287 | " except Exception as e:\n", 288 | " write_stdout('Could not kill supervisor: ' + e.strerror + '\\n')\n", 289 | " write_stdout('RESULT 2\\nOK')\n", 290 | "\n", 291 | "main()\n" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": 12, 297 | "metadata": {}, 298 | "outputs": [ 299 | { 300 | "name": "stdout", 301 | "output_type": "stream", 302 | "text": [ 303 | "Writing flaskwebapp/etc/supervisord.conf\n" 304 | ] 305 | } 306 | ], 307 | "source": [ 308 | "%%writefile flaskwebapp/etc/supervisord.conf \n", 309 | "[supervisord]\n", 310 | "logfile=/tmp/supervisord.log ; (main log file;default $CWD/supervisord.log)\n", 311 | "logfile_maxbytes=50MB ; (max main logfile bytes b4 rotation;default 50MB)\n", 312 | "logfile_backups=10 ; (num of main logfile rotation backups;default 10)\n", 313 | "loglevel=info ; (log level;default info; others: debug,warn,trace)\n", 314 | "pidfile=/tmp/supervisord.pid ; (supervisord pidfile;default supervisord.pid)\n", 315 | "nodaemon=true ; (start in foreground if true;default false)\n", 316 | "minfds=1024 ; (min. avail startup file descriptors;default 1024)\n", 317 | "minprocs=200 ; (min. avail process descriptors;default 200)\n", 318 | "\n", 319 | "[program:gunicorn]\n", 320 | "command=bash -c \"gunicorn --workers 1 -m 007 --timeout 100000 --capture-output --error-logfile - --log-config gunicorn_logging.conf \\\"wsgi:create()\\\"\"\n", 321 | "directory=/code\n", 322 | "redirect_stderr=true\n", 323 | "stdout_logfile =/dev/stdout\n", 324 | "stdout_logfile_maxbytes=0\n", 325 | "startretries=2\n", 326 | "startsecs=20\n", 327 | "\n", 328 | "[program:nginx]\n", 329 | "command=/usr/sbin/nginx -g \"daemon off;\"\n", 330 | "startretries=2\n", 331 | "startsecs=5\n", 332 | "priority=3\n", 333 | "\n", 334 | "[eventlistener:program_exit]\n", 335 | "command=python kill_supervisor.py\n", 336 | "directory=/code\n", 337 | "events=PROCESS_STATE_FATAL\n", 338 | "priority=2" 339 | ] 340 | }, 341 | { 342 | "cell_type": "markdown", 343 | "metadata": {}, 344 | "source": [ 345 | "We create a custom image based on the CUDA 9 image from NVIDIA and install all the necessary dependencies. This is in order to try and keep the size of the image as small as possible." 346 | ] 347 | }, 348 | { 349 | "cell_type": "code", 350 | "execution_count": 13, 351 | "metadata": {}, 352 | "outputs": [ 353 | { 354 | "name": "stdout", 355 | "output_type": "stream", 356 | "text": [ 357 | "Writing flaskwebapp/requirements.txt\n" 358 | ] 359 | } 360 | ], 361 | "source": [ 362 | "%%writefile flaskwebapp/requirements.txt\n", 363 | "pillow\n", 364 | "click==6.7\n", 365 | "configparser==3.5.0\n", 366 | "Flask==0.11.1\n", 367 | "gunicorn==19.6.0\n", 368 | "json-logging-py==0.2\n", 369 | "MarkupSafe==1.0\n", 370 | "olefile==0.44\n", 371 | "requests==2.12.3" 372 | ] 373 | }, 374 | { 375 | "cell_type": "code", 376 | "execution_count": 14, 377 | "metadata": {}, 378 | "outputs": [ 379 | { 380 | "name": "stdout", 381 | "output_type": "stream", 382 | "text": [ 383 | "Writing flaskwebapp/dockerfile\n" 384 | ] 385 | } 386 | ], 387 | "source": [ 388 | "%%writefile flaskwebapp/dockerfile\n", 389 | "\n", 390 | "FROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04\n", 391 | "MAINTAINER Mathew Salvaris \n", 392 | "\n", 393 | "RUN echo \"deb http://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1604/x86_64 /\" > /etc/apt/sources.list.d/nvidia-ml.list\n", 394 | "\n", 395 | "RUN mkdir /code\n", 396 | "WORKDIR /code\n", 397 | "ADD . /code/\n", 398 | "ADD etc /etc\n", 399 | "\n", 400 | "RUN apt-get update && apt-get install -y --no-install-recommends \\\n", 401 | " build-essential \\\n", 402 | " ca-certificates \\\n", 403 | " cmake \\\n", 404 | " curl \\\n", 405 | " git \\\n", 406 | " nginx \\\n", 407 | " supervisor \\\n", 408 | " wget && \\\n", 409 | " rm -rf /var/lib/apt/lists/*\n", 410 | "\n", 411 | "ENV PYTHON_VERSION=3.5\n", 412 | "RUN curl -o ~/miniconda.sh -O https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh && \\\n", 413 | " chmod +x ~/miniconda.sh && \\\n", 414 | " ~/miniconda.sh -b -p /opt/conda && \\\n", 415 | " rm ~/miniconda.sh && \\\n", 416 | " /opt/conda/bin/conda create -y --name py$PYTHON_VERSION python=$PYTHON_VERSION numpy scipy pandas scikit-learn && \\\n", 417 | " /opt/conda/bin/conda clean -ya\n", 418 | "ENV PATH /opt/conda/envs/py$PYTHON_VERSION/bin:$PATH\n", 419 | "ENV LD_LIBRARY_PATH /opt/conda/envs/py$PYTHON_VERSION/lib:/usr/local/cuda/lib64/:$LD_LIBRARY_PATH\n", 420 | "ENV PYTHONPATH /code/:$PYTHONPATH\n", 421 | "\n", 422 | "RUN rm /etc/nginx/sites-enabled/default && \\\n", 423 | " cp /code/nginx/app /etc/nginx/sites-available/ && \\\n", 424 | " ln -s /etc/nginx/sites-available/app /etc/nginx/sites-enabled/ && \\\n", 425 | " pip install tensorflow-gpu==1.9.0 && \\\n", 426 | " pip install -r /code/requirements.txt\n", 427 | "\n", 428 | "EXPOSE 80\n", 429 | "CMD [\"supervisord\", \"-c\", \"/etc/supervisord.conf\"]" 430 | ] 431 | }, 432 | { 433 | "cell_type": "markdown", 434 | "metadata": {}, 435 | "source": [ 436 | "The image name below referes to our dockerhub account. If you wish to push the image to your accountmake sure you change the docker login." 437 | ] 438 | }, 439 | { 440 | "cell_type": "code", 441 | "execution_count": 15, 442 | "metadata": {}, 443 | "outputs": [], 444 | "source": [ 445 | "%dotenv\n", 446 | "image_name = os.getenv('docker_login') + os.getenv('image_repo')\n", 447 | "application_path = 'flaskwebapp'\n", 448 | "docker_file_location = path.join(application_path, 'dockerfile')" 449 | ] 450 | }, 451 | { 452 | "cell_type": "markdown", 453 | "metadata": {}, 454 | "source": [ 455 | "Next, we build our docker image. The output of this cell is cleared from this notebook as it is quite long due to all the installations required to build the image. However, you should make sure you see *Successfully built* and *Successfully tagged* messages in the last line of the output when you run the cell. " 456 | ] 457 | }, 458 | { 459 | "cell_type": "code", 460 | "execution_count": null, 461 | "metadata": { 462 | "scrolled": true 463 | }, 464 | "outputs": [], 465 | "source": [ 466 | "!docker build -t $image_name -f $docker_file_location $application_path" 467 | ] 468 | }, 469 | { 470 | "cell_type": "markdown", 471 | "metadata": {}, 472 | "source": [ 473 | "Below we will push the image created to our dockerhub registry. Make sure you have already logged in to the appropriate dockerhub account using the docker login command. If you haven't loged in to the approrpiate dockerhub account you will get an error." 474 | ] 475 | }, 476 | { 477 | "cell_type": "code", 478 | "execution_count": 17, 479 | "metadata": {}, 480 | "outputs": [ 481 | { 482 | "name": "stdout", 483 | "output_type": "stream", 484 | "text": [ 485 | "The push refers to repository [docker.io/caia/tfresnet-gpu]\n", 486 | "\n", 487 | "\u001b[1B972d3bd5: Preparing \n", 488 | "\u001b[1B7660082a: Preparing \n", 489 | "\u001b[1Bcdacb173: Preparing \n", 490 | "\u001b[1Bd7bb955d: Preparing \n", 491 | "\u001b[1B9d4cefeb: Preparing \n", 492 | "\u001b[1Bf7f1d023: Preparing \n", 493 | "\u001b[1B006e8a0c: Preparing \n", 494 | "\u001b[1Ba273db2b: Preparing \n", 495 | "\u001b[1B35faacf3: Preparing \n", 496 | "\u001b[1B6f924f59: Preparing \n", 497 | "\u001b[1B5cb8493b: Preparing \n", 498 | "\u001b[1B62ee5fac: Preparing \n", 499 | "\u001b[1B28798916: Preparing \n", 500 | "\u001b[1Bdd00b1a4: Preparing \n", 501 | "\u001b[1Bc3c04cbd: Preparing \n", 502 | "\u001b[1Bdaf493f1: Preparing \n", 503 | "\u001b[1B88d0e278: Preparing \n", 504 | "\u001b[17B660082a: Pushing 1.024GB/1.267GB\u001b[15A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[13A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[12A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[10A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[16A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[9A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[8A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[7A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[6A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[4A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[3A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[1A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[14A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17B660082a: Pushed 1.304GB/1.267GB\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[18A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[K\u001b[17A\u001b[1K\u001b[Klatest: digest: sha256:fee37694784bc8e8b40911ce0eed46cb38ea8d32254406a603588aab304ce148 size: 4096\n" 505 | ] 506 | } 507 | ], 508 | "source": [ 509 | "!docker push $image_name" 510 | ] 511 | }, 512 | { 513 | "cell_type": "code", 514 | "execution_count": 18, 515 | "metadata": {}, 516 | "outputs": [ 517 | { 518 | "name": "stdout", 519 | "output_type": "stream", 520 | "text": [ 521 | "Docker image name caia/tfresnet-gpu\n" 522 | ] 523 | } 524 | ], 525 | "source": [ 526 | "print('Docker image name {}'.format(image_name)) " 527 | ] 528 | }, 529 | { 530 | "cell_type": "markdown", 531 | "metadata": { 532 | "collapsed": true 533 | }, 534 | "source": [ 535 | "### Test locally\n", 536 | "Go to the [Test Locally notebook](03_TestLocally.ipynb) to test your Docker image" 537 | ] 538 | } 539 | ], 540 | "metadata": { 541 | "anaconda-cloud": {}, 542 | "kernelspec": { 543 | "display_name": "Python [conda env:AKSDeployment]", 544 | "language": "python", 545 | "name": "conda-env-AKSDeployment-py" 546 | }, 547 | "language_info": { 548 | "codemirror_mode": { 549 | "name": "ipython", 550 | "version": 3 551 | }, 552 | "file_extension": ".py", 553 | "mimetype": "text/x-python", 554 | "name": "python", 555 | "nbconvert_exporter": "python", 556 | "pygments_lexer": "ipython3", 557 | "version": "3.5.5" 558 | } 559 | }, 560 | "nbformat": 4, 561 | "nbformat_minor": 1 562 | } 563 | -------------------------------------------------------------------------------- /Tensorflow/04_DeployOnAKS.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "collapsed": true 7 | }, 8 | "source": [ 9 | "### Deploy Web App on Azure Container Services (AKS)\n", 10 | "In this notebook, we will set up an Azure Container Service which will be managed by Kubernetes. We will then take the Docker image we created earlier that contains our app and deploy it to the AKS cluster. Then, we will check everything is working by sending an image to it and getting it scored.\n", 11 | " \n", 12 | "The process is split into the following steps:\n", 13 | "* [Define our resource names](#section1)\n", 14 | "* [Login to Azure](#section2)\n", 15 | "* [Create resource group and create AKS](#section3)\n", 16 | "* [Connect to AKS](#section4)\n", 17 | "* [Deploy our app](#section5)\n", 18 | "\n", 19 | "This guide assumes is designed to be run on linux and requires that the Azure CLI is installed." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 1, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "import os\n", 29 | "import json\n", 30 | "from testing_utilities import write_json_to_file\n", 31 | "%load_ext dotenv" 32 | ] 33 | }, 34 | { 35 | "cell_type": "markdown", 36 | "metadata": {}, 37 | "source": [ 38 | "\n", 39 | "## Setup\n", 40 | "Below are the various name definitions for the resources needed to setup AKS." 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 6, 46 | "metadata": { 47 | "tags": [ 48 | "parameters" 49 | ] 50 | }, 51 | "outputs": [ 52 | { 53 | "name": "stdout", 54 | "output_type": "stream", 55 | "text": [ 56 | "Appending to .env\n" 57 | ] 58 | } 59 | ], 60 | "source": [ 61 | "%%writefile --append .env\n", 62 | "# This cell is tagged `parameters`\n", 63 | "# Please modify the values below as you see fit\n", 64 | "\n", 65 | "# If you have multiple subscriptions select the subscription you want to use \n", 66 | "selected_subscription = \"Team Danielle Internal\"\n", 67 | "\n", 68 | "# Resource group, name and location for AKS cluster.\n", 69 | "resource_group = \"mabouaks\" \n", 70 | "aks_name = \"mabouaks\"\n", 71 | "location = \"eastus\"" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 10, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "%dotenv -o\n", 81 | "image_name = os.getenv('docker_login') + os.getenv('image_repo')" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "metadata": {}, 87 | "source": [ 88 | "\n", 89 | "## Azure account login\n", 90 | "If you are not already logged in to an Azure account, the command below will initiate a login. It will pop up a browser where you can select an Azure account." 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 11, 96 | "metadata": { 97 | "scrolled": false 98 | }, 99 | "outputs": [ 100 | { 101 | "name": "stdout", 102 | "output_type": "stream", 103 | "text": [ 104 | "Name CloudName SubscriptionId State IsDefault\n", 105 | "---------------------------------------------- ----------- ------------------------------------ ------- -----------\n", 106 | "Boston DS Dev AzureCloud 0ca618d2-22a8-413a-96d0-0f1b531129c3 Enabled False\n", 107 | "Solution Template Testing AzureCloud 3bcfa59c-82a0-44f9-ac08-b3479370bace Enabled False\n", 108 | "DEMO - how RepDemo are you AzureCloud fe4d94f0-dc5b-4c09-9b85-863413b0192b Enabled False\n", 109 | "Microsoft Azure Internal - Demos AzureCloud b3a823e7-7472-4111-becc-9a211a558a05 Enabled False\n", 110 | "Azure Stack Diagnostics CI and Production VaaS AzureCloud a8183b2d-7a4c-45e9-8736-dac11b84ff14 Enabled False\n", 111 | "Azure Cat E2E AzureCloud fc4ea3c9-1d30-4f18-b33b-7404e7da0123 Enabled False\n", 112 | "Core-ES-BLD AzureCloud 54e18c35-3863-4a17-8e52-b5aa1e65847e Enabled False\n", 113 | "Cosmos_WDG_Core_BnB_100348 AzureCloud dae41bd3-9db4-4b9b-943e-832b57cac828 Enabled False\n", 114 | "CFPlatformBuild AzureCloud e326c6f8-2c46-450f-9706-a03b90f06a0f Enabled False\n", 115 | "Data Wrangling Preview AzureCloud 215613ac-9dfb-488c-be46-c387e999b127 Enabled False\n", 116 | "Microsoft Azure Internal - Mohamed AzureCloud cc53b927-25cb-43c2-a741-a50b97c46532 Enabled False\n", 117 | "Boston-DS-Brandon-Dev AzureCloud e984a9db-1a27-4f54-98fc-282cf0dcda04 Enabled False\n", 118 | "Team Danielle Internal AzureCloud edf507a2-6235-46c5-b560-fd463ba2e771 Enabled True\n", 119 | "Boston Engineering AzureCloud bc4170f0-cc6e-49d2-ba65-bc00a7a4df6b Enabled False\n", 120 | "Team TJ AzureCloud 0eccc365-be66-4b08-a242-3c6fdb53cb61 Enabled False\n", 121 | "Azure Internal - TATK AzureCloud 872ff0da-188e-4461-8cf7-26e1c3e28ebb Enabled False\n" 122 | ] 123 | } 124 | ], 125 | "source": [ 126 | "%%bash\n", 127 | "list=`az account list -o table`\n", 128 | "if [ \"$list\" == '[]' ] || [ \"$list\" == '' ]; then \n", 129 | " az login -o table\n", 130 | "else\n", 131 | " az account list -o table \n", 132 | "fi" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 12, 138 | "metadata": {}, 139 | "outputs": [ 140 | { 141 | "name": "stdout", 142 | "output_type": "stream", 143 | "text": [ 144 | "\u001b[0m" 145 | ] 146 | } 147 | ], 148 | "source": [ 149 | "!az account set --subscription \"$selected_subscription\"" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 13, 155 | "metadata": {}, 156 | "outputs": [ 157 | { 158 | "name": "stdout", 159 | "output_type": "stream", 160 | "text": [ 161 | "{\r\n", 162 | " \"environmentName\": \"AzureCloud\",\r\n", 163 | " \"id\": \"edf507a2-6235-46c5-b560-fd463ba2e771\",\r\n", 164 | " \"isDefault\": true,\r\n", 165 | " \"name\": \"Team Danielle Internal\",\r\n", 166 | " \"state\": \"Enabled\",\r\n", 167 | " \"tenantId\": \"72f988bf-86f1-41af-91ab-2d7cd011db47\",\r\n", 168 | " \"user\": {\r\n", 169 | " \"name\": \"mabou@microsoft.com\",\r\n", 170 | " \"type\": \"user\"\r\n", 171 | " }\r\n", 172 | "}\r\n", 173 | "\u001b[0m" 174 | ] 175 | } 176 | ], 177 | "source": [ 178 | "!az account show" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 14, 184 | "metadata": {}, 185 | "outputs": [ 186 | { 187 | "name": "stdout", 188 | "output_type": "stream", 189 | "text": [ 190 | "\u001b[33mRegistering is still on-going. You can monitor using 'az provider show -n Microsoft.ContainerService'\u001b[0m\r\n", 191 | "\u001b[0m" 192 | ] 193 | } 194 | ], 195 | "source": [ 196 | "!az provider register -n Microsoft.ContainerService" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": 15, 202 | "metadata": {}, 203 | "outputs": [ 204 | { 205 | "name": "stdout", 206 | "output_type": "stream", 207 | "text": [ 208 | "{\r\n", 209 | " \"authorization\": {\r\n", 210 | " \"applicationId\": \"7319c514-987d-4e9b-ac3d-d38c4f427f4c\",\r\n", 211 | " \"managedByRoleDefinitionId\": \"8e3af657-a8ff-443c-a75c-2fe8c4bcb635\",\r\n", 212 | " \"roleDefinitionId\": \"1b4a0c7f-2217-416f-acfa-cf73452fdc1c\"\r\n", 213 | " },\r\n", 214 | " \"id\": \"/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/providers/Microsoft.ContainerService\",\r\n", 215 | " \"namespace\": \"Microsoft.ContainerService\",\r\n", 216 | " \"registrationState\": \"Registered\",\r\n", 217 | " \"resourceTypes\": [\r\n", 218 | " {\r\n", 219 | " \"aliases\": null,\r\n", 220 | " \"apiVersions\": [\r\n", 221 | " \"2017-07-01\",\r\n", 222 | " \"2017-01-31\",\r\n", 223 | " \"2016-09-30\",\r\n", 224 | " \"2016-03-30\"\r\n", 225 | " ],\r\n", 226 | " \"capabilities\": \"None\",\r\n", 227 | " \"locations\": [\r\n", 228 | " \"Japan East\",\r\n", 229 | " \"Central US\",\r\n", 230 | " \"East US 2\",\r\n", 231 | " \"Japan West\",\r\n", 232 | " \"East Asia\",\r\n", 233 | " \"South Central US\",\r\n", 234 | " \"Australia East\",\r\n", 235 | " \"Australia Southeast\",\r\n", 236 | " \"Brazil South\",\r\n", 237 | " \"Southeast Asia\",\r\n", 238 | " \"West US\",\r\n", 239 | " \"North Central US\",\r\n", 240 | " \"West Europe\",\r\n", 241 | " \"North Europe\",\r\n", 242 | " \"East US\",\r\n", 243 | " \"UK West\",\r\n", 244 | " \"UK South\",\r\n", 245 | " \"West Central US\",\r\n", 246 | " \"West US 2\",\r\n", 247 | " \"South India\",\r\n", 248 | " \"Central India\",\r\n", 249 | " \"West India\",\r\n", 250 | " \"Canada East\",\r\n", 251 | " \"Canada Central\",\r\n", 252 | " \"Korea South\",\r\n", 253 | " \"Korea Central\"\r\n", 254 | " ],\r\n", 255 | " \"properties\": null,\r\n", 256 | " \"resourceType\": \"containerServices\"\r\n", 257 | " },\r\n", 258 | " {\r\n", 259 | " \"aliases\": null,\r\n", 260 | " \"apiVersions\": [\r\n", 261 | " \"2018-03-31\",\r\n", 262 | " \"2017-08-31\"\r\n", 263 | " ],\r\n", 264 | " \"capabilities\": \"None\",\r\n", 265 | " \"locations\": [\r\n", 266 | " \"East US\",\r\n", 267 | " \"West Europe\",\r\n", 268 | " \"Central US\",\r\n", 269 | " \"Canada Central\",\r\n", 270 | " \"Canada East\",\r\n", 271 | " \"UK South\",\r\n", 272 | " \"West US\",\r\n", 273 | " \"West US 2\",\r\n", 274 | " \"Australia East\",\r\n", 275 | " \"North Europe\",\r\n", 276 | " \"Japan East\",\r\n", 277 | " \"East US 2\",\r\n", 278 | " \"Southeast Asia\"\r\n", 279 | " ],\r\n", 280 | " \"properties\": null,\r\n", 281 | " \"resourceType\": \"managedClusters\"\r\n", 282 | " },\r\n", 283 | " {\r\n", 284 | " \"aliases\": null,\r\n", 285 | " \"apiVersions\": [\r\n", 286 | " \"2017-08-31\",\r\n", 287 | " \"2017-01-31\",\r\n", 288 | " \"2016-09-30\",\r\n", 289 | " \"2016-03-30\",\r\n", 290 | " \"2015-11-01-preview\"\r\n", 291 | " ],\r\n", 292 | " \"locations\": [],\r\n", 293 | " \"properties\": null,\r\n", 294 | " \"resourceType\": \"locations\"\r\n", 295 | " },\r\n", 296 | " {\r\n", 297 | " \"aliases\": null,\r\n", 298 | " \"apiVersions\": [\r\n", 299 | " \"2017-08-31\",\r\n", 300 | " \"2016-03-30\"\r\n", 301 | " ],\r\n", 302 | " \"locations\": [\r\n", 303 | " \"East US\",\r\n", 304 | " \"West Europe\",\r\n", 305 | " \"Central US\",\r\n", 306 | " \"UK West\",\r\n", 307 | " \"West Central US\",\r\n", 308 | " \"West US 2\",\r\n", 309 | " \"South India\",\r\n", 310 | " \"Central India\",\r\n", 311 | " \"West India\",\r\n", 312 | " \"Canada East\",\r\n", 313 | " \"Canada Central\",\r\n", 314 | " \"Korea South\",\r\n", 315 | " \"Korea Central\",\r\n", 316 | " \"UK South\",\r\n", 317 | " \"Australia East\",\r\n", 318 | " \"North Europe\",\r\n", 319 | " \"Japan East\",\r\n", 320 | " \"East US 2\",\r\n", 321 | " \"Southeast Asia\"\r\n", 322 | " ],\r\n", 323 | " \"properties\": null,\r\n", 324 | " \"resourceType\": \"locations/operationresults\"\r\n", 325 | " },\r\n", 326 | " {\r\n", 327 | " \"aliases\": null,\r\n", 328 | " \"apiVersions\": [\r\n", 329 | " \"2017-07-01\",\r\n", 330 | " \"2017-01-31\",\r\n", 331 | " \"2016-09-30\",\r\n", 332 | " \"2016-03-30\"\r\n", 333 | " ],\r\n", 334 | " \"locations\": [\r\n", 335 | " \"Japan East\",\r\n", 336 | " \"Central US\",\r\n", 337 | " \"East US 2\",\r\n", 338 | " \"Japan West\",\r\n", 339 | " \"East Asia\",\r\n", 340 | " \"South Central US\",\r\n", 341 | " \"Australia East\",\r\n", 342 | " \"Australia Southeast\",\r\n", 343 | " \"Brazil South\",\r\n", 344 | " \"Southeast Asia\",\r\n", 345 | " \"West US\",\r\n", 346 | " \"North Central US\",\r\n", 347 | " \"West Europe\",\r\n", 348 | " \"North Europe\",\r\n", 349 | " \"East US\",\r\n", 350 | " \"UK West\",\r\n", 351 | " \"UK South\",\r\n", 352 | " \"West Central US\",\r\n", 353 | " \"West US 2\",\r\n", 354 | " \"South India\",\r\n", 355 | " \"Central India\",\r\n", 356 | " \"West India\",\r\n", 357 | " \"Canada East\",\r\n", 358 | " \"Canada Central\",\r\n", 359 | " \"Korea South\",\r\n", 360 | " \"Korea Central\"\r\n", 361 | " ],\r\n", 362 | " \"properties\": null,\r\n", 363 | " \"resourceType\": \"locations/operations\"\r\n", 364 | " },\r\n", 365 | " {\r\n", 366 | " \"aliases\": null,\r\n", 367 | " \"apiVersions\": [\r\n", 368 | " \"2018-03-31\",\r\n", 369 | " \"2017-08-31\",\r\n", 370 | " \"2017-07-01\",\r\n", 371 | " \"2017-01-31\",\r\n", 372 | " \"2016-09-30\",\r\n", 373 | " \"2016-03-30\",\r\n", 374 | " \"2015-11-01-preview\"\r\n", 375 | " ],\r\n", 376 | " \"locations\": [],\r\n", 377 | " \"properties\": null,\r\n", 378 | " \"resourceType\": \"operations\"\r\n", 379 | " },\r\n", 380 | " {\r\n", 381 | " \"aliases\": null,\r\n", 382 | " \"apiVersions\": [\r\n", 383 | " \"2017-09-30\"\r\n", 384 | " ],\r\n", 385 | " \"locations\": [\r\n", 386 | " \"East US\",\r\n", 387 | " \"West Europe\",\r\n", 388 | " \"Central US\",\r\n", 389 | " \"Canada East\",\r\n", 390 | " \"Canada Central\",\r\n", 391 | " \"UK South\",\r\n", 392 | " \"West US\",\r\n", 393 | " \"West US 2\",\r\n", 394 | " \"Australia East\",\r\n", 395 | " \"North Europe\",\r\n", 396 | " \"Japan East\",\r\n", 397 | " \"East US 2\",\r\n", 398 | " \"Southeast Asia\"\r\n", 399 | " ],\r\n", 400 | " \"properties\": null,\r\n", 401 | " \"resourceType\": \"locations/orchestrators\"\r\n", 402 | " }\r\n", 403 | " ]\r\n", 404 | "}\r\n", 405 | "\u001b[0m" 406 | ] 407 | } 408 | ], 409 | "source": [ 410 | "!az provider show -n Microsoft.ContainerService" 411 | ] 412 | }, 413 | { 414 | "cell_type": "markdown", 415 | "metadata": {}, 416 | "source": [ 417 | "\n", 418 | "## Create resource group and create AKS" 419 | ] 420 | }, 421 | { 422 | "cell_type": "markdown", 423 | "metadata": {}, 424 | "source": [ 425 | "### Create resource group\n", 426 | "Azure encourages the use of groups to organise all the Azure components you deploy. That way it is easier to find them but also we can deleted a number of resources simply by deleting the group." 427 | ] 428 | }, 429 | { 430 | "cell_type": "code", 431 | "execution_count": 16, 432 | "metadata": {}, 433 | "outputs": [ 434 | { 435 | "name": "stdout", 436 | "output_type": "stream", 437 | "text": [ 438 | "{\r\n", 439 | " \"id\": \"/subscriptions/edf507a2-6235-46c5-b560-fd463ba2e771/resourceGroups/mabouaks\",\r\n", 440 | " \"location\": \"eastus\",\r\n", 441 | " \"managedBy\": null,\r\n", 442 | " \"name\": \"mabouaks\",\r\n", 443 | " \"properties\": {\r\n", 444 | " \"provisioningState\": \"Succeeded\"\r\n", 445 | " },\r\n", 446 | " \"tags\": null\r\n", 447 | "}\r\n", 448 | "\u001b[0m" 449 | ] 450 | } 451 | ], 452 | "source": [ 453 | " !az group create --name $resource_group --location $location" 454 | ] 455 | }, 456 | { 457 | "cell_type": "markdown", 458 | "metadata": {}, 459 | "source": [ 460 | "Below, we create the AKS cluster in the resource group we created earlier. This can take up to 15 minutes." 461 | ] 462 | }, 463 | { 464 | "cell_type": "code", 465 | "execution_count": null, 466 | "metadata": {}, 467 | "outputs": [ 468 | { 469 | "name": "stdout", 470 | "output_type": "stream", 471 | "text": [ 472 | "\u001b[33mSSH key files '/home/mabou/.ssh/id_rsa' and '/home/mabou/.ssh/id_rsa.pub' have been generated under ~/.ssh to allow SSH access to the VM. If using machines without permanent storage like Azure Cloud Shell without an attached file share, back up your keys to a safe location\u001b[0m\n", 473 | "\u001b[K - Running ...principal creation[##################################] 100.0000%\r" 474 | ] 475 | } 476 | ], 477 | "source": [ 478 | "!az aks create --resource-group $resource_group --name $aks_name --node-count 1 --generate-ssh-keys -s Standard_NC6" 479 | ] 480 | }, 481 | { 482 | "cell_type": "markdown", 483 | "metadata": {}, 484 | "source": [ 485 | "### Install kubectl CLI\n", 486 | "\n", 487 | "To connect to the Kubernetes cluster, we will use kubectl, the Kubernetes command-line client. To install, run the following:" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": null, 493 | "metadata": {}, 494 | "outputs": [], 495 | "source": [ 496 | "!sudo az aks install-cli" 497 | ] 498 | }, 499 | { 500 | "cell_type": "markdown", 501 | "metadata": {}, 502 | "source": [ 503 | "\n", 504 | "## Connect to AKS cluster\n", 505 | "\n", 506 | "To configure kubectl to connect to the Kubernetes cluster, run the following command:" 507 | ] 508 | }, 509 | { 510 | "cell_type": "code", 511 | "execution_count": null, 512 | "metadata": {}, 513 | "outputs": [], 514 | "source": [ 515 | "!az aks get-credentials --resource-group $resource_group --name $aks_name" 516 | ] 517 | }, 518 | { 519 | "cell_type": "markdown", 520 | "metadata": {}, 521 | "source": [ 522 | "Let's verify connection by listing the nodes." 523 | ] 524 | }, 525 | { 526 | "cell_type": "code", 527 | "execution_count": null, 528 | "metadata": {}, 529 | "outputs": [], 530 | "source": [ 531 | "!kubectl get nodes" 532 | ] 533 | }, 534 | { 535 | "cell_type": "markdown", 536 | "metadata": {}, 537 | "source": [ 538 | "Let's check the pods on our cluster." 539 | ] 540 | }, 541 | { 542 | "cell_type": "code", 543 | "execution_count": null, 544 | "metadata": {}, 545 | "outputs": [], 546 | "source": [ 547 | "!kubectl get pods --all-namespaces" 548 | ] 549 | }, 550 | { 551 | "cell_type": "markdown", 552 | "metadata": {}, 553 | "source": [ 554 | "\n", 555 | "## Deploy application\n", 556 | "\n", 557 | "Below we define our Kubernetes manifest file for our service and load balancer. Note that we have to specify the volume mounts to the drivers that are located on the node.\n" 558 | ] 559 | }, 560 | { 561 | "cell_type": "code", 562 | "execution_count": null, 563 | "metadata": {}, 564 | "outputs": [], 565 | "source": [ 566 | "app_template = {\n", 567 | " \"apiVersion\": \"apps/v1beta1\",\n", 568 | " \"kind\": \"Deployment\",\n", 569 | " \"metadata\": {\n", 570 | " \"name\": \"azure-dl\"\n", 571 | " },\n", 572 | " \"spec\":{\n", 573 | " \"replicas\":1,\n", 574 | " \"template\":{\n", 575 | " \"metadata\":{\n", 576 | " \"labels\":{\n", 577 | " \"app\":\"azure-dl\"\n", 578 | " }\n", 579 | " },\n", 580 | " \"spec\":{\n", 581 | " \"containers\":[\n", 582 | " {\n", 583 | " \"name\": \"azure-dl\",\n", 584 | " \"image\": image_name,\n", 585 | " \"env\":[\n", 586 | " {\n", 587 | " \"name\": \"LD_LIBRARY_PATH\",\n", 588 | " \"value\": \"$LD_LIBRARY_PATH:/usr/local/nvidia/lib64:/opt/conda/envs/py3.6/lib\"\n", 589 | " }\n", 590 | " ],\n", 591 | " \"ports\":[\n", 592 | " {\n", 593 | " \"containerPort\":80,\n", 594 | " \"name\":\"model\"\n", 595 | " }\n", 596 | " ],\n", 597 | " \"volumeMounts\":[\n", 598 | " {\n", 599 | " \"mountPath\": \"/usr/local/nvidia\",\n", 600 | " \"name\": \"nvidia\"\n", 601 | " }\n", 602 | " ],\n", 603 | " \"resources\":{\n", 604 | " \"requests\":{\n", 605 | " \"alpha.kubernetes.io/nvidia-gpu\": 1\n", 606 | " },\n", 607 | " \"limits\":{\n", 608 | " \"alpha.kubernetes.io/nvidia-gpu\": 1\n", 609 | " }\n", 610 | " } \n", 611 | " }\n", 612 | " ],\n", 613 | " \"volumes\":[\n", 614 | " {\n", 615 | " \"name\": \"nvidia\",\n", 616 | " \"hostPath\":{\n", 617 | " \"path\":\"/usr/local/nvidia\"\n", 618 | " },\n", 619 | " },\n", 620 | " ]\n", 621 | " }\n", 622 | " }\n", 623 | " }\n", 624 | "}\n", 625 | "\n", 626 | "service_temp = {\n", 627 | " \"apiVersion\": \"v1\",\n", 628 | " \"kind\": \"Service\",\n", 629 | " \"metadata\": {\n", 630 | " \"name\": \"azure-dl\"\n", 631 | " },\n", 632 | " \"spec\":{\n", 633 | " \"type\": \"LoadBalancer\",\n", 634 | " \"ports\":[\n", 635 | " {\n", 636 | " \"port\":80\n", 637 | " }\n", 638 | " ],\n", 639 | " \"selector\":{\n", 640 | " \"app\":\"azure-dl\"\n", 641 | " }\n", 642 | " }\n", 643 | "}" 644 | ] 645 | }, 646 | { 647 | "cell_type": "code", 648 | "execution_count": null, 649 | "metadata": {}, 650 | "outputs": [], 651 | "source": [ 652 | "write_json_to_file(app_template, 'az-dl.json') # We write the service template to the json file" 653 | ] 654 | }, 655 | { 656 | "cell_type": "code", 657 | "execution_count": null, 658 | "metadata": {}, 659 | "outputs": [], 660 | "source": [ 661 | "write_json_to_file(service_temp, 'az-dl.json', mode='a') # We add the loadbelanacer template to the json file" 662 | ] 663 | }, 664 | { 665 | "cell_type": "markdown", 666 | "metadata": {}, 667 | "source": [ 668 | "Let's check the manifest created." 669 | ] 670 | }, 671 | { 672 | "cell_type": "code", 673 | "execution_count": null, 674 | "metadata": {}, 675 | "outputs": [], 676 | "source": [ 677 | "!cat az-dl.json" 678 | ] 679 | }, 680 | { 681 | "cell_type": "markdown", 682 | "metadata": {}, 683 | "source": [ 684 | "Next, we will use kubectl create command to deploy our application." 685 | ] 686 | }, 687 | { 688 | "cell_type": "code", 689 | "execution_count": null, 690 | "metadata": {}, 691 | "outputs": [], 692 | "source": [ 693 | "!kubectl create -f az-dl.json" 694 | ] 695 | }, 696 | { 697 | "cell_type": "markdown", 698 | "metadata": {}, 699 | "source": [ 700 | "Let's check if the pod is deployed." 701 | ] 702 | }, 703 | { 704 | "cell_type": "code", 705 | "execution_count": null, 706 | "metadata": {}, 707 | "outputs": [], 708 | "source": [ 709 | "!kubectl get pods --all-namespaces" 710 | ] 711 | }, 712 | { 713 | "cell_type": "markdown", 714 | "metadata": {}, 715 | "source": [ 716 | "If anything goes wrong you can use the commands below to observe the events on the node as well as review the logs." 717 | ] 718 | }, 719 | { 720 | "cell_type": "code", 721 | "execution_count": null, 722 | "metadata": {}, 723 | "outputs": [], 724 | "source": [ 725 | "!kubectl get events" 726 | ] 727 | }, 728 | { 729 | "cell_type": "code", 730 | "execution_count": null, 731 | "metadata": {}, 732 | "outputs": [], 733 | "source": [ 734 | "pod_json = !kubectl get pods -o json\n", 735 | "pod_dict = json.loads(''.join(pod_json))\n", 736 | "!kubectl logs {pod_dict['items'][0]['metadata']['name']}" 737 | ] 738 | }, 739 | { 740 | "cell_type": "markdown", 741 | "metadata": {}, 742 | "source": [ 743 | "It can take a few minutes for the service to populate the EXTERNAL-IP field. This will be the IP you use to call the service. You can also specify an IP to use please see the AKS documentation for further details." 744 | ] 745 | }, 746 | { 747 | "cell_type": "code", 748 | "execution_count": null, 749 | "metadata": {}, 750 | "outputs": [], 751 | "source": [ 752 | "!kubectl get service azure-dl" 753 | ] 754 | }, 755 | { 756 | "cell_type": "markdown", 757 | "metadata": {}, 758 | "source": [ 759 | "Next, we will [test our web application](05_TestWebApp.ipynb) deployed on AKS. " 760 | ] 761 | } 762 | ], 763 | "metadata": { 764 | "anaconda-cloud": {}, 765 | "kernelspec": { 766 | "display_name": "Python [conda env:AKSDeployment]", 767 | "language": "python", 768 | "name": "conda-env-AKSDeployment-py" 769 | }, 770 | "language_info": { 771 | "codemirror_mode": { 772 | "name": "ipython", 773 | "version": 3 774 | }, 775 | "file_extension": ".py", 776 | "mimetype": "text/x-python", 777 | "name": "python", 778 | "nbconvert_exporter": "python", 779 | "pygments_lexer": "ipython3", 780 | "version": "3.5.5" 781 | } 782 | }, 783 | "nbformat": 4, 784 | "nbformat_minor": 1 785 | } 786 | -------------------------------------------------------------------------------- /Tensorflow/07_TearDown.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Tear it all down\n", 8 | "Once you are done with your cluster you can use the following two commands to destroy it all." 9 | ] 10 | }, 11 | { 12 | "cell_type": "code", 13 | "execution_count": 1, 14 | "metadata": {}, 15 | "outputs": [], 16 | "source": [ 17 | "%load_ext dotenv\n", 18 | "%dotenv" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "Once you are done with your cluster you can use the following two commands to destroy it all. First, delete the application." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 1, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stdout", 35 | "output_type": "stream", 36 | "text": [ 37 | "deployment.apps \"azure-dl\" deleted\n", 38 | "service \"azure-dl\" deleted\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "!kubectl delete -f az-dl.json" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "Next, you delete the AKS cluster. This step may take a few minutes." 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "!az aks delete -n $aks_name -g $resource_group -y" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "Finally, you should delete the resource group. This also deletes the AKS cluster and can be used instead of the above command if the resource group is only used for this purpose." 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "!az group delete --name $resource_group -y" 76 | ] 77 | } 78 | ], 79 | "metadata": { 80 | "kernelspec": { 81 | "display_name": "Python [conda env:AKSDeployment]", 82 | "language": "python", 83 | "name": "conda-env-AKSDeployment-py" 84 | }, 85 | "language_info": { 86 | "codemirror_mode": { 87 | "name": "ipython", 88 | "version": 3 89 | }, 90 | "file_extension": ".py", 91 | "mimetype": "text/x-python", 92 | "name": "python", 93 | "nbconvert_exporter": "python", 94 | "pygments_lexer": "ipython3", 95 | "version": "3.5.5" 96 | } 97 | }, 98 | "nbformat": 4, 99 | "nbformat_minor": 2 100 | } 101 | -------------------------------------------------------------------------------- /Tensorflow/README.md: -------------------------------------------------------------------------------- 1 | # Deploy ResNet 152 Tensorflow model on GPU enaled Kubernetes cluster 2 | In this folder are the tutorials for deploying a Tensorflow model on a Kubernetes cluster. 3 | The tutorial is made up of the following notebooks: 4 | * [Model development](00_DevelopModel.ipynb) where we load the pretrained model and test it by using it to score images 5 | * [Developing the interface](01_DevelopModelDriver.ipynb) our Flask app will use to load and call the model 6 | * [Building the Docker Image](02_BuildImage.ipynb) with our Flask REST API and model 7 | * [Testing our Docker image](03_TestLocally.ipynb) before deployment 8 | * [Creating our Kubernetes cluster](04_DeployOnAKS.ipynb) and deploying our application to it 9 | * [Testing the deployed model](05_TestWebApp.ipynb) 10 | * [Testing the throughput](06_SpeedTestWebApp.ipynb) of our model 11 | * [Cleaning the resources](07_TearDown.ipynb) used 12 | -------------------------------------------------------------------------------- /Tensorflow/environment.yml: -------------------------------------------------------------------------------- 1 | name: AKSDeployment 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.5 6 | - nb_conda==2.2.0 7 | - tornado==4.5.3 8 | - pip: 9 | - papermill==0.14.1 10 | - python-dotenv==0.9.0 11 | - Pillow==5.2.0 12 | - wget==3.2 13 | - matplotlib==2.2.2 14 | - aiohttp==3.3.2 15 | - toolz==0.9.0 16 | - tqdm==4.23.4 17 | - azure-cli==2.0.41 18 | - tensorflow-gpu==1.9.0 19 | -------------------------------------------------------------------------------- /Tensorflow/testing_utilities.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import json 3 | import urllib 4 | from io import BytesIO 5 | 6 | import matplotlib.gridspec as gridspec 7 | import matplotlib.pyplot as plt 8 | import toolz 9 | from PIL import Image, ImageOps 10 | import random 11 | 12 | def read_image_from(url): 13 | return toolz.pipe(url, 14 | urllib.request.urlopen, 15 | lambda x: x.read(), 16 | BytesIO) 17 | 18 | 19 | def to_rgb(img_bytes): 20 | return Image.open(img_bytes).convert('RGB') 21 | 22 | 23 | @toolz.curry 24 | def resize(img_file, new_size=(100, 100)): 25 | return ImageOps.fit(img_file, new_size, Image.ANTIALIAS) 26 | 27 | 28 | def to_base64(img): 29 | imgio = BytesIO() 30 | img.save(imgio, 'PNG') 31 | imgio.seek(0) 32 | dataimg = base64.b64encode(imgio.read()) 33 | return dataimg.decode('utf-8') 34 | 35 | 36 | def to_img(img_url): 37 | return toolz.pipe(img_url, 38 | read_image_from, 39 | to_rgb, 40 | resize(new_size=(224,224))) 41 | 42 | 43 | def img_url_to_json(url, label='image'): 44 | img_data = toolz.pipe(url, 45 | to_img, 46 | to_base64) 47 | return json.dumps({'input':{label:'\"{0}\"'.format(img_data)}}) 48 | 49 | 50 | def _plot_image(ax, img): 51 | ax.imshow(to_img(img)) 52 | ax.tick_params(axis='both', 53 | which='both', 54 | bottom='off', 55 | top='off', 56 | left='off', 57 | right='off', 58 | labelleft='off', 59 | labelbottom='off') 60 | return ax 61 | 62 | 63 | def _plot_prediction_bar(ax, r): 64 | perf = list(c[1] for c in r.json()['result'][0]['image']) 65 | ax.barh(range(3, 0, -1), perf, align='center', color='#55DD55') 66 | ax.tick_params(axis='both', 67 | which='both', 68 | bottom='off', 69 | top='off', 70 | left='off', 71 | right='off', 72 | labelbottom='off') 73 | tick_labels = reversed(list(' '.join(c[0].split()[1:]).split(',')[0] for c in r.json()['result'][0]['image'])) 74 | ax.yaxis.set_ticks([1,2,3]) 75 | ax.yaxis.set_ticklabels(tick_labels, position=(0.5,0), minor=False, horizontalalignment='center') 76 | 77 | 78 | def plot_predictions(images, classification_results): 79 | if len(images)!=6: 80 | raise Exception('This method is only designed for 6 images') 81 | gs = gridspec.GridSpec(2, 3) 82 | fig = plt.figure(figsize=(12, 9)) 83 | gs.update(hspace=0.1, wspace=0.001) 84 | 85 | for gg,r, img in zip(gs, classification_results, images): 86 | gg2 = gridspec.GridSpecFromSubplotSpec(4, 10, subplot_spec=gg) 87 | ax = fig.add_subplot(gg2[0:3, :]) 88 | _plot_image(ax, img) 89 | ax = fig.add_subplot(gg2[3, 1:9]) 90 | _plot_prediction_bar(ax, r) 91 | 92 | def write_json_to_file(json_dict, filename, mode='w'): 93 | with open(filename, mode) as outfile: 94 | json.dump(json_dict, outfile, indent=4,sort_keys=True) 95 | outfile.write('\n\n') 96 | 97 | def gen_variations_of_one_image(IMAGEURL, num, label='image'): 98 | out_images = [] 99 | img = to_img(IMAGEURL).convert('RGB') 100 | # Flip the colours for one-pixel 101 | # "Different Image" 102 | for i in range(num): 103 | diff_img = img.copy() 104 | rndm_pixel_x_y = (random.randint(0, diff_img.size[0]-1), 105 | random.randint(0, diff_img.size[1]-1)) 106 | current_color = diff_img.getpixel(rndm_pixel_x_y) 107 | diff_img.putpixel(rndm_pixel_x_y, current_color[::-1]) 108 | b64img = to_base64(diff_img) 109 | out_images.append(json.dumps({'input':{label:'\"{0}\"'.format(b64img)}})) 110 | return out_images -------------------------------------------------------------------------------- /static/Design.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/AKSDeploymentTutorial/aea3e5f83cd5cee196d725204f6ae2257edbf36d/static/Design.png -------------------------------------------------------------------------------- /static/example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/AKSDeploymentTutorial/aea3e5f83cd5cee196d725204f6ae2257edbf36d/static/example.png --------------------------------------------------------------------------------