├── .ipynb_checkpoints └── Introduction to Numpy and Image Basics-checkpoint.ipynb ├── 1_Image Basics with Numpy ├── .ipynb_checkpoints │ ├── Introduction to Numpy and Image Basics-checkpoint.ipynb │ └── Numpy-and-Image-Assessment-checkpoint.ipynb ├── Introduction to Numpy and Image Basics.ipynb └── Numpy-and-Image-Assessment.ipynb ├── 2_Image Basics with OpenCV ├── .ipynb_checkpoints │ ├── 04-Image-Basics-Assessment-checkpoint.ipynb │ ├── Image Basics with OpenCV-checkpoint.ipynb │ └── puppy-checkpoint.py ├── 04-Image-Basics-Assessment.ipynb ├── Image Basics with OpenCV.ipynb └── puppy.py ├── 3_Image Processing ├── .ipynb_checkpoints │ ├── Image Processing-checkpoint.ipynb │ └── Image-Processing-Assessment-checkpoint.ipynb ├── Image Processing.ipynb └── Image-Processing-Assessment.ipynb ├── 4_Video Basics ├── .ipynb_checkpoints │ ├── Video Basics-checkpoint.ipynb │ └── Video-Basics-Assessment-checkpoint.ipynb ├── Video Basics.ipynb └── Video-Basics-Assessment.ipynb ├── 5_Object Detection ├── .ipynb_checkpoints │ ├── 09-Detection-Assessment-checkpoint.ipynb │ └── Object Detection-checkpoint.ipynb ├── 09-Detection-Assessment.ipynb └── Object Detection.ipynb ├── 6_Object Tracking ├── .ipynb_checkpoints │ └── Object Tracking-checkpoint.ipynb └── Object Tracking.ipynb ├── 7_Deep Learning for Computer Vision ├── .ipynb_checkpoints │ ├── DL-CV-Assessment-checkpoint.ipynb │ ├── Keras-Basics-checkpoint.ipynb │ ├── Keras-CNN-CIFAR-10-checkpoint.ipynb │ └── Keras-CNN-MNIST-checkpoint.ipynb ├── DL-CV-Assessment.ipynb ├── Keras-Basics.ipynb ├── Keras-CNN-CIFAR-10.ipynb ├── Keras-CNN-MNIST.ipynb └── myfirstmodel.h5 ├── 8_Capston Project ├── .ipynb_checkpoints │ └── Capstone Project-checkpoint.ipynb └── Capstone Project.ipynb ├── Data ├── 00-puppy.jpg ├── Denis_Mukwege.jpg ├── Nadia_Murad.jpg ├── bank_note_data.txt ├── bricks.jpg ├── car_plate.jpg ├── crossword.jpg ├── dog_backpack.jpg ├── dog_backpack.png ├── dot_grid.png ├── flat_chessboard.png ├── giraffes.jpg ├── gorilla.jpg ├── haarcascades │ ├── haarcascade_eye.xml │ ├── haarcascade_frontalface_default.xml │ └── haarcascade_russian_plate_number.xml ├── horse.jpg ├── internal_external.png ├── many_cereals.jpg ├── pennies.jpg ├── rainbow.jpg ├── real_chessboard.jpg ├── reeses_puffs.png ├── road_image.jpg ├── sammy.jpg ├── sammy_face.jpg ├── sammy_noise.jpg ├── separate_coins.jpg ├── solvay_conference.jpg ├── sudoku.jpg └── watermark_no_copy.png └── README.md /.ipynb_checkpoints/Introduction to Numpy and Image Basics-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 2 6 | } 7 | -------------------------------------------------------------------------------- /2_Image Basics with OpenCV/.ipynb_checkpoints/puppy-checkpoint.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | img = cv2.imread('../Data/00-puppy.jpg') 3 | 4 | while True: 5 | cv2.imshow('Puppy',img) 6 | # if we have waited for 1 ms AND we have pressed the Esc key 7 | if cv2.waitKey(1) & 0xFF == 27: 8 | break 9 | 10 | cv2.destroyAllWindows() 11 | -------------------------------------------------------------------------------- /2_Image Basics with OpenCV/puppy.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | img = cv2.imread('../Data/00-puppy.jpg') 3 | 4 | while True: 5 | cv2.imshow('Puppy',img) 6 | # if we have waited for 1 ms AND we have pressed the Esc key 7 | if cv2.waitKey(1) & 0xFF == 27: 8 | break 9 | 10 | cv2.destroyAllWindows() 11 | -------------------------------------------------------------------------------- /4_Video Basics/.ipynb_checkpoints/Video Basics-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Video Basics" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "- Goals\n", 15 | " - Connect OpenCV to WebCam\n", 16 | " - Use OpenCV to open a video file\n", 17 | " - Draw shapes on video\n", 18 | " - Interact with Video" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "#### Class 1 - Connecting to Camera" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 1, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import cv2\n", 35 | "\n", 36 | "cap = cv2.VideoCapture(0)\n", 37 | "\n", 38 | "width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\n", 39 | "height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\n", 40 | "\n", 41 | "#WINDOWS ---> #'DIVX'\n", 42 | "# ubuntu/ Mac ---> #'XVID'\n", 43 | "writer = cv2.VideoWriter('video.mp4',cv2.VideoWriter_fourcc(*'DIVX'),\n", 44 | " 20,(width,height))\n", 45 | "\n", 46 | "while True:\n", 47 | " ret,frame = cap.read()\n", 48 | " \n", 49 | " writer.write(frame)\n", 50 | " \n", 51 | "# gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)\n", 52 | " \n", 53 | " cv2.imshow('frame',frame)\n", 54 | " \n", 55 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 56 | " break\n", 57 | "\n", 58 | "cap.release()\n", 59 | "writer.release()\n", 60 | "cv2.destroyAllWindows()" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "#### Class 2 - Open Video" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "import cv2\n", 77 | "import time\n", 78 | "\n", 79 | "cap = cv2.VideoCapture('video.mp4')\n", 80 | "\n", 81 | "if cap.isOpened() == False:\n", 82 | " print(\"Error file not Found or wrong codec used\")\n", 83 | " \n", 84 | "while cap.isOpened():\n", 85 | " ret,frame = cap.read()\n", 86 | " if ret == True:\n", 87 | " \n", 88 | " time.sleep(1/20)\n", 89 | " cv2.imshow('frame',frame)\n", 90 | " \n", 91 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 92 | " break\n", 93 | " else:\n", 94 | " break\n", 95 | "cap.release()\n", 96 | "cv2.destroyAllWindows()" 97 | ] 98 | }, 99 | { 100 | "cell_type": "markdown", 101 | "metadata": {}, 102 | "source": [ 103 | "#### Class 3 - Drawing on Live Camera" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 4, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "import cv2\n", 113 | "\n", 114 | "cap = cv2.VideoCapture(0)\n", 115 | "\n", 116 | "width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\n", 117 | "height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\n", 118 | "\n", 119 | "# TOP LEFT CORNER\n", 120 | "x = width // 2 \n", 121 | "y = height // 2\n", 122 | "\n", 123 | "# width and heigth of the rectangle\n", 124 | "w = x // 4\n", 125 | "h = y // 4\n", 126 | "\n", 127 | "#BOTTOM RIGHT CORNER x+w, y+h\n", 128 | "\n", 129 | "\n", 130 | "while True:\n", 131 | " ret,frame = cap.read()\n", 132 | " \n", 133 | " cv2.rectangle(frame,(x,y),(x+w,y+h),color=(255,0,0),thickness = 5)\n", 134 | " \n", 135 | " cv2.imshow('frame',frame)\n", 136 | " \n", 137 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 138 | " break\n", 139 | "cap.release()\n", 140 | "cv2.destroyAllWindows()" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 5, 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "import cv2\n", 150 | "\n", 151 | "\n", 152 | "\n", 153 | "# Callback function to draw a rectangle\n", 154 | "def draw_rect(event,x,y,flags,param):\n", 155 | " global pt1,pt2,topleft_clicked,bottomright_clicked\n", 156 | " \n", 157 | " if event == cv2.EVENT_LBUTTONDOWN:\n", 158 | " #Reset\n", 159 | " if bottomright_clicked == True and topleft_clicked == True:\n", 160 | " pt1 = (0,0)\n", 161 | " pt2 = (0,0)\n", 162 | " topleft_clicked = False\n", 163 | " bottomright_clicked = False\n", 164 | " if topleft_clicked == False:\n", 165 | " pt1 = (x,y)\n", 166 | " topleft_clicked = True\n", 167 | " elif bottomright_clicked == False:\n", 168 | " pt2 = (x,y)\n", 169 | " bottomright_clicked = True\n", 170 | " pass\n", 171 | "\n", 172 | "# Global variable\n", 173 | "pt1 = (0,0)\n", 174 | "pt2 = (0,0)\n", 175 | "topleft_clicked = False\n", 176 | "bottomright_clicked = False\n", 177 | "\n", 178 | "# connect to the callback\n", 179 | "cap = cv2.VideoCapture(0)\n", 180 | "\n", 181 | "cv2.namedWindow('t')\n", 182 | "cv2.setMouseCallback('t',draw_rect)\n", 183 | "\n", 184 | "while True:\n", 185 | " ret,frame = cap.read()\n", 186 | " \n", 187 | " # draw on the frame based off the global variable\n", 188 | " if topleft_clicked:\n", 189 | " cv2.circle(frame,center=pt1,radius = 5,color=(0,0,255),\n", 190 | " thickness = -1)\n", 191 | " if topleft_clicked and bottomright_clicked:\n", 192 | " cv2.rectangle(frame,pt1,pt2,(0,0,255),5)\n", 193 | " \n", 194 | " cv2.imshow('t',frame)\n", 195 | " \n", 196 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 197 | " break\n", 198 | "cap.release()\n", 199 | "cv2.destroyAllWindows()" 200 | ] 201 | } 202 | ], 203 | "metadata": { 204 | "kernelspec": { 205 | "display_name": "Python 3", 206 | "language": "python", 207 | "name": "python3" 208 | }, 209 | "language_info": { 210 | "codemirror_mode": { 211 | "name": "ipython", 212 | "version": 3 213 | }, 214 | "file_extension": ".py", 215 | "mimetype": "text/x-python", 216 | "name": "python", 217 | "nbconvert_exporter": "python", 218 | "pygments_lexer": "ipython3", 219 | "version": "3.6.5" 220 | } 221 | }, 222 | "nbformat": 4, 223 | "nbformat_minor": 2 224 | } 225 | -------------------------------------------------------------------------------- /4_Video Basics/.ipynb_checkpoints/Video-Basics-Assessment-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Video Basics Assessment \n", 8 | "\n", 9 | "* **Note: This assessment is quite hard! Feel free to treat it as a code along and jump to the solutions** *\n", 10 | "\n", 11 | "## Project Task\n", 12 | "\n", 13 | "**You only have one task here. Create a program that reads in a live stream from a camera on your computer (or if you don't have a camera, just open up a video file). Then whenever you click the left mouse button down, create a blue circle around where you've clicked. Check out the video for an example of what the final project should look like**" 14 | ] 15 | }, 16 | { 17 | "cell_type": "markdown", 18 | "metadata": {}, 19 | "source": [ 20 | "**Guide**\n", 21 | "\n", 22 | "* Create a draw_circle function for the callback function\n", 23 | "* Use two events cv2.EVENT_LBUTTONDOWN and cv2.EVENT_LBUTTONUP\n", 24 | "* Use a boolean variable to keep track if the mouse has been clicked up and down based on the events above\n", 25 | "* Use a tuple to keep track of the x and y where the mouse was clicked.\n", 26 | "* You should be able to then draw a circle on the frame based on the x,y coordinates from the Event \n", 27 | "\n", 28 | "Check out the skeleton guide below:" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 1, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "# Create a function based on a CV2 Event (Left button click)\n", 38 | "import cv2\n", 39 | "# mouse callback function\n", 40 | "def draw_circle(event,x,y,flags,param):\n", 41 | "\n", 42 | " global center,clicked\n", 43 | " \n", 44 | " # get mouse click on down and track center\n", 45 | " if event == cv2.EVENT_LBUTTONDOWN:\n", 46 | " center = (x,y)\n", 47 | " clicked = False\n", 48 | " \n", 49 | " # Use boolean variable to track if the mouse has been released\n", 50 | " if event == cv2.EVENT_LBUTTONUP:\n", 51 | " clicked = True\n", 52 | "\n", 53 | " \n", 54 | "# Haven't drawn anything yet!\n", 55 | "center = (0,0)\n", 56 | "clicked = False\n", 57 | "\n", 58 | "\n", 59 | "# Capture Video\n", 60 | "cap = cv2.VideoCapture(0)\n", 61 | "\n", 62 | "# Create a named window for connections\n", 63 | "cv2.namedWindow('t')\n", 64 | "\n", 65 | "# Bind draw_rectangle function to mouse cliks\n", 66 | "cv2.setMouseCallback('t',draw_circle)\n", 67 | "\n", 68 | "\n", 69 | "while True:\n", 70 | " \n", 71 | " # Capture frame-by-frame\n", 72 | " ret,frame = cap.read()\n", 73 | "\n", 74 | " # Use if statement to see if clicked is true\n", 75 | " \n", 76 | " # Draw circle on frame\n", 77 | " if clicked:\n", 78 | " cv2.circle(frame,center,radius = 5,color = (255,0,0),thickness = 5)\n", 79 | " \n", 80 | " \n", 81 | " # Display the resulting frame\n", 82 | " cv2.imshow('t',frame)\n", 83 | " \n", 84 | "\n", 85 | " # This command let's us quit with the \"q\" button on a keyboard.\n", 86 | " # Simply pressing X on the window won't work!\n", 87 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 88 | " break\n", 89 | "cap.release()\n", 90 | "cv2.destroyAllWindows()\n", 91 | "\n", 92 | "# When everything is done, release the capture\n" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [] 101 | } 102 | ], 103 | "metadata": { 104 | "kernelspec": { 105 | "display_name": "Python 3", 106 | "language": "python", 107 | "name": "python3" 108 | }, 109 | "language_info": { 110 | "codemirror_mode": { 111 | "name": "ipython", 112 | "version": 3 113 | }, 114 | "file_extension": ".py", 115 | "mimetype": "text/x-python", 116 | "name": "python", 117 | "nbconvert_exporter": "python", 118 | "pygments_lexer": "ipython3", 119 | "version": "3.6.5" 120 | } 121 | }, 122 | "nbformat": 4, 123 | "nbformat_minor": 2 124 | } 125 | -------------------------------------------------------------------------------- /4_Video Basics/Video Basics.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Video Basics" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "- Goals\n", 15 | " - Connect OpenCV to WebCam\n", 16 | " - Use OpenCV to open a video file\n", 17 | " - Draw shapes on video\n", 18 | " - Interact with Video" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "#### Class 1 - Connecting to Camera" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 1, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import cv2\n", 35 | "\n", 36 | "cap = cv2.VideoCapture(0)\n", 37 | "\n", 38 | "width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\n", 39 | "height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\n", 40 | "\n", 41 | "#WINDOWS ---> #'DIVX'\n", 42 | "# ubuntu/ Mac ---> #'XVID'\n", 43 | "writer = cv2.VideoWriter('video.mp4',cv2.VideoWriter_fourcc(*'DIVX'),\n", 44 | " 20,(width,height))\n", 45 | "\n", 46 | "while True:\n", 47 | " ret,frame = cap.read()\n", 48 | " \n", 49 | " writer.write(frame)\n", 50 | " \n", 51 | "# gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)\n", 52 | " \n", 53 | " cv2.imshow('frame',frame)\n", 54 | " \n", 55 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 56 | " break\n", 57 | "\n", 58 | "cap.release()\n", 59 | "writer.release()\n", 60 | "cv2.destroyAllWindows()" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "#### Class 2 - Open Video" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "import cv2\n", 77 | "import time\n", 78 | "\n", 79 | "cap = cv2.VideoCapture('video.mp4')\n", 80 | "\n", 81 | "if cap.isOpened() == False:\n", 82 | " print(\"Error file not Found or wrong codec used\")\n", 83 | " \n", 84 | "while cap.isOpened():\n", 85 | " ret,frame = cap.read()\n", 86 | " if ret == True:\n", 87 | " \n", 88 | " time.sleep(1/20)\n", 89 | " cv2.imshow('frame',frame)\n", 90 | " \n", 91 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 92 | " break\n", 93 | " else:\n", 94 | " break\n", 95 | "cap.release()\n", 96 | "cv2.destroyAllWindows()" 97 | ] 98 | }, 99 | { 100 | "cell_type": "markdown", 101 | "metadata": {}, 102 | "source": [ 103 | "#### Class 3 - Drawing on Live Camera" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 4, 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "import cv2\n", 113 | "\n", 114 | "cap = cv2.VideoCapture(0)\n", 115 | "\n", 116 | "width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\n", 117 | "height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\n", 118 | "\n", 119 | "# TOP LEFT CORNER\n", 120 | "x = width // 2 \n", 121 | "y = height // 2\n", 122 | "\n", 123 | "# width and heigth of the rectangle\n", 124 | "w = x // 4\n", 125 | "h = y // 4\n", 126 | "\n", 127 | "#BOTTOM RIGHT CORNER x+w, y+h\n", 128 | "\n", 129 | "\n", 130 | "while True:\n", 131 | " ret,frame = cap.read()\n", 132 | " \n", 133 | " cv2.rectangle(frame,(x,y),(x+w,y+h),color=(255,0,0),thickness = 5)\n", 134 | " \n", 135 | " cv2.imshow('frame',frame)\n", 136 | " \n", 137 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 138 | " break\n", 139 | "cap.release()\n", 140 | "cv2.destroyAllWindows()" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 5, 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "import cv2\n", 150 | "\n", 151 | "\n", 152 | "\n", 153 | "# Callback function to draw a rectangle\n", 154 | "def draw_rect(event,x,y,flags,param):\n", 155 | " global pt1,pt2,topleft_clicked,bottomright_clicked\n", 156 | " \n", 157 | " if event == cv2.EVENT_LBUTTONDOWN:\n", 158 | " #Reset\n", 159 | " if bottomright_clicked == True and topleft_clicked == True:\n", 160 | " pt1 = (0,0)\n", 161 | " pt2 = (0,0)\n", 162 | " topleft_clicked = False\n", 163 | " bottomright_clicked = False\n", 164 | " if topleft_clicked == False:\n", 165 | " pt1 = (x,y)\n", 166 | " topleft_clicked = True\n", 167 | " elif bottomright_clicked == False:\n", 168 | " pt2 = (x,y)\n", 169 | " bottomright_clicked = True\n", 170 | " pass\n", 171 | "\n", 172 | "# Global variable\n", 173 | "pt1 = (0,0)\n", 174 | "pt2 = (0,0)\n", 175 | "topleft_clicked = False\n", 176 | "bottomright_clicked = False\n", 177 | "\n", 178 | "# connect to the callback\n", 179 | "cap = cv2.VideoCapture(0)\n", 180 | "\n", 181 | "cv2.namedWindow('t')\n", 182 | "cv2.setMouseCallback('t',draw_rect)\n", 183 | "\n", 184 | "while True:\n", 185 | " ret,frame = cap.read()\n", 186 | " \n", 187 | " # draw on the frame based off the global variable\n", 188 | " if topleft_clicked:\n", 189 | " cv2.circle(frame,center=pt1,radius = 5,color=(0,0,255),\n", 190 | " thickness = -1)\n", 191 | " if topleft_clicked and bottomright_clicked:\n", 192 | " cv2.rectangle(frame,pt1,pt2,(0,0,255),5)\n", 193 | " \n", 194 | " cv2.imshow('t',frame)\n", 195 | " \n", 196 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 197 | " break\n", 198 | "cap.release()\n", 199 | "cv2.destroyAllWindows()" 200 | ] 201 | } 202 | ], 203 | "metadata": { 204 | "kernelspec": { 205 | "display_name": "Python 3", 206 | "language": "python", 207 | "name": "python3" 208 | }, 209 | "language_info": { 210 | "codemirror_mode": { 211 | "name": "ipython", 212 | "version": 3 213 | }, 214 | "file_extension": ".py", 215 | "mimetype": "text/x-python", 216 | "name": "python", 217 | "nbconvert_exporter": "python", 218 | "pygments_lexer": "ipython3", 219 | "version": "3.6.5" 220 | } 221 | }, 222 | "nbformat": 4, 223 | "nbformat_minor": 2 224 | } 225 | -------------------------------------------------------------------------------- /4_Video Basics/Video-Basics-Assessment.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Video Basics Assessment \n", 8 | "\n", 9 | "* **Note: This assessment is quite hard! Feel free to treat it as a code along and jump to the solutions** *\n", 10 | "\n", 11 | "## Project Task\n", 12 | "\n", 13 | "**You only have one task here. Create a program that reads in a live stream from a camera on your computer (or if you don't have a camera, just open up a video file). Then whenever you click the left mouse button down, create a blue circle around where you've clicked. Check out the video for an example of what the final project should look like**" 14 | ] 15 | }, 16 | { 17 | "cell_type": "markdown", 18 | "metadata": {}, 19 | "source": [ 20 | "**Guide**\n", 21 | "\n", 22 | "* Create a draw_circle function for the callback function\n", 23 | "* Use two events cv2.EVENT_LBUTTONDOWN and cv2.EVENT_LBUTTONUP\n", 24 | "* Use a boolean variable to keep track if the mouse has been clicked up and down based on the events above\n", 25 | "* Use a tuple to keep track of the x and y where the mouse was clicked.\n", 26 | "* You should be able to then draw a circle on the frame based on the x,y coordinates from the Event \n", 27 | "\n", 28 | "Check out the skeleton guide below:" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 1, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "# Create a function based on a CV2 Event (Left button click)\n", 38 | "import cv2\n", 39 | "# mouse callback function\n", 40 | "def draw_circle(event,x,y,flags,param):\n", 41 | "\n", 42 | " global center,clicked\n", 43 | " \n", 44 | " # get mouse click on down and track center\n", 45 | " if event == cv2.EVENT_LBUTTONDOWN:\n", 46 | " center = (x,y)\n", 47 | " clicked = False\n", 48 | " \n", 49 | " # Use boolean variable to track if the mouse has been released\n", 50 | " if event == cv2.EVENT_LBUTTONUP:\n", 51 | " clicked = True\n", 52 | "\n", 53 | " \n", 54 | "# Haven't drawn anything yet!\n", 55 | "center = (0,0)\n", 56 | "clicked = False\n", 57 | "\n", 58 | "\n", 59 | "# Capture Video\n", 60 | "cap = cv2.VideoCapture(0)\n", 61 | "\n", 62 | "# Create a named window for connections\n", 63 | "cv2.namedWindow('t')\n", 64 | "\n", 65 | "# Bind draw_rectangle function to mouse cliks\n", 66 | "cv2.setMouseCallback('t',draw_circle)\n", 67 | "\n", 68 | "\n", 69 | "while True:\n", 70 | " \n", 71 | " # Capture frame-by-frame\n", 72 | " ret,frame = cap.read()\n", 73 | "\n", 74 | " # Use if statement to see if clicked is true\n", 75 | " \n", 76 | " # Draw circle on frame\n", 77 | " if clicked:\n", 78 | " cv2.circle(frame,center,radius = 5,color = (255,0,0),thickness = 5)\n", 79 | " \n", 80 | " \n", 81 | " # Display the resulting frame\n", 82 | " cv2.imshow('t',frame)\n", 83 | " \n", 84 | "\n", 85 | " # This command let's us quit with the \"q\" button on a keyboard.\n", 86 | " # Simply pressing X on the window won't work!\n", 87 | " if cv2.waitKey(1) & 0xFF == ord('q'):\n", 88 | " break\n", 89 | "cap.release()\n", 90 | "cv2.destroyAllWindows()\n", 91 | "\n", 92 | "# When everything is done, release the capture\n" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [] 101 | } 102 | ], 103 | "metadata": { 104 | "kernelspec": { 105 | "display_name": "Python 3", 106 | "language": "python", 107 | "name": "python3" 108 | }, 109 | "language_info": { 110 | "codemirror_mode": { 111 | "name": "ipython", 112 | "version": 3 113 | }, 114 | "file_extension": ".py", 115 | "mimetype": "text/x-python", 116 | "name": "python", 117 | "nbconvert_exporter": "python", 118 | "pygments_lexer": "ipython3", 119 | "version": "3.6.5" 120 | } 121 | }, 122 | "nbformat": 4, 123 | "nbformat_minor": 2 124 | } 125 | -------------------------------------------------------------------------------- /6_Object Tracking/.ipynb_checkpoints/Object Tracking-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Object Tracking" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "- Goals\n", 15 | " - Object Tracking techniques\n", 16 | " - Optical Flow\n", 17 | " - MeanShift and CamShift\n", 18 | " - Understand more advanced tracking\n", 19 | " - Review built-in tracking APIs" 20 | ] 21 | }, 22 | { 23 | "cell_type": "markdown", 24 | "metadata": {}, 25 | "source": [ 26 | "#### Class 1 Optical Flow" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "- Optical flow is the pattern of apparent motion of image objects between two consecutive frames caused by the movement of object or camera.\n", 34 | "- Assumptions:\n", 35 | " - The pixel intensities of an object do not change between consecutive frames\n", 36 | " - Neighbouring pixels have similar motion.\n", 37 | "- The optical flow methods in OpenCV will first take in a given set of points and a frame.\n", 38 | "- then it will attempt to find those point sin the next frame.\n", 39 | "- It is up to the user to supply the points to track.\n", 40 | "- The Lucas-Kanade computes optical flow for a sparse feature set\n", 41 | " - Meaning only the points it was told to track\n", 42 | "- We can use Gunner Farneback's Algorithm to calculate dense optical flow\n", 43 | "- This dense optical flow will calculate flow for all points in an image.\n", 44 | "- It will color them black if no flow is detected.\n" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 1, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "import cv2\n", 54 | "import matplotlib.pyplot as plt\n", 55 | "import numpy as np" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 2, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "#corner detection parameters\n", 65 | "corner_track_params = dict(maxCorners = 10,\n", 66 | " qualityLevel = 0.3,\n", 67 | " minDistance = 7,\n", 68 | " blockSize = 7)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 3, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "#parameters for luca-kannade optical flow function\n", 78 | "lk_params = dict(winSize = (200,200),\n", 79 | " maxLevel=2,\n", 80 | " criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,10,0.03))" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 4, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "cap = cv2.VideoCapture(0)\n", 90 | "\n", 91 | "ret,prev_frame = cap.read()\n", 92 | "prev_gray = cv2.cvtColor(prev_frame,cv2.COLOR_BGR2GRAY)\n", 93 | "\n", 94 | "#points to track\n", 95 | "prevPts = cv2.goodFeaturesToTrack(prev_gray,mask=None,\n", 96 | " **corner_track_params)\n", 97 | "\n", 98 | "mask = np.zeros_like(prev_frame)\n", 99 | "\n", 100 | "while True:\n", 101 | " ret,frame = cap.read()\n", 102 | " \n", 103 | "\n", 104 | " frame_gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)\n", 105 | " \n", 106 | " nextPts,status,err = cv2.calcOpticalFlowPyrLK(prev_gray,frame_gray,\n", 107 | " prevPts,None,**lk_params)\n", 108 | " \n", 109 | " good_new = nextPts[status == 1]\n", 110 | " good_prev = prevPts[status == 1]\n", 111 | " \n", 112 | " for i,(new,prev) in enumerate(zip(good_new,good_prev)):\n", 113 | " x_new,y_new = new.ravel()\n", 114 | " x_prev,y_prev = prev.ravel()\n", 115 | " \n", 116 | " mask = cv2.line(mask,(x_new,y_new),(x_prev,y_prev),\n", 117 | " (255,0,0),3)\n", 118 | " \n", 119 | " frame = cv2.circle(frame,(x_new,y_new),6,(0,255,0),-1)\n", 120 | " \n", 121 | " img = cv2.add(frame,mask)\n", 122 | " cv2.imshow('track',img)\n", 123 | " \n", 124 | " k = cv2.waitKey(30) & 0xFF\n", 125 | " \n", 126 | " if k == 27:\n", 127 | " break\n", 128 | " \n", 129 | " prev_gray = frame_gray.copy()\n", 130 | " prevPts = good_new.reshape(-1,1,2)\n", 131 | "\n", 132 | "cv2.destroyAllWindows()\n", 133 | "cap.release()" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 5, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | "cap = cv2.VideoCapture(0)\n", 143 | "\n", 144 | "ret,frame1 = cap.read()\n", 145 | "\n", 146 | "prev_img = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY)\n", 147 | "\n", 148 | "hsv_mask = np.zeros_like(frame1)\n", 149 | "hsv_mask[:,:,1] = 255\n", 150 | "\n", 151 | "while True:\n", 152 | " ret,frame2 = cap.read()\n", 153 | " \n", 154 | " nextImg = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)\n", 155 | " flow = cv2.calcOpticalFlowFarneback(prev_img,nextImg,\n", 156 | " None,0.5,3,15,3,5,1.2,0)\n", 157 | " \n", 158 | " mag,ang = cv2.cartToPolar(flow[:,:,0],flow[:,:,1],\n", 159 | " angleInDegrees = True)\n", 160 | " \n", 161 | " hsv_mask[:,:,0] = ang/2\n", 162 | " \n", 163 | " hsv_mask[:,:,2] = cv2.normalize(mag,None,0,255,cv2.NORM_MINMAX)\n", 164 | " \n", 165 | " bgr = cv2.cvtColor(hsv_mask,cv2.COLOR_HSV2BGR)\n", 166 | " \n", 167 | " cv2.imshow('frame',bgr)\n", 168 | " k = cv2.waitKey(30) & 0xFF\n", 169 | " \n", 170 | " if k == 27:\n", 171 | " break\n", 172 | " \n", 173 | " prev_img = nextImg\n", 174 | " \n", 175 | "cv2.destroyAllWindows()\n", 176 | "cap.release()\n", 177 | " " 178 | ] 179 | }, 180 | { 181 | "cell_type": "markdown", 182 | "metadata": {}, 183 | "source": [ 184 | "#### Class 2 MeanShift and CAMShift" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": 6, 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "cap = cv2.VideoCapture(0)\n", 194 | "\n", 195 | "ret,frame = cap.read()\n", 196 | "\n", 197 | "face_cascade = cv2.CascadeClassifier('../Data/haarcascades/haarcascade_frontalface_default.xml')\n", 198 | "\n", 199 | "face_rects = face_cascade.detectMultiScale(frame)\n", 200 | "\n", 201 | "(x,y,w,h) = tuple(face_rects[0])\n", 202 | "\n", 203 | "track_window = (x,y,w,h)\n", 204 | "\n", 205 | "roi = frame[y:y+h,x:x+w]\n", 206 | "\n", 207 | "hsv_roi = cv2.cvtColor(roi,cv2.COLOR_BGR2HSV)\n", 208 | "\n", 209 | "roi_hist = cv2.calcHist([hsv_roi],[0],None,[180],[0,180])\n", 210 | "\n", 211 | "cv2.normalize(roi_hist,roi_hist,0,255,cv2.NORM_MINMAX)\n", 212 | "\n", 213 | "#termination criteria\n", 214 | "term_crit = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,10,1)\n", 215 | "\n", 216 | "while True:\n", 217 | " ret,frame = cap.read()\n", 218 | " if ret == True:\n", 219 | " hsv = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)\n", 220 | " \n", 221 | " dst = cv2.calcBackProject([hsv],[0],roi_hist,[0,180],1)\n", 222 | " \n", 223 | " #trail 1\n", 224 | " ret,track_window = cv2.meanShift(dst,track_window,term_crit)\n", 225 | " \n", 226 | " x,y,w,h = track_window\n", 227 | " img2 = cv2.rectangle(frame,(x,y),(x+w,y+h),(255,0,0),5)\n", 228 | "\n", 229 | " #trail 2\n", 230 | "# ret,track_window = cv2.CamShift(dst,track_window,term_crit)\n", 231 | " \n", 232 | "# pts = cv2.boxPoints(ret)\n", 233 | "# pts = np.int0(pts)\n", 234 | "# img2 = cv2.polylines(frame,[pts],True,(255,0,0),4)\n", 235 | " \n", 236 | " cv2.imshow('img',img2)\n", 237 | " \n", 238 | " k = cv2.waitKey(30) & 0xFF\n", 239 | " \n", 240 | " if k == 27:\n", 241 | " break\n", 242 | " else:\n", 243 | " break\n", 244 | " \n", 245 | "cv2.destroyAllWindows()\n", 246 | "cap.release()" 247 | ] 248 | }, 249 | { 250 | "cell_type": "markdown", 251 | "metadata": {}, 252 | "source": [ 253 | "#### Class 3 Tracking API's" 254 | ] 255 | }, 256 | { 257 | "cell_type": "code", 258 | "execution_count": 7, 259 | "metadata": {}, 260 | "outputs": [], 261 | "source": [ 262 | "def ask_for_tracker():\n", 263 | " print('Welcome! What Tracker API would you like to use?')\n", 264 | " print('Enter 0 for Boosting: ')\n", 265 | " print('Enter 1 for MIL: ')\n", 266 | " print('Enter 2 for KCF: ')\n", 267 | " print('Enter 3 for TLD: ')\n", 268 | " print('Enter 4 for MedianFlow: ')\n", 269 | " choice = input(\"Please select your tracker: \")\n", 270 | " \n", 271 | " if choice == '0':\n", 272 | " tracker = cv2.TrackerBoosting_create()\n", 273 | " if choice == '1':\n", 274 | " tracker = cv2.TrackerMIL_create()\n", 275 | " if choice == '2':\n", 276 | " tracker = cv2.TrackerKCF_create()\n", 277 | " if choice == '3':\n", 278 | " tracker = cv2.TrackerTLD_create()\n", 279 | " if choice == '4':\n", 280 | " tracker = cv2.TrackerMedianFlow_create()\n", 281 | " \n", 282 | " return tracker" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": null, 288 | "metadata": {}, 289 | "outputs": [], 290 | "source": [ 291 | "tracker = ask_for_tracker()\n", 292 | "tracker_name = str(tracker).split()[0][1:]\n", 293 | "\n", 294 | "#read video\n", 295 | "cap = cv2.VideoCapture(0)\n", 296 | "\n", 297 | "#read first frame\n", 298 | "ret,frame = cap.read()\n", 299 | "\n", 300 | "#special function allows us to draw on the very first frame our desired ROI\n", 301 | "roi = cv2.selectROI(frame,False)\n", 302 | "\n", 303 | "# Initialize tracker with first frame and bounding box\n", 304 | "ret = tracker.init(frame,roi)\n", 305 | "\n", 306 | "while True:\n", 307 | " #read a new frame\n", 308 | " ret,frame = cap.read()\n", 309 | " \n", 310 | " #update tracker\n", 311 | " success,roi = tracker.update(frame)\n", 312 | " \n", 313 | " #roi variable is a tuple of 4 floats\n", 314 | " #we need each value and we them as integers\n", 315 | " (x,y,w,h) = tuple(map(int,roi))\n", 316 | " \n", 317 | " #Draw rectangle as tracker moves\n", 318 | " if success:\n", 319 | " # tracking success\n", 320 | " p1= (x,y)\n", 321 | " p2 = (x+w,y+h)\n", 322 | " cv2.rectangle(frame,p1,p2,(0,255,0),3)\n", 323 | " else:\n", 324 | " #tracking failure\n", 325 | " cv2.putText(frame,'Failure to Detect tracking!!',\n", 326 | " (100,200),cv2.FONT_HERSHEY_SIMPLEX,1,(255,0,0),3)\n", 327 | " \n", 328 | " #display tracker type on frame\n", 329 | " cv2.putText(frame,tracker_name,\n", 330 | " (20,400),cv2.FONT_HERSHEY_SIMPLEX,1,(255,0,0),3)\n", 331 | " \n", 332 | " #display result\n", 333 | " cv2.imshow(tracker_name,frame)\n", 334 | " \n", 335 | " k = cv2.waitKey(1) & 0xFF\n", 336 | " \n", 337 | " if k == 27:\n", 338 | " break\n", 339 | " \n", 340 | "cv2.destroyAllWindows()\n", 341 | "cap.release()" 342 | ] 343 | }, 344 | { 345 | "cell_type": "code", 346 | "execution_count": null, 347 | "metadata": {}, 348 | "outputs": [], 349 | "source": [] 350 | } 351 | ], 352 | "metadata": { 353 | "kernelspec": { 354 | "display_name": "Python 3", 355 | "language": "python", 356 | "name": "python3" 357 | }, 358 | "language_info": { 359 | "codemirror_mode": { 360 | "name": "ipython", 361 | "version": 3 362 | }, 363 | "file_extension": ".py", 364 | "mimetype": "text/x-python", 365 | "name": "python", 366 | "nbconvert_exporter": "python", 367 | "pygments_lexer": "ipython3", 368 | "version": "3.6.5" 369 | } 370 | }, 371 | "nbformat": 4, 372 | "nbformat_minor": 2 373 | } 374 | -------------------------------------------------------------------------------- /6_Object Tracking/Object Tracking.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Object Tracking" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "- Goals\n", 15 | " - Object Tracking techniques\n", 16 | " - Optical Flow\n", 17 | " - MeanShift and CamShift\n", 18 | " - Understand more advanced tracking\n", 19 | " - Review built-in tracking APIs" 20 | ] 21 | }, 22 | { 23 | "cell_type": "markdown", 24 | "metadata": {}, 25 | "source": [ 26 | "#### Class 1 Optical Flow" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "- Optical flow is the pattern of apparent motion of image objects between two consecutive frames caused by the movement of object or camera.\n", 34 | "- Assumptions:\n", 35 | " - The pixel intensities of an object do not change between consecutive frames\n", 36 | " - Neighbouring pixels have similar motion.\n", 37 | "- The optical flow methods in OpenCV will first take in a given set of points and a frame.\n", 38 | "- then it will attempt to find those point sin the next frame.\n", 39 | "- It is up to the user to supply the points to track.\n", 40 | "- The Lucas-Kanade computes optical flow for a sparse feature set\n", 41 | " - Meaning only the points it was told to track\n", 42 | "- We can use Gunner Farneback's Algorithm to calculate dense optical flow\n", 43 | "- This dense optical flow will calculate flow for all points in an image.\n", 44 | "- It will color them black if no flow is detected.\n" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 1, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "import cv2\n", 54 | "import matplotlib.pyplot as plt\n", 55 | "import numpy as np" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 2, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "#corner detection parameters\n", 65 | "corner_track_params = dict(maxCorners = 10,\n", 66 | " qualityLevel = 0.3,\n", 67 | " minDistance = 7,\n", 68 | " blockSize = 7)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 3, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "#parameters for luca-kannade optical flow function\n", 78 | "lk_params = dict(winSize = (200,200),\n", 79 | " maxLevel=2,\n", 80 | " criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,10,0.03))" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 4, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "cap = cv2.VideoCapture(0)\n", 90 | "\n", 91 | "ret,prev_frame = cap.read()\n", 92 | "prev_gray = cv2.cvtColor(prev_frame,cv2.COLOR_BGR2GRAY)\n", 93 | "\n", 94 | "#points to track\n", 95 | "prevPts = cv2.goodFeaturesToTrack(prev_gray,mask=None,\n", 96 | " **corner_track_params)\n", 97 | "\n", 98 | "mask = np.zeros_like(prev_frame)\n", 99 | "\n", 100 | "while True:\n", 101 | " ret,frame = cap.read()\n", 102 | " \n", 103 | "\n", 104 | " frame_gray = cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)\n", 105 | " \n", 106 | " nextPts,status,err = cv2.calcOpticalFlowPyrLK(prev_gray,frame_gray,\n", 107 | " prevPts,None,**lk_params)\n", 108 | " \n", 109 | " good_new = nextPts[status == 1]\n", 110 | " good_prev = prevPts[status == 1]\n", 111 | " \n", 112 | " for i,(new,prev) in enumerate(zip(good_new,good_prev)):\n", 113 | " x_new,y_new = new.ravel()\n", 114 | " x_prev,y_prev = prev.ravel()\n", 115 | " \n", 116 | " mask = cv2.line(mask,(x_new,y_new),(x_prev,y_prev),\n", 117 | " (255,0,0),3)\n", 118 | " \n", 119 | " frame = cv2.circle(frame,(x_new,y_new),6,(0,255,0),-1)\n", 120 | " \n", 121 | " img = cv2.add(frame,mask)\n", 122 | " cv2.imshow('track',img)\n", 123 | " \n", 124 | " k = cv2.waitKey(30) & 0xFF\n", 125 | " \n", 126 | " if k == 27:\n", 127 | " break\n", 128 | " \n", 129 | " prev_gray = frame_gray.copy()\n", 130 | " prevPts = good_new.reshape(-1,1,2)\n", 131 | "\n", 132 | "cv2.destroyAllWindows()\n", 133 | "cap.release()" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 5, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | "cap = cv2.VideoCapture(0)\n", 143 | "\n", 144 | "ret,frame1 = cap.read()\n", 145 | "\n", 146 | "prev_img = cv2.cvtColor(frame1,cv2.COLOR_BGR2GRAY)\n", 147 | "\n", 148 | "hsv_mask = np.zeros_like(frame1)\n", 149 | "hsv_mask[:,:,1] = 255\n", 150 | "\n", 151 | "while True:\n", 152 | " ret,frame2 = cap.read()\n", 153 | " \n", 154 | " nextImg = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)\n", 155 | " flow = cv2.calcOpticalFlowFarneback(prev_img,nextImg,\n", 156 | " None,0.5,3,15,3,5,1.2,0)\n", 157 | " \n", 158 | " mag,ang = cv2.cartToPolar(flow[:,:,0],flow[:,:,1],\n", 159 | " angleInDegrees = True)\n", 160 | " \n", 161 | " hsv_mask[:,:,0] = ang/2\n", 162 | " \n", 163 | " hsv_mask[:,:,2] = cv2.normalize(mag,None,0,255,cv2.NORM_MINMAX)\n", 164 | " \n", 165 | " bgr = cv2.cvtColor(hsv_mask,cv2.COLOR_HSV2BGR)\n", 166 | " \n", 167 | " cv2.imshow('frame',bgr)\n", 168 | " k = cv2.waitKey(30) & 0xFF\n", 169 | " \n", 170 | " if k == 27:\n", 171 | " break\n", 172 | " \n", 173 | " prev_img = nextImg\n", 174 | " \n", 175 | "cv2.destroyAllWindows()\n", 176 | "cap.release()\n", 177 | " " 178 | ] 179 | }, 180 | { 181 | "cell_type": "markdown", 182 | "metadata": {}, 183 | "source": [ 184 | "#### Class 2 MeanShift and CAMShift" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": 6, 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "cap = cv2.VideoCapture(0)\n", 194 | "\n", 195 | "ret,frame = cap.read()\n", 196 | "\n", 197 | "face_cascade = cv2.CascadeClassifier('../Data/haarcascades/haarcascade_frontalface_default.xml')\n", 198 | "\n", 199 | "face_rects = face_cascade.detectMultiScale(frame)\n", 200 | "\n", 201 | "(x,y,w,h) = tuple(face_rects[0])\n", 202 | "\n", 203 | "track_window = (x,y,w,h)\n", 204 | "\n", 205 | "roi = frame[y:y+h,x:x+w]\n", 206 | "\n", 207 | "hsv_roi = cv2.cvtColor(roi,cv2.COLOR_BGR2HSV)\n", 208 | "\n", 209 | "roi_hist = cv2.calcHist([hsv_roi],[0],None,[180],[0,180])\n", 210 | "\n", 211 | "cv2.normalize(roi_hist,roi_hist,0,255,cv2.NORM_MINMAX)\n", 212 | "\n", 213 | "#termination criteria\n", 214 | "term_crit = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,10,1)\n", 215 | "\n", 216 | "while True:\n", 217 | " ret,frame = cap.read()\n", 218 | " if ret == True:\n", 219 | " hsv = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)\n", 220 | " \n", 221 | " dst = cv2.calcBackProject([hsv],[0],roi_hist,[0,180],1)\n", 222 | " \n", 223 | " #trail 1\n", 224 | " ret,track_window = cv2.meanShift(dst,track_window,term_crit)\n", 225 | " \n", 226 | " x,y,w,h = track_window\n", 227 | " img2 = cv2.rectangle(frame,(x,y),(x+w,y+h),(255,0,0),5)\n", 228 | "\n", 229 | " #trail 2\n", 230 | "# ret,track_window = cv2.CamShift(dst,track_window,term_crit)\n", 231 | " \n", 232 | "# pts = cv2.boxPoints(ret)\n", 233 | "# pts = np.int0(pts)\n", 234 | "# img2 = cv2.polylines(frame,[pts],True,(255,0,0),4)\n", 235 | " \n", 236 | " cv2.imshow('img',img2)\n", 237 | " \n", 238 | " k = cv2.waitKey(30) & 0xFF\n", 239 | " \n", 240 | " if k == 27:\n", 241 | " break\n", 242 | " else:\n", 243 | " break\n", 244 | " \n", 245 | "cv2.destroyAllWindows()\n", 246 | "cap.release()" 247 | ] 248 | }, 249 | { 250 | "cell_type": "markdown", 251 | "metadata": {}, 252 | "source": [ 253 | "#### Class 3 Tracking API's" 254 | ] 255 | }, 256 | { 257 | "cell_type": "code", 258 | "execution_count": 7, 259 | "metadata": {}, 260 | "outputs": [], 261 | "source": [ 262 | "def ask_for_tracker():\n", 263 | " print('Welcome! What Tracker API would you like to use?')\n", 264 | " print('Enter 0 for Boosting: ')\n", 265 | " print('Enter 1 for MIL: ')\n", 266 | " print('Enter 2 for KCF: ')\n", 267 | " print('Enter 3 for TLD: ')\n", 268 | " print('Enter 4 for MedianFlow: ')\n", 269 | " choice = input(\"Please select your tracker: \")\n", 270 | " \n", 271 | " if choice == '0':\n", 272 | " tracker = cv2.TrackerBoosting_create()\n", 273 | " if choice == '1':\n", 274 | " tracker = cv2.TrackerMIL_create()\n", 275 | " if choice == '2':\n", 276 | " tracker = cv2.TrackerKCF_create()\n", 277 | " if choice == '3':\n", 278 | " tracker = cv2.TrackerTLD_create()\n", 279 | " if choice == '4':\n", 280 | " tracker = cv2.TrackerMedianFlow_create()\n", 281 | " \n", 282 | " return tracker" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": null, 288 | "metadata": {}, 289 | "outputs": [], 290 | "source": [ 291 | "tracker = ask_for_tracker()\n", 292 | "tracker_name = str(tracker).split()[0][1:]\n", 293 | "\n", 294 | "#read video\n", 295 | "cap = cv2.VideoCapture(0)\n", 296 | "\n", 297 | "#read first frame\n", 298 | "ret,frame = cap.read()\n", 299 | "\n", 300 | "#special function allows us to draw on the very first frame our desired ROI\n", 301 | "roi = cv2.selectROI(frame,False)\n", 302 | "\n", 303 | "# Initialize tracker with first frame and bounding box\n", 304 | "ret = tracker.init(frame,roi)\n", 305 | "\n", 306 | "while True:\n", 307 | " #read a new frame\n", 308 | " ret,frame = cap.read()\n", 309 | " \n", 310 | " #update tracker\n", 311 | " success,roi = tracker.update(frame)\n", 312 | " \n", 313 | " #roi variable is a tuple of 4 floats\n", 314 | " #we need each value and we them as integers\n", 315 | " (x,y,w,h) = tuple(map(int,roi))\n", 316 | " \n", 317 | " #Draw rectangle as tracker moves\n", 318 | " if success:\n", 319 | " # tracking success\n", 320 | " p1= (x,y)\n", 321 | " p2 = (x+w,y+h)\n", 322 | " cv2.rectangle(frame,p1,p2,(0,255,0),3)\n", 323 | " else:\n", 324 | " #tracking failure\n", 325 | " cv2.putText(frame,'Failure to Detect tracking!!',\n", 326 | " (100,200),cv2.FONT_HERSHEY_SIMPLEX,1,(255,0,0),3)\n", 327 | " \n", 328 | " #display tracker type on frame\n", 329 | " cv2.putText(frame,tracker_name,\n", 330 | " (20,400),cv2.FONT_HERSHEY_SIMPLEX,1,(255,0,0),3)\n", 331 | " \n", 332 | " #display result\n", 333 | " cv2.imshow(tracker_name,frame)\n", 334 | " \n", 335 | " k = cv2.waitKey(1) & 0xFF\n", 336 | " \n", 337 | " if k == 27:\n", 338 | " break\n", 339 | " \n", 340 | "cv2.destroyAllWindows()\n", 341 | "cap.release()" 342 | ] 343 | }, 344 | { 345 | "cell_type": "code", 346 | "execution_count": null, 347 | "metadata": {}, 348 | "outputs": [], 349 | "source": [] 350 | } 351 | ], 352 | "metadata": { 353 | "kernelspec": { 354 | "display_name": "Python 3", 355 | "language": "python", 356 | "name": "python3" 357 | }, 358 | "language_info": { 359 | "codemirror_mode": { 360 | "name": "ipython", 361 | "version": 3 362 | }, 363 | "file_extension": ".py", 364 | "mimetype": "text/x-python", 365 | "name": "python", 366 | "nbconvert_exporter": "python", 367 | "pygments_lexer": "ipython3", 368 | "version": "3.6.5" 369 | } 370 | }, 371 | "nbformat": 4, 372 | "nbformat_minor": 2 373 | } 374 | -------------------------------------------------------------------------------- /7_Deep Learning for Computer Vision/.ipynb_checkpoints/DL-CV-Assessment-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Deep Learning for Image Classification Assessment\n", 8 | "\n", 9 | "\n", 10 | "Welcome to your assessment! Follow the instructions in bold below to complete the assessment.\n", 11 | "\n", 12 | "If you get stuck, check out the solutions video and notebook. (Make sure to run the solutions notebook before posting a question to the QA forum please, thanks!)\n", 13 | "\n", 14 | "------------\n", 15 | "\n", 16 | "## The Challenge\n", 17 | "\n", 18 | "**Your task is to build an image classifier with Keras and Convolutional Neural Networks for the Fashion MNIST dataset. This data set includes 10 labels of different clothing types with 28 by 28 *grayscale* images. There is a training set of 60,000 images and 10,000 test images.**\n", 19 | "\n", 20 | " Label\tDescription\n", 21 | " 0\t T-shirt/top\n", 22 | " 1\t Trouser\n", 23 | " 2\t Pullover\n", 24 | " 3\t Dress\n", 25 | " 4\t Coat\n", 26 | " 5\t Sandal\n", 27 | " 6\t Shirt\n", 28 | " 7\t Sneaker\n", 29 | " 8\t Bag\n", 30 | " 9\t Ankle boot\n", 31 | " \n", 32 | " " 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "## The Data\n", 40 | "\n", 41 | "**TASK 1: Run the code below to download the dataset using Keras.**" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 17, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "from keras.datasets import fashion_mnist\n", 51 | "\n", 52 | "(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "metadata": {}, 58 | "source": [ 59 | "## Visualizing the Data\n", 60 | "\n", 61 | "**TASK 2: Use matplotlib to view an image from the data set. It can be any image from the data set.**" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 4, 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "import matplotlib.pyplot as plt" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 5, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "" 82 | ] 83 | }, 84 | "execution_count": 5, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | }, 88 | { 89 | "data": { 90 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAFFhJREFUeJzt3WtwlFWaB/D/053OhdABwiUgRvGCCqMrOhFUphxHRgcta9FxtLQsF6uswdrVqZ1ZP2ixszXuh92yrFXXWndmNyorVo3OpUZXx6IcNa7ilSEiKwqLKERAIAlEkpCkk748+yHNTICc52369jae/6+KIumnT/qku/95u/u85xxRVRCRfyJhd4CIwsHwE3mK4SfyFMNP5CmGn8hTDD+Rpxh+Ik8x/ESeYviJPFVVzhurlhqtRX05b5LIKwkMYESHJZfrFhR+EVkK4FEAUQBPqOoD1vVrUY9FsqSQmyQiwzpty/m6eb/sF5EogH8HcDWA+QBuEZH5+f48IiqvQt7zLwTwmapuV9URAL8CsKw43SKiUisk/LMB7Brz/e7sZUcQkRUi0i4i7UkMF3BzRFRMhYR/vA8VjpkfrKqtqtqiqi0x1BRwc0RUTIWEfzeA5jHfnwxgT2HdIaJyKST86wHMFZHTRKQawM0AXixOt4io1PIe6lPVlIjcDeAPGB3qW6WqnxStZ0RUUgWN86vqGgBritQXIiojnt5L5CmGn8hTDD+Rpxh+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTDD+Rpxh+Ik8x/ESeKuvS3RQCCVjFWY9ZfOm4RKc2mvWvvneWs9bwzPsF3XbQ7yZVMWdNkyOF3Xahgh4XS4GP2WE88hN5iuEn8hTDT+Qphp/IUww/kacYfiJPMfxEnuI4/9ecRKNmXVMpsx5ZYO+9uuXOiXb7IXctNrDQbFs1lDHrsVfazXpBY/lB5xAE3K8Q+7haSN+kyoit/XAegUd+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTBY3zi0gHgH4AaQApVW0pRqeoeMwxYQSP8+/63mSzfuslb5n1d7pPd9a+qJlpttU6s4yq715i1s/6+ZfOWqpjp/3DA+bMB91vQaJTpriL6bTZNt3X5y4ex1T/Ypzk8x1V3V+En0NEZcSX/USeKjT8CuAVEflARFYUo0NEVB6FvuxfrKp7RGQGgFdF5P9Ude3YK2T/KKwAgFpMKPDmiKhYCjryq+qe7P9dAJ4HcMxMDVVtVdUWVW2JoaaQmyOiIso7/CJSLyLxw18DuArAx8XqGBGVViEv+5sAPC+jUx+rADyjqi8XpVdEVHJ5h19VtwM4v4h9oRLIJBIFtR+54JBZ/8Eke059bSTprL0Zsefrf/l6s1lP/4Xdty8ejjtrmQ8vNdtO/dgea2/4cK9Z33/ZbLPe/U33gHxTwHYGU1773FmTntwjzaE+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CnRIm33m4sGadRFsqRst+cNa5npgMf30E0Xm/Wrf/qGWZ9Xu8es92dqnbURLezs8se2ftusD2yf5KxFRgK2yA4op5vspbc1aR9Xp2xw/+51yzrNtvL4dGfto7ZHcahnV077f/PIT+Qphp/IUww/kacYfiJPMfxEnmL4iTzF8BN5iuP8lSBgO+iCBDy+535g//3//hR7ym6QqLGW9IBWm20PpusLuu3ulHtKbzLgHIMnttlTfg8Z5xAAQCRlP6ZXfudDZ+2GxvVm2wfPOM9ZW6dt6NMejvMTkRvDT+Qphp/IUww/kacYfiJPMfxEnmL4iTxVjF16qVBlPNfiaNsOzTDrBxommvV9KXsL76lR9/La8ciQ2XZOzN78uTvtHscHgGjMvTT4iEbNtv/4jd+b9cS8mFmPib3096XGOgg3bv4rs209tpv1XPHIT+Qphp/IUww/kacYfiJPMfxEnmL4iTzF8BN5KnCcX0RWAbgWQJeqnpu9rBHArwHMAdAB4CZV/ap03aRSmV5jb3NdK+4ttgGgWlJmfU9yirO2behss+2nffY5CEubPjHrSWMs31pnAAgepz8pZj/dE2qfB2Ddq4ub7HH8jWY1d7kc+Z8CsPSoy+4D0KaqcwG0Zb8nohNIYPhVdS2AnqMuXgZgdfbr1QCuK3K/iKjE8n3P36SqewEg+7/9+oyIKk7Jz+0XkRUAVgBALSaU+uaIKEf5Hvk7RWQWAGT/73JdUVVbVbVFVVtiqMnz5oio2PIN/4sAlme/Xg7gheJ0h4jKJTD8IvIsgPcAnC0iu0XkDgAPALhSRLYBuDL7PRGdQALf86vqLY4SF+AvloB1+yVqzz3XlHusPTrFPc4OAN+evMmsd6cbzPrBtP05zuTooLPWn6o12/YM2T/7nJq9Zn3D4BxnbXq1PU5v9RsAOkammfW5NfvM+oOd7vg01x49uHak1JLLnDVd957Zdiye4UfkKYafyFMMP5GnGH4iTzH8RJ5i+Ik8xaW7K0HA0t1SZT9M1lDfrjvmmW2vmGAvUf1uYrZZn17Vb9atabWzanrNtvGmhFkPGmZsrHJPV+5P15ltJ0SGzXrQ731htb3s+E9eu9BZi597wGzbEDOO2cex2zuP/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTDD+RpzjOXwEkVm3WMwl7vNsybdOIWd+ftpeYnhyxp7ZWByxxbW2FfWnjDrNtd8BY/Iah08x6POreAnx6xB6nb47ZY+2bEs1mfc3AmWb9jmtfc9aebb3SbFv98rvOmqj9eI3FIz+Rpxh+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5KkTa5zfWOJaquzxaokG/J2L2PVMwpjfnbHHuoNo0h6LL8Sj//mYWd+VmmzW9yXtetAS12ljgvn7Q5PMtrURe3vw6VV9Zr0vY58nYOnP2MuKW+sUAMF9v3fqNmftud7vmm2LhUd+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTgeP8IrIKwLUAulT13Oxl9wP4IYDu7NVWquqaQjtTyPr0QWPlag+7hmpo2UKzvus6+zyCWy/4o7O2LxU3235obGMNAJOMOfEAUB+wvn1C3edf7Bmxtw8PGiu31uUHgBnGeQBptY97XybtvgUJOv9hd8rYU+Av7bUGJj+dV5eOkcuR/ykAS8e5/BFVXZD9V3Dwiai8AsOvqmsB9JShL0RURoW8579bRD4SkVUiUthrJCIqu3zD/wsAZwBYAGAvgIdcVxSRFSLSLiLtSdjvD4mofPIKv6p2qmpaVTMAHgfg/MRKVVtVtUVVW2KoybefRFRkeYVfRGaN+fZ6AB8XpztEVC65DPU9C+ByANNEZDeAnwG4XEQWAFAAHQDuLGEfiagERAP2hi+mBmnURbKkbLc3VtWsmWY9eVqTWe+Z594LfnCmvSn6gmu2mPXbm942693pBrMeE/f5D0H70M+MHTTrr/fON+sTq+zPcazzBC6s6zDbHsy473MAOKnqK7N+72c/cNaaJthj6U+cao9eJzVj1rcm7be48Yj7vJS3Bu01/5+fP91ZW6dt6NMe+wmZxTP8iDzF8BN5iuEn8hTDT+Qphp/IUww/kacqaunu4asvMusz/n67s7agYbfZdn6dPZyWyNhLf1vTSzcPzTbbDmbsLbi3jdjDkL0pe8grKu5hp64Re0rvQzvsZaLbFv6HWf/pnvEmfP5ZpM49lHwgPdFse8NEe2luwH7M7jxlrbN2enWX2falgVlmfU/AlN+mWK9ZnxPrdta+H//UbPs83EN9x4NHfiJPMfxEnmL4iTzF8BN5iuEn8hTDT+Qphp/IU+Ud5xd7ee5F/7zebL4k/omzNqj2FMqgcfygcVvLpCp7mebhpH03dyXtKbtBzqrZ56xd37DRbLv2sUVm/VuJH5n1z6/4L7PeNuTeyro7Zf/eN++4wqxv2Nls1i+es8NZOy/+pdk26NyKeDRh1q1p1gAwkHE/X99P2Oc/FAuP/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTDD+Rp8q6dHfdzGY947a/c9Zb7/o3s/0zPRc7a8219l6ip1bvN+tTo/Z2z5Z4xB7zPTtmj/m+NHCyWX/j4Dlm/ZvxDmctJvb23pdP+Mys3/6Te8x6qtZeJbpvjvv4kqq3n3sN5x8w6z8683WzXm387gfT9jh+0P0WtAV3EGsNhnjE3hb9oWuud9be63gKvUN7uXQ3Ebkx/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTgfP5RaQZwNMAZgLIAGhV1UdFpBHArwHMAdAB4CZVNfdMjiSBCZ3u8c2X+haYfTm9zr3W+f6kvT79Hw6dZ9ZPrrO3e7a2mj7TmE8PABsTk836y93fMOsn1dnr13cmJzlrB5L1ZttBY145ADz5yMNm/aFOe93/6xs3OGvnV9vj+Acz9rFpc8B+B/2ZWmctofb6Dr0B5wHEjecDACTVjlbU2OJ7csQ+h6DvvKnOWroz9yU6cjnypwDco6rzAFwM4C4RmQ/gPgBtqjoXQFv2eyI6QQSGX1X3quqG7Nf9ALYAmA1gGYDV2autBnBdqTpJRMV3XO/5RWQOgAsArAPQpKp7gdE/EABmFLtzRFQ6OYdfRCYC+B2AH6tq0CZqY9utEJF2EWlPDQ/k00ciKoGcwi8iMYwG/5eq+lz24k4RmZWtzwIw7s6Hqtqqqi2q2lJVY3/4RETlExh+EREATwLYoqpjP/p9EcDy7NfLAbxQ/O4RUankMi6wGMBtADaJyOF1oFcCeADAb0TkDgA7AdwY9IOiIxnEdw076xm1ZyK+vt89tbWptt9suyC+y6xvHbSHjTYNneSsbag6xWxbF3Vv7w0Ak6rtKcH1Ve77DACmxdy/+2k19lbU1rRXAFifsH+3v57+hlnfmXIvif77gbPMtpsH3fc5AEwJWDJ9U5+7/WDK3jZ9OG1HI5Gyh44n1diP6UWNXzhrW2FvD959vjFN+h2z6RECw6+qbwNwpXJJ7jdFRJWEZ/gReYrhJ/IUw0/kKYafyFMMP5GnGH4iT5V3i+5DQ4i8+aGz/NtXFpvN/2HZb521NwOWt35pnz0u2zdiT22dPsF9anKDMc4OAI0x+7TmoC2+awO2e/4q5T5zcjhiT11NO0dxR+0bdk8XBoB3MnPNejLj3qJ72KgBwedH9IxMM+sn1fU6a/0p93RfAOjobzTr+3vtbbQTE+xovZ0+w1lbOtO9FT0A1HW5H7OI/VQ58rq5X5WIvk4YfiJPMfxEnmL4iTzF8BN5iuEn8hTDT+Spsm7R3SCNukjynwXce6t7i+7T/2ar2Xbh5B1mfUOfPW99pzHumwxYYjoWcS/TDAATYiNmvTZgvLs66p6TH4H9+GYCxvnro3bfgtYaaKhyz2uPR+057xFjG+tcRI3f/Y+9cwr62fGA3zul9nPikkmfO2urdlxqtp10jXtb9XXahj7t4RbdROTG8BN5iuEn8hTDT+Qphp/IUww/kacYfiJPlX+cP3qV+woZew35QgzcsMisL1q53q7H3eOy51R3mm1jsMerawPGs+sj9rBtwngMg/66vz3UbNbTAT/h9a/mmfWkMd7dOdhgto0Z5y/kwtoHYigVsEX3kD3fPxqxc5N4w15rYOpm97kbNWvs56KF4/xEFIjhJ/IUw0/kKYafyFMMP5GnGH4iTzH8RJ4KHOcXkWYATwOYCSADoFVVHxWR+wH8EEB39qorVXWN9bMKnc9fqeQie0+AoZl1Zr3mgD03vP9Uu33D5+59ASLD9kLumf/dYtbpxHI84/y5bNqRAnCPqm4QkTiAD0Tk1WztEVX9l3w7SkThCQy/qu4FsDf7db+IbAEwu9QdI6LSOq73/CIyB8AFANZlL7pbRD4SkVUiMsXRZoWItItIexL2y1siKp+cwy8iEwH8DsCPVbUPwC8AnAFgAUZfGTw0XjtVbVXVFlVticHeD4+Iyien8ItIDKPB/6WqPgcAqtqpqmlVzQB4HMDC0nWTiIotMPwiIgCeBLBFVR8ec/msMVe7HsDHxe8eEZVKLp/2LwZwG4BNIrIxe9lKALeIyAIACqADwJ0l6eEJQNdvMuv25NBgDe/m37awxa/p6yyXT/vfBsZd3N0c0yeiysYz/Ig8xfATeYrhJ/IUw0/kKYafyFMMP5GnGH4iTzH8RJ5i+Ik8xfATeYrhJ/IUw0/kKYafyFMMP5GnyrpFt4h0A/hizEXTAOwvWweOT6X2rVL7BbBv+Spm305V1em5XLGs4T/mxkXaVbUltA4YKrVvldovgH3LV1h948t+Ik8x/ESeCjv8rSHfvqVS+1ap/QLYt3yF0rdQ3/MTUXjCPvITUUhCCb+ILBWRrSLymYjcF0YfXESkQ0Q2ichGEWkPuS+rRKRLRD4ec1mjiLwqItuy/4+7TVpIfbtfRL7M3ncbReSakPrWLCL/IyJbROQTEfnb7OWh3ndGv0K538r+sl9EogA+BXAlgN0A1gO4RVU3l7UjDiLSAaBFVUMfExaRywAcAvC0qp6bvexBAD2q+kD2D+cUVb23Qvp2P4BDYe/cnN1QZtbYnaUBXAfgdoR43xn9ugkh3G9hHPkXAvhMVber6giAXwFYFkI/Kp6qrgXQc9TFywCszn69GqNPnrJz9K0iqOpeVd2Q/bofwOGdpUO974x+hSKM8M8GsGvM97tRWVt+K4BXROQDEVkRdmfG0ZTdNv3w9ukzQu7P0QJ3bi6no3aWrpj7Lp8dr4stjPCPt/tPJQ05LFbVCwFcDeCu7Mtbyk1OOzeXyzg7S1eEfHe8LrYwwr8bQPOY708GsCeEfoxLVfdk/+8C8Dwqb/fhzsObpGb/7wq5P39SSTs3j7ezNCrgvqukHa/DCP96AHNF5DQRqQZwM4AXQ+jHMUSkPvtBDESkHsBVqLzdh18EsDz79XIAL4TYlyNUys7Nrp2lEfJ9V2k7Xodykk92KONfAUQBrFLVfyp7J8YhIqdj9GgPjG5i+kyYfRORZwFcjtFZX50AfgbgvwH8BsApAHYCuFFVy/7Bm6Nvl2P0peufdm4+/B67zH37FoC3AGzCnzcqXonR99eh3XdGv25BCPcbz/Aj8hTP8CPyFMNP5CmGn8hTDD+Rpxh+Ik8x/ESeYviJPMXwE3nq/wHG6/IGFn5KEQAAAABJRU5ErkJggg==\n", 91 | "text/plain": [ 92 | "
" 93 | ] 94 | }, 95 | "metadata": {}, 96 | "output_type": "display_data" 97 | } 98 | ], 99 | "source": [ 100 | "plt.imshow(x_train[0])" 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "metadata": {}, 106 | "source": [ 107 | "## Preprocessing the Data\n", 108 | "\n", 109 | "**TASK 3: Normalize the X train and X test data by dividing by the max value of the image arrays.**" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 6, 115 | "metadata": {}, 116 | "outputs": [ 117 | { 118 | "data": { 119 | "text/plain": [ 120 | "255" 121 | ] 122 | }, 123 | "execution_count": 6, 124 | "metadata": {}, 125 | "output_type": "execute_result" 126 | } 127 | ], 128 | "source": [ 129 | "x_train.max()" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": 18, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "x_train = x_train / x_train.max()\n", 139 | "x_test = x_test / x_test.max()" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "**Task 4: Reshape the X arrays to include a 4 dimension of the single channel. Similar to what we did for the numbers MNIST data set.**" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 8, 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "data": { 156 | "text/plain": [ 157 | "(60000, 28, 28)" 158 | ] 159 | }, 160 | "execution_count": 8, 161 | "metadata": {}, 162 | "output_type": "execute_result" 163 | } 164 | ], 165 | "source": [ 166 | "x_train.shape" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 19, 172 | "metadata": {}, 173 | "outputs": [], 174 | "source": [ 175 | "x_train = x_train.reshape(60000,28,28,1)\n", 176 | "x_test = x_test.reshape(10000,28,28,1)" 177 | ] 178 | }, 179 | { 180 | "cell_type": "markdown", 181 | "metadata": {}, 182 | "source": [ 183 | "**TASK 5: Convert the y_train and y_test values to be one-hot encoded for categorical analysis by Keras.**" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 20, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "from keras.utils import to_categorical" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 21, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [ 201 | "y_train_cat = to_categorical(y_train,10)\n", 202 | "y_test_cat = to_categorical(y_test,10)" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": 22, 208 | "metadata": {}, 209 | "outputs": [ 210 | { 211 | "data": { 212 | "text/plain": [ 213 | "(60000, 10)" 214 | ] 215 | }, 216 | "execution_count": 22, 217 | "metadata": {}, 218 | "output_type": "execute_result" 219 | } 220 | ], 221 | "source": [ 222 | "y_train_cat.shape" 223 | ] 224 | }, 225 | { 226 | "cell_type": "markdown", 227 | "metadata": {}, 228 | "source": [ 229 | "## Building the Model\n", 230 | "\n", 231 | "**TASK 5: Use Keras to create a model consisting of at least the following layers (but feel free to experiment):**\n", 232 | "\n", 233 | "* 2D Convolutional Layer, filters=32 and kernel_size=(4,4)\n", 234 | "* Pooling Layer where pool_size = (2,2)\n", 235 | "\n", 236 | "* Flatten Layer\n", 237 | "* Dense Layer (128 Neurons, but feel free to play around with this value), RELU activation\n", 238 | "\n", 239 | "* Final Dense Layer of 10 Neurons with a softmax activation\n", 240 | "\n", 241 | "**Then compile the model with these parameters: loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy']**" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": 23, 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "from keras.models import Sequential\n", 251 | "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": 24, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "model = Sequential()\n", 261 | "\n", 262 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(28, 28, 1), activation='relu',))\n", 263 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 264 | "\n", 265 | "model.add(Flatten())\n", 266 | "\n", 267 | "model.add(Dense(128, activation='relu'))\n", 268 | "\n", 269 | "model.add(Dense(10, activation='softmax'))\n", 270 | "\n", 271 | "\n", 272 | "model.compile(loss='categorical_crossentropy',\n", 273 | " optimizer='rmsprop',\n", 274 | " metrics=['accuracy'])" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": 25, 280 | "metadata": {}, 281 | "outputs": [ 282 | { 283 | "name": "stdout", 284 | "output_type": "stream", 285 | "text": [ 286 | "_________________________________________________________________\n", 287 | "Layer (type) Output Shape Param # \n", 288 | "=================================================================\n", 289 | "conv2d_2 (Conv2D) (None, 25, 25, 32) 544 \n", 290 | "_________________________________________________________________\n", 291 | "max_pooling2d_2 (MaxPooling2 (None, 12, 12, 32) 0 \n", 292 | "_________________________________________________________________\n", 293 | "flatten_2 (Flatten) (None, 4608) 0 \n", 294 | "_________________________________________________________________\n", 295 | "dense_3 (Dense) (None, 128) 589952 \n", 296 | "_________________________________________________________________\n", 297 | "dense_4 (Dense) (None, 10) 1290 \n", 298 | "=================================================================\n", 299 | "Total params: 591,786\n", 300 | "Trainable params: 591,786\n", 301 | "Non-trainable params: 0\n", 302 | "_________________________________________________________________\n" 303 | ] 304 | } 305 | ], 306 | "source": [ 307 | "model.summary()" 308 | ] 309 | }, 310 | { 311 | "cell_type": "markdown", 312 | "metadata": {}, 313 | "source": [ 314 | "### Training the Model\n", 315 | "**TASK 6: Train/Fit the model to the x_train set. Amount of epochs is up to you.**" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": 27, 321 | "metadata": {}, 322 | "outputs": [ 323 | { 324 | "name": "stdout", 325 | "output_type": "stream", 326 | "text": [ 327 | "Epoch 1/1\n", 328 | "60000/60000 [==============================] - 26s 437us/step - loss: 0.3219 - acc: 0.8856\n" 329 | ] 330 | }, 331 | { 332 | "data": { 333 | "text/plain": [ 334 | "" 335 | ] 336 | }, 337 | "execution_count": 27, 338 | "metadata": {}, 339 | "output_type": "execute_result" 340 | } 341 | ], 342 | "source": [ 343 | "model.fit(x_train,y_train_cat,epochs=1)" 344 | ] 345 | }, 346 | { 347 | "cell_type": "markdown", 348 | "metadata": {}, 349 | "source": [ 350 | "### Evaluating the Model\n", 351 | "\n", 352 | "**TASK 7: Show the accuracy,precision,recall,f1-score the model achieved on the x_test data set. Keep in mind, there are quite a few ways to do this, but we recommend following the same procedure we showed in the MNIST lecture.**" 353 | ] 354 | }, 355 | { 356 | "cell_type": "code", 357 | "execution_count": 28, 358 | "metadata": {}, 359 | "outputs": [ 360 | { 361 | "data": { 362 | "text/plain": [ 363 | "['loss', 'acc']" 364 | ] 365 | }, 366 | "execution_count": 28, 367 | "metadata": {}, 368 | "output_type": "execute_result" 369 | } 370 | ], 371 | "source": [ 372 | "model.metrics_names" 373 | ] 374 | }, 375 | { 376 | "cell_type": "code", 377 | "execution_count": 30, 378 | "metadata": {}, 379 | "outputs": [ 380 | { 381 | "name": "stdout", 382 | "output_type": "stream", 383 | "text": [ 384 | "10000/10000 [==============================] - 1s 138us/step\n" 385 | ] 386 | }, 387 | { 388 | "data": { 389 | "text/plain": [ 390 | "[0.3002570129275322, 0.8916]" 391 | ] 392 | }, 393 | "execution_count": 30, 394 | "metadata": {}, 395 | "output_type": "execute_result" 396 | } 397 | ], 398 | "source": [ 399 | "model.evaluate(x_test,y_test_cat)" 400 | ] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "execution_count": 31, 405 | "metadata": {}, 406 | "outputs": [], 407 | "source": [ 408 | "from sklearn.metrics import classification_report" 409 | ] 410 | }, 411 | { 412 | "cell_type": "code", 413 | "execution_count": 32, 414 | "metadata": {}, 415 | "outputs": [], 416 | "source": [ 417 | "predictions = model.predict_classes(x_test)" 418 | ] 419 | }, 420 | { 421 | "cell_type": "code", 422 | "execution_count": 34, 423 | "metadata": {}, 424 | "outputs": [ 425 | { 426 | "name": "stdout", 427 | "output_type": "stream", 428 | "text": [ 429 | " precision recall f1-score support\n", 430 | "\n", 431 | " 0 0.86 0.81 0.84 1000\n", 432 | " 1 0.99 0.97 0.98 1000\n", 433 | " 2 0.80 0.85 0.83 1000\n", 434 | " 3 0.86 0.93 0.89 1000\n", 435 | " 4 0.84 0.81 0.83 1000\n", 436 | " 5 0.96 0.98 0.97 1000\n", 437 | " 6 0.73 0.69 0.71 1000\n", 438 | " 7 0.97 0.91 0.94 1000\n", 439 | " 8 0.98 0.98 0.98 1000\n", 440 | " 9 0.93 0.98 0.95 1000\n", 441 | "\n", 442 | "avg / total 0.89 0.89 0.89 10000\n", 443 | "\n" 444 | ] 445 | } 446 | ], 447 | "source": [ 448 | "print(classification_report(y_test,predictions))" 449 | ] 450 | }, 451 | { 452 | "cell_type": "markdown", 453 | "metadata": {}, 454 | "source": [ 455 | "# Great Job!" 456 | ] 457 | } 458 | ], 459 | "metadata": { 460 | "kernelspec": { 461 | "display_name": "Python 3", 462 | "language": "python", 463 | "name": "python3" 464 | }, 465 | "language_info": { 466 | "codemirror_mode": { 467 | "name": "ipython", 468 | "version": 3 469 | }, 470 | "file_extension": ".py", 471 | "mimetype": "text/x-python", 472 | "name": "python", 473 | "nbconvert_exporter": "python", 474 | "pygments_lexer": "ipython3", 475 | "version": "3.6.5" 476 | } 477 | }, 478 | "nbformat": 4, 479 | "nbformat_minor": 2 480 | } 481 | -------------------------------------------------------------------------------- /7_Deep Learning for Computer Vision/.ipynb_checkpoints/Keras-CNN-MNIST-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Convolutional Neural Networks for Image Classification" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [ 15 | { 16 | "name": "stderr", 17 | "output_type": "stream", 18 | "text": [ 19 | "Using TensorFlow backend.\n" 20 | ] 21 | } 22 | ], 23 | "source": [ 24 | "from keras.datasets import mnist\n", 25 | "\n", 26 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "## Visualizing the Image Data" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 2, 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "import matplotlib.pyplot as plt\n", 43 | "%matplotlib inline" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "metadata": {}, 50 | "outputs": [ 51 | { 52 | "data": { 53 | "text/plain": [ 54 | "(60000, 28, 28)" 55 | ] 56 | }, 57 | "execution_count": 3, 58 | "metadata": {}, 59 | "output_type": "execute_result" 60 | } 61 | ], 62 | "source": [ 63 | "x_train.shape" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 4, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "single_image = x_train[0]" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 5, 78 | "metadata": {}, 79 | "outputs": [ 80 | { 81 | "data": { 82 | "text/plain": [ 83 | "array([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 84 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 85 | " 0, 0],\n", 86 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 87 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 88 | " 0, 0],\n", 89 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 90 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 91 | " 0, 0],\n", 92 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 93 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 94 | " 0, 0],\n", 95 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 96 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 97 | " 0, 0],\n", 98 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3,\n", 99 | " 18, 18, 18, 126, 136, 175, 26, 166, 255, 247, 127, 0, 0,\n", 100 | " 0, 0],\n", 101 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 30, 36, 94, 154, 170,\n", 102 | " 253, 253, 253, 253, 253, 225, 172, 253, 242, 195, 64, 0, 0,\n", 103 | " 0, 0],\n", 104 | " [ 0, 0, 0, 0, 0, 0, 0, 49, 238, 253, 253, 253, 253,\n", 105 | " 253, 253, 253, 253, 251, 93, 82, 82, 56, 39, 0, 0, 0,\n", 106 | " 0, 0],\n", 107 | " [ 0, 0, 0, 0, 0, 0, 0, 18, 219, 253, 253, 253, 253,\n", 108 | " 253, 198, 182, 247, 241, 0, 0, 0, 0, 0, 0, 0, 0,\n", 109 | " 0, 0],\n", 110 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 80, 156, 107, 253, 253,\n", 111 | " 205, 11, 0, 43, 154, 0, 0, 0, 0, 0, 0, 0, 0,\n", 112 | " 0, 0],\n", 113 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 1, 154, 253,\n", 114 | " 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 115 | " 0, 0],\n", 116 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 253,\n", 117 | " 190, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 118 | " 0, 0],\n", 119 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 190,\n", 120 | " 253, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 121 | " 0, 0],\n", 122 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35,\n", 123 | " 241, 225, 160, 108, 1, 0, 0, 0, 0, 0, 0, 0, 0,\n", 124 | " 0, 0],\n", 125 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 126 | " 81, 240, 253, 253, 119, 25, 0, 0, 0, 0, 0, 0, 0,\n", 127 | " 0, 0],\n", 128 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 129 | " 0, 45, 186, 253, 253, 150, 27, 0, 0, 0, 0, 0, 0,\n", 130 | " 0, 0],\n", 131 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 132 | " 0, 0, 16, 93, 252, 253, 187, 0, 0, 0, 0, 0, 0,\n", 133 | " 0, 0],\n", 134 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 135 | " 0, 0, 0, 0, 249, 253, 249, 64, 0, 0, 0, 0, 0,\n", 136 | " 0, 0],\n", 137 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 138 | " 0, 46, 130, 183, 253, 253, 207, 2, 0, 0, 0, 0, 0,\n", 139 | " 0, 0],\n", 140 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39,\n", 141 | " 148, 229, 253, 253, 253, 250, 182, 0, 0, 0, 0, 0, 0,\n", 142 | " 0, 0],\n", 143 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 114, 221,\n", 144 | " 253, 253, 253, 253, 201, 78, 0, 0, 0, 0, 0, 0, 0,\n", 145 | " 0, 0],\n", 146 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 23, 66, 213, 253, 253,\n", 147 | " 253, 253, 198, 81, 2, 0, 0, 0, 0, 0, 0, 0, 0,\n", 148 | " 0, 0],\n", 149 | " [ 0, 0, 0, 0, 0, 0, 18, 171, 219, 253, 253, 253, 253,\n", 150 | " 195, 80, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 151 | " 0, 0],\n", 152 | " [ 0, 0, 0, 0, 55, 172, 226, 253, 253, 253, 253, 244, 133,\n", 153 | " 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 154 | " 0, 0],\n", 155 | " [ 0, 0, 0, 0, 136, 253, 253, 253, 212, 135, 132, 16, 0,\n", 156 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 157 | " 0, 0],\n", 158 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 159 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 160 | " 0, 0],\n", 161 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 162 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 163 | " 0, 0],\n", 164 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 165 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 166 | " 0, 0]], dtype=uint8)" 167 | ] 168 | }, 169 | "execution_count": 5, 170 | "metadata": {}, 171 | "output_type": "execute_result" 172 | } 173 | ], 174 | "source": [ 175 | "single_image" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": 6, 181 | "metadata": {}, 182 | "outputs": [ 183 | { 184 | "data": { 185 | "text/plain": [ 186 | "(28, 28)" 187 | ] 188 | }, 189 | "execution_count": 6, 190 | "metadata": {}, 191 | "output_type": "execute_result" 192 | } 193 | ], 194 | "source": [ 195 | "single_image.shape" 196 | ] 197 | }, 198 | { 199 | "cell_type": "code", 200 | "execution_count": 7, 201 | "metadata": {}, 202 | "outputs": [ 203 | { 204 | "data": { 205 | "text/plain": [ 206 | "" 207 | ] 208 | }, 209 | "execution_count": 7, 210 | "metadata": {}, 211 | "output_type": "execute_result" 212 | }, 213 | { 214 | "data": { 215 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAADolJREFUeJzt3X2MXOV1x/HfyXq9jo1JvHVsHOJgxzgBYhqTjgzICFwhXKdCMqgCYkWRQ5M4LzgprStBraq4FancKiF1CUVamq1tifcEiv+gSZAVAVFhy+IQXuLwErMli7e7mA3YEOKX3dM/9m60MTvPrGfuzJ3d8/1I1szcc+/co4Hf3pl55t7H3F0A4nlP0Q0AKAbhB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1LRG7my6tfkMzWrkLoFQfqu3dcQP20TWrSn8ZrZG0jZJLZL+3d23ptafoVk61y6uZZcAErp894TXrfptv5m1SLpF0qcknSVpnZmdVe3zAWisWj7zr5D0krvvc/cjku6StDaftgDUWy3hP1XSr8Y87s2W/R4z22Bm3WbWfVSHa9gdgDzVEv7xvlR41/nB7t7h7iV3L7WqrYbdAchTLeHvlbRwzOMPSdpfWzsAGqWW8D8haamZLTaz6ZI+LWlXPm0BqLeqh/rc/ZiZbZT0Q40M9XW6+3O5dQagrmoa53f3ByU9mFMvABqIn/cCQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QVE2z9JpZj6RDkoYkHXP3Uh5NIT82Lf2fuOUDc+u6/+f/elHZ2tDM4eS2py0ZSNZnftWS9f+7aXrZ2p7S3cltDwy9nayfe++mZP30v3o8WW8GNYU/88fufiCH5wHQQLztB4KqNfwu6Udm9qSZbcijIQCNUevb/pXuvt/M5kl6yMx+4e6PjF0h+6OwQZJmaGaNuwOQl5qO/O6+P7sdkHS/pBXjrNPh7iV3L7WqrZbdAchR1eE3s1lmNnv0vqTVkp7NqzEA9VXL2/75ku43s9HnucPdf5BLVwDqrurwu/s+SZ/IsZcpq+XMpcm6t7Um6/sven+y/s555cek29+XHq9+9BPp8e4i/ddvZifr//SdNcl619l3lK29fPSd5LZb+y9J1j/4qCfrkwFDfUBQhB8IivADQRF+ICjCDwRF+IGg8jirL7yhVZ9M1m/afkuy/tHW8qeeTmVHfShZ/7ubP5esT3s7Pdx2/r0by9Zmv3osuW3bgfRQ4MzurmR9MuDIDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBMc6fg7bn9yfrT/52YbL+0db+PNvJ1aa+85L1fW+lL/29fcn3ytbeHE6P08//1/9O1utp8p+wWxlHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IytwbN6J5srX7uXZxw/bXLAavPj9ZP7gmfXntlqdPStZ/9tWbT7inUTce+MNk/YmL0uP4Q2+8maz7+eWv7t7z9eSmWrzuZ+kV8C5dvlsHfTA9d3mGIz8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBFVxnN/MOiVdKmnA3Zdly9ol3S1pkaQeSVe6+68r7SzqOH8lLXP/IFkfen0wWX/5jvJj9c9d2JncdsU/fi1Zn3dLcefU48TlPc6/XdLxE6FfL2m3uy+VtDt7DGASqRh+d39E0vGHnrWSdmT3d0i6LOe+ANRZtZ/557t7nyRlt/PyawlAI9T9Gn5mtkHSBkmaoZn13h2ACar2yN9vZgskKbsdKLeiu3e4e8ndS61qq3J3APJWbfh3SVqf3V8v6YF82gHQKBXDb2Z3SnpM0sfMrNfMPi9pq6RLzOxFSZdkjwFMIhU/87v7ujIlBuxzMnTg9Zq2P3pwetXbfvwzP0/WX7u1Jf0Ew0NV7xvF4hd+QFCEHwiK8ANBEX4gKMIPBEX4gaCYonsKOPO6F8rWrj47PSL7H6ftTtYvuuKaZH323Y8n62heHPmBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjG+aeA1DTZr3/lzOS2r+x6J1m//sadyfrfXHl5su4/fV/Z2sJvPJbcVg2cPj4ijvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EFTFKbrzxBTdzWfwz89P1m+/4ZvJ+uJpM6re98d3bkzWl97Wl6wf29dT9b6nqryn6AYwBRF+ICjCDwRF+IGgCD8QFOEHgiL8QFAVx/nNrFPSpZIG3H1ZtmyLpC9Kei1bbbO7P1hpZ4zzTz6+cnmyfvLW3mT9zo/8sOp9n/HjLyTrH/v78tcxkKShF/dVve/JKu9x/u2S1oyz/Nvuvjz7VzH4AJpLxfC7+yOSBhvQC4AGquUz/0Yze9rMOs1sTm4dAWiIasN/q6QlkpZL6pP0rXIrmtkGM+s2s+6jOlzl7gDkrarwu3u/uw+5+7Ck2yStSKzb4e4ldy+1qq3aPgHkrKrwm9mCMQ8vl/RsPu0AaJSKl+42szslrZI018x6Jd0gaZWZLZfkknokfamOPQKoA87nR01a5s9L1vdfdXrZWtd125LbvqfCG9PPvLw6WX/zgteT9amI8/kBVET4gaAIPxAU4QeCIvxAUIQfCIqhPhTmnt70FN0zbXqy/hs/kqxf+rVryz/3/V3JbScrhvoAVET4gaAIPxAU4QeCIvxAUIQfCIrwA0FVPJ8fsQ1fkL509y+vSE/RvWx5T9lapXH8Sm4ePCdZn/lAd03PP9Vx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoBjnn+KstCxZf+Hr6bH221buSNYvnJE+p74Wh/1osv744OL0Ewz35djN1MORHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCqjjOb2YLJe2UdIqkYUkd7r7NzNol3S1pkaQeSVe6+6/r12pc0xaflqz/8uoPlq1tuequ5LZ/dtKBqnrKw+b+UrL+8LbzkvU5O9LX/UfaRI78xyRtcvczJZ0n6RozO0vS9ZJ2u/tSSbuzxwAmiYrhd/c+d9+T3T8kaa+kUyWtlTT6868dki6rV5MA8ndCn/nNbJGkcyR1SZrv7n3SyB8ISfPybg5A/Uw4/GZ2kqTvS7rW3Q+ewHYbzKzbzLqP6nA1PQKogwmF38xaNRL82939vmxxv5ktyOoLJA2Mt627d7h7yd1LrWrLo2cAOagYfjMzSd+VtNfdbxpT2iVpfXZ/vaQH8m8PQL1M5JTelZI+K+kZM3sqW7ZZ0lZJ95jZ5yW9IumK+rQ4+U1b9OFk/c0/WpCsX/UPP0jWv/z++5L1etrUlx6Oe+zfyg/ntW//n+S2c4YZyquniuF3959IKjff98X5tgOgUfiFHxAU4QeCIvxAUIQfCIrwA0ERfiAoLt09QdMWnFK2Ntg5K7ntVxY/nKyvm91fVU952PjqBcn6nlvTU3TP/d6zyXr7IcbqmxVHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IKsw4/5E/SV8m+shfDibrm09/sGxt9XvfrqqnvPQPvVO2duGuTcltz/jbXyTr7W+kx+mHk1U0M478QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUmHH+nsvSf+deOPveuu37ljeWJOvbHl6drNtQuSunjzjjxpfL1pb2dyW3HUpWMZVx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoMzd0yuYLZS0U9IpGjl9u8Pdt5nZFklflPRatupmdy9/0rukk63dzzVm9Qbqpct366APpn8YkpnIj3yOSdrk7nvMbLakJ83soaz2bXf/ZrWNAihOxfC7e5+kvuz+ITPbK+nUejcGoL5O6DO/mS2SdI6k0d+MbjSzp82s08zmlNlmg5l1m1n3UR2uqVkA+Zlw+M3sJEnfl3Stux+UdKukJZKWa+SdwbfG287dO9y95O6lVrXl0DKAPEwo/GbWqpHg3+7u90mSu/e7+5C7D0u6TdKK+rUJIG8Vw29mJum7kva6+01jli8Ys9rlktLTtQJoKhP5tn+lpM9KesbMnsqWbZa0zsyWS3JJPZK+VJcOAdTFRL7t/4mk8cYNk2P6AJobv/ADgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EVfHS3bnuzOw1Sf87ZtFcSQca1sCJadbemrUvid6qlWdvp7n7ByayYkPD/66dm3W7e6mwBhKatbdm7Uuit2oV1Rtv+4GgCD8QVNHh7yh4/ynN2luz9iXRW7UK6a3Qz/wAilP0kR9AQQoJv5mtMbPnzewlM7u+iB7KMbMeM3vGzJ4ys+6Ce+k0swEze3bMsnYze8jMXsxux50mraDetpjZq9lr95SZ/WlBvS00sx+b2V4ze87M/iJbXuhrl+irkNet4W/7zaxF0guSLpHUK+kJSevc/ecNbaQMM+uRVHL3wseEzexCSW9J2unuy7Jl/yxp0N23Zn8457j7dU3S2xZJbxU9c3M2ocyCsTNLS7pM0udU4GuX6OtKFfC6FXHkXyHpJXff5+5HJN0laW0BfTQ9d39E0uBxi9dK2pHd36GR/3karkxvTcHd+9x9T3b/kKTRmaULfe0SfRWiiPCfKulXYx73qrmm/HZJPzKzJ81sQ9HNjGN+Nm366PTp8wru53gVZ25upONmlm6a166aGa/zVkT4x5v9p5mGHFa6+yclfUrSNdnbW0zMhGZubpRxZpZuCtXOeJ23IsLfK2nhmMcfkrS/gD7G5e77s9sBSfer+WYf7h+dJDW7HSi4n99pppmbx5tZWk3w2jXTjNdFhP8JSUvNbLGZTZf0aUm7CujjXcxsVvZFjMxslqTVar7Zh3dJWp/dXy/pgQJ7+T3NMnNzuZmlVfBr12wzXhfyI59sKONfJLVI6nT3bzS8iXGY2Uc0crSXRiYxvaPI3szsTkmrNHLWV7+kGyT9p6R7JH1Y0iuSrnD3hn/xVqa3VRp56/q7mZtHP2M3uLcLJD0q6RlJw9nizRr5fF3Ya5foa50KeN34hR8QFL/wA4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1P8D6+E2hIAP97kAAAAASUVORK5CYII=\n", 216 | "text/plain": [ 217 | "
" 218 | ] 219 | }, 220 | "metadata": { 221 | "needs_background": "light" 222 | }, 223 | "output_type": "display_data" 224 | } 225 | ], 226 | "source": [ 227 | "plt.imshow(single_image)" 228 | ] 229 | }, 230 | { 231 | "cell_type": "markdown", 232 | "metadata": {}, 233 | "source": [ 234 | "# PreProcessing Data\n", 235 | "\n", 236 | "We first need to make sure the labels will be understandable by our CNN." 237 | ] 238 | }, 239 | { 240 | "cell_type": "markdown", 241 | "metadata": {}, 242 | "source": [ 243 | "## Labels" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 8, 249 | "metadata": {}, 250 | "outputs": [ 251 | { 252 | "data": { 253 | "text/plain": [ 254 | "array([5, 0, 4, ..., 5, 6, 8], dtype=uint8)" 255 | ] 256 | }, 257 | "execution_count": 8, 258 | "metadata": {}, 259 | "output_type": "execute_result" 260 | } 261 | ], 262 | "source": [ 263 | "y_train" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": 9, 269 | "metadata": {}, 270 | "outputs": [ 271 | { 272 | "data": { 273 | "text/plain": [ 274 | "array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)" 275 | ] 276 | }, 277 | "execution_count": 9, 278 | "metadata": {}, 279 | "output_type": "execute_result" 280 | } 281 | ], 282 | "source": [ 283 | "y_test" 284 | ] 285 | }, 286 | { 287 | "cell_type": "markdown", 288 | "metadata": {}, 289 | "source": [ 290 | "Hmmm, looks like our labels are literally categories of numbers. We need to translate this to be \"one hot encoded\" so our CNN can understand, otherwise it will think this is some sort of regression problem on a continuous axis. Luckily , Keras has an easy to use function for this:" 291 | ] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": 10, 296 | "metadata": {}, 297 | "outputs": [], 298 | "source": [ 299 | "from keras.utils.np_utils import to_categorical" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": 11, 305 | "metadata": {}, 306 | "outputs": [ 307 | { 308 | "data": { 309 | "text/plain": [ 310 | "(60000,)" 311 | ] 312 | }, 313 | "execution_count": 11, 314 | "metadata": {}, 315 | "output_type": "execute_result" 316 | } 317 | ], 318 | "source": [ 319 | "y_train.shape" 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": 12, 325 | "metadata": {}, 326 | "outputs": [], 327 | "source": [ 328 | "y_example = to_categorical(y_train)" 329 | ] 330 | }, 331 | { 332 | "cell_type": "code", 333 | "execution_count": 13, 334 | "metadata": {}, 335 | "outputs": [ 336 | { 337 | "data": { 338 | "text/plain": [ 339 | "array([[0., 0., 0., ..., 0., 0., 0.],\n", 340 | " [1., 0., 0., ..., 0., 0., 0.],\n", 341 | " [0., 0., 0., ..., 0., 0., 0.],\n", 342 | " ...,\n", 343 | " [0., 0., 0., ..., 0., 0., 0.],\n", 344 | " [0., 0., 0., ..., 0., 0., 0.],\n", 345 | " [0., 0., 0., ..., 0., 1., 0.]], dtype=float32)" 346 | ] 347 | }, 348 | "execution_count": 13, 349 | "metadata": {}, 350 | "output_type": "execute_result" 351 | } 352 | ], 353 | "source": [ 354 | "y_example" 355 | ] 356 | }, 357 | { 358 | "cell_type": "code", 359 | "execution_count": 14, 360 | "metadata": {}, 361 | "outputs": [ 362 | { 363 | "data": { 364 | "text/plain": [ 365 | "(60000, 10)" 366 | ] 367 | }, 368 | "execution_count": 14, 369 | "metadata": {}, 370 | "output_type": "execute_result" 371 | } 372 | ], 373 | "source": [ 374 | "y_example.shape" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": 15, 380 | "metadata": {}, 381 | "outputs": [ 382 | { 383 | "data": { 384 | "text/plain": [ 385 | "array([0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], dtype=float32)" 386 | ] 387 | }, 388 | "execution_count": 15, 389 | "metadata": {}, 390 | "output_type": "execute_result" 391 | } 392 | ], 393 | "source": [ 394 | "y_example[0]" 395 | ] 396 | }, 397 | { 398 | "cell_type": "code", 399 | "execution_count": 16, 400 | "metadata": {}, 401 | "outputs": [], 402 | "source": [ 403 | "y_cat_test = to_categorical(y_test,10)" 404 | ] 405 | }, 406 | { 407 | "cell_type": "code", 408 | "execution_count": 17, 409 | "metadata": {}, 410 | "outputs": [], 411 | "source": [ 412 | "y_cat_train = to_categorical(y_train,10)" 413 | ] 414 | }, 415 | { 416 | "cell_type": "code", 417 | "execution_count": null, 418 | "metadata": {}, 419 | "outputs": [], 420 | "source": [] 421 | }, 422 | { 423 | "cell_type": "markdown", 424 | "metadata": {}, 425 | "source": [ 426 | "### Processing X Data\n", 427 | "\n", 428 | "We should normalize the X data" 429 | ] 430 | }, 431 | { 432 | "cell_type": "code", 433 | "execution_count": 18, 434 | "metadata": {}, 435 | "outputs": [ 436 | { 437 | "data": { 438 | "text/plain": [ 439 | "255" 440 | ] 441 | }, 442 | "execution_count": 18, 443 | "metadata": {}, 444 | "output_type": "execute_result" 445 | } 446 | ], 447 | "source": [ 448 | "single_image.max()" 449 | ] 450 | }, 451 | { 452 | "cell_type": "code", 453 | "execution_count": 19, 454 | "metadata": {}, 455 | "outputs": [ 456 | { 457 | "data": { 458 | "text/plain": [ 459 | "0" 460 | ] 461 | }, 462 | "execution_count": 19, 463 | "metadata": {}, 464 | "output_type": "execute_result" 465 | } 466 | ], 467 | "source": [ 468 | "single_image.min()" 469 | ] 470 | }, 471 | { 472 | "cell_type": "code", 473 | "execution_count": 20, 474 | "metadata": {}, 475 | "outputs": [], 476 | "source": [ 477 | "x_train = x_train/255\n", 478 | "x_test = x_test/255" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": 21, 484 | "metadata": {}, 485 | "outputs": [], 486 | "source": [ 487 | "scaled_single = x_train[0]" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": 22, 493 | "metadata": {}, 494 | "outputs": [ 495 | { 496 | "data": { 497 | "text/plain": [ 498 | "1.0" 499 | ] 500 | }, 501 | "execution_count": 22, 502 | "metadata": {}, 503 | "output_type": "execute_result" 504 | } 505 | ], 506 | "source": [ 507 | "scaled_single.max()" 508 | ] 509 | }, 510 | { 511 | "cell_type": "code", 512 | "execution_count": 23, 513 | "metadata": {}, 514 | "outputs": [ 515 | { 516 | "data": { 517 | "text/plain": [ 518 | "" 519 | ] 520 | }, 521 | "execution_count": 23, 522 | "metadata": {}, 523 | "output_type": "execute_result" 524 | }, 525 | { 526 | "data": { 527 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAADolJREFUeJzt3X2MXOV1x/HfyXq9jo1JvHVsHOJgxzgBYhqTjgzICFwhXKdCMqgCYkWRQ5M4LzgprStBraq4FancKiF1CUVamq1tifcEiv+gSZAVAVFhy+IQXuLwErMli7e7mA3YEOKX3dM/9m60MTvPrGfuzJ3d8/1I1szcc+/co4Hf3pl55t7H3F0A4nlP0Q0AKAbhB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1LRG7my6tfkMzWrkLoFQfqu3dcQP20TWrSn8ZrZG0jZJLZL+3d23ptafoVk61y6uZZcAErp894TXrfptv5m1SLpF0qcknSVpnZmdVe3zAWisWj7zr5D0krvvc/cjku6StDaftgDUWy3hP1XSr8Y87s2W/R4z22Bm3WbWfVSHa9gdgDzVEv7xvlR41/nB7t7h7iV3L7WqrYbdAchTLeHvlbRwzOMPSdpfWzsAGqWW8D8haamZLTaz6ZI+LWlXPm0BqLeqh/rc/ZiZbZT0Q40M9XW6+3O5dQagrmoa53f3ByU9mFMvABqIn/cCQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QVE2z9JpZj6RDkoYkHXP3Uh5NIT82Lf2fuOUDc+u6/+f/elHZ2tDM4eS2py0ZSNZnftWS9f+7aXrZ2p7S3cltDwy9nayfe++mZP30v3o8WW8GNYU/88fufiCH5wHQQLztB4KqNfwu6Udm9qSZbcijIQCNUevb/pXuvt/M5kl6yMx+4e6PjF0h+6OwQZJmaGaNuwOQl5qO/O6+P7sdkHS/pBXjrNPh7iV3L7WqrZbdAchR1eE3s1lmNnv0vqTVkp7NqzEA9VXL2/75ku43s9HnucPdf5BLVwDqrurwu/s+SZ/IsZcpq+XMpcm6t7Um6/sven+y/s555cek29+XHq9+9BPp8e4i/ddvZifr//SdNcl619l3lK29fPSd5LZb+y9J1j/4qCfrkwFDfUBQhB8IivADQRF+ICjCDwRF+IGg8jirL7yhVZ9M1m/afkuy/tHW8qeeTmVHfShZ/7ubP5esT3s7Pdx2/r0by9Zmv3osuW3bgfRQ4MzurmR9MuDIDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBMc6fg7bn9yfrT/52YbL+0db+PNvJ1aa+85L1fW+lL/29fcn3ytbeHE6P08//1/9O1utp8p+wWxlHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IytwbN6J5srX7uXZxw/bXLAavPj9ZP7gmfXntlqdPStZ/9tWbT7inUTce+MNk/YmL0uP4Q2+8maz7+eWv7t7z9eSmWrzuZ+kV8C5dvlsHfTA9d3mGIz8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBFVxnN/MOiVdKmnA3Zdly9ol3S1pkaQeSVe6+68r7SzqOH8lLXP/IFkfen0wWX/5jvJj9c9d2JncdsU/fi1Zn3dLcefU48TlPc6/XdLxE6FfL2m3uy+VtDt7DGASqRh+d39E0vGHnrWSdmT3d0i6LOe+ANRZtZ/557t7nyRlt/PyawlAI9T9Gn5mtkHSBkmaoZn13h2ACar2yN9vZgskKbsdKLeiu3e4e8ndS61qq3J3APJWbfh3SVqf3V8v6YF82gHQKBXDb2Z3SnpM0sfMrNfMPi9pq6RLzOxFSZdkjwFMIhU/87v7ujIlBuxzMnTg9Zq2P3pwetXbfvwzP0/WX7u1Jf0Ew0NV7xvF4hd+QFCEHwiK8ANBEX4gKMIPBEX4gaCYonsKOPO6F8rWrj47PSL7H6ftTtYvuuKaZH323Y8n62heHPmBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjG+aeA1DTZr3/lzOS2r+x6J1m//sadyfrfXHl5su4/fV/Z2sJvPJbcVg2cPj4ijvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EFTFKbrzxBTdzWfwz89P1m+/4ZvJ+uJpM6re98d3bkzWl97Wl6wf29dT9b6nqryn6AYwBRF+ICjCDwRF+IGgCD8QFOEHgiL8QFAVx/nNrFPSpZIG3H1ZtmyLpC9Kei1bbbO7P1hpZ4zzTz6+cnmyfvLW3mT9zo/8sOp9n/HjLyTrH/v78tcxkKShF/dVve/JKu9x/u2S1oyz/Nvuvjz7VzH4AJpLxfC7+yOSBhvQC4AGquUz/0Yze9rMOs1sTm4dAWiIasN/q6QlkpZL6pP0rXIrmtkGM+s2s+6jOlzl7gDkrarwu3u/uw+5+7Ck2yStSKzb4e4ldy+1qq3aPgHkrKrwm9mCMQ8vl/RsPu0AaJSKl+42szslrZI018x6Jd0gaZWZLZfkknokfamOPQKoA87nR01a5s9L1vdfdXrZWtd125LbvqfCG9PPvLw6WX/zgteT9amI8/kBVET4gaAIPxAU4QeCIvxAUIQfCIqhPhTmnt70FN0zbXqy/hs/kqxf+rVryz/3/V3JbScrhvoAVET4gaAIPxAU4QeCIvxAUIQfCIrwA0FVPJ8fsQ1fkL509y+vSE/RvWx5T9lapXH8Sm4ePCdZn/lAd03PP9Vx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoBjnn+KstCxZf+Hr6bH221buSNYvnJE+p74Wh/1osv744OL0Ewz35djN1MORHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCqjjOb2YLJe2UdIqkYUkd7r7NzNol3S1pkaQeSVe6+6/r12pc0xaflqz/8uoPlq1tuequ5LZ/dtKBqnrKw+b+UrL+8LbzkvU5O9LX/UfaRI78xyRtcvczJZ0n6RozO0vS9ZJ2u/tSSbuzxwAmiYrhd/c+d9+T3T8kaa+kUyWtlTT6868dki6rV5MA8ndCn/nNbJGkcyR1SZrv7n3SyB8ISfPybg5A/Uw4/GZ2kqTvS7rW3Q+ewHYbzKzbzLqP6nA1PQKogwmF38xaNRL82939vmxxv5ktyOoLJA2Mt627d7h7yd1LrWrLo2cAOagYfjMzSd+VtNfdbxpT2iVpfXZ/vaQH8m8PQL1M5JTelZI+K+kZM3sqW7ZZ0lZJ95jZ5yW9IumK+rQ4+U1b9OFk/c0/WpCsX/UPP0jWv/z++5L1etrUlx6Oe+zfyg/ntW//n+S2c4YZyquniuF3959IKjff98X5tgOgUfiFHxAU4QeCIvxAUIQfCIrwA0ERfiAoLt09QdMWnFK2Ntg5K7ntVxY/nKyvm91fVU952PjqBcn6nlvTU3TP/d6zyXr7IcbqmxVHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IKsw4/5E/SV8m+shfDibrm09/sGxt9XvfrqqnvPQPvVO2duGuTcltz/jbXyTr7W+kx+mHk1U0M478QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUmHH+nsvSf+deOPveuu37ljeWJOvbHl6drNtQuSunjzjjxpfL1pb2dyW3HUpWMZVx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoMzd0yuYLZS0U9IpGjl9u8Pdt5nZFklflPRatupmdy9/0rukk63dzzVm9Qbqpct366APpn8YkpnIj3yOSdrk7nvMbLakJ83soaz2bXf/ZrWNAihOxfC7e5+kvuz+ITPbK+nUejcGoL5O6DO/mS2SdI6k0d+MbjSzp82s08zmlNlmg5l1m1n3UR2uqVkA+Zlw+M3sJEnfl3Stux+UdKukJZKWa+SdwbfG287dO9y95O6lVrXl0DKAPEwo/GbWqpHg3+7u90mSu/e7+5C7D0u6TdKK+rUJIG8Vw29mJum7kva6+01jli8Ys9rlktLTtQJoKhP5tn+lpM9KesbMnsqWbZa0zsyWS3JJPZK+VJcOAdTFRL7t/4mk8cYNk2P6AJobv/ADgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EVfHS3bnuzOw1Sf87ZtFcSQca1sCJadbemrUvid6qlWdvp7n7ByayYkPD/66dm3W7e6mwBhKatbdm7Uuit2oV1Rtv+4GgCD8QVNHh7yh4/ynN2luz9iXRW7UK6a3Qz/wAilP0kR9AQQoJv5mtMbPnzewlM7u+iB7KMbMeM3vGzJ4ys+6Ce+k0swEze3bMsnYze8jMXsxux50mraDetpjZq9lr95SZ/WlBvS00sx+b2V4ze87M/iJbXuhrl+irkNet4W/7zaxF0guSLpHUK+kJSevc/ecNbaQMM+uRVHL3wseEzexCSW9J2unuy7Jl/yxp0N23Zn8457j7dU3S2xZJbxU9c3M2ocyCsTNLS7pM0udU4GuX6OtKFfC6FXHkXyHpJXff5+5HJN0laW0BfTQ9d39E0uBxi9dK2pHd36GR/3karkxvTcHd+9x9T3b/kKTRmaULfe0SfRWiiPCfKulXYx73qrmm/HZJPzKzJ81sQ9HNjGN+Nm366PTp8wru53gVZ25upONmlm6a166aGa/zVkT4x5v9p5mGHFa6+yclfUrSNdnbW0zMhGZubpRxZpZuCtXOeJ23IsLfK2nhmMcfkrS/gD7G5e77s9sBSfer+WYf7h+dJDW7HSi4n99pppmbx5tZWk3w2jXTjNdFhP8JSUvNbLGZTZf0aUm7CujjXcxsVvZFjMxslqTVar7Zh3dJWp/dXy/pgQJ7+T3NMnNzuZmlVfBr12wzXhfyI59sKONfJLVI6nT3bzS8iXGY2Uc0crSXRiYxvaPI3szsTkmrNHLWV7+kGyT9p6R7JH1Y0iuSrnD3hn/xVqa3VRp56/q7mZtHP2M3uLcLJD0q6RlJw9nizRr5fF3Ya5foa50KeN34hR8QFL/wA4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1P8D6+E2hIAP97kAAAAASUVORK5CYII=\n", 528 | "text/plain": [ 529 | "
" 530 | ] 531 | }, 532 | "metadata": { 533 | "needs_background": "light" 534 | }, 535 | "output_type": "display_data" 536 | } 537 | ], 538 | "source": [ 539 | "plt.imshow(scaled_single)" 540 | ] 541 | }, 542 | { 543 | "cell_type": "markdown", 544 | "metadata": {}, 545 | "source": [ 546 | "## Reshaping the Data\n", 547 | "\n", 548 | "Right now our data is 60,000 images stored in 28 by 28 pixel array formation. \n", 549 | "\n", 550 | "This is correct for a CNN, but we need to add one more dimension to show we're dealing with 1 RGB channel (since technically the images are in black and white, only showing values from 0-255 on a single channel), an color image would have 3 dimensions." 551 | ] 552 | }, 553 | { 554 | "cell_type": "code", 555 | "execution_count": 24, 556 | "metadata": {}, 557 | "outputs": [ 558 | { 559 | "data": { 560 | "text/plain": [ 561 | "(60000, 28, 28)" 562 | ] 563 | }, 564 | "execution_count": 24, 565 | "metadata": {}, 566 | "output_type": "execute_result" 567 | } 568 | ], 569 | "source": [ 570 | "x_train.shape" 571 | ] 572 | }, 573 | { 574 | "cell_type": "code", 575 | "execution_count": 25, 576 | "metadata": {}, 577 | "outputs": [ 578 | { 579 | "data": { 580 | "text/plain": [ 581 | "(10000, 28, 28)" 582 | ] 583 | }, 584 | "execution_count": 25, 585 | "metadata": {}, 586 | "output_type": "execute_result" 587 | } 588 | ], 589 | "source": [ 590 | "x_test.shape" 591 | ] 592 | }, 593 | { 594 | "cell_type": "markdown", 595 | "metadata": {}, 596 | "source": [ 597 | "Reshape to include channel dimension (in this case, 1 channel)" 598 | ] 599 | }, 600 | { 601 | "cell_type": "code", 602 | "execution_count": 26, 603 | "metadata": {}, 604 | "outputs": [], 605 | "source": [ 606 | "x_train = x_train.reshape(60000, 28, 28, 1)" 607 | ] 608 | }, 609 | { 610 | "cell_type": "code", 611 | "execution_count": 27, 612 | "metadata": {}, 613 | "outputs": [ 614 | { 615 | "data": { 616 | "text/plain": [ 617 | "(60000, 28, 28, 1)" 618 | ] 619 | }, 620 | "execution_count": 27, 621 | "metadata": {}, 622 | "output_type": "execute_result" 623 | } 624 | ], 625 | "source": [ 626 | "x_train.shape" 627 | ] 628 | }, 629 | { 630 | "cell_type": "code", 631 | "execution_count": 28, 632 | "metadata": {}, 633 | "outputs": [], 634 | "source": [ 635 | "x_test = x_test.reshape(10000,28,28,1)" 636 | ] 637 | }, 638 | { 639 | "cell_type": "code", 640 | "execution_count": 29, 641 | "metadata": {}, 642 | "outputs": [ 643 | { 644 | "data": { 645 | "text/plain": [ 646 | "(10000, 28, 28, 1)" 647 | ] 648 | }, 649 | "execution_count": 29, 650 | "metadata": {}, 651 | "output_type": "execute_result" 652 | } 653 | ], 654 | "source": [ 655 | "x_test.shape" 656 | ] 657 | }, 658 | { 659 | "cell_type": "markdown", 660 | "metadata": {}, 661 | "source": [ 662 | "# Training the Model" 663 | ] 664 | }, 665 | { 666 | "cell_type": "code", 667 | "execution_count": 30, 668 | "metadata": {}, 669 | "outputs": [], 670 | "source": [ 671 | "from keras.models import Sequential\n", 672 | "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten" 673 | ] 674 | }, 675 | { 676 | "cell_type": "code", 677 | "execution_count": 31, 678 | "metadata": {}, 679 | "outputs": [], 680 | "source": [ 681 | "model = Sequential()\n", 682 | "\n", 683 | "# CONVOLUTIONAL LAYER\n", 684 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(28, 28, 1), activation='relu',))\n", 685 | "# POOLING LAYER\n", 686 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 687 | "\n", 688 | "# FLATTEN IMAGES FROM 28 by 28 to 764 BEFORE FINAL LAYER\n", 689 | "model.add(Flatten())\n", 690 | "\n", 691 | "# 128 NEURONS IN DENSE HIDDEN LAYER (YOU CAN CHANGE THIS NUMBER OF NEURONS)\n", 692 | "model.add(Dense(128, activation='relu'))\n", 693 | "\n", 694 | "# LAST LAYER IS THE CLASSIFIER, THUS 10 POSSIBLE CLASSES\n", 695 | "model.add(Dense(10, activation='softmax'))\n", 696 | "\n", 697 | "\n", 698 | "model.compile(loss='categorical_crossentropy',\n", 699 | " optimizer='rmsprop',\n", 700 | " metrics=['accuracy'])" 701 | ] 702 | }, 703 | { 704 | "cell_type": "code", 705 | "execution_count": 32, 706 | "metadata": {}, 707 | "outputs": [ 708 | { 709 | "name": "stdout", 710 | "output_type": "stream", 711 | "text": [ 712 | "_________________________________________________________________\n", 713 | "Layer (type) Output Shape Param # \n", 714 | "=================================================================\n", 715 | "conv2d_1 (Conv2D) (None, 25, 25, 32) 544 \n", 716 | "_________________________________________________________________\n", 717 | "max_pooling2d_1 (MaxPooling2 (None, 12, 12, 32) 0 \n", 718 | "_________________________________________________________________\n", 719 | "flatten_1 (Flatten) (None, 4608) 0 \n", 720 | "_________________________________________________________________\n", 721 | "dense_1 (Dense) (None, 128) 589952 \n", 722 | "_________________________________________________________________\n", 723 | "dense_2 (Dense) (None, 10) 1290 \n", 724 | "=================================================================\n", 725 | "Total params: 591,786\n", 726 | "Trainable params: 591,786\n", 727 | "Non-trainable params: 0\n", 728 | "_________________________________________________________________\n" 729 | ] 730 | } 731 | ], 732 | "source": [ 733 | "model.summary()" 734 | ] 735 | }, 736 | { 737 | "cell_type": "markdown", 738 | "metadata": {}, 739 | "source": [ 740 | "## Train the Model" 741 | ] 742 | }, 743 | { 744 | "cell_type": "code", 745 | "execution_count": 33, 746 | "metadata": {}, 747 | "outputs": [ 748 | { 749 | "name": "stdout", 750 | "output_type": "stream", 751 | "text": [ 752 | "Epoch 1/2\n", 753 | "60000/60000 [==============================] - 7s 124us/step - loss: 0.1344 - acc: 0.9593\n", 754 | "Epoch 2/2\n", 755 | "60000/60000 [==============================] - 6s 92us/step - loss: 0.0488 - acc: 0.9858\n" 756 | ] 757 | }, 758 | { 759 | "data": { 760 | "text/plain": [ 761 | "" 762 | ] 763 | }, 764 | "execution_count": 33, 765 | "metadata": {}, 766 | "output_type": "execute_result" 767 | } 768 | ], 769 | "source": [ 770 | "# THIS WILL TAKE AWHILE ON MOST COMPUTERS!!!\n", 771 | "# CHANGE NUMBER OF EPOCHS IF NECESSARY\n", 772 | "# YOUR ACCURACY MAY ALSO BE LOWER THAN WHAT IS SHOWN HERE SINCE THIS WAS TRAINED ON GPU\n", 773 | "model.fit(x_train,y_cat_train,epochs=2)" 774 | ] 775 | }, 776 | { 777 | "cell_type": "markdown", 778 | "metadata": {}, 779 | "source": [ 780 | "## Evaluate the Model" 781 | ] 782 | }, 783 | { 784 | "cell_type": "code", 785 | "execution_count": 34, 786 | "metadata": {}, 787 | "outputs": [ 788 | { 789 | "data": { 790 | "text/plain": [ 791 | "['loss', 'acc']" 792 | ] 793 | }, 794 | "execution_count": 34, 795 | "metadata": {}, 796 | "output_type": "execute_result" 797 | } 798 | ], 799 | "source": [ 800 | "model.metrics_names" 801 | ] 802 | }, 803 | { 804 | "cell_type": "code", 805 | "execution_count": 35, 806 | "metadata": {}, 807 | "outputs": [ 808 | { 809 | "name": "stdout", 810 | "output_type": "stream", 811 | "text": [ 812 | "10000/10000 [==============================] - 0s 39us/step\n" 813 | ] 814 | }, 815 | { 816 | "data": { 817 | "text/plain": [ 818 | "[0.043689755835279356, 0.9855]" 819 | ] 820 | }, 821 | "execution_count": 35, 822 | "metadata": {}, 823 | "output_type": "execute_result" 824 | } 825 | ], 826 | "source": [ 827 | "model.evaluate(x_test,y_cat_test)" 828 | ] 829 | }, 830 | { 831 | "cell_type": "code", 832 | "execution_count": 36, 833 | "metadata": {}, 834 | "outputs": [], 835 | "source": [ 836 | "from sklearn.metrics import classification_report" 837 | ] 838 | }, 839 | { 840 | "cell_type": "code", 841 | "execution_count": 37, 842 | "metadata": {}, 843 | "outputs": [], 844 | "source": [ 845 | "predictions = model.predict_classes(x_test)" 846 | ] 847 | }, 848 | { 849 | "cell_type": "code", 850 | "execution_count": 38, 851 | "metadata": {}, 852 | "outputs": [ 853 | { 854 | "data": { 855 | "text/plain": [ 856 | "(10000, 10)" 857 | ] 858 | }, 859 | "execution_count": 38, 860 | "metadata": {}, 861 | "output_type": "execute_result" 862 | } 863 | ], 864 | "source": [ 865 | "y_cat_test.shape" 866 | ] 867 | }, 868 | { 869 | "cell_type": "code", 870 | "execution_count": 39, 871 | "metadata": {}, 872 | "outputs": [ 873 | { 874 | "data": { 875 | "text/plain": [ 876 | "array([0., 0., 0., 0., 0., 0., 0., 1., 0., 0.], dtype=float32)" 877 | ] 878 | }, 879 | "execution_count": 39, 880 | "metadata": {}, 881 | "output_type": "execute_result" 882 | } 883 | ], 884 | "source": [ 885 | "y_cat_test[0]" 886 | ] 887 | }, 888 | { 889 | "cell_type": "code", 890 | "execution_count": 40, 891 | "metadata": {}, 892 | "outputs": [ 893 | { 894 | "data": { 895 | "text/plain": [ 896 | "7" 897 | ] 898 | }, 899 | "execution_count": 40, 900 | "metadata": {}, 901 | "output_type": "execute_result" 902 | } 903 | ], 904 | "source": [ 905 | "predictions[0]" 906 | ] 907 | }, 908 | { 909 | "cell_type": "code", 910 | "execution_count": 41, 911 | "metadata": {}, 912 | "outputs": [ 913 | { 914 | "data": { 915 | "text/plain": [ 916 | "array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)" 917 | ] 918 | }, 919 | "execution_count": 41, 920 | "metadata": {}, 921 | "output_type": "execute_result" 922 | } 923 | ], 924 | "source": [ 925 | "y_test" 926 | ] 927 | }, 928 | { 929 | "cell_type": "code", 930 | "execution_count": 42, 931 | "metadata": { 932 | "scrolled": true 933 | }, 934 | "outputs": [ 935 | { 936 | "name": "stdout", 937 | "output_type": "stream", 938 | "text": [ 939 | " precision recall f1-score support\n", 940 | "\n", 941 | " 0 0.98 1.00 0.99 980\n", 942 | " 1 1.00 1.00 1.00 1135\n", 943 | " 2 0.97 0.99 0.98 1032\n", 944 | " 3 0.98 0.99 0.99 1010\n", 945 | " 4 0.99 0.99 0.99 982\n", 946 | " 5 0.98 0.98 0.98 892\n", 947 | " 6 1.00 0.98 0.99 958\n", 948 | " 7 0.99 0.98 0.98 1028\n", 949 | " 8 0.99 0.97 0.98 974\n", 950 | " 9 0.98 0.98 0.98 1009\n", 951 | "\n", 952 | "avg / total 0.99 0.99 0.99 10000\n", 953 | "\n" 954 | ] 955 | } 956 | ], 957 | "source": [ 958 | "print(classification_report(y_test,predictions))" 959 | ] 960 | }, 961 | { 962 | "cell_type": "markdown", 963 | "metadata": {}, 964 | "source": [ 965 | "Looks like the CNN performed quite well!" 966 | ] 967 | } 968 | ], 969 | "metadata": { 970 | "kernelspec": { 971 | "display_name": "Python 3", 972 | "language": "python", 973 | "name": "python3" 974 | }, 975 | "language_info": { 976 | "codemirror_mode": { 977 | "name": "ipython", 978 | "version": 3 979 | }, 980 | "file_extension": ".py", 981 | "mimetype": "text/x-python", 982 | "name": "python", 983 | "nbconvert_exporter": "python", 984 | "pygments_lexer": "ipython3", 985 | "version": "3.6.6" 986 | } 987 | }, 988 | "nbformat": 4, 989 | "nbformat_minor": 2 990 | } 991 | -------------------------------------------------------------------------------- /7_Deep Learning for Computer Vision/DL-CV-Assessment.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Deep Learning for Image Classification Assessment\n", 8 | "\n", 9 | "\n", 10 | "Welcome to your assessment! Follow the instructions in bold below to complete the assessment.\n", 11 | "\n", 12 | "If you get stuck, check out the solutions video and notebook. (Make sure to run the solutions notebook before posting a question to the QA forum please, thanks!)\n", 13 | "\n", 14 | "------------\n", 15 | "\n", 16 | "## The Challenge\n", 17 | "\n", 18 | "**Your task is to build an image classifier with Keras and Convolutional Neural Networks for the Fashion MNIST dataset. This data set includes 10 labels of different clothing types with 28 by 28 *grayscale* images. There is a training set of 60,000 images and 10,000 test images.**\n", 19 | "\n", 20 | " Label\tDescription\n", 21 | " 0\t T-shirt/top\n", 22 | " 1\t Trouser\n", 23 | " 2\t Pullover\n", 24 | " 3\t Dress\n", 25 | " 4\t Coat\n", 26 | " 5\t Sandal\n", 27 | " 6\t Shirt\n", 28 | " 7\t Sneaker\n", 29 | " 8\t Bag\n", 30 | " 9\t Ankle boot\n", 31 | " \n", 32 | " " 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "## The Data\n", 40 | "\n", 41 | "**TASK 1: Run the code below to download the dataset using Keras.**" 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 17, 47 | "metadata": {}, 48 | "outputs": [], 49 | "source": [ 50 | "from keras.datasets import fashion_mnist\n", 51 | "\n", 52 | "(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "metadata": {}, 58 | "source": [ 59 | "## Visualizing the Data\n", 60 | "\n", 61 | "**TASK 2: Use matplotlib to view an image from the data set. It can be any image from the data set.**" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 4, 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "import matplotlib.pyplot as plt" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 5, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "" 82 | ] 83 | }, 84 | "execution_count": 5, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | }, 88 | { 89 | "data": { 90 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAFFhJREFUeJzt3WtwlFWaB/D/053OhdABwiUgRvGCCqMrOhFUphxHRgcta9FxtLQsF6uswdrVqZ1ZP2ixszXuh92yrFXXWndmNyorVo3OpUZXx6IcNa7ilSEiKwqLKERAIAlEkpCkk748+yHNTICc52369jae/6+KIumnT/qku/95u/u85xxRVRCRfyJhd4CIwsHwE3mK4SfyFMNP5CmGn8hTDD+Rpxh+Ik8x/ESeYviJPFVVzhurlhqtRX05b5LIKwkMYESHJZfrFhR+EVkK4FEAUQBPqOoD1vVrUY9FsqSQmyQiwzpty/m6eb/sF5EogH8HcDWA+QBuEZH5+f48IiqvQt7zLwTwmapuV9URAL8CsKw43SKiUisk/LMB7Brz/e7sZUcQkRUi0i4i7UkMF3BzRFRMhYR/vA8VjpkfrKqtqtqiqi0x1BRwc0RUTIWEfzeA5jHfnwxgT2HdIaJyKST86wHMFZHTRKQawM0AXixOt4io1PIe6lPVlIjcDeAPGB3qW6WqnxStZ0RUUgWN86vqGgBritQXIiojnt5L5CmGn8hTDD+Rpxh+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTDD+Rpxh+Ik8x/ESeKuvS3RQCCVjFWY9ZfOm4RKc2mvWvvneWs9bwzPsF3XbQ7yZVMWdNkyOF3Xahgh4XS4GP2WE88hN5iuEn8hTDT+Qphp/IUww/kacYfiJPMfxEnuI4/9ecRKNmXVMpsx5ZYO+9uuXOiXb7IXctNrDQbFs1lDHrsVfazXpBY/lB5xAE3K8Q+7haSN+kyoit/XAegUd+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTBY3zi0gHgH4AaQApVW0pRqeoeMwxYQSP8+/63mSzfuslb5n1d7pPd9a+qJlpttU6s4yq715i1s/6+ZfOWqpjp/3DA+bMB91vQaJTpriL6bTZNt3X5y4ex1T/Ypzk8x1V3V+En0NEZcSX/USeKjT8CuAVEflARFYUo0NEVB6FvuxfrKp7RGQGgFdF5P9Ude3YK2T/KKwAgFpMKPDmiKhYCjryq+qe7P9dAJ4HcMxMDVVtVdUWVW2JoaaQmyOiIso7/CJSLyLxw18DuArAx8XqGBGVViEv+5sAPC+jUx+rADyjqi8XpVdEVHJ5h19VtwM4v4h9oRLIJBIFtR+54JBZ/8Eke059bSTprL0Zsefrf/l6s1lP/4Xdty8ejjtrmQ8vNdtO/dgea2/4cK9Z33/ZbLPe/U33gHxTwHYGU1773FmTntwjzaE+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CnRIm33m4sGadRFsqRst+cNa5npgMf30E0Xm/Wrf/qGWZ9Xu8es92dqnbURLezs8se2ftusD2yf5KxFRgK2yA4op5vspbc1aR9Xp2xw/+51yzrNtvL4dGfto7ZHcahnV077f/PIT+Qphp/IUww/kacYfiJPMfxEnmL4iTzF8BN5iuP8lSBgO+iCBDy+535g//3//hR7ym6QqLGW9IBWm20PpusLuu3ulHtKbzLgHIMnttlTfg8Z5xAAQCRlP6ZXfudDZ+2GxvVm2wfPOM9ZW6dt6NMejvMTkRvDT+Qphp/IUww/kacYfiJPMfxEnmL4iTxVjF16qVBlPNfiaNsOzTDrBxommvV9KXsL76lR9/La8ciQ2XZOzN78uTvtHscHgGjMvTT4iEbNtv/4jd+b9cS8mFmPib3096XGOgg3bv4rs209tpv1XPHIT+Qphp/IUww/kacYfiJPMfxEnmL4iTzF8BN5KnCcX0RWAbgWQJeqnpu9rBHArwHMAdAB4CZV/ap03aRSmV5jb3NdK+4ttgGgWlJmfU9yirO2behss+2nffY5CEubPjHrSWMs31pnAAgepz8pZj/dE2qfB2Ddq4ub7HH8jWY1d7kc+Z8CsPSoy+4D0KaqcwG0Zb8nohNIYPhVdS2AnqMuXgZgdfbr1QCuK3K/iKjE8n3P36SqewEg+7/9+oyIKk7Jz+0XkRUAVgBALSaU+uaIKEf5Hvk7RWQWAGT/73JdUVVbVbVFVVtiqMnz5oio2PIN/4sAlme/Xg7gheJ0h4jKJTD8IvIsgPcAnC0iu0XkDgAPALhSRLYBuDL7PRGdQALf86vqLY4SF+AvloB1+yVqzz3XlHusPTrFPc4OAN+evMmsd6cbzPrBtP05zuTooLPWn6o12/YM2T/7nJq9Zn3D4BxnbXq1PU5v9RsAOkammfW5NfvM+oOd7vg01x49uHak1JLLnDVd957Zdiye4UfkKYafyFMMP5GnGH4iTzH8RJ5i+Ik8xaW7K0HA0t1SZT9M1lDfrjvmmW2vmGAvUf1uYrZZn17Vb9atabWzanrNtvGmhFkPGmZsrHJPV+5P15ltJ0SGzXrQ731htb3s+E9eu9BZi597wGzbEDOO2cex2zuP/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTDD+RpzjOXwEkVm3WMwl7vNsybdOIWd+ftpeYnhyxp7ZWByxxbW2FfWnjDrNtd8BY/Iah08x6POreAnx6xB6nb47ZY+2bEs1mfc3AmWb9jmtfc9aebb3SbFv98rvOmqj9eI3FIz+Rpxh+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5KkTa5zfWOJaquzxaokG/J2L2PVMwpjfnbHHuoNo0h6LL8Sj//mYWd+VmmzW9yXtetAS12ljgvn7Q5PMtrURe3vw6VV9Zr0vY58nYOnP2MuKW+sUAMF9v3fqNmftud7vmm2LhUd+Ik8x/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTgeP8IrIKwLUAulT13Oxl9wP4IYDu7NVWquqaQjtTyPr0QWPlag+7hmpo2UKzvus6+zyCWy/4o7O2LxU3235obGMNAJOMOfEAUB+wvn1C3edf7Bmxtw8PGiu31uUHgBnGeQBptY97XybtvgUJOv9hd8rYU+Av7bUGJj+dV5eOkcuR/ykAS8e5/BFVXZD9V3Dwiai8AsOvqmsB9JShL0RURoW8579bRD4SkVUiUthrJCIqu3zD/wsAZwBYAGAvgIdcVxSRFSLSLiLtSdjvD4mofPIKv6p2qmpaVTMAHgfg/MRKVVtVtUVVW2KoybefRFRkeYVfRGaN+fZ6AB8XpztEVC65DPU9C+ByANNEZDeAnwG4XEQWAFAAHQDuLGEfiagERAP2hi+mBmnURbKkbLc3VtWsmWY9eVqTWe+Z594LfnCmvSn6gmu2mPXbm942693pBrMeE/f5D0H70M+MHTTrr/fON+sTq+zPcazzBC6s6zDbHsy473MAOKnqK7N+72c/cNaaJthj6U+cao9eJzVj1rcm7be48Yj7vJS3Bu01/5+fP91ZW6dt6NMe+wmZxTP8iDzF8BN5iuEn8hTDT+Qphp/IUww/kacqaunu4asvMusz/n67s7agYbfZdn6dPZyWyNhLf1vTSzcPzTbbDmbsLbi3jdjDkL0pe8grKu5hp64Re0rvQzvsZaLbFv6HWf/pnvEmfP5ZpM49lHwgPdFse8NEe2luwH7M7jxlrbN2enWX2falgVlmfU/AlN+mWK9ZnxPrdta+H//UbPs83EN9x4NHfiJPMfxEnmL4iTzF8BN5iuEn8hTDT+Qphp/IU+Ud5xd7ee5F/7zebL4k/omzNqj2FMqgcfygcVvLpCp7mebhpH03dyXtKbtBzqrZ56xd37DRbLv2sUVm/VuJH5n1z6/4L7PeNuTeyro7Zf/eN++4wqxv2Nls1i+es8NZOy/+pdk26NyKeDRh1q1p1gAwkHE/X99P2Oc/FAuP/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTDD+Rp8q6dHfdzGY947a/c9Zb7/o3s/0zPRc7a8219l6ip1bvN+tTo/Z2z5Z4xB7zPTtmj/m+NHCyWX/j4Dlm/ZvxDmctJvb23pdP+Mys3/6Te8x6qtZeJbpvjvv4kqq3n3sN5x8w6z8683WzXm387gfT9jh+0P0WtAV3EGsNhnjE3hb9oWuud9be63gKvUN7uXQ3Ebkx/ESeYviJPMXwE3mK4SfyFMNP5CmGn8hTgfP5RaQZwNMAZgLIAGhV1UdFpBHArwHMAdAB4CZVNfdMjiSBCZ3u8c2X+haYfTm9zr3W+f6kvT79Hw6dZ9ZPrrO3e7a2mj7TmE8PABsTk836y93fMOsn1dnr13cmJzlrB5L1ZttBY145ADz5yMNm/aFOe93/6xs3OGvnV9vj+Acz9rFpc8B+B/2ZWmctofb6Dr0B5wHEjecDACTVjlbU2OJ7csQ+h6DvvKnOWroz9yU6cjnypwDco6rzAFwM4C4RmQ/gPgBtqjoXQFv2eyI6QQSGX1X3quqG7Nf9ALYAmA1gGYDV2autBnBdqTpJRMV3XO/5RWQOgAsArAPQpKp7gdE/EABmFLtzRFQ6OYdfRCYC+B2AH6tq0CZqY9utEJF2EWlPDQ/k00ciKoGcwi8iMYwG/5eq+lz24k4RmZWtzwIw7s6Hqtqqqi2q2lJVY3/4RETlExh+EREATwLYoqpjP/p9EcDy7NfLAbxQ/O4RUankMi6wGMBtADaJyOF1oFcCeADAb0TkDgA7AdwY9IOiIxnEdw076xm1ZyK+vt89tbWptt9suyC+y6xvHbSHjTYNneSsbag6xWxbF3Vv7w0Ak6rtKcH1Ve77DACmxdy/+2k19lbU1rRXAFifsH+3v57+hlnfmXIvif77gbPMtpsH3fc5AEwJWDJ9U5+7/WDK3jZ9OG1HI5Gyh44n1diP6UWNXzhrW2FvD959vjFN+h2z6RECw6+qbwNwpXJJ7jdFRJWEZ/gReYrhJ/IUw0/kKYafyFMMP5GnGH4iT5V3i+5DQ4i8+aGz/NtXFpvN/2HZb521NwOWt35pnz0u2zdiT22dPsF9anKDMc4OAI0x+7TmoC2+awO2e/4q5T5zcjhiT11NO0dxR+0bdk8XBoB3MnPNejLj3qJ72KgBwedH9IxMM+sn1fU6a/0p93RfAOjobzTr+3vtbbQTE+xovZ0+w1lbOtO9FT0A1HW5H7OI/VQ58rq5X5WIvk4YfiJPMfxEnmL4iTzF8BN5iuEn8hTDT+Spsm7R3SCNukjynwXce6t7i+7T/2ar2Xbh5B1mfUOfPW99pzHumwxYYjoWcS/TDAATYiNmvTZgvLs66p6TH4H9+GYCxvnro3bfgtYaaKhyz2uPR+057xFjG+tcRI3f/Y+9cwr62fGA3zul9nPikkmfO2urdlxqtp10jXtb9XXahj7t4RbdROTG8BN5iuEn8hTDT+Qphp/IUww/kacYfiJPlX+cP3qV+woZew35QgzcsMisL1q53q7H3eOy51R3mm1jsMerawPGs+sj9rBtwngMg/66vz3UbNbTAT/h9a/mmfWkMd7dOdhgto0Z5y/kwtoHYigVsEX3kD3fPxqxc5N4w15rYOpm97kbNWvs56KF4/xEFIjhJ/IUw0/kKYafyFMMP5GnGH4iTzH8RJ4KHOcXkWYATwOYCSADoFVVHxWR+wH8EEB39qorVXWN9bMKnc9fqeQie0+AoZl1Zr3mgD03vP9Uu33D5+59ASLD9kLumf/dYtbpxHI84/y5bNqRAnCPqm4QkTiAD0Tk1WztEVX9l3w7SkThCQy/qu4FsDf7db+IbAEwu9QdI6LSOq73/CIyB8AFANZlL7pbRD4SkVUiMsXRZoWItItIexL2y1siKp+cwy8iEwH8DsCPVbUPwC8AnAFgAUZfGTw0XjtVbVXVFlVticHeD4+Iyien8ItIDKPB/6WqPgcAqtqpqmlVzQB4HMDC0nWTiIotMPwiIgCeBLBFVR8ec/msMVe7HsDHxe8eEZVKLp/2LwZwG4BNIrIxe9lKALeIyAIACqADwJ0l6eEJQNdvMuv25NBgDe/m37awxa/p6yyXT/vfBsZd3N0c0yeiysYz/Ig8xfATeYrhJ/IUw0/kKYafyFMMP5GnGH4iTzH8RJ5i+Ik8xfATeYrhJ/IUw0/kKYafyFMMP5GnyrpFt4h0A/hizEXTAOwvWweOT6X2rVL7BbBv+Spm305V1em5XLGs4T/mxkXaVbUltA4YKrVvldovgH3LV1h948t+Ik8x/ESeCjv8rSHfvqVS+1ap/QLYt3yF0rdQ3/MTUXjCPvITUUhCCb+ILBWRrSLymYjcF0YfXESkQ0Q2ichGEWkPuS+rRKRLRD4ec1mjiLwqItuy/4+7TVpIfbtfRL7M3ncbReSakPrWLCL/IyJbROQTEfnb7OWh3ndGv0K538r+sl9EogA+BXAlgN0A1gO4RVU3l7UjDiLSAaBFVUMfExaRywAcAvC0qp6bvexBAD2q+kD2D+cUVb23Qvp2P4BDYe/cnN1QZtbYnaUBXAfgdoR43xn9ugkh3G9hHPkXAvhMVber6giAXwFYFkI/Kp6qrgXQc9TFywCszn69GqNPnrJz9K0iqOpeVd2Q/bofwOGdpUO974x+hSKM8M8GsGvM97tRWVt+K4BXROQDEVkRdmfG0ZTdNv3w9ukzQu7P0QJ3bi6no3aWrpj7Lp8dr4stjPCPt/tPJQ05LFbVCwFcDeCu7Mtbyk1OOzeXyzg7S1eEfHe8LrYwwr8bQPOY708GsCeEfoxLVfdk/+8C8Dwqb/fhzsObpGb/7wq5P39SSTs3j7ezNCrgvqukHa/DCP96AHNF5DQRqQZwM4AXQ+jHMUSkPvtBDESkHsBVqLzdh18EsDz79XIAL4TYlyNUys7Nrp2lEfJ9V2k7Xodykk92KONfAUQBrFLVfyp7J8YhIqdj9GgPjG5i+kyYfRORZwFcjtFZX50AfgbgvwH8BsApAHYCuFFVy/7Bm6Nvl2P0peufdm4+/B67zH37FoC3AGzCnzcqXonR99eh3XdGv25BCPcbz/Aj8hTP8CPyFMNP5CmGn8hTDD+Rpxh+Ik8x/ESeYviJPMXwE3nq/wHG6/IGFn5KEQAAAABJRU5ErkJggg==\n", 91 | "text/plain": [ 92 | "
" 93 | ] 94 | }, 95 | "metadata": {}, 96 | "output_type": "display_data" 97 | } 98 | ], 99 | "source": [ 100 | "plt.imshow(x_train[0])" 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "metadata": {}, 106 | "source": [ 107 | "## Preprocessing the Data\n", 108 | "\n", 109 | "**TASK 3: Normalize the X train and X test data by dividing by the max value of the image arrays.**" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 6, 115 | "metadata": {}, 116 | "outputs": [ 117 | { 118 | "data": { 119 | "text/plain": [ 120 | "255" 121 | ] 122 | }, 123 | "execution_count": 6, 124 | "metadata": {}, 125 | "output_type": "execute_result" 126 | } 127 | ], 128 | "source": [ 129 | "x_train.max()" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": 18, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "x_train = x_train / x_train.max()\n", 139 | "x_test = x_test / x_test.max()" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "**Task 4: Reshape the X arrays to include a 4 dimension of the single channel. Similar to what we did for the numbers MNIST data set.**" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 8, 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "data": { 156 | "text/plain": [ 157 | "(60000, 28, 28)" 158 | ] 159 | }, 160 | "execution_count": 8, 161 | "metadata": {}, 162 | "output_type": "execute_result" 163 | } 164 | ], 165 | "source": [ 166 | "x_train.shape" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 19, 172 | "metadata": {}, 173 | "outputs": [], 174 | "source": [ 175 | "x_train = x_train.reshape(60000,28,28,1)\n", 176 | "x_test = x_test.reshape(10000,28,28,1)" 177 | ] 178 | }, 179 | { 180 | "cell_type": "markdown", 181 | "metadata": {}, 182 | "source": [ 183 | "**TASK 5: Convert the y_train and y_test values to be one-hot encoded for categorical analysis by Keras.**" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 20, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "from keras.utils import to_categorical" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 21, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [ 201 | "y_train_cat = to_categorical(y_train,10)\n", 202 | "y_test_cat = to_categorical(y_test,10)" 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": 22, 208 | "metadata": {}, 209 | "outputs": [ 210 | { 211 | "data": { 212 | "text/plain": [ 213 | "(60000, 10)" 214 | ] 215 | }, 216 | "execution_count": 22, 217 | "metadata": {}, 218 | "output_type": "execute_result" 219 | } 220 | ], 221 | "source": [ 222 | "y_train_cat.shape" 223 | ] 224 | }, 225 | { 226 | "cell_type": "markdown", 227 | "metadata": {}, 228 | "source": [ 229 | "## Building the Model\n", 230 | "\n", 231 | "**TASK 5: Use Keras to create a model consisting of at least the following layers (but feel free to experiment):**\n", 232 | "\n", 233 | "* 2D Convolutional Layer, filters=32 and kernel_size=(4,4)\n", 234 | "* Pooling Layer where pool_size = (2,2)\n", 235 | "\n", 236 | "* Flatten Layer\n", 237 | "* Dense Layer (128 Neurons, but feel free to play around with this value), RELU activation\n", 238 | "\n", 239 | "* Final Dense Layer of 10 Neurons with a softmax activation\n", 240 | "\n", 241 | "**Then compile the model with these parameters: loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy']**" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": 23, 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "from keras.models import Sequential\n", 251 | "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": 24, 257 | "metadata": {}, 258 | "outputs": [], 259 | "source": [ 260 | "model = Sequential()\n", 261 | "\n", 262 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(28, 28, 1), activation='relu',))\n", 263 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 264 | "\n", 265 | "model.add(Flatten())\n", 266 | "\n", 267 | "model.add(Dense(128, activation='relu'))\n", 268 | "\n", 269 | "model.add(Dense(10, activation='softmax'))\n", 270 | "\n", 271 | "\n", 272 | "model.compile(loss='categorical_crossentropy',\n", 273 | " optimizer='rmsprop',\n", 274 | " metrics=['accuracy'])" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": 25, 280 | "metadata": {}, 281 | "outputs": [ 282 | { 283 | "name": "stdout", 284 | "output_type": "stream", 285 | "text": [ 286 | "_________________________________________________________________\n", 287 | "Layer (type) Output Shape Param # \n", 288 | "=================================================================\n", 289 | "conv2d_2 (Conv2D) (None, 25, 25, 32) 544 \n", 290 | "_________________________________________________________________\n", 291 | "max_pooling2d_2 (MaxPooling2 (None, 12, 12, 32) 0 \n", 292 | "_________________________________________________________________\n", 293 | "flatten_2 (Flatten) (None, 4608) 0 \n", 294 | "_________________________________________________________________\n", 295 | "dense_3 (Dense) (None, 128) 589952 \n", 296 | "_________________________________________________________________\n", 297 | "dense_4 (Dense) (None, 10) 1290 \n", 298 | "=================================================================\n", 299 | "Total params: 591,786\n", 300 | "Trainable params: 591,786\n", 301 | "Non-trainable params: 0\n", 302 | "_________________________________________________________________\n" 303 | ] 304 | } 305 | ], 306 | "source": [ 307 | "model.summary()" 308 | ] 309 | }, 310 | { 311 | "cell_type": "markdown", 312 | "metadata": {}, 313 | "source": [ 314 | "### Training the Model\n", 315 | "**TASK 6: Train/Fit the model to the x_train set. Amount of epochs is up to you.**" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": 27, 321 | "metadata": {}, 322 | "outputs": [ 323 | { 324 | "name": "stdout", 325 | "output_type": "stream", 326 | "text": [ 327 | "Epoch 1/1\n", 328 | "60000/60000 [==============================] - 26s 437us/step - loss: 0.3219 - acc: 0.8856\n" 329 | ] 330 | }, 331 | { 332 | "data": { 333 | "text/plain": [ 334 | "" 335 | ] 336 | }, 337 | "execution_count": 27, 338 | "metadata": {}, 339 | "output_type": "execute_result" 340 | } 341 | ], 342 | "source": [ 343 | "model.fit(x_train,y_train_cat,epochs=1)" 344 | ] 345 | }, 346 | { 347 | "cell_type": "markdown", 348 | "metadata": {}, 349 | "source": [ 350 | "### Evaluating the Model\n", 351 | "\n", 352 | "**TASK 7: Show the accuracy,precision,recall,f1-score the model achieved on the x_test data set. Keep in mind, there are quite a few ways to do this, but we recommend following the same procedure we showed in the MNIST lecture.**" 353 | ] 354 | }, 355 | { 356 | "cell_type": "code", 357 | "execution_count": 28, 358 | "metadata": {}, 359 | "outputs": [ 360 | { 361 | "data": { 362 | "text/plain": [ 363 | "['loss', 'acc']" 364 | ] 365 | }, 366 | "execution_count": 28, 367 | "metadata": {}, 368 | "output_type": "execute_result" 369 | } 370 | ], 371 | "source": [ 372 | "model.metrics_names" 373 | ] 374 | }, 375 | { 376 | "cell_type": "code", 377 | "execution_count": 30, 378 | "metadata": {}, 379 | "outputs": [ 380 | { 381 | "name": "stdout", 382 | "output_type": "stream", 383 | "text": [ 384 | "10000/10000 [==============================] - 1s 138us/step\n" 385 | ] 386 | }, 387 | { 388 | "data": { 389 | "text/plain": [ 390 | "[0.3002570129275322, 0.8916]" 391 | ] 392 | }, 393 | "execution_count": 30, 394 | "metadata": {}, 395 | "output_type": "execute_result" 396 | } 397 | ], 398 | "source": [ 399 | "model.evaluate(x_test,y_test_cat)" 400 | ] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "execution_count": 31, 405 | "metadata": {}, 406 | "outputs": [], 407 | "source": [ 408 | "from sklearn.metrics import classification_report" 409 | ] 410 | }, 411 | { 412 | "cell_type": "code", 413 | "execution_count": 32, 414 | "metadata": {}, 415 | "outputs": [], 416 | "source": [ 417 | "predictions = model.predict_classes(x_test)" 418 | ] 419 | }, 420 | { 421 | "cell_type": "code", 422 | "execution_count": 34, 423 | "metadata": {}, 424 | "outputs": [ 425 | { 426 | "name": "stdout", 427 | "output_type": "stream", 428 | "text": [ 429 | " precision recall f1-score support\n", 430 | "\n", 431 | " 0 0.86 0.81 0.84 1000\n", 432 | " 1 0.99 0.97 0.98 1000\n", 433 | " 2 0.80 0.85 0.83 1000\n", 434 | " 3 0.86 0.93 0.89 1000\n", 435 | " 4 0.84 0.81 0.83 1000\n", 436 | " 5 0.96 0.98 0.97 1000\n", 437 | " 6 0.73 0.69 0.71 1000\n", 438 | " 7 0.97 0.91 0.94 1000\n", 439 | " 8 0.98 0.98 0.98 1000\n", 440 | " 9 0.93 0.98 0.95 1000\n", 441 | "\n", 442 | "avg / total 0.89 0.89 0.89 10000\n", 443 | "\n" 444 | ] 445 | } 446 | ], 447 | "source": [ 448 | "print(classification_report(y_test,predictions))" 449 | ] 450 | }, 451 | { 452 | "cell_type": "markdown", 453 | "metadata": {}, 454 | "source": [ 455 | "# Great Job!" 456 | ] 457 | } 458 | ], 459 | "metadata": { 460 | "kernelspec": { 461 | "display_name": "Python 3", 462 | "language": "python", 463 | "name": "python3" 464 | }, 465 | "language_info": { 466 | "codemirror_mode": { 467 | "name": "ipython", 468 | "version": 3 469 | }, 470 | "file_extension": ".py", 471 | "mimetype": "text/x-python", 472 | "name": "python", 473 | "nbconvert_exporter": "python", 474 | "pygments_lexer": "ipython3", 475 | "version": "3.6.5" 476 | } 477 | }, 478 | "nbformat": 4, 479 | "nbformat_minor": 2 480 | } 481 | -------------------------------------------------------------------------------- /7_Deep Learning for Computer Vision/Keras-CNN-CIFAR-10.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# CIFAR-10 Multiple Classes\n", 8 | "\n", 9 | "Let's go over another example of using Keras and building out CNNs. This time will use another famous data set, the CIFAR-10 dataset which consists of 10 different image types." 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "-----" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "# The Data\n", 24 | "\n", 25 | "CIFAR-10 is a dataset of 50,000 32x32 color training images, labeled over 10 categories, and 10,000 test images." 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 1, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stderr", 35 | "output_type": "stream", 36 | "text": [ 37 | "Using TensorFlow backend.\n" 38 | ] 39 | } 40 | ], 41 | "source": [ 42 | "from keras.datasets import cifar10\n", 43 | "\n", 44 | "(x_train, y_train), (x_test, y_test) = cifar10.load_data()" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": {}, 51 | "outputs": [ 52 | { 53 | "data": { 54 | "text/plain": [ 55 | "(50000, 32, 32, 3)" 56 | ] 57 | }, 58 | "execution_count": 2, 59 | "metadata": {}, 60 | "output_type": "execute_result" 61 | } 62 | ], 63 | "source": [ 64 | "x_train.shape" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 3, 70 | "metadata": {}, 71 | "outputs": [ 72 | { 73 | "data": { 74 | "text/plain": [ 75 | "(32, 32, 3)" 76 | ] 77 | }, 78 | "execution_count": 3, 79 | "metadata": {}, 80 | "output_type": "execute_result" 81 | } 82 | ], 83 | "source": [ 84 | "x_train[0].shape" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 4, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "import matplotlib.pyplot as plt" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 5, 99 | "metadata": {}, 100 | "outputs": [ 101 | { 102 | "data": { 103 | "text/plain": [ 104 | "" 105 | ] 106 | }, 107 | "execution_count": 5, 108 | "metadata": {}, 109 | "output_type": "execute_result" 110 | }, 111 | { 112 | "data": { 113 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAH3FJREFUeJztnVuMXNd1pv9Vt67qezf7QrJJiRJ1GcmxRMmMIEiZjB3PBIoRRDaQZOwHQw9GGAQxEAPJg+AAYw8wD/ZgbMMPAw/okRJl4PFlfImFQJjEEWwIiQNFlCXrHomiKLHJVrPJ7mZ3dVXXdc1DlyZUa/+bJTZZTWn/H0B0ca/a56zaddY5VeevtZa5O4QQ6ZHZbgeEENuDgl+IRFHwC5EoCn4hEkXBL0SiKPiFSBQFvxCJouAXIlEU/EIkSm4rk83sHgBfB5AF8D/d/Uux5+fzee8rFoO2VqtF52UQ/hVi1vi+Cjl+XstHbLlsltrMwjs0i5xDIz42m/w1x353mY35SH6x2fY231eb780ykRcQod0Ov7aY79HtRfy3yCIzWybiRzbD3092DABAO/JrWY8dCGxOdHthFpdXUa6sd7Wziw5+M8sC+O8A/gOAWQBPmNnD7v4Cm9NXLOLA7R8K2paXF+m++jLhN368wBfnqh391DY5PkBtE6OD1FbI5oPjub4SnYMsX+LFpWVqqzf5axsbHaG2TKsRHK/VanTO+vo6tRVL4ZM1ALTAT16Vajk4PjI6TOfA+fbqtTq1ZRF+XwB+shka5O/zwAA/PvJ5vh7ViI8eu0BkwsdI7DU3PRzfX37gB3w/m3fb9TPfyR0Ajrr7MXevA/gOgHu3sD0hRA/ZSvDPADhx3v9nO2NCiPcAW/nOH/rc8Y7PqmZ2CMAhAOjr69vC7oQQl5KtXPlnAew97/97AJza/CR3P+zuB939YC7Pv5sJIXrLVoL/CQDXm9k1ZlYA8EkAD18at4QQl5uL/tjv7k0z+yyAv8WG1Peguz8fm7O+vo7nXwg/ZfnMGTpvnNxgtR38zutEa4jarDRFbWttrjqUW+E78G4FOqeyzu/YVqr8DnyjxaWtMxGNs5gL+9hs8u1lyd1mIP5VrbK+Rm3Ndvh12/oOOicTUQEbEbWilOPHQZncMV9sNemc/n5+t98y/NOrETUIABCRDyvrYYWm2QiPA0A2F35fGutV7sMmtqTzu/sjAB7ZyjaEENuDfuEnRKIo+IVIFAW/EImi4BciURT8QiTKlu72v1syAEo5IlNFfvx3NZH09k3zBJepyXFqK8WknEjWVrUWToBZb3AZyiPbK5QiCUGRxB5v8/2NjIcTmpoNvr1CnvsRSbZEtsDftFo9vFaNJl+P/sj2cgPcx2JkXtPCcmQmkiXYjGTgxTJJBwd4Mll5rUJtjWZY0oslVK6unAuOt2Nv2Obtd/1MIcT7CgW/EImi4BciURT8QiSKgl+IROnp3X4zR9HCCRVDQ9yVG2bGguM7SjwTJN/mpanKizzZptXm58NqJex7huf1YDhSFiwXuUu9fG6Vz4u8a+ND4TvOqys8CaceSdCpkqQTIF6XbpCUwmrUeeJJpsVfWD6SYNQipcsAIEduz9dqfE4hz9/QTJsnBNXKS9QGkhQGAH3kMG62uSJxbi2s+LQi9Rg3oyu/EImi4BciURT8QiSKgl+IRFHwC5EoCn4hEqWnUl/ODGN94V2WIlLOCEnqmBzmNdNapF0UgEifGSCbixSSI3XYau2I1BTR5XKR5JJWjUtinuXn7NOnw12AWg3+qlcrPOmk0uKy6GAp0n2nRtp1gb/mjHGZKtsX6ZSzxmXd/nzYx1ykFdZ6pO5itcGlvnakydpymfu4XAkfP2UiLQPAeiN8DNQjtRo3oyu/EImi4BciURT8QiSKgl+IRFHwC5EoCn4hEmVLUp+ZHQewig31rOnuB6M7yxomR8OSzVCeS2zFYtiWyXJppRSpj9doctmrHclUcw9LQPVIvb1WncuAbY9kzEUkNs/xrLPVejhDr9Xi61uJtAZrRmyra9z/k4thP/IZvr3hMl/7xpu8nVv1HJcqr5q4Ljg+NbWHzrGhcH08AKgtnaW2cplnR55b5VLfmXNhWff4Ce5HKxsO3Vqdy4ObuRQ6/0fcnb8zQogrEn3sFyJRthr8DuDvzOxJMzt0KRwSQvSGrX7sv9vdT5nZFICfmNlL7v7Y+U/onBQOAUAx8r1eCNFbtnTld/dTnb+nAfwIwB2B5xx294PufrCQ07cMIa4ULjoazWzAzIbeegzgNwE8d6kcE0JcXrbysX8awI867a1yAP63u//f2IR8Lovdk+HCjsMFLlEM9oelLYtIZYhkWFkkm65W5bJRhsiAO4Z427CBAZ6NtnKOiyQjwzxjbjVSVPP1k+Ftlmv8K1chkgg20x/JSszzzMPjZ8PZhTWPFF2NZPWNDA9R2103c4V5ZS4s63olsq8Jni1aq/D1KJf5tbQvz7e5d2f4tU1NTdM58yth6fDsy2/SOZu56OB392MAbr3Y+UKI7UVfwoVIFAW/EImi4BciURT8QiSKgl+IROltAc+sYXwonG2Xq4elIQDoy4fd7O8L96UDgFqVy2GNSL+10dFwX0AAcFL0sd7i59BGI1JccpD38Tu1EO7FBgCvvs6zvRZWw68tUgsSV0d6Hn783x6gtj27uP/ff/JYcPyfjnIpqtnmmYy5DJfmVpcXqK1SDq/j0BCX3tDi2YXFIp9XINmnANBvfF6zFX5zrtq7m84ZWgz3cnzmNb4Wm9GVX4hEUfALkSgKfiESRcEvRKIo+IVIlN7e7c/lMDW+I2irLvK74hkLu1kmbY4AoBqpZZazSD27SFsrdqasNvhd6tExnqBTb/E72MdmT1Hb4gr3kdX3y0ZafA0X+famcuG7ygBQXOSKxPXDO4Pjc+Pcj/nl09RWq/A1furll6ktQ9pXNQYircZGeEINMjxkRka4+jTUjrQHI3Uevb5C5+wjCXJ9+e6v57ryC5EoCn4hEkXBL0SiKPiFSBQFvxCJouAXIlF6LPXlMTYxGbSNDfL2WplMOClieWWJzmmslfn2WrF2XbygnZMEo8FBXqevAW578RiXqNZqvPVTsdjHbYWwj6UBLkONZbks+uTReWpr1vnhUxsJS32TY3w9DFx+azS5FFyp81qCa6RWX73JX7NFpNtINzfkM5FWb5lI7cJceB2bNS6lOpGJSe5ZEF35hUgUBb8QiaLgFyJRFPxCJIqCX4hEUfALkSgXlPrM7EEAvw3gtLv/SmdsHMB3AewDcBzA77s7193+dWsAke0s0s6I0Repp9aPcNYTAOQi57xMJlKPj8iAfSXeruvMmzwrrnKGL9m141wSq3HVC0Ui6d24f4bOyUQ22MzyNV6JSK25bLjO4FCBvy87xvZT2/7rr6K21954gtpeevlkcLyQi8hozmXiZpOHTIZkVAJAvsDXsd0OH1ftiK5oFj5OI0rkO+jmyv+XAO7ZNHY/gEfd/XoAj3b+L4R4D3HB4Hf3xwAsbhq+F8BDnccPAfj4JfZLCHGZudjv/NPuPgcAnb9Tl84lIUQvuOw3/MzskJkdMbMjq5XIl1UhRE+52OCfN7NdAND5S+svufthdz/o7geH+vlNLCFEb7nY4H8YwH2dx/cB+PGlcUcI0Su6kfq+DeDDACbMbBbAFwB8CcD3zOwzAN4A8Hvd7Kztjup6uFihNXhmFhDOwFpb4wUO6w1+Xmtm+CeQcoVLcyvENrOXL6M3+faunuDCzP7dXBqqrPN5MzfcGhwvOP/KtXSOF0ItjYYLrgIAzvJMtb07dwXHl9d4tuK1/+Z6ahse41mJw2M3UdvSQnj9l87xlmf5iByZcZ5R2WhHskV5sihajfDxHUkSpK3j3kVS34WD390/RUwffRf7EUJcYegXfkIkioJfiERR8AuRKAp+IRJFwS9EovS0gKfD0bKwHOItXlCRyRqlIi/6OTjEpaFTC1xWfG12gdpy+bAfhXneV299nm/v+iku5330w1z2evXk5lSLf2VoJlwgdWJHuKAmAJxe4EU6R0cjsleb+18gBStPL4Sz7AAgV1ymtoXlOWo7Ocez8PL58HEwOsy1t2qVC2ae49dLi2hz7YgMmLHwPItkmEbaPHaNrvxCJIqCX4hEUfALkSgKfiESRcEvRKIo+IVIlJ5KfdlsBqOjg0FbM8elvnI5nJHmDS6fnFvlWVuvv8GlrXKZy0alYvhcOfcazy6cLvKijjMzV1Pb6O5rqC2/GkkRI0VN99x6B5/yJpffSk0uVbbAMwXX1sK2Xf1hKRIA6i3+umwgfNwAwJ6B3dQ2NBqWOFfPvknnnJ4/S20N4/Lmep0XBUWGa3MDfeEs03o1ImGSgqBGZMOgS10/UwjxvkLBL0SiKPiFSBQFvxCJouAXIlF6ere/3WpidTl8JzVX57Xu8qQ1EXgJOeSy3FgpcyVgbIgnsowOhO/KVpf43f6p3bwG3swt/47anputU9vLR7ntrl3jwfHlZT5nen+47h8AZFChtnqNKwGjHr5zv3Ka30kv1XktwV3j4dcFAMstXlcvf8tYcLwaSRT6x0ceprbZE/w1ZyMtuWKNtFgeUSPWVq4RXiuWBBfcRtfPFEK8r1DwC5EoCn4hEkXBL0SiKPiFSBQFvxCJ0k27rgcB/DaA0+7+K52xLwL4AwBv6R6fd/dHutlhligerUgSgxOZJEPaeAFAy7jUt8QVJaysROq31cJy2a4RLg/+6kc+Qm17bryT2n74Fw9S285Ikku2Hq5PePLYq3x7195MbcUd11HbgHN5trIY7t1aaoelNwCoV7mseGaV20YneRLUjp37guPV8jCdk+EmtAo8mSlWw6/R4FKrNcMJauY8ca3ZDIfupZb6/hLAPYHxr7n7gc6/rgJfCHHlcMHgd/fHAPBysUKI9yRb+c7/WTN7xsweNDP+WU4IcUVyscH/DQD7ARwAMAfgK+yJZnbIzI6Y2ZFyhX/vEUL0losKfnefd/eWu7cBfBMALRPj7ofd/aC7Hxzs51VthBC95aKC38x2nfffTwB47tK4I4ToFd1Ifd8G8GEAE2Y2C+ALAD5sZgcAOIDjAP6wm50ZACNKRItkKQG8bVGkcxK8GtlepATe+A7e5mtnf1havP3gDXTOTXdxOW/pNJc3+5o88/DaPXuorU1e3M4pXjuvuc4l00okG7De5PMa1fCh1QKXKV89OUttzz53hNruupP7uGNnOKtyZTUsRQIA6fAFAJjYx2Xddqy9Vj0i2xEJ+dwCb19WWw072SbZlCEuGPzu/qnA8ANd70EIcUWiX/gJkSgKfiESRcEvRKIo+IVIFAW/EInS0wKe7kCbZDBVa1yiKJAstlyOF0zMZrj8c91O/mvkYomfD/ddvTc4fuuv8cy9XTfeQm1P/9NfUNtVe7mPOz/wQWorTO4Pjuf6R+icyjqXHKsrPHNv/tQJaluaD8t2rQbPzisNhQukAsDEBH+vT5x6itqmd80Ex5uVSBZplbfdsrUlamt5OKMSAJxp3ABKfeHXVtjJX/NKH8l0fRcRrSu/EImi4BciURT8QiSKgl+IRFHwC5EoCn4hEqWnUp+ZIZ8N73IpUqCxtR6WNUr9JTonm+HSylQkc+/EHM+k2n97qJQhsOeD4fENuGTXWF2jtpEhLs1N3nCA2tZy4Z52zz/1BJ1Tq3I/Vlb4epw5+Qa1ZVthqbVY5IfczDVhWQ4AbrmBFxJtZnmmXT47Gh4v8KzP3Dov0ll5/SS1MRkbAJqRy2yZ9JXs38Ff1zTpAZnPd38915VfiERR8AuRKAp+IRJFwS9Eoij4hUiU3ib2tNuoVcN3Uvv7uCtWDN8NzWd4DTlvcVtpkLfy+p3/+DvUdtdvfTQ4PjwxTefMH3uR2rIR/5dXeQ2/heP/Qm2nVsN3nH/2139N5wyWeALJeo0nwOyc5orE8FD4TvVrszwZqB5Zj/Hd+6jthg9+iNrQ6gsOLy7zeoEVoi4BwFKV+2jOj+H1Kk9cK5MWW17mqsNNYRED7e67denKL0SqKPiFSBQFvxCJouAXIlEU/EIkioJfiETppl3XXgB/BWAngDaAw+7+dTMbB/BdAPuw0bLr992dFzgD4HC0ndTWa/OkCGuGZZKmR1pyRWqmFfuGqe3Ah7hs1JcPS2IvPM1ryC2depXaajUu5awuLVLbiaMvUFvZw8lO+Rbf12COS5/DRZ5cMjnGpb65+TeD481IW7bKKpcVT7zGk4iA56mlXA7XICzm+PHR7JuitrNNfuyUSrwGYf8QT0Ir5cJy5Gplhc5ptsOS47tQ+rq68jcB/Km73wTgTgB/bGY3A7gfwKPufj2ARzv/F0K8R7hg8Lv7nLv/ovN4FcCLAGYA3Avgoc7THgLw8cvlpBDi0vOuvvOb2T4AtwF4HMC0u88BGycIAPyzkhDiiqPr4DezQQA/APA5d+dfRt4575CZHTGzI2tVXktfCNFbugp+M8tjI/C/5e4/7AzPm9mujn0XgGDDc3c/7O4H3f3gQKlwKXwWQlwCLhj8ZmYAHgDwort/9TzTwwDu6zy+D8CPL717QojLRTdZfXcD+DSAZ83s6c7Y5wF8CcD3zOwzAN4A8HsX3pRjQy18J+0m/0qQy4dr7rUiNdPq4NlX0yO8rt7fPvw31DY+HZaUpnaF23gBQL3Cs/Py+bDEAwCDA1xSymW4NDdA5MidU+GabwBQXeUKbSnLfTy7cIbaGvXwezNU5JJXvcylvleeOkJtcy+9TG21Jmmhledr2Iqt7x4ufWKAH8OZPi61FolsNwa+Vjd94JrgeKl4jM7ZzAWD393/AQDLcQznuAohrnj0Cz8hEkXBL0SiKPiFSBQFvxCJouAXIlF6WsATbmi3w8JBIZJZVsyR4ocZXmjRIy2c2nWeWXbmTDgbDQDKC2FbqcF/8NgGf13jY1x+G909SW3NVo3aTp4K++iRfK9Mhh8G9SaXTLPGC38OFMPyLEnQ3NhezBjJ0mzVuZyaIcfbSoXLm/U+Ig8CGNrN136txFubrba5DLi+Fr4G7xi+ls6ZINJtLt99SOvKL0SiKPiFSBQFvxCJouAXIlEU/EIkioJfiETprdQHQ8bCWWLFPp7B5CRDb6AUlpMAYGBogtoqDZ5htWOI1xzIET/q5+bpnHaGb6+S59LW9HQ4awsA2nUuG914y57g+M9/+iidU/cKteWNy6nVMp83PBTOSizk+CGXtUg/u3X+nr02x2W75eXwe1azNTpn8gZ+TZwZjWQlOn+vl87wtSqshyXTgZlIJmYlnDXZjqilm9GVX4hEUfALkSgKfiESRcEvRKIo+IVIlJ7e7c8YUMiFzzeVGk+YyJKWUe1IfblKgydnZPM8SaSvwO/m5vNhPwr9vG3VyDBPMHpzgasElZnwXXsAmNp7HbWdPB2uq/eBX72bzikvnKK2Yy/zVlhrZZ7IksuG139khNcmNFLfEQDmTnIf33g9ktjTF17/4WmuFE2OR3yMqA62yN/rsSUeajNT48HxPaP8GDj6QjiBq1blSWub0ZVfiERR8AuRKAp+IRJFwS9Eoij4hUgUBb8QiXJBqc/M9gL4KwA7sdFr67C7f93MvgjgDwAsdJ76eXd/JLqznGF6Mny+aZw9S+dVW2EJaI3nZsAzvJVXLpJcMjzMkykKpBVWdY3X8CvFaqrVue3Iz39ObdfeyCXC2dmwBJSJ1Dvs7+O1+LIRObVU4tLWWjks9VWrXIJtRlq2DZa4H3fddgO1FUmCUTPLaxO2GjwJp3qCS32Z1SK1TfUPUdttN3wgPGd0ms55cu614HizwV/XZrrR+ZsA/tTdf2FmQwCeNLOfdGxfc/f/1vXehBBXDN306psDMNd5vGpmLwKYudyOCSEuL+/qO7+Z7QNwG4DHO0OfNbNnzOxBM+Otb4UQVxxdB7+ZDQL4AYDPufsKgG8A2A/gADY+GXyFzDtkZkfM7MhKhX+nE0L0lq6C38zy2Aj8b7n7DwHA3efdveXubQDfBHBHaK67H3b3g+5+cLifVzoRQvSWCwa/mRmABwC86O5fPW9813lP+wSA5y69e0KIy0U3d/vvBvBpAM+a2dOdsc8D+JSZHQDgAI4D+MMLbahQMFy1N3z1HzEukxw9EZZe5hd4dl69xaWhwUH+stcqPEOs1S4Hx7ORc+jiApcwV8tclllvcD+yzm1Dg+FbL/NvLtI5s2tcvmo7lwinJ7ksau1wdtnSMq+31zfA37PRES6VFbJ8/Wt1IvnmuLy5VuPbq5cjLcrafN51e3dS2+6d4XU8Mcsl3bML4ZhoxlqebaKbu/3/ACB0BEQ1fSHElY1+4SdEoij4hUgUBb8QiaLgFyJRFPxCJEpPC3hmc4bhMZIZR6QLABibyoYNA7wI45l5XhB0PdLuKlfgxRvZtHaDZxA2WtyPc1Uuew1EstjWK1yaq66HC3jWIz62IjZ3svYAyiuRdl3D4UKow8O82Gm1yrd35ixfq8FBnl1omfD1zZpcJi7keBHXPq5Io1Dga7Xvun3UVq2EfXnssRfonGdePh3e1nr3WX268guRKAp+IRJFwS9Eoij4hUgUBb8QiaLgFyJReir1mRlyxfAui8M81398MHyOylW5jJYv8eymlUjfNLT4+bBUnApPyfN9tWq8n12hn/uRz/H1yGa5xFnzsC/1Bpc3PZK5Z1wRg9e55Ngipnwkmw4FLm8uL3Gpr1rn/elGRsPSbY5IgACQiax9BVxKmz+zSm1LkQzO1bVwlubf/+wlvi+iiq7XJfUJIS6Agl+IRFHwC5EoCn4hEkXBL0SiKPiFSJSeSn3ttqHMCiBmB+m8wYGwbpQvcR1qIJJ+NTLCpbnyCu8lV14JF1QsVyJZfevcNlTgBTCLpC8gADRrXOLM5cLn80LkNJ/v49loZnxif6QQaoaYmi0uRRVKkR6Ko1zeXFzkEtsqkT6Hx/naVyI9A185zguyvvTsCWqbHufZotN7yGvL8ON0ghQ0nV/lsuc7Nt/1M4UQ7ysU/EIkioJfiERR8AuRKAp+IRLlgnf7zawI4DEAfZ3nf9/dv2Bm1wD4DoBxAL8A8Gl3j7bhrdeB2dfDttoyvzs/NBm+Q1wsRRI6uHiA8XH+sstrvI7c8nLYtnSWJ4Is8ZvDyLb5Xfa2cyWj1eIKAtphW+wsbxme2JPN8bWqRpKgnNzUz5M2XgDQrPCWYq1Ifb9WJFlouRyex7p4AcBiRPE5fpS/octn16itvsZ3uHMk3Mrrpqtn6Bzm4itvrtA5m+nmyl8D8Bvufis22nHfY2Z3AvgygK+5+/UAlgB8puu9CiG2nQsGv2/wVofKfOefA/gNAN/vjD8E4OOXxUMhxGWhq+/8ZpbtdOg9DeAnAF4FsOz+/z/czQLgn1GEEFccXQW/u7fc/QCAPQDuAHBT6GmhuWZ2yMyOmNmRc2Ve/EEI0Vve1d1+d18G8DMAdwIYNbO37gbtAXCKzDns7gfd/eDIYKTjgRCip1ww+M1s0sxGO49LAP49gBcB/BTA73aedh+AH18uJ4UQl55uEnt2AXjIzLLYOFl8z93/xsxeAPAdM/svAJ4C8MCFNuSWQys/EbQ1CgfpvFo7nMiSaYZbUwFAcYTLV6OT/BPIWIYnnoxXwokWy4u8vdPyGS7nVdf48reaXD6E83N2uxn2cb3Kv3IVCpF6gTnu/+o6Tzypkq94+YgaPJQJJ6sAQDvDJaxGg69j30BYMi3meb3A0QL38VqMUtsHb+Vtw2685VZq23fddcHxO+7k8ubsqXJw/B9f5TGxmQsGv7s/A+C2wPgxbHz/F0K8B9Ev/IRIFAW/EImi4BciURT8QiSKgl+IRDGPZI9d8p2ZLQB4K69vAkD3usTlQ368Hfnxdt5rflzt7pPdbLCnwf+2HZsdcXcu7ssP+SE/Lqsf+tgvRKIo+IVIlO0M/sPbuO/zkR9vR368nfetH9v2nV8Isb3oY78QibItwW9m95jZv5jZUTO7fzt86Phx3MyeNbOnzexID/f7oJmdNrPnzhsbN7OfmNkrnb9j2+THF83sZGdNnjazj/XAj71m9lMze9HMnjezP+mM93RNIn70dE3MrGhm/2xmv+z48Z8749eY2eOd9fiumUVSP7vA3Xv6D0AWG2XArgVQAPBLADf32o+OL8cBTGzDfn8dwO0Anjtv7L8CuL/z+H4AX94mP74I4M96vB67ANzeeTwE4GUAN/d6TSJ+9HRNABiAwc7jPIDHsVFA53sAPtkZ/x8A/mgr+9mOK/8dAI66+zHfKPX9HQD3boMf24a7PwZgc53qe7FRCBXoUUFU4kfPcfc5d/9F5/EqNorFzKDHaxLxo6f4Bpe9aO52BP8MgPPbmW5n8U8H8Hdm9qSZHdomH95i2t3ngI2DEMDUNvryWTN7pvO14LJ//TgfM9uHjfoRj2Mb12STH0CP16QXRXO3I/hDJXa2S3K4291vB/BbAP7YzH59m/y4kvgGgP3Y6NEwB+ArvdqxmQ0C+AGAz7l7990nLr8fPV8T30LR3G7ZjuCfBbD3vP/T4p+XG3c/1fl7GsCPsL2ViebNbBcAdP6e3g4n3H2+c+C1AXwTPVoTM8tjI+C+5e4/7Az3fE1CfmzXmnT2/a6L5nbLdgT/EwCu79y5LAD4JICHe+2EmQ2Y2dBbjwH8JoDn4rMuKw9joxAqsI0FUd8Ktg6fQA/WxMwMGzUgX3T3r55n6umaMD96vSY9K5rbqzuYm+5mfgwbd1JfBfDn2+TDtdhQGn4J4Ple+gHg29j4+NjAxiehzwDYAeBRAK90/o5vkx//C8CzAJ7BRvDt6oEfv4aNj7DPAHi68+9jvV6TiB89XRMAt2CjKO4z2DjR/Kfzjtl/BnAUwP8B0LeV/egXfkIkin7hJ0SiKPiFSBQFvxCJouAXIlEU/EIkioJfiERR8AuRKAp+IRLl/wHCOW2RBgdIrQAAAABJRU5ErkJggg==\n", 114 | "text/plain": [ 115 | "
" 116 | ] 117 | }, 118 | "metadata": { 119 | "needs_background": "light" 120 | }, 121 | "output_type": "display_data" 122 | } 123 | ], 124 | "source": [ 125 | "# FROG\n", 126 | "plt.imshow(x_train[0])" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": 6, 132 | "metadata": {}, 133 | "outputs": [ 134 | { 135 | "data": { 136 | "text/plain": [ 137 | "" 138 | ] 139 | }, 140 | "execution_count": 6, 141 | "metadata": {}, 142 | "output_type": "execute_result" 143 | }, 144 | { 145 | "data": { 146 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAHNxJREFUeJztnWts3Nd55p93LuRweJMoUjKti2WpcmI78S2qN103bXpJ6gYtnABtkXxI/SGoit0GuwHaD0YW2KTYXSBdbBLkwyILZWPUXWTjpLk0RhEkMdwkTrZb23Riyxc1seXIupAWRYmkeBnO9e2HGe/KzHkOR0NqSOU8P0DQ8Lxz5pz5z/+Z/8x55n2PuTuEEOmR2ewJCCE2B4lfiESR+IVIFIlfiESR+IVIFIlfiESR+IVIFIlfiESR+IVIlNx6OpvZvQA+AyAL4H+6+ydi9x/cPupju/cFY5380tCMx2KPZoh0FFuTa/yXqLHZO4lGnzEJXpg8jcW5C22d4B2L38yyAP47gHcBOAPgKTN7xN1fZH3Gdu/Df/rKD4KxRr3WyRxorFPxx95QaJ9r+7y8JrgmfoYemWLDGzRWRThWa/A+qIYH+y9//C7eZxXr+dh/N4CX3f0Vd68AeBjAfet4PCFEF1mP+HcDOH3Z32dabUKIa4D1iD/0AfnnPouY2REzmzCziUuzM+sYTgixkaxH/GcA7L3s7z0AJlffyd2Puvthdz88tH10HcMJITaS9Yj/KQCHzOxGM+sB8H4Aj2zMtIQQV5uOV/vdvWZmHwbwbTStvgfd/YVYHzMglw0vpzc6eR/qZGke4e8r7Twmi2Qii7KdT+Qap4PFeWZ5NYOdHazoY3ZAzCmKjeXOz+8McTKykefcIA93JUdpXT6/u38TwDfX8xhCiM1Bv/ATIlEkfiESReIXIlEkfiESReIXIlHWtdrf0YAZYvVFbI1YAs9WwDp9C93aT2tdbLQJ27FhR8+rjU8U8tizjgxnJKfNIslMDaKjKzkVdeUXIlEkfiESReIXIlEkfiESReIXIlG6utpvALIsIaGTgmVbBL2DbmG2iKPSiK3c18LL/Zl6nfZpVtFbHzpvhUgUiV+IRJH4hUgUiV+IRJH4hUgUiV+IROluYo8ZMiQLxi1ia1CrL+bjbLzHQ2v4RfM5YvPo0MLcYFu007p0ndFd762jnLD4dk+RbpEafg1+ftcr5WB7tcx3sbJcT3icK9jZSFd+IRJF4hciUSR+IRJF4hciUSR+IRJF4hciUdZl9ZnZSQALAOoAau5+eM1OmXA2kju3QnIglkd0d6eNf19jmYexbZXqEa+pcRWyFQ3hvcPipmhntfNi82d1F2P1GGOZnRtuEEbGykTORW6+ASBb0QFAJmJlV8uLwfbKCh+qtxC2+q7klNoIn/833F17bwtxjaGP/UIkynrF7wC+Y2ZPm9mRjZiQEKI7rPdj/z3uPmlmOwE8amb/7O6PX36H1pvCEQAY271vncMJITaKdV353X2y9f80gK8DuDtwn6PuftjdDw+NjK5nOCHEBtKx+M2s38wGX78N4N0Ant+oiQkhri7r+di/C8DXW9ZNDsD/dvdvxToY+BZEFnkfMr/y96irUfKTOTmVxQXaxyL2T09fH43VI9lZMRvTO0hj6zRzL7NV1os7yMLrNNcyviVX5HVxYs0BKC3NB9tXlku0T2+eWX1hqzdEx+J391cA3N5pfyHE5rJF3rqFEN1G4hciUSR+IRJF4hciUSR+IRKly3v1OfKoBGONBp8K35eMZ0plIpZHzMrJZPj74fz5c8H2x77+FdpncGCAxm5685torG/7MI31j43RWHFgJNhej2QeuvFjFbs6xC1YcpQ79GCjV6kOUv5ilmg9cg7EnkAmZs86P79nL0wF20+e4D+b+de/8rtkIK6J1ejKL0SiSPxCJIrEL0SiSPxCJIrEL0SidHe7Lq8j07gUnojx1W22Lsvq1QFrbJ0UWZXNWp7G5mZeC7Yf+6fv8bFWwu4GAPzs2F4aG9q9i8b2v/U2GvuVd/xOsN2sQPvUI6v9LBELiK9ucyJ1/yLL9vEF/Vi/8Hix1f5Y4lS9skRj5yYnaWzXTv5a1yvhxJ6TL/+Y9hkq9gfbSyWeZLYaXfmFSBSJX4hEkfiFSBSJX4hEkfiFSBSJX4hE6arVV62uYPL0i8HY+N5fpv0aJEmH2TjNWGfva17nGzLVa+Vg+3BvZJumOp/j0vQZGrtwKZzsAQDn587TWF9uKNh+21330D6Z3ogtGkmesg0+fTIRPy9ecy+2bxux+hr8EbM5fu6cefUnNPZP3/82jd1996/S2KkTLwTbz0++Svs8tRw+F5eWZPUJIdZA4hciUSR+IRJF4hciUSR+IRJF4hciUdb0aszsQQC/B2Da3d/SahsB8CUA+wGcBPBH7j671mOVV5Zx4qfPBWPX7+ab/2RYpl0kMytmDTWy/D2vthK2UADgp88+HWzPVJdpn52RGn4np7mdBwtnbQFAYz6cGQkA//DI3wXb+/P88W658600VovZbxFvjpUMrDe4LVeP1J/LRerqWSQLL0Ni2Yg9WCvz4/uTZ/4vjb344x/Q2OL8WRqbPHUq2D43zyVVbYSPVb3Gs0hX086V/68B3Luq7QEAj7n7IQCPtf4WQlxDrCl+d38cwMVVzfcBeKh1+yEA793geQkhrjKdfuff5e5TAND6f+fGTUkI0Q2u+oKfmR0xswkzm1ha4FVQhBDdpVPxnzOzcQBo/T/N7ujuR939sLsf7h/ki05CiO7SqfgfAXB/6/b9AL6xMdMRQnSLdqy+LwJ4J4BRMzsD4GMAPgHgy2b2IQCnAPxhO4PVazXMz4Q/JNRXuL2S6wsvKTR43UmYccvDM7xI50UyPwA4ceypYPtgDz+Mw729NHZhhmfn1ebnaGxkmT/x7aNhj+0nEz+kfV45/iyNDWzbTmO3v+0uGsv3hQuGNmJbYUVsRWZtAUC5xF/r0sJisH1x7gLtc/rVcJYdALw4we28RqR45vTZkzS2QOZY6C/SPpkcOQeuYOuyNcXv7h8god9qfxghxFZDv/ATIlEkfiESReIXIlEkfiESReIXIlG6WsCzVqvg4oVw0cqfvXKM9nvTre8Itlumj/bJRzK9spE95k6fPEljc3Nh+23f+Cjtg6UqDcW2uosVEi0thfd2A4DtI2FrrjzPLcznn3qSxnp6+HGcfZlbhIX+8A+6+gb4a4ZIxt/ceW7NlSK/HD1DMuYWFyKFLnsimYc1nsGZiex5WMvw13OgdzDYXooUf200SuHAFeyfqCu/EIki8QuRKBK/EIki8QuRKBK/EIki8QuRKF21+rxRR6UULko4eTa8hx8AHHrTHcH2pUVidwCoRaytTGQvtsWZczRWroSLe5YjGWezkSzB+eVwNhcAFIu89kEuFylc6uEMt3rEHhzr51mO2QYvaDp7IlyMFQDKpbAlVqvyx4u5VH39vBDqyCDPfmtceCU8j2WeCXjozbfSWKGHF61aJM8ZAF49v7oS3v9nrho+D6yf24OFQXIOX0FWn678QiSKxC9Eokj8QiSKxC9Eokj8QiRKV1f7G406KqTO2amf8bppr7x0PNjemx2jfV5+8ns0NtjHV7czVb7CWiNJHU8c+zHtMzbAa+CVIttT1Re5EzC6kz/vejW8ir20yGsC7ojU6atXIsvHlUgRxVL4OBYzfEk/V+ihsfH919FYtsYTe84WwolVl8o84apR4U7A4AB3YfaM7qCxkcFtNPbwtx4Ntu88xJ2FbbuHg+25bJb2WY2u/EIkisQvRKJI/EIkisQvRKJI/EIkisQvRKK0s13XgwB+D8C0u7+l1fZxAH8C4PX9pj7q7t9c87EAZEj2xtzF12i/1ybPBtvf8bZbaJ+b33kPjZ14kdeeWzw7Q2O5TNiamwO3B4d7ufUyfvAGGjt9/ASNlVf4ePmR8PZg+d7w9lkA4JFEoUqNz996eEJNGeHt17J1brEVstzqG+jh255lwZOFxraFLbHzC7wm4MxcOPkMAKweSUwq8y3nxndwe3a4EH5u5WU+Vh/pY9Z+Zk87V/6/BnBvoP3T7n5H69+awhdCbC3WFL+7Pw6A5yMKIa5J1vOd/8NmdszMHjQz/hMxIcSWpFPxfxbAQQB3AJgC8El2RzM7YmYTZjZRXuHf94QQ3aUj8bv7OXevu3sDwOcA3B2571F3P+zuh3sL/Df1Qoju0pH4zWz8sj/fB+D5jZmOEKJbtGP1fRHAOwGMmtkZAB8D8E4zuwOAAzgJ4E/bGczdUK+EraOycUspmw9Ps8a2LALQE8kQGyrypz0+wLPObhwLW1uFvsi2YYP7aOz2O8ZprLHC35crKys0lsuE+znJ9gOAmTleZ3Bqhq/1Fou8rl6vk694Zf6aFar8NZu/eJ7GrMpr5/Xmw69NpcK/gi5XeJYgcjyrb3aW28SLESu7x8JzyfTxsYZ2hJ9XNlKfcjVrit/dPxBo/nzbIwghtiT6hZ8QiSLxC5EoEr8QiSLxC5EoEr8QidLVAp6AwRG2c5aXuMVWWgkX/ZyeeZX2yZGsJwAoDHBr7s6bD9DY1NlwkdHzx07RPnt/idt5N4yP0lj2Nj6PiX98gsYW5sN2Uy6y/Ve9xLPRZs9N0thM5PQZJkVSCzn+OvcXudU3t8TnWFoInx8AsEQSIJciRTpry3ysGnh2XqHAz6ulC+HMVACo18L25/DQLtqnbyCcvUec3vB927+rEOIXCYlfiESR+IVIFIlfiESR+IVIFIlfiETpqtVX6Cvg0K03BWOzczwzqzR/Ltj+/DGeRfXkNM9Uy5d4Ztlf/Lt/S2PvGwrbZdt2fJ/2WZqZorH+6Zdo7KYBnrl3gtfixJlTYfszu3c/7VOtcfut7Pz6sHiJW2ylpbAVNRDbJzHLn9jCMi9aenGOnwdLJHtvbokf3x4+FE68eobG9u4IFwsFgHyeZ62W6+E9D3MZ3sdrbJL8tVyNrvxCJIrEL0SiSPxCJIrEL0SiSPxCJEpXV/uzuSx2XDcSjO3cxRMm0AivHF+a59sqnb/EV9kXzvJ+p6a4S3D96PXB9nf/+m/RPqeffZrGLk7ybcMyY9tobHyUb5Pw8onjwfZaeEG5GQPf4mkx4oxYpF5chaw6z5f4FlSlc3zVPmt8rIXyPI3limRbq4jrMBtxMZYW+fEol3jtv+vHeL3D5Wp4G7jePp7oxGr1WeS1XI2u/EIkisQvRKJI/EIkisQvRKJI/EIkisQvRKK0s13XXgB/A+A6AA0AR939M2Y2AuBLAPajuWXXH7k799AAwBywcEKCg9dUcwtbIayOGQDs2r2Txvoy4W23AKDaCI8FAIvEWjTnttEvv+sPaOylF3iNtnKVW2I9T/HahX2kPqEbP1Zz83M0VmtEslwskkTiJMbaAeSqfAsty/D5941GajL+q9uC7WMjvH7i977DayS+dppvG3b2In9uiyv89axmw8+tfwc/Txsk58fbd/rauvLXAPy5u98M4O0A/szMbgHwAIDH3P0QgMdafwshrhHWFL+7T7n7j1q3FwAcB7AbwH0AHmrd7SEA771akxRCbDxX9J3fzPYDuBPAEwB2ufsU0HyDAMA/Zwshthxti9/MBgB8FcBH3J0XNv/5fkfMbMLMJpYWeMEOIUR3aUv8ZpZHU/hfcPevtZrPmdl4Kz4OIPijeHc/6u6H3f1w/yBfwBBCdJc1xW9mBuDzAI67+6cuCz0C4P7W7fsBfGPjpyeEuFq0k9V3D4APAnjOzJ5ptX0UwCcAfNnMPgTgFIA/XOuBzIEMsY4qdW6F5HvD71HLS4u0T815Glu2wLOl/u6Rr9HYnQfC1tz0NM8q23nzO2isbzu3+ib+8R9o7NQMz34rDobrDJbL/Hj0F3ntvBq41bdj1w4ay2TDXlQ2x23RHtIHAHbvvo7G9tzKY6PjQ8H2XuOn/twcz+r79vQPaKzK/DcAC2Xuwe28ITz/nfvCGbAAYD3EGr8Cq29N8bv7DyMPyXNZhRBbGv3CT4hEkfiFSBSJX4hEkfiFSBSJX4hE6WoBz3qjjsXlsI2yvMJ//WfEQVlc4sUU4fyp1fPcvvrWo9+lsanj4QKe05Gijo0XTtBYzEYrR4pS9ozwLLbKa+HMw+VFnq1Ycj6PsYjd9PvvfzeNWSFsEGWykbkv8HlcFylaWsryH5yWqmE7uNjHf3B26OaDNPZ/vv8UjZUXIluRFfjzvunWNwXbd47wY1+qhnWUZWIJzantewohfqGQ+IVIFIlfiESR+IVIFIlfiESR+IVIlK5afWaGXD48pC/zrDNWU9Mi+7flCzzW18dtl0NvuYnGDozsDrZnLvH9/eYyvDDprh28iGRxx400Vl1eobHZybAFtHAxVqSTF56cn+eZkwsrfG+6LEmcrFS4LWd1bpWdm+c2YK2HHw/mfM1GbOJ6jh+PYqQmxfw0Px71yF6JszPh18ar4fMNALJ1VsGTj7MaXfmFSBSJX4hEkfiFSBSJX4hEkfiFSJSurva7N1Arh2v1DUQSLXK58DRXIltJ1as8kSWT4U97eySBZKEUXqk+ePs+Po8h7iz0ZngSxuwyX2XPF4dpbPj68PYJkyd5otDenbwG3tT8azw2eYHGxnoHgu2NSDLT8DA/B7JZfp3KFcNjAUDdw+dBbw8fK1/opbE9B/fQ2NkTP6UxNPj8z5yaCraXym+mffL94Tlapv3rua78QiSKxC9Eokj8QiSKxC9Eokj8QiSKxC9Eoqxp9ZnZXgB/A+A6AA0AR939M2b2cQB/AuB8664fdfdvrvV4LO+gWOTWC0vgWVzkySoGnkmR6+FWTnEovN0VAIxsC29rVYwk6MyBJ/ZUq5EtxfJ8C60FYpcCwI49YasvP/gz2uf228M15ACgcoyPVa3w+Y/uCG/l5dkq7VPs4ce+WucZK408TwjKEYvQnT9eIVJv75duPkBjLzxxmsYGivy5sXO17vzavG1b2O5l26SFaMfnrwH4c3f/kZkNAnjazB5txT7t7v+t7dGEEFuGdvbqmwIw1bq9YGbHAfBcQyHENcEVfec3s/0A7gTwRKvpw2Z2zMweNDP+0zghxJajbfGb2QCArwL4iLtfAvBZAAcB3IHmJ4NPkn5HzGzCzCaWF3nRBSFEd2lL/GaWR1P4X3D3rwGAu59z97q7NwB8DsDdob7uftTdD7v74eIAX8QSQnSXNcVvZgbg8wCOu/unLmsfv+xu7wPw/MZPTwhxtWhntf8eAB8E8JyZPdNq+yiAD5jZHWi6dycB/OlaD+QAauTtpp4Jb+8EALlc2L7o6eUWT3mJ11MrFPknkJGdYYsKAArE9crmuXXokezCvoillI1kLFarPLZnfzhD7+R+bkcO7+LH49bbeU3DYj+f/+DQULB9eSVcYxAAKhX+tbAeOR6WCY8FAHViEZaWeJZjMfK69A2Q4oQArr+RH+N9N/A18skz4czJ8zOROV4Xtg4bEQtzNe2s9v8QQEiZa3r6Qoiti37hJ0SiSPxCJIrEL0SiSPxCJIrEL0SidHe7rkwG2b6wVbJc59lvvbmwDTgwzC2ebGTfomqdZ5ZZnr8fLi+Ebar+Brd/IrUggSq3tjLOM+Z2jvACnrVi2Ba99W3csmNbawHAge17aezUeV7cc352Ntie7+WDVSPZirU6P1bF3ojVVwtbrYN9kSy7yLHvJ4UzAWD3wTEa23conG0JAJeI7XjpErdFl0vh7cYajci+YKvQlV+IRJH4hUgUiV+IRJH4hUgUiV+IRJH4hUiUrlp9MCBDEvHKK9zqqy2Hrbl6JKsvW+BPzTKxwpm8AGKuuC3YvlLj1mFPJOPPiIUJANk6j+XZQQRg+bDFedNbb6R9UOeZh6jxeSw7z5w0UtxzeIgXar2wHLavAKBa4dZtJjL/bD2cDZjPxk59PlYsk7F/mNuYo7u4Pbt770iwvVzl1mcveVmMv1w/h678QiSKxC9Eokj8QiSKxC9Eokj8QiSKxC9EonTX6oMDHrZezCJZeLVwn3IlYvFkYwVB+dOuG7cBq2TPwEqVW30lMncAqNdj2WPcEqtGxsuRvdp6B7nlGM0Eq/HYngPhYqEAUCDZmxGXEn39vJBoPpIeWVpepLEaOf65DM/qy0TOgUyWP4HrrufFX4tFPv8DB8OZk9PnzwfbAaCXZJ9mrsDr05VfiESR+IVIFIlfiESR+IVIFIlfiERZc7XfzAoAHgfQ27r/V9z9Y2Z2I4CHAYwA+BGAD7o7z84BAHfUSRKMk22VAACN8Kp+KZIMhEwkEYSs2gNAJsNjNZJAslji9eViK/OR/BEMrgzQ2ECRr1T3F8MuQS7HV6lXYgkkPbxflSTNAEC9EX7eGd4FfYORpBnjSTMrJX4as+OfiWwP19PDXQeLSGbfjXxLrnok+ahvMPyajRe4m4Js+7X6GO1c+csAftPdb0dzO+57zeztAP4KwKfd/RCAWQAfWvdshBBdY03xe5PXjdR8658D+E0AX2m1PwTgvVdlhkKIq0Jb3/nNLNvaoXcawKMATgCYc/9/v9g5A4B/5hFCbDnaEr+71939DgB7ANwN4ObQ3UJ9zeyImU2Y2cTyAv9uLIToLle02u/ucwC+B+DtALaZ2eurH3sATJI+R939sLsfLg7yhRQhRHdZU/xmNmZm21q3+wD8NoDjAL4L4A9ad7sfwDeu1iSFEBtPO4k94wAeMrMsmm8WX3b3vzezFwE8bGb/GcCPAXx+7YdyWIMkWhivnccKk83MXuR9Iok9g0Oxbb74++GF2blg+8IS/zoTSyLK57l9dWmR18fzSCJOtRa2P4eGeQ25lUpkmyxi2TVj3Gp1klDTU+DWYW+ktmJvDz8/vMFjGWKJxZKqYs/ZEXnO4OdcJZIgxZKFcnl+7tRAXrMrqOG3pvjd/RiAOwPtr6D5/V8IcQ2iX/gJkSgSvxCJIvELkSgSvxCJIvELkSjmHkkt2+jBzM4DeLX15yiAma4NztE83ojm8UautXnc4O5j7TxgV8X/hoHNJtz98KYMrnloHpqHPvYLkSoSvxCJspniP7qJY1+O5vFGNI838gs7j037zi+E2Fz0sV+IRNkU8ZvZvWb2EzN72cwe2Iw5tOZx0syeM7NnzGyii+M+aGbTZvb8ZW0jZvaomb3U+n/7Js3j42Z2tnVMnjGz93RhHnvN7LtmdtzMXjCzf99q7+oxicyjq8fEzApm9qSZPduax1+22m80sydax+NLZpGqpu3g7l39ByCLZhmwAwB6ADwL4JZuz6M1l5MARjdh3F8DcBeA5y9r+68AHmjdfgDAX23SPD4O4C+6fDzGAdzVuj0I4KcAbun2MYnMo6vHBM3E3IHW7TyAJ9AsoPNlAO9vtf8PAP9mPeNsxpX/bgAvu/sr3iz1/TCA+zZhHpuGuz8OYHUxgvvQLIQKdKkgKplH13H3KXf/Uev2AprFYnajy8ckMo+u4k2uetHczRD/bgCnL/t7M4t/OoDvmNnTZnZkk+bwOrvcfQponoQAdm7iXD5sZsdaXwuu+tePyzGz/WjWj3gCm3hMVs0D6PIx6UbR3M0Qf6jWyGZZDve4+10AfhfAn5nZr23SPLYSnwVwEM09GqYAfLJbA5vZAICvAviIu1/q1rhtzKPrx8TXUTS3XTZD/GcAXL4hOS3+ebVx98nW/9MAvo7NrUx0zszGAaD1//RmTMLdz7VOvAaAz6FLx8TM8mgK7gvu/rVWc9ePSWgem3VMWmNfcdHcdtkM8T8F4FBr5bIHwPsBPNLtSZhZv5kNvn4bwLsBPB/vdVV5BM1CqMAmFkR9XWwt3ocuHBMzMzRrQB53909dFurqMWHz6PYx6VrR3G6tYK5azXwPmiupJwD8h02awwE0nYZnAbzQzXkA+CKaHx+raH4S+hCAHQAeA/BS6/+RTZrH/wLwHIBjaIpvvAvz+FU0P8IeA/BM6997un1MIvPo6jEBcBuaRXGPoflG8x8vO2efBPAygL8F0LuecfQLPyESRb/wEyJRJH4hEkXiFyJRJH4hEkXiFyJRJH4hEkXiFyJRJH4hEuVfADERd3rj30mgAAAAAElFTkSuQmCC\n", 147 | "text/plain": [ 148 | "
" 149 | ] 150 | }, 151 | "metadata": { 152 | "needs_background": "light" 153 | }, 154 | "output_type": "display_data" 155 | } 156 | ], 157 | "source": [ 158 | "# HORSE\n", 159 | "plt.imshow(x_train[12])" 160 | ] 161 | }, 162 | { 163 | "cell_type": "markdown", 164 | "metadata": {}, 165 | "source": [ 166 | "# PreProcessing" 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 7, 172 | "metadata": {}, 173 | "outputs": [ 174 | { 175 | "data": { 176 | "text/plain": [ 177 | "array([[[ 59, 62, 63],\n", 178 | " [ 43, 46, 45],\n", 179 | " [ 50, 48, 43],\n", 180 | " ...,\n", 181 | " [158, 132, 108],\n", 182 | " [152, 125, 102],\n", 183 | " [148, 124, 103]],\n", 184 | "\n", 185 | " [[ 16, 20, 20],\n", 186 | " [ 0, 0, 0],\n", 187 | " [ 18, 8, 0],\n", 188 | " ...,\n", 189 | " [123, 88, 55],\n", 190 | " [119, 83, 50],\n", 191 | " [122, 87, 57]],\n", 192 | "\n", 193 | " [[ 25, 24, 21],\n", 194 | " [ 16, 7, 0],\n", 195 | " [ 49, 27, 8],\n", 196 | " ...,\n", 197 | " [118, 84, 50],\n", 198 | " [120, 84, 50],\n", 199 | " [109, 73, 42]],\n", 200 | "\n", 201 | " ...,\n", 202 | "\n", 203 | " [[208, 170, 96],\n", 204 | " [201, 153, 34],\n", 205 | " [198, 161, 26],\n", 206 | " ...,\n", 207 | " [160, 133, 70],\n", 208 | " [ 56, 31, 7],\n", 209 | " [ 53, 34, 20]],\n", 210 | "\n", 211 | " [[180, 139, 96],\n", 212 | " [173, 123, 42],\n", 213 | " [186, 144, 30],\n", 214 | " ...,\n", 215 | " [184, 148, 94],\n", 216 | " [ 97, 62, 34],\n", 217 | " [ 83, 53, 34]],\n", 218 | "\n", 219 | " [[177, 144, 116],\n", 220 | " [168, 129, 94],\n", 221 | " [179, 142, 87],\n", 222 | " ...,\n", 223 | " [216, 184, 140],\n", 224 | " [151, 118, 84],\n", 225 | " [123, 92, 72]]], dtype=uint8)" 226 | ] 227 | }, 228 | "execution_count": 7, 229 | "metadata": {}, 230 | "output_type": "execute_result" 231 | } 232 | ], 233 | "source": [ 234 | "x_train[0]" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": 8, 240 | "metadata": {}, 241 | "outputs": [ 242 | { 243 | "data": { 244 | "text/plain": [ 245 | "(32, 32, 3)" 246 | ] 247 | }, 248 | "execution_count": 8, 249 | "metadata": {}, 250 | "output_type": "execute_result" 251 | } 252 | ], 253 | "source": [ 254 | "x_train[0].shape" 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "execution_count": 9, 260 | "metadata": {}, 261 | "outputs": [ 262 | { 263 | "data": { 264 | "text/plain": [ 265 | "255" 266 | ] 267 | }, 268 | "execution_count": 9, 269 | "metadata": {}, 270 | "output_type": "execute_result" 271 | } 272 | ], 273 | "source": [ 274 | "x_train.max()" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": 10, 280 | "metadata": {}, 281 | "outputs": [], 282 | "source": [ 283 | "x_train = x_train/225" 284 | ] 285 | }, 286 | { 287 | "cell_type": "code", 288 | "execution_count": 11, 289 | "metadata": {}, 290 | "outputs": [], 291 | "source": [ 292 | "x_test = x_test/255" 293 | ] 294 | }, 295 | { 296 | "cell_type": "code", 297 | "execution_count": 12, 298 | "metadata": {}, 299 | "outputs": [ 300 | { 301 | "data": { 302 | "text/plain": [ 303 | "(50000, 32, 32, 3)" 304 | ] 305 | }, 306 | "execution_count": 12, 307 | "metadata": {}, 308 | "output_type": "execute_result" 309 | } 310 | ], 311 | "source": [ 312 | "x_train.shape" 313 | ] 314 | }, 315 | { 316 | "cell_type": "code", 317 | "execution_count": 13, 318 | "metadata": {}, 319 | "outputs": [ 320 | { 321 | "data": { 322 | "text/plain": [ 323 | "(10000, 32, 32, 3)" 324 | ] 325 | }, 326 | "execution_count": 13, 327 | "metadata": {}, 328 | "output_type": "execute_result" 329 | } 330 | ], 331 | "source": [ 332 | "x_test.shape" 333 | ] 334 | }, 335 | { 336 | "cell_type": "markdown", 337 | "metadata": {}, 338 | "source": [ 339 | "## Labels" 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": 14, 345 | "metadata": {}, 346 | "outputs": [], 347 | "source": [ 348 | "from keras.utils import to_categorical" 349 | ] 350 | }, 351 | { 352 | "cell_type": "code", 353 | "execution_count": 15, 354 | "metadata": {}, 355 | "outputs": [ 356 | { 357 | "data": { 358 | "text/plain": [ 359 | "(50000, 1)" 360 | ] 361 | }, 362 | "execution_count": 15, 363 | "metadata": {}, 364 | "output_type": "execute_result" 365 | } 366 | ], 367 | "source": [ 368 | "y_train.shape" 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": 16, 374 | "metadata": {}, 375 | "outputs": [ 376 | { 377 | "data": { 378 | "text/plain": [ 379 | "array([6], dtype=uint8)" 380 | ] 381 | }, 382 | "execution_count": 16, 383 | "metadata": {}, 384 | "output_type": "execute_result" 385 | } 386 | ], 387 | "source": [ 388 | "y_train[0]" 389 | ] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "execution_count": 17, 394 | "metadata": {}, 395 | "outputs": [], 396 | "source": [ 397 | "y_cat_train = to_categorical(y_train,10)" 398 | ] 399 | }, 400 | { 401 | "cell_type": "code", 402 | "execution_count": 18, 403 | "metadata": {}, 404 | "outputs": [ 405 | { 406 | "data": { 407 | "text/plain": [ 408 | "(50000, 10)" 409 | ] 410 | }, 411 | "execution_count": 18, 412 | "metadata": {}, 413 | "output_type": "execute_result" 414 | } 415 | ], 416 | "source": [ 417 | "y_cat_train.shape" 418 | ] 419 | }, 420 | { 421 | "cell_type": "code", 422 | "execution_count": 19, 423 | "metadata": {}, 424 | "outputs": [ 425 | { 426 | "data": { 427 | "text/plain": [ 428 | "array([0., 0., 0., 0., 0., 0., 1., 0., 0., 0.], dtype=float32)" 429 | ] 430 | }, 431 | "execution_count": 19, 432 | "metadata": {}, 433 | "output_type": "execute_result" 434 | } 435 | ], 436 | "source": [ 437 | "y_cat_train[0]" 438 | ] 439 | }, 440 | { 441 | "cell_type": "code", 442 | "execution_count": 20, 443 | "metadata": {}, 444 | "outputs": [], 445 | "source": [ 446 | "y_cat_test = to_categorical(y_test,10)" 447 | ] 448 | }, 449 | { 450 | "cell_type": "markdown", 451 | "metadata": {}, 452 | "source": [ 453 | "----------\n", 454 | "# Building the Model" 455 | ] 456 | }, 457 | { 458 | "cell_type": "code", 459 | "execution_count": 21, 460 | "metadata": {}, 461 | "outputs": [], 462 | "source": [ 463 | "from keras.models import Sequential\n", 464 | "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten" 465 | ] 466 | }, 467 | { 468 | "cell_type": "code", 469 | "execution_count": 22, 470 | "metadata": {}, 471 | "outputs": [], 472 | "source": [ 473 | "model = Sequential()\n", 474 | "\n", 475 | "## FIRST SET OF LAYERS\n", 476 | "\n", 477 | "# CONVOLUTIONAL LAYER\n", 478 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(32, 32, 3), activation='relu',))\n", 479 | "# POOLING LAYER\n", 480 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 481 | "\n", 482 | "## SECOND SET OF LAYERS\n", 483 | "\n", 484 | "# CONVOLUTIONAL LAYER\n", 485 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(32, 32, 3), activation='relu',))\n", 486 | "# POOLING LAYER\n", 487 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 488 | "\n", 489 | "# FLATTEN IMAGES FROM 28 by 28 to 764 BEFORE FINAL LAYER\n", 490 | "model.add(Flatten())\n", 491 | "\n", 492 | "# 256 NEURONS IN DENSE HIDDEN LAYER (YOU CAN CHANGE THIS NUMBER OF NEURONS)\n", 493 | "model.add(Dense(256, activation='relu'))\n", 494 | "\n", 495 | "# LAST LAYER IS THE CLASSIFIER, THUS 10 POSSIBLE CLASSES\n", 496 | "model.add(Dense(10, activation='softmax'))\n", 497 | "\n", 498 | "\n", 499 | "model.compile(loss='categorical_crossentropy',\n", 500 | " optimizer='rmsprop',\n", 501 | " metrics=['accuracy'])" 502 | ] 503 | }, 504 | { 505 | "cell_type": "code", 506 | "execution_count": 23, 507 | "metadata": {}, 508 | "outputs": [ 509 | { 510 | "name": "stdout", 511 | "output_type": "stream", 512 | "text": [ 513 | "_________________________________________________________________\n", 514 | "Layer (type) Output Shape Param # \n", 515 | "=================================================================\n", 516 | "conv2d_1 (Conv2D) (None, 29, 29, 32) 1568 \n", 517 | "_________________________________________________________________\n", 518 | "max_pooling2d_1 (MaxPooling2 (None, 14, 14, 32) 0 \n", 519 | "_________________________________________________________________\n", 520 | "conv2d_2 (Conv2D) (None, 11, 11, 32) 16416 \n", 521 | "_________________________________________________________________\n", 522 | "max_pooling2d_2 (MaxPooling2 (None, 5, 5, 32) 0 \n", 523 | "_________________________________________________________________\n", 524 | "flatten_1 (Flatten) (None, 800) 0 \n", 525 | "_________________________________________________________________\n", 526 | "dense_1 (Dense) (None, 256) 205056 \n", 527 | "_________________________________________________________________\n", 528 | "dense_2 (Dense) (None, 10) 2570 \n", 529 | "=================================================================\n", 530 | "Total params: 225,610\n", 531 | "Trainable params: 225,610\n", 532 | "Non-trainable params: 0\n", 533 | "_________________________________________________________________\n" 534 | ] 535 | } 536 | ], 537 | "source": [ 538 | "model.summary()" 539 | ] 540 | }, 541 | { 542 | "cell_type": "code", 543 | "execution_count": 25, 544 | "metadata": {}, 545 | "outputs": [ 546 | { 547 | "name": "stdout", 548 | "output_type": "stream", 549 | "text": [ 550 | "Epoch 1/10\n", 551 | "50000/50000 [==============================] - 5s 109us/step - loss: 0.8937 - acc: 0.6919\n", 552 | "Epoch 2/10\n", 553 | "50000/50000 [==============================] - 5s 108us/step - loss: 0.8094 - acc: 0.7249\n", 554 | "Epoch 3/10\n", 555 | "50000/50000 [==============================] - 5s 109us/step - loss: 0.7420 - acc: 0.7461\n", 556 | "Epoch 4/10\n", 557 | "50000/50000 [==============================] - 5s 109us/step - loss: 0.6902 - acc: 0.7656\n", 558 | "Epoch 5/10\n", 559 | "50000/50000 [==============================] - 5s 109us/step - loss: 0.6466 - acc: 0.7780\n", 560 | "Epoch 6/10\n", 561 | "50000/50000 [==============================] - 5s 110us/step - loss: 0.6105 - acc: 0.7958\n", 562 | "Epoch 7/10\n", 563 | "50000/50000 [==============================] - 5s 110us/step - loss: 0.5891 - acc: 0.8020\n", 564 | "Epoch 8/10\n", 565 | "50000/50000 [==============================] - 5s 109us/step - loss: 0.5631 - acc: 0.8124\n", 566 | "Epoch 9/10\n", 567 | "50000/50000 [==============================] - 6s 110us/step - loss: 0.5416 - acc: 0.8203\n", 568 | "Epoch 10/10\n", 569 | "50000/50000 [==============================] - 5s 109us/step - loss: 0.5252 - acc: 0.8275\n" 570 | ] 571 | }, 572 | { 573 | "data": { 574 | "text/plain": [ 575 | "" 576 | ] 577 | }, 578 | "execution_count": 25, 579 | "metadata": {}, 580 | "output_type": "execute_result" 581 | } 582 | ], 583 | "source": [ 584 | "model.fit(x_train,y_cat_train,verbose=1,epochs=10)" 585 | ] 586 | }, 587 | { 588 | "cell_type": "code", 589 | "execution_count": 26, 590 | "metadata": {}, 591 | "outputs": [], 592 | "source": [ 593 | "# Careful, don't overwrite our file!\n", 594 | "# model.save('cifar_10epochs.h5')" 595 | ] 596 | }, 597 | { 598 | "cell_type": "code", 599 | "execution_count": 27, 600 | "metadata": {}, 601 | "outputs": [ 602 | { 603 | "data": { 604 | "text/plain": [ 605 | "['loss', 'acc']" 606 | ] 607 | }, 608 | "execution_count": 27, 609 | "metadata": {}, 610 | "output_type": "execute_result" 611 | } 612 | ], 613 | "source": [ 614 | "model.metrics_names" 615 | ] 616 | }, 617 | { 618 | "cell_type": "code", 619 | "execution_count": 29, 620 | "metadata": {}, 621 | "outputs": [ 622 | { 623 | "name": "stdout", 624 | "output_type": "stream", 625 | "text": [ 626 | "10000/10000 [==============================] - 1s 56us/step\n" 627 | ] 628 | }, 629 | { 630 | "data": { 631 | "text/plain": [ 632 | "[1.3332478387832642, 0.6444]" 633 | ] 634 | }, 635 | "execution_count": 29, 636 | "metadata": {}, 637 | "output_type": "execute_result" 638 | } 639 | ], 640 | "source": [ 641 | "model.evaluate(x_test,y_cat_test)" 642 | ] 643 | }, 644 | { 645 | "cell_type": "code", 646 | "execution_count": 30, 647 | "metadata": {}, 648 | "outputs": [], 649 | "source": [ 650 | "from sklearn.metrics import classification_report\n", 651 | "\n", 652 | "predictions = model.predict_classes(x_test)" 653 | ] 654 | }, 655 | { 656 | "cell_type": "code", 657 | "execution_count": 31, 658 | "metadata": {}, 659 | "outputs": [ 660 | { 661 | "name": "stdout", 662 | "output_type": "stream", 663 | "text": [ 664 | " precision recall f1-score support\n", 665 | "\n", 666 | " 0 0.80 0.59 0.68 1000\n", 667 | " 1 0.83 0.76 0.80 1000\n", 668 | " 2 0.44 0.65 0.52 1000\n", 669 | " 3 0.50 0.40 0.44 1000\n", 670 | " 4 0.50 0.75 0.60 1000\n", 671 | " 5 0.52 0.57 0.54 1000\n", 672 | " 6 0.70 0.74 0.72 1000\n", 673 | " 7 0.88 0.56 0.69 1000\n", 674 | " 8 0.82 0.74 0.78 1000\n", 675 | " 9 0.81 0.68 0.74 1000\n", 676 | "\n", 677 | "avg / total 0.68 0.64 0.65 10000\n", 678 | "\n" 679 | ] 680 | } 681 | ], 682 | "source": [ 683 | "print(classification_report(y_test,predictions))" 684 | ] 685 | }, 686 | { 687 | "cell_type": "markdown", 688 | "metadata": {}, 689 | "source": [ 690 | "## Optional: Large Model" 691 | ] 692 | }, 693 | { 694 | "cell_type": "code", 695 | "execution_count": 33, 696 | "metadata": {}, 697 | "outputs": [], 698 | "source": [ 699 | "model = Sequential()\n", 700 | "\n", 701 | "## FIRST SET OF LAYERS\n", 702 | "\n", 703 | "# CONVOLUTIONAL LAYER\n", 704 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(32, 32, 3), activation='relu',))\n", 705 | "# CONVOLUTIONAL LAYER\n", 706 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(32, 32, 3), activation='relu',))\n", 707 | "\n", 708 | "# POOLING LAYER\n", 709 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 710 | "\n", 711 | "## SECOND SET OF LAYERS\n", 712 | "\n", 713 | "# CONVOLUTIONAL LAYER\n", 714 | "model.add(Conv2D(filters=64, kernel_size=(4,4),input_shape=(32, 32, 3), activation='relu',))\n", 715 | "# CONVOLUTIONAL LAYER\n", 716 | "model.add(Conv2D(filters=64, kernel_size=(4,4),input_shape=(32, 32, 3), activation='relu',))\n", 717 | "\n", 718 | "# POOLING LAYER\n", 719 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 720 | "\n", 721 | "# FLATTEN IMAGES FROM 28 by 28 to 764 BEFORE FINAL LAYER\n", 722 | "model.add(Flatten())\n", 723 | "\n", 724 | "# 512 NEURONS IN DENSE HIDDEN LAYER (YOU CAN CHANGE THIS NUMBER OF NEURONS)\n", 725 | "model.add(Dense(512, activation='relu'))\n", 726 | "\n", 727 | "# LAST LAYER IS THE CLASSIFIER, THUS 10 POSSIBLE CLASSES\n", 728 | "model.add(Dense(10, activation='softmax'))\n", 729 | "\n", 730 | "\n", 731 | "model.compile(loss='categorical_crossentropy',\n", 732 | " optimizer='rmsprop',\n", 733 | " metrics=['accuracy'])" 734 | ] 735 | }, 736 | { 737 | "cell_type": "code", 738 | "execution_count": 34, 739 | "metadata": {}, 740 | "outputs": [ 741 | { 742 | "name": "stdout", 743 | "output_type": "stream", 744 | "text": [ 745 | "Epoch 1/20\n", 746 | "50000/50000 [==============================] - 7s 148us/step - loss: 1.6025 - acc: 0.4197\n", 747 | "Epoch 2/20\n", 748 | "50000/50000 [==============================] - 8s 159us/step - loss: 1.1793 - acc: 0.5849\n", 749 | "Epoch 3/20\n", 750 | "50000/50000 [==============================] - 8s 155us/step - loss: 1.0083 - acc: 0.6508\n", 751 | "Epoch 4/20\n", 752 | "50000/50000 [==============================] - 8s 156us/step - loss: 0.9479 - acc: 0.6782\n", 753 | "Epoch 5/20\n", 754 | "50000/50000 [==============================] - 8s 154us/step - loss: 0.9145 - acc: 0.6952\n", 755 | "Epoch 6/20\n", 756 | "50000/50000 [==============================] - 8s 151us/step - loss: 0.9005 - acc: 0.7015\n", 757 | "Epoch 7/20\n", 758 | "50000/50000 [==============================] - 8s 151us/step - loss: 0.8867 - acc: 0.7061\n", 759 | "Epoch 8/20\n", 760 | "50000/50000 [==============================] - 8s 152us/step - loss: 0.8796 - acc: 0.7121\n", 761 | "Epoch 9/20\n", 762 | "50000/50000 [==============================] - 8s 151us/step - loss: 0.8693 - acc: 0.7158\n", 763 | "Epoch 10/20\n", 764 | "50000/50000 [==============================] - 8s 151us/step - loss: 0.8591 - acc: 0.7194\n", 765 | "Epoch 11/20\n", 766 | "50000/50000 [==============================] - 7s 145us/step - loss: 0.8586 - acc: 0.7235\n", 767 | "Epoch 12/20\n", 768 | "50000/50000 [==============================] - 8s 151us/step - loss: 0.8497 - acc: 0.7248\n", 769 | "Epoch 13/20\n", 770 | "50000/50000 [==============================] - 7s 148us/step - loss: 0.8462 - acc: 0.7296\n", 771 | "Epoch 14/20\n", 772 | "50000/50000 [==============================] - 7s 139us/step - loss: 0.8422 - acc: 0.7263\n", 773 | "Epoch 15/20\n", 774 | "50000/50000 [==============================] - 7s 147us/step - loss: 0.8321 - acc: 0.7330\n", 775 | "Epoch 16/20\n", 776 | "50000/50000 [==============================] - 7s 143us/step - loss: 0.8154 - acc: 0.7363\n", 777 | "Epoch 17/20\n", 778 | "50000/50000 [==============================] - 7s 147us/step - loss: 0.8241 - acc: 0.7353\n", 779 | "Epoch 18/20\n", 780 | "50000/50000 [==============================] - 7s 138us/step - loss: 0.8061 - acc: 0.7411\n", 781 | "Epoch 19/20\n", 782 | "50000/50000 [==============================] - 7s 140us/step - loss: 0.8120 - acc: 0.7409\n", 783 | "Epoch 20/20\n", 784 | "50000/50000 [==============================] - 7s 142us/step - loss: 0.8097 - acc: 0.7437\n" 785 | ] 786 | }, 787 | { 788 | "data": { 789 | "text/plain": [ 790 | "" 791 | ] 792 | }, 793 | "execution_count": 34, 794 | "metadata": {}, 795 | "output_type": "execute_result" 796 | } 797 | ], 798 | "source": [ 799 | "model.fit(x_train,y_cat_train,verbose=1,epochs=20)" 800 | ] 801 | }, 802 | { 803 | "cell_type": "code", 804 | "execution_count": 35, 805 | "metadata": {}, 806 | "outputs": [ 807 | { 808 | "name": "stdout", 809 | "output_type": "stream", 810 | "text": [ 811 | "10000/10000 [==============================] - 1s 75us/step\n" 812 | ] 813 | }, 814 | { 815 | "data": { 816 | "text/plain": [ 817 | "[0.9843294318199157, 0.6902]" 818 | ] 819 | }, 820 | "execution_count": 35, 821 | "metadata": {}, 822 | "output_type": "execute_result" 823 | } 824 | ], 825 | "source": [ 826 | "model.evaluate(x_test,y_cat_test)" 827 | ] 828 | }, 829 | { 830 | "cell_type": "code", 831 | "execution_count": 36, 832 | "metadata": {}, 833 | "outputs": [], 834 | "source": [ 835 | "from sklearn.metrics import classification_report\n", 836 | "\n", 837 | "predictions = model.predict_classes(x_test)" 838 | ] 839 | }, 840 | { 841 | "cell_type": "code", 842 | "execution_count": 37, 843 | "metadata": {}, 844 | "outputs": [ 845 | { 846 | "name": "stdout", 847 | "output_type": "stream", 848 | "text": [ 849 | " precision recall f1-score support\n", 850 | "\n", 851 | " 0 0.82 0.61 0.70 1000\n", 852 | " 1 0.77 0.88 0.82 1000\n", 853 | " 2 0.64 0.57 0.60 1000\n", 854 | " 3 0.66 0.30 0.41 1000\n", 855 | " 4 0.66 0.67 0.66 1000\n", 856 | " 5 0.45 0.81 0.58 1000\n", 857 | " 6 0.80 0.75 0.77 1000\n", 858 | " 7 0.76 0.69 0.72 1000\n", 859 | " 8 0.78 0.81 0.80 1000\n", 860 | " 9 0.75 0.82 0.79 1000\n", 861 | "\n", 862 | "avg / total 0.71 0.69 0.69 10000\n", 863 | "\n" 864 | ] 865 | } 866 | ], 867 | "source": [ 868 | "print(classification_report(y_test,predictions))" 869 | ] 870 | }, 871 | { 872 | "cell_type": "code", 873 | "execution_count": 38, 874 | "metadata": {}, 875 | "outputs": [], 876 | "source": [ 877 | "model.save('larger_CIFAR10_model.h5')" 878 | ] 879 | }, 880 | { 881 | "cell_type": "markdown", 882 | "metadata": {}, 883 | "source": [ 884 | "___" 885 | ] 886 | } 887 | ], 888 | "metadata": { 889 | "kernelspec": { 890 | "display_name": "Python 3", 891 | "language": "python", 892 | "name": "python3" 893 | }, 894 | "language_info": { 895 | "codemirror_mode": { 896 | "name": "ipython", 897 | "version": 3 898 | }, 899 | "file_extension": ".py", 900 | "mimetype": "text/x-python", 901 | "name": "python", 902 | "nbconvert_exporter": "python", 903 | "pygments_lexer": "ipython3", 904 | "version": "3.6.5" 905 | } 906 | }, 907 | "nbformat": 4, 908 | "nbformat_minor": 2 909 | } 910 | -------------------------------------------------------------------------------- /7_Deep Learning for Computer Vision/Keras-CNN-MNIST.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Convolutional Neural Networks for Image Classification" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "from keras.datasets import mnist\n", 17 | "\n", 18 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "## Visualizing the Image Data" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import matplotlib.pyplot as plt\n", 35 | "%matplotlib inline" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 3, 41 | "metadata": {}, 42 | "outputs": [ 43 | { 44 | "data": { 45 | "text/plain": [ 46 | "(60000, 28, 28)" 47 | ] 48 | }, 49 | "execution_count": 3, 50 | "metadata": {}, 51 | "output_type": "execute_result" 52 | } 53 | ], 54 | "source": [ 55 | "x_train.shape" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 4, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "single_image = x_train[0]" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 5, 70 | "metadata": {}, 71 | "outputs": [ 72 | { 73 | "data": { 74 | "text/plain": [ 75 | "array([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 76 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 77 | " 0, 0],\n", 78 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 79 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 80 | " 0, 0],\n", 81 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 82 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 83 | " 0, 0],\n", 84 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 85 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 86 | " 0, 0],\n", 87 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 88 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 89 | " 0, 0],\n", 90 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3,\n", 91 | " 18, 18, 18, 126, 136, 175, 26, 166, 255, 247, 127, 0, 0,\n", 92 | " 0, 0],\n", 93 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 30, 36, 94, 154, 170,\n", 94 | " 253, 253, 253, 253, 253, 225, 172, 253, 242, 195, 64, 0, 0,\n", 95 | " 0, 0],\n", 96 | " [ 0, 0, 0, 0, 0, 0, 0, 49, 238, 253, 253, 253, 253,\n", 97 | " 253, 253, 253, 253, 251, 93, 82, 82, 56, 39, 0, 0, 0,\n", 98 | " 0, 0],\n", 99 | " [ 0, 0, 0, 0, 0, 0, 0, 18, 219, 253, 253, 253, 253,\n", 100 | " 253, 198, 182, 247, 241, 0, 0, 0, 0, 0, 0, 0, 0,\n", 101 | " 0, 0],\n", 102 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 80, 156, 107, 253, 253,\n", 103 | " 205, 11, 0, 43, 154, 0, 0, 0, 0, 0, 0, 0, 0,\n", 104 | " 0, 0],\n", 105 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 1, 154, 253,\n", 106 | " 90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 107 | " 0, 0],\n", 108 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 253,\n", 109 | " 190, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 110 | " 0, 0],\n", 111 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 190,\n", 112 | " 253, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 113 | " 0, 0],\n", 114 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35,\n", 115 | " 241, 225, 160, 108, 1, 0, 0, 0, 0, 0, 0, 0, 0,\n", 116 | " 0, 0],\n", 117 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 118 | " 81, 240, 253, 253, 119, 25, 0, 0, 0, 0, 0, 0, 0,\n", 119 | " 0, 0],\n", 120 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 121 | " 0, 45, 186, 253, 253, 150, 27, 0, 0, 0, 0, 0, 0,\n", 122 | " 0, 0],\n", 123 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 124 | " 0, 0, 16, 93, 252, 253, 187, 0, 0, 0, 0, 0, 0,\n", 125 | " 0, 0],\n", 126 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 127 | " 0, 0, 0, 0, 249, 253, 249, 64, 0, 0, 0, 0, 0,\n", 128 | " 0, 0],\n", 129 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 130 | " 0, 46, 130, 183, 253, 253, 207, 2, 0, 0, 0, 0, 0,\n", 131 | " 0, 0],\n", 132 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39,\n", 133 | " 148, 229, 253, 253, 253, 250, 182, 0, 0, 0, 0, 0, 0,\n", 134 | " 0, 0],\n", 135 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 114, 221,\n", 136 | " 253, 253, 253, 253, 201, 78, 0, 0, 0, 0, 0, 0, 0,\n", 137 | " 0, 0],\n", 138 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 23, 66, 213, 253, 253,\n", 139 | " 253, 253, 198, 81, 2, 0, 0, 0, 0, 0, 0, 0, 0,\n", 140 | " 0, 0],\n", 141 | " [ 0, 0, 0, 0, 0, 0, 18, 171, 219, 253, 253, 253, 253,\n", 142 | " 195, 80, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 143 | " 0, 0],\n", 144 | " [ 0, 0, 0, 0, 55, 172, 226, 253, 253, 253, 253, 244, 133,\n", 145 | " 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 146 | " 0, 0],\n", 147 | " [ 0, 0, 0, 0, 136, 253, 253, 253, 212, 135, 132, 16, 0,\n", 148 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 149 | " 0, 0],\n", 150 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 151 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 152 | " 0, 0],\n", 153 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 154 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 155 | " 0, 0],\n", 156 | " [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 157 | " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", 158 | " 0, 0]], dtype=uint8)" 159 | ] 160 | }, 161 | "execution_count": 5, 162 | "metadata": {}, 163 | "output_type": "execute_result" 164 | } 165 | ], 166 | "source": [ 167 | "single_image" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": 6, 173 | "metadata": {}, 174 | "outputs": [ 175 | { 176 | "data": { 177 | "text/plain": [ 178 | "(28, 28)" 179 | ] 180 | }, 181 | "execution_count": 6, 182 | "metadata": {}, 183 | "output_type": "execute_result" 184 | } 185 | ], 186 | "source": [ 187 | "single_image.shape" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 7, 193 | "metadata": {}, 194 | "outputs": [ 195 | { 196 | "data": { 197 | "text/plain": [ 198 | "" 199 | ] 200 | }, 201 | "execution_count": 7, 202 | "metadata": {}, 203 | "output_type": "execute_result" 204 | }, 205 | { 206 | "data": { 207 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAADolJREFUeJzt3X2MXOV1x/HfyXq9jo1JvHVsHOJgxzgBYhqTjgzICFwhXKdCMqgCYkWRQ5M4LzgprStBraq4FancKiF1CUVamq1tifcEiv+gSZAVAVFhy+IQXuLwErMli7e7mA3YEOKX3dM/9m60MTvPrGfuzJ3d8/1I1szcc+/co4Hf3pl55t7H3F0A4nlP0Q0AKAbhB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1LRG7my6tfkMzWrkLoFQfqu3dcQP20TWrSn8ZrZG0jZJLZL+3d23ptafoVk61y6uZZcAErp894TXrfptv5m1SLpF0qcknSVpnZmdVe3zAWisWj7zr5D0krvvc/cjku6StDaftgDUWy3hP1XSr8Y87s2W/R4z22Bm3WbWfVSHa9gdgDzVEv7xvlR41/nB7t7h7iV3L7WqrYbdAchTLeHvlbRwzOMPSdpfWzsAGqWW8D8haamZLTaz6ZI+LWlXPm0BqLeqh/rc/ZiZbZT0Q40M9XW6+3O5dQagrmoa53f3ByU9mFMvABqIn/cCQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QVE2z9JpZj6RDkoYkHXP3Uh5NIT82Lf2fuOUDc+u6/+f/elHZ2tDM4eS2py0ZSNZnftWS9f+7aXrZ2p7S3cltDwy9nayfe++mZP30v3o8WW8GNYU/88fufiCH5wHQQLztB4KqNfwu6Udm9qSZbcijIQCNUevb/pXuvt/M5kl6yMx+4e6PjF0h+6OwQZJmaGaNuwOQl5qO/O6+P7sdkHS/pBXjrNPh7iV3L7WqrZbdAchR1eE3s1lmNnv0vqTVkp7NqzEA9VXL2/75ku43s9HnucPdf5BLVwDqrurwu/s+SZ/IsZcpq+XMpcm6t7Um6/sven+y/s555cek29+XHq9+9BPp8e4i/ddvZifr//SdNcl619l3lK29fPSd5LZb+y9J1j/4qCfrkwFDfUBQhB8IivADQRF+ICjCDwRF+IGg8jirL7yhVZ9M1m/afkuy/tHW8qeeTmVHfShZ/7ubP5esT3s7Pdx2/r0by9Zmv3osuW3bgfRQ4MzurmR9MuDIDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBMc6fg7bn9yfrT/52YbL+0db+PNvJ1aa+85L1fW+lL/29fcn3ytbeHE6P08//1/9O1utp8p+wWxlHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IytwbN6J5srX7uXZxw/bXLAavPj9ZP7gmfXntlqdPStZ/9tWbT7inUTce+MNk/YmL0uP4Q2+8maz7+eWv7t7z9eSmWrzuZ+kV8C5dvlsHfTA9d3mGIz8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBFVxnN/MOiVdKmnA3Zdly9ol3S1pkaQeSVe6+68r7SzqOH8lLXP/IFkfen0wWX/5jvJj9c9d2JncdsU/fi1Zn3dLcefU48TlPc6/XdLxE6FfL2m3uy+VtDt7DGASqRh+d39E0vGHnrWSdmT3d0i6LOe+ANRZtZ/557t7nyRlt/PyawlAI9T9Gn5mtkHSBkmaoZn13h2ACar2yN9vZgskKbsdKLeiu3e4e8ndS61qq3J3APJWbfh3SVqf3V8v6YF82gHQKBXDb2Z3SnpM0sfMrNfMPi9pq6RLzOxFSZdkjwFMIhU/87v7ujIlBuxzMnTg9Zq2P3pwetXbfvwzP0/WX7u1Jf0Ew0NV7xvF4hd+QFCEHwiK8ANBEX4gKMIPBEX4gaCYonsKOPO6F8rWrj47PSL7H6ftTtYvuuKaZH323Y8n62heHPmBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjG+aeA1DTZr3/lzOS2r+x6J1m//sadyfrfXHl5su4/fV/Z2sJvPJbcVg2cPj4ijvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EFTFKbrzxBTdzWfwz89P1m+/4ZvJ+uJpM6re98d3bkzWl97Wl6wf29dT9b6nqryn6AYwBRF+ICjCDwRF+IGgCD8QFOEHgiL8QFAVx/nNrFPSpZIG3H1ZtmyLpC9Kei1bbbO7P1hpZ4zzTz6+cnmyfvLW3mT9zo/8sOp9n/HjLyTrH/v78tcxkKShF/dVve/JKu9x/u2S1oyz/Nvuvjz7VzH4AJpLxfC7+yOSBhvQC4AGquUz/0Yze9rMOs1sTm4dAWiIasN/q6QlkpZL6pP0rXIrmtkGM+s2s+6jOlzl7gDkrarwu3u/uw+5+7Ck2yStSKzb4e4ldy+1qq3aPgHkrKrwm9mCMQ8vl/RsPu0AaJSKl+42szslrZI018x6Jd0gaZWZLZfkknokfamOPQKoA87nR01a5s9L1vdfdXrZWtd125LbvqfCG9PPvLw6WX/zgteT9amI8/kBVET4gaAIPxAU4QeCIvxAUIQfCIqhPhTmnt70FN0zbXqy/hs/kqxf+rVryz/3/V3JbScrhvoAVET4gaAIPxAU4QeCIvxAUIQfCIrwA0FVPJ8fsQ1fkL509y+vSE/RvWx5T9lapXH8Sm4ePCdZn/lAd03PP9Vx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoBjnn+KstCxZf+Hr6bH221buSNYvnJE+p74Wh/1osv744OL0Ewz35djN1MORHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCqjjOb2YLJe2UdIqkYUkd7r7NzNol3S1pkaQeSVe6+6/r12pc0xaflqz/8uoPlq1tuequ5LZ/dtKBqnrKw+b+UrL+8LbzkvU5O9LX/UfaRI78xyRtcvczJZ0n6RozO0vS9ZJ2u/tSSbuzxwAmiYrhd/c+d9+T3T8kaa+kUyWtlTT6868dki6rV5MA8ndCn/nNbJGkcyR1SZrv7n3SyB8ISfPybg5A/Uw4/GZ2kqTvS7rW3Q+ewHYbzKzbzLqP6nA1PQKogwmF38xaNRL82939vmxxv5ktyOoLJA2Mt627d7h7yd1LrWrLo2cAOagYfjMzSd+VtNfdbxpT2iVpfXZ/vaQH8m8PQL1M5JTelZI+K+kZM3sqW7ZZ0lZJ95jZ5yW9IumK+rQ4+U1b9OFk/c0/WpCsX/UPP0jWv/z++5L1etrUlx6Oe+zfyg/ntW//n+S2c4YZyquniuF3959IKjff98X5tgOgUfiFHxAU4QeCIvxAUIQfCIrwA0ERfiAoLt09QdMWnFK2Ntg5K7ntVxY/nKyvm91fVU952PjqBcn6nlvTU3TP/d6zyXr7IcbqmxVHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IKsw4/5E/SV8m+shfDibrm09/sGxt9XvfrqqnvPQPvVO2duGuTcltz/jbXyTr7W+kx+mHk1U0M478QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUmHH+nsvSf+deOPveuu37ljeWJOvbHl6drNtQuSunjzjjxpfL1pb2dyW3HUpWMZVx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoMzd0yuYLZS0U9IpGjl9u8Pdt5nZFklflPRatupmdy9/0rukk63dzzVm9Qbqpct366APpn8YkpnIj3yOSdrk7nvMbLakJ83soaz2bXf/ZrWNAihOxfC7e5+kvuz+ITPbK+nUejcGoL5O6DO/mS2SdI6k0d+MbjSzp82s08zmlNlmg5l1m1n3UR2uqVkA+Zlw+M3sJEnfl3Stux+UdKukJZKWa+SdwbfG287dO9y95O6lVrXl0DKAPEwo/GbWqpHg3+7u90mSu/e7+5C7D0u6TdKK+rUJIG8Vw29mJum7kva6+01jli8Ys9rlktLTtQJoKhP5tn+lpM9KesbMnsqWbZa0zsyWS3JJPZK+VJcOAdTFRL7t/4mk8cYNk2P6AJobv/ADgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EVfHS3bnuzOw1Sf87ZtFcSQca1sCJadbemrUvid6qlWdvp7n7ByayYkPD/66dm3W7e6mwBhKatbdm7Uuit2oV1Rtv+4GgCD8QVNHh7yh4/ynN2luz9iXRW7UK6a3Qz/wAilP0kR9AQQoJv5mtMbPnzewlM7u+iB7KMbMeM3vGzJ4ys+6Ce+k0swEze3bMsnYze8jMXsxux50mraDetpjZq9lr95SZ/WlBvS00sx+b2V4ze87M/iJbXuhrl+irkNet4W/7zaxF0guSLpHUK+kJSevc/ecNbaQMM+uRVHL3wseEzexCSW9J2unuy7Jl/yxp0N23Zn8457j7dU3S2xZJbxU9c3M2ocyCsTNLS7pM0udU4GuX6OtKFfC6FXHkXyHpJXff5+5HJN0laW0BfTQ9d39E0uBxi9dK2pHd36GR/3karkxvTcHd+9x9T3b/kKTRmaULfe0SfRWiiPCfKulXYx73qrmm/HZJPzKzJ81sQ9HNjGN+Nm366PTp8wru53gVZ25upONmlm6a166aGa/zVkT4x5v9p5mGHFa6+yclfUrSNdnbW0zMhGZubpRxZpZuCtXOeJ23IsLfK2nhmMcfkrS/gD7G5e77s9sBSfer+WYf7h+dJDW7HSi4n99pppmbx5tZWk3w2jXTjNdFhP8JSUvNbLGZTZf0aUm7CujjXcxsVvZFjMxslqTVar7Zh3dJWp/dXy/pgQJ7+T3NMnNzuZmlVfBr12wzXhfyI59sKONfJLVI6nT3bzS8iXGY2Uc0crSXRiYxvaPI3szsTkmrNHLWV7+kGyT9p6R7JH1Y0iuSrnD3hn/xVqa3VRp56/q7mZtHP2M3uLcLJD0q6RlJw9nizRr5fF3Ya5foa50KeN34hR8QFL/wA4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1P8D6+E2hIAP97kAAAAASUVORK5CYII=\n", 208 | "text/plain": [ 209 | "
" 210 | ] 211 | }, 212 | "metadata": { 213 | "needs_background": "light" 214 | }, 215 | "output_type": "display_data" 216 | } 217 | ], 218 | "source": [ 219 | "plt.imshow(single_image)" 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": {}, 225 | "source": [ 226 | "# PreProcessing Data\n", 227 | "\n", 228 | "We first need to make sure the labels will be understandable by our CNN." 229 | ] 230 | }, 231 | { 232 | "cell_type": "markdown", 233 | "metadata": {}, 234 | "source": [ 235 | "## Labels" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": 8, 241 | "metadata": {}, 242 | "outputs": [ 243 | { 244 | "data": { 245 | "text/plain": [ 246 | "array([5, 0, 4, ..., 5, 6, 8], dtype=uint8)" 247 | ] 248 | }, 249 | "execution_count": 8, 250 | "metadata": {}, 251 | "output_type": "execute_result" 252 | } 253 | ], 254 | "source": [ 255 | "y_train" 256 | ] 257 | }, 258 | { 259 | "cell_type": "code", 260 | "execution_count": 9, 261 | "metadata": {}, 262 | "outputs": [ 263 | { 264 | "data": { 265 | "text/plain": [ 266 | "array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)" 267 | ] 268 | }, 269 | "execution_count": 9, 270 | "metadata": {}, 271 | "output_type": "execute_result" 272 | } 273 | ], 274 | "source": [ 275 | "y_test" 276 | ] 277 | }, 278 | { 279 | "cell_type": "markdown", 280 | "metadata": {}, 281 | "source": [ 282 | "Hmmm, looks like our labels are literally categories of numbers. We need to translate this to be \"one hot encoded\" so our CNN can understand, otherwise it will think this is some sort of regression problem on a continuous axis. Luckily , Keras has an easy to use function for this:" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": 10, 288 | "metadata": {}, 289 | "outputs": [], 290 | "source": [ 291 | "from keras.utils.np_utils import to_categorical" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": 11, 297 | "metadata": {}, 298 | "outputs": [ 299 | { 300 | "data": { 301 | "text/plain": [ 302 | "(60000,)" 303 | ] 304 | }, 305 | "execution_count": 11, 306 | "metadata": {}, 307 | "output_type": "execute_result" 308 | } 309 | ], 310 | "source": [ 311 | "y_train.shape" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": 12, 317 | "metadata": {}, 318 | "outputs": [], 319 | "source": [ 320 | "y_example = to_categorical(y_train)" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": 13, 326 | "metadata": {}, 327 | "outputs": [ 328 | { 329 | "data": { 330 | "text/plain": [ 331 | "array([[0., 0., 0., ..., 0., 0., 0.],\n", 332 | " [1., 0., 0., ..., 0., 0., 0.],\n", 333 | " [0., 0., 0., ..., 0., 0., 0.],\n", 334 | " ...,\n", 335 | " [0., 0., 0., ..., 0., 0., 0.],\n", 336 | " [0., 0., 0., ..., 0., 0., 0.],\n", 337 | " [0., 0., 0., ..., 0., 1., 0.]], dtype=float32)" 338 | ] 339 | }, 340 | "execution_count": 13, 341 | "metadata": {}, 342 | "output_type": "execute_result" 343 | } 344 | ], 345 | "source": [ 346 | "y_example" 347 | ] 348 | }, 349 | { 350 | "cell_type": "code", 351 | "execution_count": 14, 352 | "metadata": {}, 353 | "outputs": [ 354 | { 355 | "data": { 356 | "text/plain": [ 357 | "(60000, 10)" 358 | ] 359 | }, 360 | "execution_count": 14, 361 | "metadata": {}, 362 | "output_type": "execute_result" 363 | } 364 | ], 365 | "source": [ 366 | "y_example.shape" 367 | ] 368 | }, 369 | { 370 | "cell_type": "code", 371 | "execution_count": 15, 372 | "metadata": {}, 373 | "outputs": [ 374 | { 375 | "data": { 376 | "text/plain": [ 377 | "array([0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], dtype=float32)" 378 | ] 379 | }, 380 | "execution_count": 15, 381 | "metadata": {}, 382 | "output_type": "execute_result" 383 | } 384 | ], 385 | "source": [ 386 | "y_example[0]" 387 | ] 388 | }, 389 | { 390 | "cell_type": "code", 391 | "execution_count": 16, 392 | "metadata": {}, 393 | "outputs": [], 394 | "source": [ 395 | "y_cat_test = to_categorical(y_test,10)" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": 17, 401 | "metadata": {}, 402 | "outputs": [], 403 | "source": [ 404 | "y_cat_train = to_categorical(y_train,10)" 405 | ] 406 | }, 407 | { 408 | "cell_type": "code", 409 | "execution_count": null, 410 | "metadata": {}, 411 | "outputs": [], 412 | "source": [] 413 | }, 414 | { 415 | "cell_type": "markdown", 416 | "metadata": {}, 417 | "source": [ 418 | "### Processing X Data\n", 419 | "\n", 420 | "We should normalize the X data" 421 | ] 422 | }, 423 | { 424 | "cell_type": "code", 425 | "execution_count": 18, 426 | "metadata": {}, 427 | "outputs": [ 428 | { 429 | "data": { 430 | "text/plain": [ 431 | "255" 432 | ] 433 | }, 434 | "execution_count": 18, 435 | "metadata": {}, 436 | "output_type": "execute_result" 437 | } 438 | ], 439 | "source": [ 440 | "single_image.max()" 441 | ] 442 | }, 443 | { 444 | "cell_type": "code", 445 | "execution_count": 19, 446 | "metadata": {}, 447 | "outputs": [ 448 | { 449 | "data": { 450 | "text/plain": [ 451 | "0" 452 | ] 453 | }, 454 | "execution_count": 19, 455 | "metadata": {}, 456 | "output_type": "execute_result" 457 | } 458 | ], 459 | "source": [ 460 | "single_image.min()" 461 | ] 462 | }, 463 | { 464 | "cell_type": "code", 465 | "execution_count": 20, 466 | "metadata": {}, 467 | "outputs": [], 468 | "source": [ 469 | "x_train = x_train/255\n", 470 | "x_test = x_test/255" 471 | ] 472 | }, 473 | { 474 | "cell_type": "code", 475 | "execution_count": 21, 476 | "metadata": {}, 477 | "outputs": [], 478 | "source": [ 479 | "scaled_single = x_train[0]" 480 | ] 481 | }, 482 | { 483 | "cell_type": "code", 484 | "execution_count": 22, 485 | "metadata": {}, 486 | "outputs": [ 487 | { 488 | "data": { 489 | "text/plain": [ 490 | "1.0" 491 | ] 492 | }, 493 | "execution_count": 22, 494 | "metadata": {}, 495 | "output_type": "execute_result" 496 | } 497 | ], 498 | "source": [ 499 | "scaled_single.max()" 500 | ] 501 | }, 502 | { 503 | "cell_type": "code", 504 | "execution_count": 23, 505 | "metadata": {}, 506 | "outputs": [ 507 | { 508 | "data": { 509 | "text/plain": [ 510 | "" 511 | ] 512 | }, 513 | "execution_count": 23, 514 | "metadata": {}, 515 | "output_type": "execute_result" 516 | }, 517 | { 518 | "data": { 519 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAADolJREFUeJzt3X2MXOV1x/HfyXq9jo1JvHVsHOJgxzgBYhqTjgzICFwhXKdCMqgCYkWRQ5M4LzgprStBraq4FancKiF1CUVamq1tifcEiv+gSZAVAVFhy+IQXuLwErMli7e7mA3YEOKX3dM/9m60MTvPrGfuzJ3d8/1I1szcc+/co4Hf3pl55t7H3F0A4nlP0Q0AKAbhB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1LRG7my6tfkMzWrkLoFQfqu3dcQP20TWrSn8ZrZG0jZJLZL+3d23ptafoVk61y6uZZcAErp894TXrfptv5m1SLpF0qcknSVpnZmdVe3zAWisWj7zr5D0krvvc/cjku6StDaftgDUWy3hP1XSr8Y87s2W/R4z22Bm3WbWfVSHa9gdgDzVEv7xvlR41/nB7t7h7iV3L7WqrYbdAchTLeHvlbRwzOMPSdpfWzsAGqWW8D8haamZLTaz6ZI+LWlXPm0BqLeqh/rc/ZiZbZT0Q40M9XW6+3O5dQagrmoa53f3ByU9mFMvABqIn/cCQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QVE2z9JpZj6RDkoYkHXP3Uh5NIT82Lf2fuOUDc+u6/+f/elHZ2tDM4eS2py0ZSNZnftWS9f+7aXrZ2p7S3cltDwy9nayfe++mZP30v3o8WW8GNYU/88fufiCH5wHQQLztB4KqNfwu6Udm9qSZbcijIQCNUevb/pXuvt/M5kl6yMx+4e6PjF0h+6OwQZJmaGaNuwOQl5qO/O6+P7sdkHS/pBXjrNPh7iV3L7WqrZbdAchR1eE3s1lmNnv0vqTVkp7NqzEA9VXL2/75ku43s9HnucPdf5BLVwDqrurwu/s+SZ/IsZcpq+XMpcm6t7Um6/sven+y/s555cek29+XHq9+9BPp8e4i/ddvZifr//SdNcl619l3lK29fPSd5LZb+y9J1j/4qCfrkwFDfUBQhB8IivADQRF+ICjCDwRF+IGg8jirL7yhVZ9M1m/afkuy/tHW8qeeTmVHfShZ/7ubP5esT3s7Pdx2/r0by9Zmv3osuW3bgfRQ4MzurmR9MuDIDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBMc6fg7bn9yfrT/52YbL+0db+PNvJ1aa+85L1fW+lL/29fcn3ytbeHE6P08//1/9O1utp8p+wWxlHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IytwbN6J5srX7uXZxw/bXLAavPj9ZP7gmfXntlqdPStZ/9tWbT7inUTce+MNk/YmL0uP4Q2+8maz7+eWv7t7z9eSmWrzuZ+kV8C5dvlsHfTA9d3mGIz8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBFVxnN/MOiVdKmnA3Zdly9ol3S1pkaQeSVe6+68r7SzqOH8lLXP/IFkfen0wWX/5jvJj9c9d2JncdsU/fi1Zn3dLcefU48TlPc6/XdLxE6FfL2m3uy+VtDt7DGASqRh+d39E0vGHnrWSdmT3d0i6LOe+ANRZtZ/557t7nyRlt/PyawlAI9T9Gn5mtkHSBkmaoZn13h2ACar2yN9vZgskKbsdKLeiu3e4e8ndS61qq3J3APJWbfh3SVqf3V8v6YF82gHQKBXDb2Z3SnpM0sfMrNfMPi9pq6RLzOxFSZdkjwFMIhU/87v7ujIlBuxzMnTg9Zq2P3pwetXbfvwzP0/WX7u1Jf0Ew0NV7xvF4hd+QFCEHwiK8ANBEX4gKMIPBEX4gaCYonsKOPO6F8rWrj47PSL7H6ftTtYvuuKaZH323Y8n62heHPmBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjG+aeA1DTZr3/lzOS2r+x6J1m//sadyfrfXHl5su4/fV/Z2sJvPJbcVg2cPj4ijvxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EFTFKbrzxBTdzWfwz89P1m+/4ZvJ+uJpM6re98d3bkzWl97Wl6wf29dT9b6nqryn6AYwBRF+ICjCDwRF+IGgCD8QFOEHgiL8QFAVx/nNrFPSpZIG3H1ZtmyLpC9Kei1bbbO7P1hpZ4zzTz6+cnmyfvLW3mT9zo/8sOp9n/HjLyTrH/v78tcxkKShF/dVve/JKu9x/u2S1oyz/Nvuvjz7VzH4AJpLxfC7+yOSBhvQC4AGquUz/0Yze9rMOs1sTm4dAWiIasN/q6QlkpZL6pP0rXIrmtkGM+s2s+6jOlzl7gDkrarwu3u/uw+5+7Ck2yStSKzb4e4ldy+1qq3aPgHkrKrwm9mCMQ8vl/RsPu0AaJSKl+42szslrZI018x6Jd0gaZWZLZfkknokfamOPQKoA87nR01a5s9L1vdfdXrZWtd125LbvqfCG9PPvLw6WX/zgteT9amI8/kBVET4gaAIPxAU4QeCIvxAUIQfCIqhPhTmnt70FN0zbXqy/hs/kqxf+rVryz/3/V3JbScrhvoAVET4gaAIPxAU4QeCIvxAUIQfCIrwA0FVPJ8fsQ1fkL509y+vSE/RvWx5T9lapXH8Sm4ePCdZn/lAd03PP9Vx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoBjnn+KstCxZf+Hr6bH221buSNYvnJE+p74Wh/1osv744OL0Ewz35djN1MORHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCqjjOb2YLJe2UdIqkYUkd7r7NzNol3S1pkaQeSVe6+6/r12pc0xaflqz/8uoPlq1tuequ5LZ/dtKBqnrKw+b+UrL+8LbzkvU5O9LX/UfaRI78xyRtcvczJZ0n6RozO0vS9ZJ2u/tSSbuzxwAmiYrhd/c+d9+T3T8kaa+kUyWtlTT6868dki6rV5MA8ndCn/nNbJGkcyR1SZrv7n3SyB8ISfPybg5A/Uw4/GZ2kqTvS7rW3Q+ewHYbzKzbzLqP6nA1PQKogwmF38xaNRL82939vmxxv5ktyOoLJA2Mt627d7h7yd1LrWrLo2cAOagYfjMzSd+VtNfdbxpT2iVpfXZ/vaQH8m8PQL1M5JTelZI+K+kZM3sqW7ZZ0lZJ95jZ5yW9IumK+rQ4+U1b9OFk/c0/WpCsX/UPP0jWv/z++5L1etrUlx6Oe+zfyg/ntW//n+S2c4YZyquniuF3959IKjff98X5tgOgUfiFHxAU4QeCIvxAUIQfCIrwA0ERfiAoLt09QdMWnFK2Ntg5K7ntVxY/nKyvm91fVU952PjqBcn6nlvTU3TP/d6zyXr7IcbqmxVHfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IKsw4/5E/SV8m+shfDibrm09/sGxt9XvfrqqnvPQPvVO2duGuTcltz/jbXyTr7W+kx+mHk1U0M478QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxBUmHH+nsvSf+deOPveuu37ljeWJOvbHl6drNtQuSunjzjjxpfL1pb2dyW3HUpWMZVx5AeCIvxAUIQfCIrwA0ERfiAowg8ERfiBoMzd0yuYLZS0U9IpGjl9u8Pdt5nZFklflPRatupmdy9/0rukk63dzzVm9Qbqpct366APpn8YkpnIj3yOSdrk7nvMbLakJ83soaz2bXf/ZrWNAihOxfC7e5+kvuz+ITPbK+nUejcGoL5O6DO/mS2SdI6k0d+MbjSzp82s08zmlNlmg5l1m1n3UR2uqVkA+Zlw+M3sJEnfl3Stux+UdKukJZKWa+SdwbfG287dO9y95O6lVrXl0DKAPEwo/GbWqpHg3+7u90mSu/e7+5C7D0u6TdKK+rUJIG8Vw29mJum7kva6+01jli8Ys9rlktLTtQJoKhP5tn+lpM9KesbMnsqWbZa0zsyWS3JJPZK+VJcOAdTFRL7t/4mk8cYNk2P6AJobv/ADgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EVfHS3bnuzOw1Sf87ZtFcSQca1sCJadbemrUvid6qlWdvp7n7ByayYkPD/66dm3W7e6mwBhKatbdm7Uuit2oV1Rtv+4GgCD8QVNHh7yh4/ynN2luz9iXRW7UK6a3Qz/wAilP0kR9AQQoJv5mtMbPnzewlM7u+iB7KMbMeM3vGzJ4ys+6Ce+k0swEze3bMsnYze8jMXsxux50mraDetpjZq9lr95SZ/WlBvS00sx+b2V4ze87M/iJbXuhrl+irkNet4W/7zaxF0guSLpHUK+kJSevc/ecNbaQMM+uRVHL3wseEzexCSW9J2unuy7Jl/yxp0N23Zn8457j7dU3S2xZJbxU9c3M2ocyCsTNLS7pM0udU4GuX6OtKFfC6FXHkXyHpJXff5+5HJN0laW0BfTQ9d39E0uBxi9dK2pHd36GR/3karkxvTcHd+9x9T3b/kKTRmaULfe0SfRWiiPCfKulXYx73qrmm/HZJPzKzJ81sQ9HNjGN+Nm366PTp8wru53gVZ25upONmlm6a166aGa/zVkT4x5v9p5mGHFa6+yclfUrSNdnbW0zMhGZubpRxZpZuCtXOeJ23IsLfK2nhmMcfkrS/gD7G5e77s9sBSfer+WYf7h+dJDW7HSi4n99pppmbx5tZWk3w2jXTjNdFhP8JSUvNbLGZTZf0aUm7CujjXcxsVvZFjMxslqTVar7Zh3dJWp/dXy/pgQJ7+T3NMnNzuZmlVfBr12wzXhfyI59sKONfJLVI6nT3bzS8iXGY2Uc0crSXRiYxvaPI3szsTkmrNHLWV7+kGyT9p6R7JH1Y0iuSrnD3hn/xVqa3VRp56/q7mZtHP2M3uLcLJD0q6RlJw9nizRr5fF3Ya5foa50KeN34hR8QFL/wA4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1P8D6+E2hIAP97kAAAAASUVORK5CYII=\n", 520 | "text/plain": [ 521 | "
" 522 | ] 523 | }, 524 | "metadata": { 525 | "needs_background": "light" 526 | }, 527 | "output_type": "display_data" 528 | } 529 | ], 530 | "source": [ 531 | "plt.imshow(scaled_single)" 532 | ] 533 | }, 534 | { 535 | "cell_type": "markdown", 536 | "metadata": {}, 537 | "source": [ 538 | "## Reshaping the Data\n", 539 | "\n", 540 | "Right now our data is 60,000 images stored in 28 by 28 pixel array formation. \n", 541 | "\n", 542 | "This is correct for a CNN, but we need to add one more dimension to show we're dealing with 1 RGB channel (since technically the images are in black and white, only showing values from 0-255 on a single channel), an color image would have 3 dimensions." 543 | ] 544 | }, 545 | { 546 | "cell_type": "code", 547 | "execution_count": 24, 548 | "metadata": {}, 549 | "outputs": [ 550 | { 551 | "data": { 552 | "text/plain": [ 553 | "(60000, 28, 28)" 554 | ] 555 | }, 556 | "execution_count": 24, 557 | "metadata": {}, 558 | "output_type": "execute_result" 559 | } 560 | ], 561 | "source": [ 562 | "x_train.shape" 563 | ] 564 | }, 565 | { 566 | "cell_type": "code", 567 | "execution_count": 25, 568 | "metadata": {}, 569 | "outputs": [ 570 | { 571 | "data": { 572 | "text/plain": [ 573 | "(10000, 28, 28)" 574 | ] 575 | }, 576 | "execution_count": 25, 577 | "metadata": {}, 578 | "output_type": "execute_result" 579 | } 580 | ], 581 | "source": [ 582 | "x_test.shape" 583 | ] 584 | }, 585 | { 586 | "cell_type": "markdown", 587 | "metadata": {}, 588 | "source": [ 589 | "Reshape to include channel dimension (in this case, 1 channel)" 590 | ] 591 | }, 592 | { 593 | "cell_type": "code", 594 | "execution_count": 26, 595 | "metadata": {}, 596 | "outputs": [], 597 | "source": [ 598 | "x_train = x_train.reshape(60000, 28, 28, 1)" 599 | ] 600 | }, 601 | { 602 | "cell_type": "code", 603 | "execution_count": 27, 604 | "metadata": {}, 605 | "outputs": [ 606 | { 607 | "data": { 608 | "text/plain": [ 609 | "(60000, 28, 28, 1)" 610 | ] 611 | }, 612 | "execution_count": 27, 613 | "metadata": {}, 614 | "output_type": "execute_result" 615 | } 616 | ], 617 | "source": [ 618 | "x_train.shape" 619 | ] 620 | }, 621 | { 622 | "cell_type": "code", 623 | "execution_count": 28, 624 | "metadata": {}, 625 | "outputs": [], 626 | "source": [ 627 | "x_test = x_test.reshape(10000,28,28,1)" 628 | ] 629 | }, 630 | { 631 | "cell_type": "code", 632 | "execution_count": 29, 633 | "metadata": {}, 634 | "outputs": [ 635 | { 636 | "data": { 637 | "text/plain": [ 638 | "(10000, 28, 28, 1)" 639 | ] 640 | }, 641 | "execution_count": 29, 642 | "metadata": {}, 643 | "output_type": "execute_result" 644 | } 645 | ], 646 | "source": [ 647 | "x_test.shape" 648 | ] 649 | }, 650 | { 651 | "cell_type": "markdown", 652 | "metadata": {}, 653 | "source": [ 654 | "# Training the Model" 655 | ] 656 | }, 657 | { 658 | "cell_type": "code", 659 | "execution_count": 30, 660 | "metadata": {}, 661 | "outputs": [], 662 | "source": [ 663 | "from keras.models import Sequential\n", 664 | "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten" 665 | ] 666 | }, 667 | { 668 | "cell_type": "code", 669 | "execution_count": 31, 670 | "metadata": {}, 671 | "outputs": [], 672 | "source": [ 673 | "model = Sequential()\n", 674 | "\n", 675 | "# CONVOLUTIONAL LAYER\n", 676 | "model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(28, 28, 1), activation='relu',))\n", 677 | "# POOLING LAYER\n", 678 | "model.add(MaxPool2D(pool_size=(2, 2)))\n", 679 | "\n", 680 | "# FLATTEN IMAGES FROM 28 by 28 to 764 BEFORE FINAL LAYER\n", 681 | "model.add(Flatten())\n", 682 | "\n", 683 | "# 128 NEURONS IN DENSE HIDDEN LAYER (YOU CAN CHANGE THIS NUMBER OF NEURONS)\n", 684 | "model.add(Dense(128, activation='relu'))\n", 685 | "\n", 686 | "# LAST LAYER IS THE CLASSIFIER, THUS 10 POSSIBLE CLASSES\n", 687 | "model.add(Dense(10, activation='softmax'))\n", 688 | "\n", 689 | "\n", 690 | "model.compile(loss='categorical_crossentropy',\n", 691 | " optimizer='rmsprop',\n", 692 | " metrics=['accuracy'])" 693 | ] 694 | }, 695 | { 696 | "cell_type": "code", 697 | "execution_count": 32, 698 | "metadata": {}, 699 | "outputs": [ 700 | { 701 | "name": "stdout", 702 | "output_type": "stream", 703 | "text": [ 704 | "_________________________________________________________________\n", 705 | "Layer (type) Output Shape Param # \n", 706 | "=================================================================\n", 707 | "conv2d_1 (Conv2D) (None, 25, 25, 32) 544 \n", 708 | "_________________________________________________________________\n", 709 | "max_pooling2d_1 (MaxPooling2 (None, 12, 12, 32) 0 \n", 710 | "_________________________________________________________________\n", 711 | "flatten_1 (Flatten) (None, 4608) 0 \n", 712 | "_________________________________________________________________\n", 713 | "dense_1 (Dense) (None, 128) 589952 \n", 714 | "_________________________________________________________________\n", 715 | "dense_2 (Dense) (None, 10) 1290 \n", 716 | "=================================================================\n", 717 | "Total params: 591,786\n", 718 | "Trainable params: 591,786\n", 719 | "Non-trainable params: 0\n", 720 | "_________________________________________________________________\n" 721 | ] 722 | } 723 | ], 724 | "source": [ 725 | "model.summary()" 726 | ] 727 | }, 728 | { 729 | "cell_type": "markdown", 730 | "metadata": {}, 731 | "source": [ 732 | "## Train the Model" 733 | ] 734 | }, 735 | { 736 | "cell_type": "code", 737 | "execution_count": 33, 738 | "metadata": {}, 739 | "outputs": [ 740 | { 741 | "name": "stdout", 742 | "output_type": "stream", 743 | "text": [ 744 | "Epoch 1/2\n", 745 | "60000/60000 [==============================] - 7s 124us/step - loss: 0.1344 - acc: 0.9593\n", 746 | "Epoch 2/2\n", 747 | "60000/60000 [==============================] - 6s 92us/step - loss: 0.0488 - acc: 0.9858\n" 748 | ] 749 | }, 750 | { 751 | "data": { 752 | "text/plain": [ 753 | "" 754 | ] 755 | }, 756 | "execution_count": 33, 757 | "metadata": {}, 758 | "output_type": "execute_result" 759 | } 760 | ], 761 | "source": [ 762 | "# THIS WILL TAKE AWHILE ON MOST COMPUTERS!!!\n", 763 | "# CHANGE NUMBER OF EPOCHS IF NECESSARY\n", 764 | "# YOUR ACCURACY MAY ALSO BE LOWER THAN WHAT IS SHOWN HERE SINCE THIS WAS TRAINED ON GPU\n", 765 | "model.fit(x_train,y_cat_train,epochs=2)" 766 | ] 767 | }, 768 | { 769 | "cell_type": "markdown", 770 | "metadata": {}, 771 | "source": [ 772 | "## Evaluate the Model" 773 | ] 774 | }, 775 | { 776 | "cell_type": "code", 777 | "execution_count": 34, 778 | "metadata": {}, 779 | "outputs": [ 780 | { 781 | "data": { 782 | "text/plain": [ 783 | "['loss', 'acc']" 784 | ] 785 | }, 786 | "execution_count": 34, 787 | "metadata": {}, 788 | "output_type": "execute_result" 789 | } 790 | ], 791 | "source": [ 792 | "model.metrics_names" 793 | ] 794 | }, 795 | { 796 | "cell_type": "code", 797 | "execution_count": 35, 798 | "metadata": {}, 799 | "outputs": [ 800 | { 801 | "name": "stdout", 802 | "output_type": "stream", 803 | "text": [ 804 | "10000/10000 [==============================] - 0s 39us/step\n" 805 | ] 806 | }, 807 | { 808 | "data": { 809 | "text/plain": [ 810 | "[0.043689755835279356, 0.9855]" 811 | ] 812 | }, 813 | "execution_count": 35, 814 | "metadata": {}, 815 | "output_type": "execute_result" 816 | } 817 | ], 818 | "source": [ 819 | "model.evaluate(x_test,y_cat_test)" 820 | ] 821 | }, 822 | { 823 | "cell_type": "code", 824 | "execution_count": 36, 825 | "metadata": {}, 826 | "outputs": [], 827 | "source": [ 828 | "from sklearn.metrics import classification_report" 829 | ] 830 | }, 831 | { 832 | "cell_type": "code", 833 | "execution_count": 37, 834 | "metadata": {}, 835 | "outputs": [], 836 | "source": [ 837 | "predictions = model.predict_classes(x_test)" 838 | ] 839 | }, 840 | { 841 | "cell_type": "code", 842 | "execution_count": 38, 843 | "metadata": {}, 844 | "outputs": [ 845 | { 846 | "data": { 847 | "text/plain": [ 848 | "(10000, 10)" 849 | ] 850 | }, 851 | "execution_count": 38, 852 | "metadata": {}, 853 | "output_type": "execute_result" 854 | } 855 | ], 856 | "source": [ 857 | "y_cat_test.shape" 858 | ] 859 | }, 860 | { 861 | "cell_type": "code", 862 | "execution_count": 39, 863 | "metadata": {}, 864 | "outputs": [ 865 | { 866 | "data": { 867 | "text/plain": [ 868 | "array([0., 0., 0., 0., 0., 0., 0., 1., 0., 0.], dtype=float32)" 869 | ] 870 | }, 871 | "execution_count": 39, 872 | "metadata": {}, 873 | "output_type": "execute_result" 874 | } 875 | ], 876 | "source": [ 877 | "y_cat_test[0]" 878 | ] 879 | }, 880 | { 881 | "cell_type": "code", 882 | "execution_count": 40, 883 | "metadata": {}, 884 | "outputs": [ 885 | { 886 | "data": { 887 | "text/plain": [ 888 | "7" 889 | ] 890 | }, 891 | "execution_count": 40, 892 | "metadata": {}, 893 | "output_type": "execute_result" 894 | } 895 | ], 896 | "source": [ 897 | "predictions[0]" 898 | ] 899 | }, 900 | { 901 | "cell_type": "code", 902 | "execution_count": 41, 903 | "metadata": {}, 904 | "outputs": [ 905 | { 906 | "data": { 907 | "text/plain": [ 908 | "array([7, 2, 1, ..., 4, 5, 6], dtype=uint8)" 909 | ] 910 | }, 911 | "execution_count": 41, 912 | "metadata": {}, 913 | "output_type": "execute_result" 914 | } 915 | ], 916 | "source": [ 917 | "y_test" 918 | ] 919 | }, 920 | { 921 | "cell_type": "code", 922 | "execution_count": 42, 923 | "metadata": { 924 | "scrolled": true 925 | }, 926 | "outputs": [ 927 | { 928 | "name": "stdout", 929 | "output_type": "stream", 930 | "text": [ 931 | " precision recall f1-score support\n", 932 | "\n", 933 | " 0 0.98 1.00 0.99 980\n", 934 | " 1 1.00 1.00 1.00 1135\n", 935 | " 2 0.97 0.99 0.98 1032\n", 936 | " 3 0.98 0.99 0.99 1010\n", 937 | " 4 0.99 0.99 0.99 982\n", 938 | " 5 0.98 0.98 0.98 892\n", 939 | " 6 1.00 0.98 0.99 958\n", 940 | " 7 0.99 0.98 0.98 1028\n", 941 | " 8 0.99 0.97 0.98 974\n", 942 | " 9 0.98 0.98 0.98 1009\n", 943 | "\n", 944 | "avg / total 0.99 0.99 0.99 10000\n", 945 | "\n" 946 | ] 947 | } 948 | ], 949 | "source": [ 950 | "print(classification_report(y_test,predictions))" 951 | ] 952 | }, 953 | { 954 | "cell_type": "markdown", 955 | "metadata": {}, 956 | "source": [ 957 | "Looks like the CNN performed quite well!" 958 | ] 959 | } 960 | ], 961 | "metadata": { 962 | "kernelspec": { 963 | "display_name": "Python 3", 964 | "language": "python", 965 | "name": "python3" 966 | }, 967 | "language_info": { 968 | "codemirror_mode": { 969 | "name": "ipython", 970 | "version": 3 971 | }, 972 | "file_extension": ".py", 973 | "mimetype": "text/x-python", 974 | "name": "python", 975 | "nbconvert_exporter": "python", 976 | "pygments_lexer": "ipython3", 977 | "version": "3.6.5" 978 | } 979 | }, 980 | "nbformat": 4, 981 | "nbformat_minor": 2 982 | } 983 | -------------------------------------------------------------------------------- /7_Deep Learning for Computer Vision/myfirstmodel.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/7_Deep Learning for Computer Vision/myfirstmodel.h5 -------------------------------------------------------------------------------- /8_Capston Project/.ipynb_checkpoints/Capstone Project-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 2 6 | } 7 | -------------------------------------------------------------------------------- /8_Capston Project/Capstone Project.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Capston Project " 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import cv2\n", 17 | "import numpy as np\n", 18 | "\n", 19 | "from sklearn.metrics import pairwise" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 2, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "# Global Variables\n", 29 | "\n", 30 | "background = None\n", 31 | "accumulated_weight = 0.5\n", 32 | "\n", 33 | "roi_top = 20\n", 34 | "roi_bottom = 300\n", 35 | "roi_right = 300\n", 36 | "roi_left = 600" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 3, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "#function to find average background value\n", 46 | "\n", 47 | "def cal_accum_avg(frame,accumulated_weight):\n", 48 | " \n", 49 | " global background\n", 50 | " \n", 51 | " if background is None:\n", 52 | " background = frame.copy().astype('float')\n", 53 | " return None\n", 54 | " \n", 55 | " cv2.accumulateWeighted(frame,background,accumulated_weight)" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": 4, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "#function for segment in ROI\n", 65 | "\n", 66 | "def segment(frame,threshold = 25):\n", 67 | " \n", 68 | " diff = cv2.absdiff(background.astype('uint8'),frame)\n", 69 | " \n", 70 | " ret,thresholded_img = cv2.threshold(diff,threshold,255,\n", 71 | " cv2.THRESH_BINARY)\n", 72 | " \n", 73 | " image,contours,hierarchy = cv2.findContours(thresholded_img.copy(),\n", 74 | " cv2.RETR_EXTERNAL,\n", 75 | " cv2.CHAIN_APPROX_SIMPLE)\n", 76 | " \n", 77 | " if len(contours) == 0:\n", 78 | " return None\n", 79 | " \n", 80 | " else:\n", 81 | " #Assuming the largest external contour in roi is the hand\n", 82 | " \n", 83 | " hand_segment = max(contours,key = cv2.contourArea)\n", 84 | " \n", 85 | " return (thresholded_img,hand_segment)" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 5, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [ 94 | "#Function to count fingers using Convexhull\n", 95 | "\n", 96 | "def count_fingers(thresholded_img,hand_segment):\n", 97 | " \n", 98 | " conv_hull = cv2.convexHull(hand_segment)\n", 99 | " \n", 100 | " #most extreme top,bottom,left and right points\n", 101 | " top = tuple(conv_hull[conv_hull[:, :, 1].argmin()][0])\n", 102 | " bottom = tuple(conv_hull[conv_hull[:, :, 1].argmax()][0])\n", 103 | " left = tuple(conv_hull[conv_hull[:, :, 0].argmin()][0])\n", 104 | " right = tuple(conv_hull[conv_hull[:, :, 0].argmax()][0])\n", 105 | " \n", 106 | " #finding center point\n", 107 | " cx = (left[0] + right[0]) // 2\n", 108 | " cy = (top[1] + bottom[1]) // 2\n", 109 | " \n", 110 | " #calculate distance from center to all extreme points\n", 111 | " distance = pairwise.euclidean_distances([(cx,cy)],\n", 112 | " Y=[left,right,top,bottom])[0]\n", 113 | " \n", 114 | " #calculate one of the max distance\n", 115 | " max_distance = distance.max()\n", 116 | " \n", 117 | " #create circle \n", 118 | " radius = int(0.8 * max_distance)\n", 119 | " circumference = (2 * np.pi * radius)\n", 120 | " \n", 121 | " circular_roi = np.zeros(thresholded_img.shape[:2],dtype='uint8')\n", 122 | " \n", 123 | " cv2.circle(circular_roi,(cx,cy),radius,255,10)\n", 124 | " \n", 125 | " circular_roi = cv2.bitwise_and(thresholded_img,thresholded_img,\n", 126 | " mask = circular_roi)\n", 127 | " \n", 128 | " image,contours,hierarchy= cv2.findContours(circular_roi.copy(),\n", 129 | " cv2.RETR_EXTERNAL,\n", 130 | " cv2.CHAIN_APPROX_NONE)\n", 131 | " \n", 132 | " count = 0\n", 133 | " \n", 134 | " for cnt in contours:\n", 135 | " (x,y,w,h) = cv2.boundingRect(cnt)\n", 136 | " \n", 137 | " out_of_wrist = ((cy + (cy * 0.25)) > (y + h))\n", 138 | " \n", 139 | " limit_points = ((circumference * 0.25) > cnt.shape[0])\n", 140 | " \n", 141 | " if out_of_wrist and limit_points:\n", 142 | " count += 1\n", 143 | " \n", 144 | " return count" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": 6, 150 | "metadata": {}, 151 | "outputs": [], 152 | "source": [ 153 | "cam = cv2.VideoCapture(0)\n", 154 | "\n", 155 | "num_frames = 0\n", 156 | "\n", 157 | "while True:\n", 158 | " \n", 159 | " ret,frame = cam.read()\n", 160 | " \n", 161 | " frame = cv2.flip(frame, 1)\n", 162 | " frame_copy = frame.copy()\n", 163 | " \n", 164 | " roi = frame[roi_top:roi_bottom,roi_right:roi_left]\n", 165 | " \n", 166 | " gray = cv2.cvtColor(roi,cv2.COLOR_BGR2GRAY)\n", 167 | " \n", 168 | " gray_blur = cv2.GaussianBlur(gray,(7,7),0)\n", 169 | " \n", 170 | " if num_frames < 60:\n", 171 | " cal_accum_avg(gray_blur,accumulated_weight)\n", 172 | " \n", 173 | " if num_frames <= 59:\n", 174 | " cv2.putText(frame_copy,'WAIT, GETTING BACKGROUND',(200,300),\n", 175 | " cv2.FONT_HERSHEY_COMPLEX,1,(255,0,0),2)\n", 176 | " cv2.imshow('finger count',frame_copy)\n", 177 | " \n", 178 | " else:\n", 179 | " hand = segment(gray_blur)\n", 180 | " \n", 181 | " if hand is not None:\n", 182 | " \n", 183 | " thresholded, hand_segment = hand\n", 184 | " \n", 185 | " #draw contours around real hand in live stream\n", 186 | " cv2.drawContours(frame_copy,\n", 187 | " [hand_segment+[roi_right,roi_top]],\n", 188 | " -1,\n", 189 | " (0,0,255),\n", 190 | " 5)\n", 191 | " fingers = count_fingers(thresholded,hand_segment)\n", 192 | "\n", 193 | " \n", 194 | " cv2.putText(frame_copy,\n", 195 | " str(fingers),\n", 196 | " (70,50),\n", 197 | " cv2.FONT_HERSHEY_SIMPLEX,\n", 198 | " 1,\n", 199 | " (255,0,0),\n", 200 | " 2)\n", 201 | " \n", 202 | " cv2.imshow('thresholded',thresholded)\n", 203 | " \n", 204 | " cv2.rectangle(frame_copy,\n", 205 | " (roi_left,roi_top),\n", 206 | " (roi_right,roi_bottom),\n", 207 | " (0,255,0),\n", 208 | " 5)\n", 209 | " \n", 210 | " num_frames += 1\n", 211 | " \n", 212 | " cv2.imshow('finger count', frame_copy)\n", 213 | " \n", 214 | " k = cv2.waitKey(1) & 0xFF\n", 215 | " \n", 216 | " if k == 27:\n", 217 | " break\n", 218 | " \n", 219 | "cv2.destroyAllWindows()\n", 220 | "cam.release()" 221 | ] 222 | } 223 | ], 224 | "metadata": { 225 | "kernelspec": { 226 | "display_name": "Python 3", 227 | "language": "python", 228 | "name": "python3" 229 | }, 230 | "language_info": { 231 | "codemirror_mode": { 232 | "name": "ipython", 233 | "version": 3 234 | }, 235 | "file_extension": ".py", 236 | "mimetype": "text/x-python", 237 | "name": "python", 238 | "nbconvert_exporter": "python", 239 | "pygments_lexer": "ipython3", 240 | "version": "3.6.5" 241 | } 242 | }, 243 | "nbformat": 4, 244 | "nbformat_minor": 2 245 | } 246 | -------------------------------------------------------------------------------- /Data/00-puppy.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/00-puppy.jpg -------------------------------------------------------------------------------- /Data/Denis_Mukwege.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/Denis_Mukwege.jpg -------------------------------------------------------------------------------- /Data/Nadia_Murad.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/Nadia_Murad.jpg -------------------------------------------------------------------------------- /Data/bricks.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/bricks.jpg -------------------------------------------------------------------------------- /Data/car_plate.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/car_plate.jpg -------------------------------------------------------------------------------- /Data/crossword.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/crossword.jpg -------------------------------------------------------------------------------- /Data/dog_backpack.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/dog_backpack.jpg -------------------------------------------------------------------------------- /Data/dog_backpack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/dog_backpack.png -------------------------------------------------------------------------------- /Data/dot_grid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/dot_grid.png -------------------------------------------------------------------------------- /Data/flat_chessboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/flat_chessboard.png -------------------------------------------------------------------------------- /Data/giraffes.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/giraffes.jpg -------------------------------------------------------------------------------- /Data/gorilla.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/gorilla.jpg -------------------------------------------------------------------------------- /Data/horse.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/horse.jpg -------------------------------------------------------------------------------- /Data/internal_external.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/internal_external.png -------------------------------------------------------------------------------- /Data/many_cereals.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/many_cereals.jpg -------------------------------------------------------------------------------- /Data/pennies.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/pennies.jpg -------------------------------------------------------------------------------- /Data/rainbow.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/rainbow.jpg -------------------------------------------------------------------------------- /Data/real_chessboard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/real_chessboard.jpg -------------------------------------------------------------------------------- /Data/reeses_puffs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/reeses_puffs.png -------------------------------------------------------------------------------- /Data/road_image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/road_image.jpg -------------------------------------------------------------------------------- /Data/sammy.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/sammy.jpg -------------------------------------------------------------------------------- /Data/sammy_face.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/sammy_face.jpg -------------------------------------------------------------------------------- /Data/sammy_noise.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/sammy_noise.jpg -------------------------------------------------------------------------------- /Data/separate_coins.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/separate_coins.jpg -------------------------------------------------------------------------------- /Data/solvay_conference.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/solvay_conference.jpg -------------------------------------------------------------------------------- /Data/sudoku.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/sudoku.jpg -------------------------------------------------------------------------------- /Data/watermark_no_copy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RamjiB/Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning/8a11b49752917924110bae2b7bd0546bd52da712/Data/watermark_no_copy.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Python-for-Computer-Vision-with-OpenCV-and-Deep-Learning 2 | 3 | Udemy Course ( https://www.udemy.com/course/python-for-computer-vision-with-opencv-and-deep-learning/ ) 4 | 5 | - Understand basics of NumPy 6 | - Manipulate and open Images with NumPy 7 | - Use OpenCV to work with image files 8 | - Use Python and OpenCV to draw shapes on images and videos 9 | - Perform image manipulation with OpenCV, including smoothing, blurring, thresholding, and morphological operations. 10 | - Create Color Histograms with OpenCV 11 | - Open and Stream video with Python and OpenCV 12 | - Detect Objects, including corner, edge, and grid detection techniques with OpenCV and Python 13 | - Create Face Detection Software 14 | - Segment Images with the Watershed Algorithm 15 | - Track Objects in Video 16 | - Use Python and Deep Learning to build image classifiers 17 | 18 | # Certificate 19 | http://ude.my/UC-4GWIGE4L 20 | --------------------------------------------------------------------------------