├── .ipynb_checkpoints ├── Approximating Contours-checkpoint.ipynb ├── Blob Detection-checkpoint.ipynb ├── Contour Detection-checkpoint.ipynb ├── Convex Hull-checkpoint.ipynb ├── Counting Circles and Ellipses-checkpoint.ipynb ├── Creating images and drawing shapes in OpenCV-checkpoint.ipynb ├── Grayscaling-checkpoint.ipynb ├── Histograms in OpenCV-checkpoint.ipynb ├── Image Brightening and Darkening-checkpoint.ipynb ├── Image Color Spaces-checkpoint.ipynb ├── Image Contours-checkpoint.ipynb ├── Image Convolution and Blurring-checkpoint.ipynb ├── Image Cropping-checkpoint.ipynb ├── Image Dilation and Erosion-checkpoint.ipynb ├── Image Edge Detection-checkpoint.ipynb ├── Image Masking-checkpoint.ipynb ├── Image Pyramids or Image Scaling-checkpoint.ipynb ├── Image Resizing or Scaling-checkpoint.ipynb ├── Image Rotations-checkpoint.ipynb ├── Image Sharpening-checkpoint.ipynb ├── Image Thresholding-checkpoint.ipynb ├── Image Translations-checkpoint.ipynb ├── Non-Affine Image Transformations-checkpoint.ipynb ├── Read Write and Display Images using OpenCV-checkpoint.ipynb └── Sorting Contours-checkpoint.ipynb ├── Approximating Contours.ipynb ├── Blob Detection.ipynb ├── Contour Detection.ipynb ├── Convex Hull.ipynb ├── Counting Circles and Ellipses.ipynb ├── Creating images and drawing shapes in OpenCV.ipynb ├── Grayscaling.ipynb ├── Histograms in OpenCV.ipynb ├── Image Brightening and Darkening.ipynb ├── Image Color Spaces.ipynb ├── Image Contours.ipynb ├── Image Convolution and Blurring.ipynb ├── Image Cropping.ipynb ├── Image Dilation and Erosion.ipynb ├── Image Edge Detection.ipynb ├── Image Masking.ipynb ├── Image Pyramids or Image Scaling.ipynb ├── Image Resizing or Scaling.ipynb ├── Image Rotations.ipynb ├── Image Sharpening.ipynb ├── Image Thresholding.ipynb ├── Image Translations.ipynb ├── Non-Affine Image Transformations.ipynb ├── README.md ├── Read Write and Display Images using OpenCV.ipynb ├── Sorting Contours.ipynb ├── daisies.jpg ├── inpu1.jpg ├── input.jpg ├── input2.jpeg ├── input3.png ├── square-01.png └── sudoku.jpg /.ipynb_checkpoints/Approximating Contours-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import numpy as np\n", 21 | "import cv2\n", 22 | "\n", 23 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\input3.png')\n", 24 | "\n", 25 | "original_image = image.copy()\n", 26 | "\n", 27 | "gray = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 28 | "ret , thresh = cv2.threshold(gray , 127 , 255 , cv2.THRESH_BINARY_INV)\n", 29 | "\n", 30 | "contours , heirarchy = cv2.findContours(thresh.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 31 | "\n", 32 | "for c in contours: #Drawing rectangle boxes\n", 33 | " x , y , w , h = cv2.boundingRect(c)\n", 34 | " cv2.rectangle(original_image , (x,y) , (x+w , y+h) , (0,0,255) , 2)\n", 35 | " cv2.imshow('rectangle' , original_image)\n", 36 | " \n", 37 | "cv2.waitKey()\n", 38 | "\n", 39 | "\n", 40 | " " 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 3, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "for c in contours:\n", 50 | " #calculate accuracy as a percent of perimeter\n", 51 | " accuracy = 0.01 * cv2.arcLength(c , True) #lower accuracy higher precision when approximating\n", 52 | " approx = cv2.approxPolyDP(c , accuracy , True)\n", 53 | " cv2.drawContours(image , [approx] , 0 , (0,255,0) , 2)\n", 54 | " cv2.imshow('approx' , image)\n", 55 | " \n", 56 | "cv2.waitKey()\n", 57 | "cv2.destroyAllWindows()" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [] 66 | } 67 | ], 68 | "metadata": { 69 | "kernelspec": { 70 | "display_name": "Python 3", 71 | "language": "python", 72 | "name": "python3" 73 | }, 74 | "language_info": { 75 | "codemirror_mode": { 76 | "name": "ipython", 77 | "version": 3 78 | }, 79 | "file_extension": ".py", 80 | "mimetype": "text/x-python", 81 | "name": "python", 82 | "nbconvert_exporter": "python", 83 | "pygments_lexer": "ipython3", 84 | "version": "3.7.1" 85 | } 86 | }, 87 | "nbformat": 4, 88 | "nbformat_minor": 2 89 | } 90 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Blob Detection-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 10, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "\n", 11 | "daisies = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\daisies.jpg' , cv2.IMREAD_GRAYSCALE)\n" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 13, 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "ename": "AttributeError", 21 | "evalue": "module 'cv2.cv2' has no attribute 'drawKeypoints'", 22 | "output_type": "error", 23 | "traceback": [ 24 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 25 | "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", 26 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 9\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 10\u001b[0m \u001b[0mblank\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 11\u001b[1;33m blobs = cv2.drawKeypoints(daisies, keypoints, blank, (255,0,0),\n\u001b[0m\u001b[0;32m 12\u001b[0m cv2.DRAW_MATCHES_FLAGS_DEFAULT)\n\u001b[0;32m 13\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Blobs'\u001b[0m \u001b[1;33m,\u001b[0m \u001b[0mblobs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 27 | "\u001b[1;31mAttributeError\u001b[0m: module 'cv2.cv2' has no attribute 'drawKeypoints'" 28 | ] 29 | } 30 | ], 31 | "source": [ 32 | "#setup a detector \n", 33 | "detector = cv2.SimpleBlobDetector_create()\n", 34 | "\n", 35 | "#Detect blobs\n", 36 | "keypoints = detector.detect(daisies)\n", 37 | "\n", 38 | "#draw the detected blobs\n", 39 | "#cv2.draw_matches_flags_draw_rich_keypoints makes sure the size of circle is same as size of blob\n", 40 | "\n", 41 | "blank = np.zeros((1,1)) \n", 42 | "blobs = cv2.drawKeypoints(daisies, keypoints, blank, (255,0,0),\n", 43 | " cv2.DRAW_MATCHES_FLAGS_DEFAULT) #openCV 4.0.0 error\n", 44 | "cv2.imshow('Blobs' , blobs)\n", 45 | "cv2.waitKey()\n", 46 | "cv2.destroyAllWindows()" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [] 55 | } 56 | ], 57 | "metadata": { 58 | "kernelspec": { 59 | "display_name": "Python 3", 60 | "language": "python", 61 | "name": "python3" 62 | }, 63 | "language_info": { 64 | "codemirror_mode": { 65 | "name": "ipython", 66 | "version": 3 67 | }, 68 | "file_extension": ".py", 69 | "mimetype": "text/x-python", 70 | "name": "python", 71 | "nbconvert_exporter": "python", 72 | "pygments_lexer": "ipython3", 73 | "version": "3.7.1" 74 | } 75 | }, 76 | "nbformat": 4, 77 | "nbformat_minor": 2 78 | } 79 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Contour Detection-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import cv2\n", 21 | "import numpy as np\n", 22 | "\n", 23 | "contour_one = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\square-01.png', 0) #Reference image contour\n", 24 | "cv2.imshow(\"Contour Tempalte\", contour_one)\n", 25 | "cv2.waitKey()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stdout", 35 | "output_type": "stream", 36 | "text": [ 37 | "1\n", 38 | "0.3767457291322354\n", 39 | "0.39325765426692477\n", 40 | "0.37723717269282037\n", 41 | "0.3151969672568337\n" 42 | ] 43 | } 44 | ], 45 | "source": [ 46 | "#target image to identify similar to reference image\n", 47 | "targer = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 48 | "target_gray = cv2.cvtColor(targer , cv2.COLOR_BGR2GRAY)\n", 49 | "cv2.imshow('Target Image',target_gray)\n", 50 | "cv2.waitKey()\n", 51 | "\n", 52 | "#Threshold both images \n", 53 | "ret , thres1 = cv2.threshold(contour_one , 127 , 255 , 0)\n", 54 | "ret , thres2 = cv2.threshold(target_gray , 127 , 255 , 0)\n", 55 | "\n", 56 | "contours , hierarchy = cv2.findContours(thres1 , cv2.RETR_CCOMP , cv2.CHAIN_APPROX_SIMPLE)\n", 57 | "\n", 58 | "sorted_contours = sorted(contours , key = cv2.contourArea , reverse = True)\n", 59 | "print(len(contours))\n", 60 | "template_contour = contours[0]\n", 61 | "\n", 62 | "contours , heirarchy = cv2.findContours(thres2 , cv2.RETR_CCOMP , cv2.CHAIN_APPROX_SIMPLE)\n", 63 | "\n", 64 | "for c in contours:\n", 65 | " #iterate through each contour in image and use cv2.matchShapes to compare\n", 66 | " match = cv2.matchShapes(template_contour , c , 2 , 0.0)\n", 67 | " print(match)\n", 68 | " if match < 0.32:\n", 69 | " closest_contour = c\n", 70 | " break\n", 71 | " else:\n", 72 | " closest_contour = []\n", 73 | "\n", 74 | "cv2.drawContours(targer , [closest_contour] , -1 , ( 0,255,0) , 3)\n", 75 | "cv2.imshow('Output' , targer)\n", 76 | "cv2.waitKey()\n", 77 | "cv2.destroyAllWindows()\n", 78 | " " 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [] 87 | } 88 | ], 89 | "metadata": { 90 | "kernelspec": { 91 | "display_name": "Python 3", 92 | "language": "python", 93 | "name": "python3" 94 | }, 95 | "language_info": { 96 | "codemirror_mode": { 97 | "name": "ipython", 98 | "version": 3 99 | }, 100 | "file_extension": ".py", 101 | "mimetype": "text/x-python", 102 | "name": "python", 103 | "nbconvert_exporter": "python", 104 | "pygments_lexer": "ipython3", 105 | "version": "3.7.1" 106 | } 107 | }, 108 | "nbformat": 4, 109 | "nbformat_minor": 2 110 | } 111 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Convex Hull-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Smallest Polygon that can fit outside an object \n", 10 | "import numpy as np\n", 11 | "import cv2\n", 12 | "\n", 13 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 14 | "\n", 15 | "gray = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 16 | "\n", 17 | "ret , thresh = cv2.threshold(gray , 175 , 255 , 0)\n", 18 | "\n", 19 | "contours , heirarchy = cv2.findContours(thresh.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 20 | "\n", 21 | "n = len(contours) -1 #to remove largest frame contour\n", 22 | "contours = sorted(contours , key = cv2.contourArea , reverse = False)[:n] #index to remove outmost frame\n", 23 | " \n", 24 | "for c in contours:\n", 25 | " hull = cv2.convexHull(c)\n", 26 | " cv2.drawContours(image ,[hull] , 0 , (0,255 , 0) , 2)\n", 27 | " cv2.imshow('hull' , image)\n", 28 | "\n", 29 | "cv2.waitKey()\n", 30 | "cv2.destroyAllWindows()" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [] 39 | } 40 | ], 41 | "metadata": { 42 | "kernelspec": { 43 | "display_name": "Python 3", 44 | "language": "python", 45 | "name": "python3" 46 | }, 47 | "language_info": { 48 | "codemirror_mode": { 49 | "name": "ipython", 50 | "version": 3 51 | }, 52 | "file_extension": ".py", 53 | "mimetype": "text/x-python", 54 | "name": "python", 55 | "nbconvert_exporter": "python", 56 | "pygments_lexer": "ipython3", 57 | "version": "3.7.1" 58 | } 59 | }, 60 | "nbformat": 4, 61 | "nbformat_minor": 2 62 | } 63 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Counting Circles and Ellipses-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "error", 10 | "evalue": "OpenCV(4.0.0) C:\\projects\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:350: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[1;31merror\u001b[0m Traceback (most recent call last)", 15 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[1;31m# Load image\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mimage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"images/blobs.jpg\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 6\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Original Image'\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mimage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 7\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 8\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", 16 | "\u001b[1;31merror\u001b[0m: OpenCV(4.0.0) C:\\projects\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:350: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n" 17 | ] 18 | } 19 | ], 20 | "source": [ 21 | "import cv2\n", 22 | "import numpy as np\n", 23 | " \n", 24 | "# Load image\n", 25 | "image = cv2.imread(\"images/blobs.jpg\", 0)\n", 26 | "cv2.imshow('Original Image',image)\n", 27 | "cv2.waitKey(0)\n", 28 | " \n", 29 | "# Intialize the detector using the default parameters\n", 30 | "detector = cv2.SimpleBlobDetector_create()\n", 31 | " \n", 32 | "# Detect blobs\n", 33 | "keypoints = detector.detect(image)\n", 34 | " \n", 35 | "# Draw blobs on our image as red circles\n", 36 | "blank = np.zeros((1,1)) \n", 37 | "blobs = cv2.drawKeypoints(image, keypoints, blank, (0,0,255),\n", 38 | " cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)\n", 39 | " \n", 40 | "number_of_blobs = len(keypoints)\n", 41 | "text = \"Total Number of Blobs: \" + str(len(keypoints))\n", 42 | "cv2.putText(blobs, text, (20, 550), cv2.FONT_HERSHEY_SIMPLEX, 1, (100, 0, 255), 2)\n", 43 | " \n", 44 | "# Display image with blob keypoints\n", 45 | "cv2.imshow(\"Blobs using default parameters\", blobs)\n", 46 | "cv2.waitKey(0)\n", 47 | " \n", 48 | " \n", 49 | "# Set our filtering parameters\n", 50 | "# Initialize parameter settiing using cv2.SimpleBlobDetector\n", 51 | "params = cv2.SimpleBlobDetector_Params()\n", 52 | " \n", 53 | "# Set Area filtering parameters\n", 54 | "params.filterByArea = True\n", 55 | "params.minArea = 100\n", 56 | " \n", 57 | "# Set Circularity filtering parameters\n", 58 | "params.filterByCircularity = True \n", 59 | "params.minCircularity = 0.9\n", 60 | " \n", 61 | "# Set Convexity filtering parameters\n", 62 | "params.filterByConvexity = False\n", 63 | "params.minConvexity = 0.2\n", 64 | " \n", 65 | "# Set inertia filtering parameters\n", 66 | "params.filterByInertia = True\n", 67 | "params.minInertiaRatio = 0.01\n", 68 | " \n", 69 | "# Create a detector with the parameters\n", 70 | "detector = cv2.SimpleBlobDetector_create(params)\n", 71 | " \n", 72 | "# Detect blobs\n", 73 | "keypoints = detector.detect(image)\n", 74 | " \n", 75 | "# Draw blobs on our image as red circles\n", 76 | "blank = np.zeros((1,1)) \n", 77 | "blobs = cv2.drawKeypoints(image, keypoints, blank, (0,255,0),\n", 78 | " cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)\n", 79 | " \n", 80 | "number_of_blobs = len(keypoints)\n", 81 | "text = \"Number of Circular Blobs: \" + str(len(keypoints))\n", 82 | "cv2.putText(blobs, text, (20, 550), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 100, 255), 2)\n", 83 | " \n", 84 | "# Show blobs\n", 85 | "cv2.imshow(\"Filtering Circular Blobs Only\", blobs)\n", 86 | "cv2.waitKey(0)\n", 87 | "cv2.destroyAllWindows()" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [] 96 | } 97 | ], 98 | "metadata": { 99 | "kernelspec": { 100 | "display_name": "Python 3", 101 | "language": "python", 102 | "name": "python3" 103 | }, 104 | "language_info": { 105 | "codemirror_mode": { 106 | "name": "ipython", 107 | "version": 3 108 | }, 109 | "file_extension": ".py", 110 | "mimetype": "text/x-python", 111 | "name": "python", 112 | "nbconvert_exporter": "python", 113 | "pygments_lexer": "ipython3", 114 | "version": "3.7.1" 115 | } 116 | }, 117 | "nbformat": 4, 118 | "nbformat_minor": 2 119 | } 120 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Creating images and drawing shapes in OpenCV-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "\n", 12 | "#creating a black image\n", 13 | "image = np.zeros((512 , 512 , 3) , np.uint8) #512 width and height with 3 rgb colors \n", 14 | "\n", 15 | "#black and white image\n", 16 | "image_b = np.zeros((512 , 512 ) , np.uint8)\n", 17 | "\n", 18 | "cv2.imshow(\"Color black image\" , image)\n", 19 | "cv2.imshow(\"Black and white image\" , image_b)\n", 20 | "cv2.waitKey()\n", 21 | "cv2.destroyAllWindows()" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "#creating a line over the square\n", 31 | "\n", 32 | "cv2.line(image , (0,0) , (512, 512) , (255 , 127 , 0) , 5)\n", 33 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 34 | "cv2.imshow('Blue Line' , image)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 3, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "#Draw a rectangle\n", 46 | "\n", 47 | "cv2.rectangle(image , (100,100) , (300,250) , (127,50,127) , 5)\n", 48 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 49 | "cv2.imshow('Rectangle' , image)\n", 50 | "cv2.waitKey()\n", 51 | "cv2.destroyAllWindows()" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 4, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "cv2.rectangle(image , (100,100) , (300,250) , (127,50,127) , -1) #-1 is to fill the image\n", 61 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 62 | "cv2.imshow('Rectangle' , image)\n", 63 | "cv2.waitKey()\n", 64 | "cv2.destroyAllWindows()" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 6, 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "#drawing a circle\n", 74 | "\n", 75 | "cv2.circle(image , (350,350) , 100 , (127,50,127) , -1) #center of circle and radius no start and end\n", 76 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 77 | "cv2.imshow('Circle' , image)\n", 78 | "cv2.waitKey()\n", 79 | "cv2.destroyAllWindows()" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 7, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "#drawing a polygon\n", 89 | "\n", 90 | "pts = np.array([[10,50] ,[200,30] , [123,24]] , np.int32) #array of points in the image\n", 91 | "\n", 92 | "pts = pts.reshape((-1 , 1 , 2))\n", 93 | "\n", 94 | "cv2.polylines(image , [pts] , True , (0,0,244) , 3) #True if image is closed or not\n", 95 | "cv2.imshow('Polygons' , image)\n", 96 | "cv2.waitKey()\n", 97 | "cv2.destroyAllWindows()" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 10, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "#adding text to images\n", 107 | "\n", 108 | "\n", 109 | "cv2.putText(image , \"Hey dude\" , (25,200) , cv2.FONT_HERSHEY_COMPLEX , 2 ,(100,244,0) , 3)\n", 110 | "cv2.imshow('Text' , image)\n", 111 | "cv2.waitKey()\n", 112 | "cv2.destroyAllWindows()" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 3, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "#Making a Square\n", 122 | "\n", 123 | "square = np.zeros((300,300) , np.uint8) #array of zeros of height and width 300\n", 124 | "cv2.rectangle(square , (50,50) , (250,250) , 255 , -2)\n", 125 | "\n", 126 | "#Making a ellipse\n", 127 | "ellipse = np.zeros((300,300) , np.uint8)\n", 128 | "cv2.ellipse(ellipse , (150,150) , (150,150) , 30 , 0 , 180 , 255 , -1)\n", 129 | "\n", 130 | "cv2.imshow('Square' , square)\n", 131 | "cv2.imshow('Ellipse' , ellipse)\n", 132 | "cv2.waitKey()\n", 133 | "cv2.destroyAllWindows()" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [] 142 | } 143 | ], 144 | "metadata": { 145 | "kernelspec": { 146 | "display_name": "Python 3", 147 | "language": "python", 148 | "name": "python3" 149 | }, 150 | "language_info": { 151 | "codemirror_mode": { 152 | "name": "ipython", 153 | "version": 3 154 | }, 155 | "file_extension": ".py", 156 | "mimetype": "text/x-python", 157 | "name": "python", 158 | "nbconvert_exporter": "python", 159 | "pygments_lexer": "ipython3", 160 | "version": "3.7.1" 161 | } 162 | }, 163 | "nbformat": 4, 164 | "nbformat_minor": 2 165 | } 166 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Grayscaling-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Grayscaling" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import cv2" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 3, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg') #Read images using OpenCV\n", 26 | "cv2.imshow('Color image', input)\n", 27 | "cv2.waitKey()\n", 28 | "\n", 29 | "#cvtColor is used to convert to grayscale\n", 30 | "\n", 31 | "gray_image = cv2.cvtColor(input , cv2.COLOR_BGR2GRAY)\n", 32 | "cv2.imshow('Grayscale', gray_image)\n", 33 | "cv2.waitKey()\n", 34 | "cv2.destroyAllWindows()" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 4, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "#Faster method to convert to grayscale\n", 44 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg' , 0) #Read images using OpenCV\n", 45 | "cv2.imshow('Faster Grayscale', input)\n", 46 | "cv2.waitKey()\n", 47 | "cv2.destroyAllWindows()" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [] 56 | } 57 | ], 58 | "metadata": { 59 | "kernelspec": { 60 | "display_name": "Python 3", 61 | "language": "python", 62 | "name": "python3" 63 | }, 64 | "language_info": { 65 | "codemirror_mode": { 66 | "name": "ipython", 67 | "version": 3 68 | }, 69 | "file_extension": ".py", 70 | "mimetype": "text/x-python", 71 | "name": "python", 72 | "nbconvert_exporter": "python", 73 | "pygments_lexer": "ipython3", 74 | "version": "3.7.1" 75 | } 76 | }, 77 | "nbformat": 4, 78 | "nbformat_minor": 2 79 | } 80 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Brightening and Darkening-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "\n", 12 | "\n", 13 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "metadata": {}, 20 | "outputs": [ 21 | { 22 | "name": "stdout", 23 | "output_type": "stream", 24 | "text": [ 25 | "[[[75 75 75]\n", 26 | " [75 75 75]\n", 27 | " [75 75 75]\n", 28 | " ...\n", 29 | " [75 75 75]\n", 30 | " [75 75 75]\n", 31 | " [75 75 75]]\n", 32 | "\n", 33 | " [[75 75 75]\n", 34 | " [75 75 75]\n", 35 | " [75 75 75]\n", 36 | " ...\n", 37 | " [75 75 75]\n", 38 | " [75 75 75]\n", 39 | " [75 75 75]]\n", 40 | "\n", 41 | " [[75 75 75]\n", 42 | " [75 75 75]\n", 43 | " [75 75 75]\n", 44 | " ...\n", 45 | " [75 75 75]\n", 46 | " [75 75 75]\n", 47 | " [75 75 75]]\n", 48 | "\n", 49 | " ...\n", 50 | "\n", 51 | " [[75 75 75]\n", 52 | " [75 75 75]\n", 53 | " [75 75 75]\n", 54 | " ...\n", 55 | " [75 75 75]\n", 56 | " [75 75 75]\n", 57 | " [75 75 75]]\n", 58 | "\n", 59 | " [[75 75 75]\n", 60 | " [75 75 75]\n", 61 | " [75 75 75]\n", 62 | " ...\n", 63 | " [75 75 75]\n", 64 | " [75 75 75]\n", 65 | " [75 75 75]]\n", 66 | "\n", 67 | " [[75 75 75]\n", 68 | " [75 75 75]\n", 69 | " [75 75 75]\n", 70 | " ...\n", 71 | " [75 75 75]\n", 72 | " [75 75 75]\n", 73 | " [75 75 75]]]\n" 74 | ] 75 | } 76 | ], 77 | "source": [ 78 | "\n", 79 | "M = np.ones(image.shape , dtype = 'uint8') * 75\n", 80 | "print(M)\n", 81 | "#Creates a numpy array with ones of same size of the image of type integer" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 3, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "#When we add to our color values of our image the brightness increases\n", 91 | "#when we subtract the brightness decreases\n", 92 | "\n", 93 | "bright_image = cv2.add(image , M)\n", 94 | "dark_image = cv2.subtract(image , M)\n", 95 | "\n", 96 | "cv2.imshow('Bright' , bright_image)\n", 97 | "cv2.imshow('Dark' , dark_image)\n", 98 | "\n", 99 | "cv2.waitKey()\n", 100 | "cv2.destroyAllWindows()" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [] 109 | } 110 | ], 111 | "metadata": { 112 | "kernelspec": { 113 | "display_name": "Python 3", 114 | "language": "python", 115 | "name": "python3" 116 | }, 117 | "language_info": { 118 | "codemirror_mode": { 119 | "name": "ipython", 120 | "version": 3 121 | }, 122 | "file_extension": ".py", 123 | "mimetype": "text/x-python", 124 | "name": "python", 125 | "nbconvert_exporter": "python", 126 | "pygments_lexer": "ipython3", 127 | "version": "3.7.1" 128 | } 129 | }, 130 | "nbformat": 4, 131 | "nbformat_minor": 2 132 | } 133 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Color Spaces-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "HSV - \n", 11 | "Hue - color value(0 -179)\n", 12 | "Saturation - vibrancy (0-255)\n", 13 | "Value - brightness (0 - 255)\n", 14 | "\n", 15 | "'''\n", 16 | "\n", 17 | "#Opencv uses BGR or RGB\n", 18 | "\n", 19 | "import cv2\n", 20 | "import numpy as np" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 8, 26 | "metadata": {}, 27 | "outputs": [ 28 | { 29 | "name": "stdout", 30 | "output_type": "stream", 31 | "text": [ 32 | "69 78 115\n", 33 | "92 98 135\n", 34 | "(450, 800, 3)\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg') #Read images using OpenCV\n", 40 | "\n", 41 | "B , G , R = input[0 , 0] #BGR values at the first 0,0 pixel\n", 42 | "print(B,G,R)\n", 43 | "\n", 44 | "B , G , R = input[10 , 12] #BGR values at the first 10,12 pixel\n", 45 | "print(B,G,R)\n", 46 | "\n", 47 | "print(input.shape)" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 9, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "(450, 800)\n", 60 | "88\n", 61 | "111\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "#Convert images to grayscale\n", 67 | "\n", 68 | "gray_img = cv2.cvtColor(input , cv2.COLOR_BGR2GRAY)\n", 69 | "print(gray_img.shape)\n", 70 | "\n", 71 | "print(gray_img[0,0])\n", 72 | "print(gray_img[21,42])" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 11, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "#Convert images to HSV\n", 82 | "\n", 83 | "hsv_image = cv2.cvtColor(input , cv2.COLOR_BGR2HSV)\n", 84 | "\n", 85 | "cv2.imshow('HSV Image' , hsv_image)\n", 86 | "cv2.imshow('Hue Channel' , hsv_image[:,:,0]) #first two are height and width , third one is color\n", 87 | "cv2.imshow('Saturation Channel' , hsv_image[:,:,1])\n", 88 | "cv2.imshow('Value Channel' , hsv_image[:,:,2])\n", 89 | "cv2.waitKey()\n", 90 | "cv2.destroyAllWindows()" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 13, 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "(450, 800, 3)\n" 103 | ] 104 | } 105 | ], 106 | "source": [ 107 | "#individual channels in RGB image\n", 108 | "\n", 109 | "print(input.shape)\n", 110 | "B,G,R = cv2.split(input)\n", 111 | "\n", 112 | "cv2.imshow('Red' , R)\n", 113 | "cv2.imshow('Greem' , G)\n", 114 | "cv2.imshow('Blue' , B)\n", 115 | "cv2.waitKey(0)\n", 116 | "cv2.destroyAllWindows()\n", 117 | "\n", 118 | "merged = cv2.merge([B, G , R]) #Merge all the three channels\n", 119 | "cv2.imshow('Merged', merged)\n", 120 | "\n", 121 | "merged_amplified = cv2.merge([B+100 , G , R])\n", 122 | "cv2.imshow('Amplified' , merged_amplified)\n", 123 | "\n", 124 | "\n", 125 | "cv2.waitKey(0)\n", 126 | "cv2.destroyAllWindows()" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": 17, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | " #Zero Matrix with dimensions of image \n", 136 | "zeros = np.zeros(input.shape[:2] , dtype = 'uint8')\n", 137 | "\n", 138 | "cv2.imshow('Red' , cv2.merge([zeros , zeros , R]))#display image with only Red \n", 139 | "cv2.imshow('Green' , cv2.merge([zeros , G , zeros])) #display image with only green\n", 140 | "cv2.imshow('Blue' , cv2.merge([B , zeros , zeros]))\n", 141 | "cv2.waitKey()\n", 142 | "cv2.destroyAllWindows()" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [] 151 | } 152 | ], 153 | "metadata": { 154 | "kernelspec": { 155 | "display_name": "Python 3", 156 | "language": "python", 157 | "name": "python3" 158 | }, 159 | "language_info": { 160 | "codemirror_mode": { 161 | "name": "ipython", 162 | "version": 3 163 | }, 164 | "file_extension": ".py", 165 | "mimetype": "text/x-python", 166 | "name": "python", 167 | "nbconvert_exporter": "python", 168 | "pygments_lexer": "ipython3", 169 | "version": "3.7.1" 170 | } 171 | }, 172 | "nbformat": 4, 173 | "nbformat_minor": 2 174 | } 175 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Contours-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 16, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 16, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import cv2\n", 21 | "import numpy as np\n", 22 | "\n", 23 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 24 | "cv2.imshow('Original', image)\n", 25 | "cv2.waitKey()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 17, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "data": { 35 | "text/plain": [ 36 | "-1" 37 | ] 38 | }, 39 | "execution_count": 17, 40 | "metadata": {}, 41 | "output_type": "execute_result" 42 | } 43 | ], 44 | "source": [ 45 | "grayscaled = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 46 | "edges = cv2.Canny(grayscaled , 30 , 200)\n", 47 | "cv2.imshow('Canny' , edges)\n", 48 | "cv2.waitKey()" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": 18, 54 | "metadata": {}, 55 | "outputs": [ 56 | { 57 | "name": "stdout", 58 | "output_type": "stream", 59 | "text": [ 60 | "Number of Contours 12\n" 61 | ] 62 | } 63 | ], 64 | "source": [ 65 | "contours , heirarchy = cv2.findContours(edges.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 66 | "#findcontours() takes in image then retrieval mode and approximation mode , .copy() creates a new edge image\n", 67 | "cv2.imshow('Canny Edges with contouring' , edges)\n", 68 | "cv2.waitKey()\n", 69 | "\n", 70 | "print('Number of Contours ' + str(len(contours)))" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 19, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "-1" 82 | ] 83 | }, 84 | "execution_count": 19, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | } 88 | ], 89 | "source": [ 90 | "# -1 to draw all contours or 1 to draw first contour or 2 to draw two contours\n", 91 | "cv2.drawContours(image , contours , -1 ,(0 , 255 , 0) , 3)\n", 92 | "cv2.imshow('contours' , image)\n", 93 | "cv2.waitKey()" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 11, 99 | "metadata": {}, 100 | "outputs": [ 101 | { 102 | "data": { 103 | "text/plain": [ 104 | "'\\nCHAIN_APPROX_NONE returns boundary points\\nCHAIN_APPROX_SIMPLE stores ending points\\n\\n\\nRetrieval modes talks about heirarchy\\nRETR_LIST - all contours\\nRETR_EXTERNAL - external\\nRETR_COMP - Retrieves in 2 Level Hierarchy\\nRETR_TREE - Retrieves Full Hierarchy\\n\\n'" 105 | ] 106 | }, 107 | "execution_count": 11, 108 | "metadata": {}, 109 | "output_type": "execute_result" 110 | } 111 | ], 112 | "source": [ 113 | "'''\n", 114 | "CHAIN_APPROX_NONE returns boundary points\n", 115 | "CHAIN_APPROX_SIMPLE stores ending points\n", 116 | "\n", 117 | "\n", 118 | "Retrieval modes talks about heirarchy\n", 119 | "RETR_LIST - all contours\n", 120 | "RETR_EXTERNAL - external\n", 121 | "RETR_COMP - Retrieves in 2 Level Hierarchy\n", 122 | "RETR_TREE - Retrieves Full Hierarchy\n", 123 | "\n", 124 | "'''" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": null, 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [] 133 | } 134 | ], 135 | "metadata": { 136 | "kernelspec": { 137 | "display_name": "Python 3", 138 | "language": "python", 139 | "name": "python3" 140 | }, 141 | "language_info": { 142 | "codemirror_mode": { 143 | "name": "ipython", 144 | "version": 3 145 | }, 146 | "file_extension": ".py", 147 | "mimetype": "text/x-python", 148 | "name": "python", 149 | "nbconvert_exporter": "python", 150 | "pygments_lexer": "ipython3", 151 | "version": "3.7.1" 152 | } 153 | }, 154 | "nbformat": 4, 155 | "nbformat_minor": 2 156 | } 157 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Convolution and Blurring-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "\n", 12 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "#Kernels are used to normalize image and to specify a matrix to apply convolution or blurring\n", 22 | "\n", 23 | "kernel_3x3 = np.ones((3,3) , np.float32) / 9\n", 24 | "\n", 25 | "#to convolve image using opencv \n", 26 | "blurred = cv2.filter2D(image , -1 , kernel_3x3)\n", 27 | "\n", 28 | "\n", 29 | "kernel_7x7 = np.ones((7,7) , np.float32) / 49\n", 30 | "\n", 31 | "blurred1 = cv2.filter2D(image , -1 , kernel_7x7)\n", 32 | "\n", 33 | "cv2.imshow('Original', image)\n", 34 | "cv2.imshow('Blurred 1' , blurred)\n", 35 | "cv2.imshow('Blurred 2' , blurred1)\n", 36 | "cv2.waitKey()\n", 37 | "cv2.destroyAllWindows()" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 4, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "#Other blurring techniques\n", 47 | "\n", 48 | "#averaging by convolving the image with a box filter\n", 49 | "#average values over a specified windows\n", 50 | "blur = cv2.blur(image , (3,3)) #averaging with 3x3 box filter\n", 51 | "\n", 52 | "#gaussian kernel\n", 53 | "#similar to a normal 1's kernel , but has more emphasis or weighting around the center\n", 54 | "gaussian = cv2.GaussianBlur(image , (7,7) , 0)\n", 55 | "\n", 56 | "#medianBlur\n", 57 | "#finding median value of each kernal , paint effect\n", 58 | "median = cv2.medianBlur(image , 5) #uses the median values of a box filter instead of averaging\n", 59 | "\n", 60 | "\n", 61 | "#bilateral effective for noise removal and to keep edges sharp\n", 62 | "biateral = cv2.bilateralFilter(image , 9,75,75)\n", 63 | "\n", 64 | "cv2.imshow('Averaging' ,blur)\n", 65 | "cv2.imshow('Gaussian', gaussian)\n", 66 | "cv2.imshow('Median' , median)\n", 67 | "cv2.imshow('Bilateral' , biateral)\n", 68 | "\n", 69 | "cv2.waitKey()\n", 70 | "cv2.destroyAllWindows()" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 7, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "#Image Denoising\n", 80 | "\n", 81 | "denoising = cv2.fastNlMeansDenoisingColored(image , None , 6 , 6, 7, 21)\n", 82 | "\n", 83 | "cv2.imshow('Denoising' ,denoising)\n", 84 | "cv2.waitKey()\n", 85 | "cv2.destroyAllWindows()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [] 94 | } 95 | ], 96 | "metadata": { 97 | "kernelspec": { 98 | "display_name": "Python 3", 99 | "language": "python", 100 | "name": "python3" 101 | }, 102 | "language_info": { 103 | "codemirror_mode": { 104 | "name": "ipython", 105 | "version": 3 106 | }, 107 | "file_extension": ".py", 108 | "mimetype": "text/x-python", 109 | "name": "python", 110 | "nbconvert_exporter": "python", 111 | "pygments_lexer": "ipython3", 112 | "version": "3.7.1" 113 | } 114 | }, 115 | "nbformat": 4, 116 | "nbformat_minor": 2 117 | } 118 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Cropping-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2 \n", 10 | "import numpy as np\n", 11 | "\n", 12 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 13 | "\n", 14 | "height , width = image.shape[:2]" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "#for cropping mention starting coordinates and ending coordinates and use indexing to crop out the image\n", 24 | "\n", 25 | "\n", 26 | "#starting coordinates\n", 27 | "start_row , start_col = int(height *.25) , int(width * .25) #25 % of height and width\n", 28 | "\n", 29 | "end_row , end_col = int(height * .75) , int(width * .75) #75%of height and width\n", 30 | "\n", 31 | "cropped_image = image[start_row:end_row , start_col:end_col]\n", 32 | "\n", 33 | "cv2.imshow('Original Image', image)\n", 34 | "cv2.imshow('Cropped Image ' , cropped_image)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [] 45 | } 46 | ], 47 | "metadata": { 48 | "kernelspec": { 49 | "display_name": "Python 3", 50 | "language": "python", 51 | "name": "python3" 52 | }, 53 | "language_info": { 54 | "codemirror_mode": { 55 | "name": "ipython", 56 | "version": 3 57 | }, 58 | "file_extension": ".py", 59 | "mimetype": "text/x-python", 60 | "name": "python", 61 | "nbconvert_exporter": "python", 62 | "pygments_lexer": "ipython3", 63 | "version": "3.7.1" 64 | } 65 | }, 66 | "nbformat": 4, 67 | "nbformat_minor": 2 68 | } 69 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Dilation and Erosion-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 12, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#erosion removes pixels at the boundaries\n", 10 | "#dilation adds pixels to the boundaries\n", 11 | "import cv2\n", 12 | "import numpy as np\n", 13 | "\n", 14 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\inpu1.jpg') #Read images using OpenCV" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 13, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "kernel = np.ones((5,5) , np.uint8)\n", 24 | "cv2.imshow(\"Original\" , image)\n", 25 | "\n", 26 | "erosion = cv2.erode(image , kernel , iterations = 1)\n", 27 | "cv2.imshow(\"Erosion\" , erosion)\n", 28 | "\n", 29 | "dilation = cv2.dilate(image , kernel , iterations = 1)\n", 30 | "cv2.imshow('Dilation' , dilation)\n", 31 | "\n", 32 | "\n", 33 | "#Opening - GOod for removing noise - Erosion followed by diilation\n", 34 | "opening = cv2.morphologyEx(image , cv2.MORPH_OPEN , kernel)\n", 35 | "cv2.imshow('OPening', opening)\n", 36 | "\n", 37 | "\n", 38 | "#closing - Also good for removing noise , DIlation followed by erosion\n", 39 | "closing = cv2.morphologyEx(image , cv2.MORPH_CLOSE , kernel)\n", 40 | "cv2.imshow(\"closing\" , closing)\n", 41 | "\n", 42 | "\n", 43 | "cv2.waitKey()\n", 44 | "cv2.destroyAllWindows()" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [] 53 | } 54 | ], 55 | "metadata": { 56 | "kernelspec": { 57 | "display_name": "Python 3", 58 | "language": "python", 59 | "name": "python3" 60 | }, 61 | "language_info": { 62 | "codemirror_mode": { 63 | "name": "ipython", 64 | "version": 3 65 | }, 66 | "file_extension": ".py", 67 | "mimetype": "text/x-python", 68 | "name": "python", 69 | "nbconvert_exporter": "python", 70 | "pygments_lexer": "ipython3", 71 | "version": "3.7.1" 72 | } 73 | }, 74 | "nbformat": 4, 75 | "nbformat_minor": 2 76 | } 77 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Edge Detection-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 7, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "Edges are discountinuities in images\n", 11 | "\n", 12 | "Edge Detection Algorithms\n", 13 | "Sobel = Emphasize Verical and Horizontal\n", 14 | "Canny - Lowest Error Rate\n", 15 | "Laplacian - Gets orientations\n", 16 | "\n", 17 | "\n", 18 | "Canny\n", 19 | "1) Applies Gaussian Blurring\n", 20 | "2) Finds intensity gradient\n", 21 | "3) Removes pixels that are not edges - Non maximum suppression\n", 22 | "4) If pixel is within upper and threshold consider it as an edge - Hysterisis Applies thresholds\n", 23 | "\n", 24 | "'''\n", 25 | "\n", 26 | "import cv2\n", 27 | "import numpy as np" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 13, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "ename": "error", 37 | "evalue": "OpenCV(4.0.0) c:\\projects\\opencv-python\\opencv\\modules\\imgproc\\src\\color.hpp:261: error: (-2:Unspecified error) in function '__cdecl cv::CvtHelper,struct cv::Set<3,4,-1>,struct cv::Set<0,2,5>,2>::CvtHelper(const class cv::_InputArray &,const class cv::_OutputArray &,int)'\n> Unsupported depth of input image:\n> 'VDepth::contains(depth)'\n> where\n> 'depth' is 6 (CV_64F)\n", 38 | "output_type": "error", 39 | "traceback": [ 40 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 41 | "\u001b[1;31merror\u001b[0m Traceback (most recent call last)", 42 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 19\u001b[0m \u001b[0msobel_NOT\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mbitwise_not\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msobel_x\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 20\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 21\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Sobel OR Image'\u001b[0m \u001b[1;33m,\u001b[0m \u001b[0msobel_OR\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 22\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 23\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Sobel And Image'\u001b[0m \u001b[1;33m,\u001b[0m \u001b[0msobel_AND\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 43 | "\u001b[1;31merror\u001b[0m: OpenCV(4.0.0) c:\\projects\\opencv-python\\opencv\\modules\\imgproc\\src\\color.hpp:261: error: (-2:Unspecified error) in function '__cdecl cv::CvtHelper,struct cv::Set<3,4,-1>,struct cv::Set<0,2,5>,2>::CvtHelper(const class cv::_InputArray &,const class cv::_OutputArray &,int)'\n> Unsupported depth of input image:\n> 'VDepth::contains(depth)'\n> where\n> 'depth' is 6 (CV_64F)\n" 44 | ] 45 | } 46 | ], 47 | "source": [ 48 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg',0)\n", 49 | "\n", 50 | "height , width = image.shape\n", 51 | "\n", 52 | "#Sobel Filter\n", 53 | "#Extract Sobel Edges\n", 54 | "sobel_x = cv2.Sobel(image , cv2.CV_64F , 0 , 1 , ksize=5) #ksize = kernel size to get different strength\n", 55 | "sobel_y = cv2.Sobel(image , cv2.CV_64F , 1 , 0 , ksize=5)\n", 56 | "\n", 57 | "cv2.imshow('Original Image' , image)\n", 58 | "cv2.waitKey(0)\n", 59 | "#cv2.imshow('Sobel X Image' , sobel_x)\n", 60 | "#cv2.waitKey(0)\n", 61 | "#cv2.imshow('Sobel Y Image' , sobel_y)\n", 62 | "#cv2.waitKey(0)\n", 63 | "\n", 64 | "sobel_OR = cv2.bitwise_or(sobel_x ,sobel_y)\n", 65 | "sobel_AND = cv2.bitwise_and(sobel_x , sobel_y)\n", 66 | "sobel_NOT = cv2.bitwise_not(sobel_x)\n", 67 | "\n", 68 | "cv2.imshow('Sobel OR Image' , sobel_OR)\n", 69 | "cv2.waitKey()\n", 70 | "cv2.imshow('Sobel And Image' , sobel_AND)\n", 71 | "cv2.waitKey()\n", 72 | "cv2.imshow('Sobel Not Image' , sobel_NOT)\n", 73 | "cv2.waitKey()\n", 74 | "\n", 75 | "\n", 76 | "#Laplacian\n", 77 | "laplacian = cv2.Laplacian(image , cv2.CV_64F)\n", 78 | "cv2.imshow('Laplacian Image' , laplacian)\n", 79 | "cv2.waitKey()\n", 80 | "\n", 81 | "canny = cv2.Canny(image , 20 , 170)\n", 82 | "cv2.imshow('Canny Image' , canny)\n", 83 | "\n", 84 | "cv2.waitKey()\n", 85 | "cv2.destroyAllWindows()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [] 101 | } 102 | ], 103 | "metadata": { 104 | "kernelspec": { 105 | "display_name": "Python 3", 106 | "language": "python", 107 | "name": "python3" 108 | }, 109 | "language_info": { 110 | "codemirror_mode": { 111 | "name": "ipython", 112 | "version": 3 113 | }, 114 | "file_extension": ".py", 115 | "mimetype": "text/x-python", 116 | "name": "python", 117 | "nbconvert_exporter": "python", 118 | "pygments_lexer": "ipython3", 119 | "version": "3.7.1" 120 | } 121 | }, 122 | "nbformat": 4, 123 | "nbformat_minor": 2 124 | } 125 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Masking-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import cv2\n", 11 | "\n", 12 | "\n", 13 | "#Making a Square\n", 14 | "\n", 15 | "square = np.zeros((300,300) , np.uint8) #array of zeros of height and width 300\n", 16 | "cv2.rectangle(square , (50,50) , (250,250) , 255 , -2)\n", 17 | "\n", 18 | "#Making a ellipse\n", 19 | "ellipse = np.zeros((300,300) , np.uint8)\n", 20 | "cv2.ellipse(ellipse , (150,150) , (150,150) , 30 , 0 , 180 , 255 , -1)\n", 21 | "\n", 22 | "cv2.imshow('Square' , square)\n", 23 | "cv2.imshow('Ellipse' , ellipse)\n", 24 | "cv2.waitKey()\n", 25 | "cv2.destroyAllWindows()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 5, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "#experimenting with some bitwise operations\n", 35 | "#only works on black and white images\n", 36 | "\n", 37 | "And = cv2.bitwise_and(square , ellipse) #intersection of images \n", 38 | "cv2.imshow('AND' , And)\n", 39 | "cv2.waitKey()\n", 40 | "\n", 41 | "Or = cv2.bitwise_or(square , ellipse) #combination of images\n", 42 | "cv2.imshow('OR' , Or)\n", 43 | "cv2.waitKey()\n", 44 | "\n", 45 | "Xor = cv2.bitwise_xor(square , ellipse) #combination of and and or\n", 46 | "cv2.imshow('XOR' , Xor)\n", 47 | "cv2.waitKey()\n", 48 | "\n", 49 | "\n", 50 | "Not = cv2.bitwise_not(square) #changes white to black of square\n", 51 | "cv2.imshow('NOT' , Not)\n", 52 | "cv2.waitKey()\n", 53 | "\n", 54 | "cv2.destroyAllWindows()" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [] 63 | } 64 | ], 65 | "metadata": { 66 | "kernelspec": { 67 | "display_name": "Python 3", 68 | "language": "python", 69 | "name": "python3" 70 | }, 71 | "language_info": { 72 | "codemirror_mode": { 73 | "name": "ipython", 74 | "version": 3 75 | }, 76 | "file_extension": ".py", 77 | "mimetype": "text/x-python", 78 | "name": "python", 79 | "nbconvert_exporter": "python", 80 | "pygments_lexer": "ipython3", 81 | "version": "3.7.1" 82 | } 83 | }, 84 | "nbformat": 4, 85 | "nbformat_minor": 2 86 | } 87 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Pyramids or Image Scaling-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Scaling images\n", 10 | "\n", 11 | "import cv2\n", 12 | "\n", 13 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 14 | "\n", 15 | "smaller = cv2.pyrDown(image)\n", 16 | "larger = cv2.pyrUp(image)\n", 17 | "scale_back = cv2.pyrUp(smaller)\n", 18 | "\n", 19 | "cv2.imshow('Smaller' , smaller)\n", 20 | "cv2.imshow('Larger' , larger)\n", 21 | "cv2.imshow('Scale Back' , scale_back)\n", 22 | "cv2.waitKey(0)\n", 23 | "cv2.destroyAllWindows()\n" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [] 32 | } 33 | ], 34 | "metadata": { 35 | "kernelspec": { 36 | "display_name": "Python 3", 37 | "language": "python", 38 | "name": "python3" 39 | }, 40 | "language_info": { 41 | "codemirror_mode": { 42 | "name": "ipython", 43 | "version": 3 44 | }, 45 | "file_extension": ".py", 46 | "mimetype": "text/x-python", 47 | "name": "python", 48 | "nbconvert_exporter": "python", 49 | "pygments_lexer": "ipython3", 50 | "version": "3.7.1" 51 | } 52 | }, 53 | "nbformat": 4, 54 | "nbformat_minor": 2 55 | } 56 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Resizing or Scaling-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [], 3 | "metadata": {}, 4 | "nbformat": 4, 5 | "nbformat_minor": 2 6 | } 7 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Rotations-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Rotation Matrix \n", 10 | "# M = [cosx -sinx\n", 11 | "# sinx cosx]\n", 12 | "# x - the angle of rotation\n", 13 | "\n", 14 | "import numpy as np\n", 15 | "import cv2" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 2, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 25 | "\n", 26 | "height , width = image.shape[:2]\n", 27 | "\n", 28 | "rotation_matrix = cv2.getRotationMatrix2D((width/2 , height/2) , 90 , 1)\n", 29 | "#rotation matrix parameters = ( center of image , rotation angle , scaling factor)\n", 30 | "\n", 31 | "rotated_image = cv2.warpAffine(image , rotation_matrix , (width , height))\n", 32 | "\n", 33 | "cv2.imshow('Rotated Image' , rotated_image)\n", 34 | "cv2.waitKey()\n", 35 | "cv2.destroyAllWindows()" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 3, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "#Alternated method to rotate image to avoid black spaces around image as shown previously\n", 45 | "\n", 46 | "rotated_image = cv2.transpose(image)\n", 47 | "\n", 48 | "cv2.imshow('Rotated Image' , rotated_image)\n", 49 | "cv2.waitKey(0)\n", 50 | "cv2.destroyAllWindows()" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [] 59 | } 60 | ], 61 | "metadata": { 62 | "kernelspec": { 63 | "display_name": "Python 3", 64 | "language": "python", 65 | "name": "python3" 66 | }, 67 | "language_info": { 68 | "codemirror_mode": { 69 | "name": "ipython", 70 | "version": 3 71 | }, 72 | "file_extension": ".py", 73 | "mimetype": "text/x-python", 74 | "name": "python", 75 | "nbconvert_exporter": "python", 76 | "pygments_lexer": "ipython3", 77 | "version": "3.7.1" 78 | } 79 | }, 80 | "nbformat": 4, 81 | "nbformat_minor": 2 82 | } 83 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Sharpening-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "Sharpening Kernel = [ -1 -1 -1\n", 11 | " -1 9 -1 \n", 12 | " -1 -1 -1]\n", 13 | "\n", 14 | "'''\n", 15 | "\n", 16 | "import cv2\n", 17 | "import numpy as np" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 2, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 27 | "\n", 28 | "sharpening_kernel = np.array([[-1 , -1 , -1],\n", 29 | " [-1 , 9 , -1],\n", 30 | " [-1 , -1 , -1]])\n", 31 | "\n", 32 | "sharpened_image = cv2.filter2D(image , -1 , sharpening_kernel)\n", 33 | "\n", 34 | "cv2.imshow('Sharpened Image' , sharpened_image)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [] 45 | } 46 | ], 47 | "metadata": { 48 | "kernelspec": { 49 | "display_name": "Python 3", 50 | "language": "python", 51 | "name": "python3" 52 | }, 53 | "language_info": { 54 | "codemirror_mode": { 55 | "name": "ipython", 56 | "version": 3 57 | }, 58 | "file_extension": ".py", 59 | "mimetype": "text/x-python", 60 | "name": "python", 61 | "nbconvert_exporter": "python", 62 | "pygments_lexer": "ipython3", 63 | "version": "3.7.1" 64 | } 65 | }, 66 | "nbformat": 4, 67 | "nbformat_minor": 2 68 | } 69 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Thresholding-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 7, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "Thresholding only works with grayscale images\n", 11 | "cv2.threshold(image , Threshold Value , Max Value , Threshold Type)\n", 12 | "cv2.THRESH_BINARY - Most common thresholding\n", 13 | "cv2.THRESH_BINARY_INV - Another common\n", 14 | "'''\n", 15 | "\n", 16 | "import cv2\n", 17 | "import numpy as np\n", 18 | "\n", 19 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg' , 0)" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 4, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "cv2.imshow('Gray Scaled', image)\n", 29 | "cv2.waitKey()\n", 30 | "cv2.destroyAllWindows()" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 5, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "#Values below 127 goes to 0 , above goes to 255\n", 40 | "ret, thresh1 = cv2.threshold(image , 127 , 255 , cv2.THRESH_BINARY)\n", 41 | "cv2.imshow('Threshold Binary' , thresh1)\n", 42 | "\n", 43 | "# input parameters image , threshold value , max value and type\n", 44 | "\n", 45 | "#Values below 127 got to 255 above 127 goes to 0 \n", 46 | "ret, thresh2 = cv2.threshold(image , 127 , 255 , cv2.THRESH_BINARY_INV)\n", 47 | "cv2.imshow('Threshold Inverse Binary' , thresh2)\n", 48 | "\n", 49 | "#values above 127 goes to same value 127 has.\n", 50 | "ret, thresh3 = cv2.threshold(image , 127 , 255 , cv2.THRESH_TRUNC)\n", 51 | "cv2.imshow('Threshold Truncated' , thresh3)\n", 52 | "\n", 53 | "#values below 127 goes to zero , rest remain unchanged\n", 54 | "ret, thresh4 = cv2.threshold(image , 127 , 255 , cv2.THRESH_TOZERO)\n", 55 | "cv2.imshow('Threshold ToZero' , thresh4)\n", 56 | "\n", 57 | "#values below 127 remains unchanged rest goes to zero\n", 58 | "ret, thresh5 = cv2.threshold(image , 127 , 255 , cv2.THRESH_TOZERO_INV)\n", 59 | "cv2.imshow('Threshold ToZero Inverse' , thresh5)\n", 60 | "\n", 61 | "\n", 62 | "cv2.waitKey()\n", 63 | "cv2.destroyAllWindows()" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 9, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "#Adaptive Thresholding\n", 73 | "\n", 74 | "#adaptivethreshold parameters are image , max value , adaptive type , threshold type , block size , constant that is subratcted from mean\n", 75 | "thresh6 = cv2.adaptiveThreshold(image , 127 , cv2.ADAPTIVE_THRESH_MEAN_C , cv2.THRESH_BINARY , 3 , 5)\n", 76 | "cv2.imshow('Adaptive Mean Threshold' , thresh6) #Based on mean of neighbourhood of pixels\n", 77 | "\n", 78 | "_ , thresh7 = cv2.threshold(image , 0 , 255 , cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n", 79 | "cv2.imshow('Otsu Thresholding' , thresh7)\n", 80 | "\n", 81 | "#Otsu looks into histogram and finds peaks and finds an optimal values to seperate peaks\n", 82 | "#Otsu Thresholding with Gaussian Filtering\n", 83 | "blur = cv2.GaussianBlur(image , (5 , 53) , 0)\n", 84 | "_ , thresh8 = cv2.threshold(image , 0 , 255 , cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n", 85 | "cv2.imshow('Otsu Gaussian Thresholding' , thresh8)\n", 86 | "\n", 87 | "cv2.waitKey()\n", 88 | "cv2.destroyAllWindows()\n" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": {}, 95 | "outputs": [], 96 | "source": [] 97 | } 98 | ], 99 | "metadata": { 100 | "kernelspec": { 101 | "display_name": "Python 3", 102 | "language": "python", 103 | "name": "python3" 104 | }, 105 | "language_info": { 106 | "codemirror_mode": { 107 | "name": "ipython", 108 | "version": 3 109 | }, 110 | "file_extension": ".py", 111 | "mimetype": "text/x-python", 112 | "name": "python", 113 | "nbconvert_exporter": "python", 114 | "pygments_lexer": "ipython3", 115 | "version": "3.7.1" 116 | } 117 | }, 118 | "nbformat": 4, 119 | "nbformat_minor": 2 120 | } 121 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Image Translations-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "[[ 1. 0. 200. ]\n", 13 | " [ 0. 1. 112.5]]\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "#Translation Matrix = [ 1 0 x\n", 19 | "# 0 1 y]\n", 20 | "# x distance along x axis , y along y axis\n", 21 | "\n", 22 | "import cv2\n", 23 | "import numpy as np \n", 24 | "\n", 25 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 26 | "\n", 27 | "height , width = image.shape[:2] #Store height and width of image\n", 28 | "\n", 29 | "quarter_height , quarter_width = height/4 , width/4\n", 30 | "\n", 31 | "T = np.float32([[1 , 0 , quarter_width], [0 , 1, quarter_height ]])\n", 32 | "\n", 33 | "image_translated = cv2.warpAffine(image , T , (width , height))\n", 34 | "cv2.imshow('translated image' , image_translated)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()\n", 37 | "\n", 38 | "print(T)" 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": null, 44 | "metadata": {}, 45 | "outputs": [], 46 | "source": [] 47 | } 48 | ], 49 | "metadata": { 50 | "kernelspec": { 51 | "display_name": "Python 3", 52 | "language": "python", 53 | "name": "python3" 54 | }, 55 | "language_info": { 56 | "codemirror_mode": { 57 | "name": "ipython", 58 | "version": 3 59 | }, 60 | "file_extension": ".py", 61 | "mimetype": "text/x-python", 62 | "name": "python", 63 | "nbconvert_exporter": "python", 64 | "pygments_lexer": "ipython3", 65 | "version": "3.7.1" 66 | } 67 | }, 68 | "nbformat": 4, 69 | "nbformat_minor": 2 70 | } 71 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Non-Affine Image Transformations-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "import matplotlib.pyplot as plt" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 3, 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "data": { 21 | "text/plain": [ 22 | "-1" 23 | ] 24 | }, 25 | "execution_count": 3, 26 | "metadata": {}, 27 | "output_type": "execute_result" 28 | } 29 | ], 30 | "source": [ 31 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 32 | "\n", 33 | "cv2.imshow('Original' , image)\n", 34 | "cv2.waitKey()\n", 35 | "\n", 36 | "#Coordinated of the 4 corners of the original image that need to be skewed and aligned\n", 37 | "points_A = np.float32([[320,14] , [70,213] , [53,134] , [24,42]])\n", 38 | "\n", 39 | "points_B = np.float32([[0,0] , [23,145] , [0,504], [56,425]])\n", 40 | "\n", 41 | "M = cv2.getPerspectiveTransform(points_A , points_B) #Transform Matrix\n", 42 | "\n", 43 | "warpped = cv2.warpPerspective(image , M , (420 , 504)) #Final size of image\n", 44 | "\n", 45 | "cv2.imshow(\"Warpped Image\" , warpped)\n", 46 | "cv2.waitKey()" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [] 55 | } 56 | ], 57 | "metadata": { 58 | "kernelspec": { 59 | "display_name": "Python 3", 60 | "language": "python", 61 | "name": "python3" 62 | }, 63 | "language_info": { 64 | "codemirror_mode": { 65 | "name": "ipython", 66 | "version": 3 67 | }, 68 | "file_extension": ".py", 69 | "mimetype": "text/x-python", 70 | "name": "python", 71 | "nbconvert_exporter": "python", 72 | "pygments_lexer": "ipython3", 73 | "version": "3.7.1" 74 | } 75 | }, 76 | "nbformat": 4, 77 | "nbformat_minor": 2 78 | } 79 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Read Write and Display Images using OpenCV-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Reading and Displaying Images" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "#import libaries\n", 17 | "import cv2\n", 18 | "import numpy as np" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 11, 24 | "metadata": {}, 25 | "outputs": [], 26 | "source": [ 27 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg') #Read images using OpenCV\n", 28 | "cv2.imshow('Hello World' , input) #imshow() - first parameter title of image , followed by image source\n", 29 | "cv2.waitKey() #Necessary when showing images using openCV , placing numbers in the parameter allows a delay for the image to be kept open\n", 30 | "cv2.destroyAllWindows() #closes all open windows" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "metadata": {}, 36 | "source": [ 37 | "# Storing Images" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 13, 43 | "metadata": {}, 44 | "outputs": [ 45 | { 46 | "name": "stdout", 47 | "output_type": "stream", 48 | "text": [ 49 | "(450, 800, 3)\n" 50 | ] 51 | } 52 | ], 53 | "source": [ 54 | "print(input.shape)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 14, 60 | "metadata": {}, 61 | "outputs": [ 62 | { 63 | "name": "stdout", 64 | "output_type": "stream", 65 | "text": [ 66 | "Height of Image 450\n", 67 | "Width of Image 800\n" 68 | ] 69 | } 70 | ], 71 | "source": [ 72 | "print('Height of Image' , int(input.shape[0]))\n", 73 | "print('Width of Image' , int(input.shape[1]))" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 15, 79 | "metadata": {}, 80 | "outputs": [ 81 | { 82 | "data": { 83 | "text/plain": [ 84 | "True" 85 | ] 86 | }, 87 | "execution_count": 15, 88 | "metadata": {}, 89 | "output_type": "execute_result" 90 | } 91 | ], 92 | "source": [ 93 | "#Writing Images\n", 94 | "cv2.imwrite('inpu1.jpg' , input) #First parameter of name and extension followed by image to save" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "metadata": {}, 101 | "outputs": [], 102 | "source": [] 103 | } 104 | ], 105 | "metadata": { 106 | "kernelspec": { 107 | "display_name": "Python 3", 108 | "language": "python", 109 | "name": "python3" 110 | }, 111 | "language_info": { 112 | "codemirror_mode": { 113 | "name": "ipython", 114 | "version": 3 115 | }, 116 | "file_extension": ".py", 117 | "mimetype": "text/x-python", 118 | "name": "python", 119 | "nbconvert_exporter": "python", 120 | "pygments_lexer": "ipython3", 121 | "version": "3.7.1" 122 | } 123 | }, 124 | "nbformat": 4, 125 | "nbformat_minor": 2 126 | } 127 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Sorting Contours-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import cv2\n", 21 | "import numpy as np\n", 22 | "\n", 23 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 24 | "cv2.imshow('Original Image', image)\n", 25 | "cv2.waitKey()\n", 26 | "\n", 27 | "blank_image = np.zeros((image.shape[0] , image.shape[1] , 3))\n", 28 | "\n", 29 | "original_image = image\n", 30 | "gray = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 31 | "canny = cv2.Canny(gray , 30 ,200)\n", 32 | "\n", 33 | "contours , hierarchy = cv2.findContours(canny.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 34 | "cv2.imshow('Contours' , canny)\n", 35 | "cv2.waitKey()\n", 36 | "\n", 37 | "cv2.drawContours(image , contours , -1 , (0,255,0) , 3)\n", 38 | "cv2.imshow('All Contours' , image)\n", 39 | "cv2.waitKey()" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 7, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stdout", 49 | "output_type": "stream", 50 | "text": [ 51 | "Before Sorting\n", 52 | "[1407.0, 1453.0, 1595.0, 1597.0, 952.0, 996.0, 1658.5, 1696.5, 1158.0, 1213.0, 22258.0, 22261.0]\n", 53 | "After sorting\n", 54 | "[22261.0, 22258.0, 1696.5, 1658.5, 1597.0, 1595.0, 1453.0, 1407.0, 1213.0, 1158.0, 996.0, 952.0]\n" 55 | ] 56 | } 57 | ], 58 | "source": [ 59 | "#sorting contours by area\n", 60 | "\n", 61 | "def get_contour_areas(contours):\n", 62 | " all_areas = []\n", 63 | " for cnt in contours:\n", 64 | " area = cv2.contourArea(cnt) #area of contours\n", 65 | " all_areas.append(area)\n", 66 | " return all_areas\n", 67 | "\n", 68 | "print(\"Before Sorting\")\n", 69 | "print(get_contour_areas(contours))\n", 70 | "\n", 71 | "sorted_contours = sorted(contours , key = cv2.contourArea , reverse = True)\n", 72 | "#reverse = true means big to small , key is the function that we are passing the input to\n", 73 | "\n", 74 | "print(\"After sorting\")\n", 75 | "print(get_contour_areas(sorted_contours))\n", 76 | "\n", 77 | "for c in sorted_contours:\n", 78 | " cv2.drawContours(original_image , [c] , -1 , (255 , 0 , 0) , 3)\n", 79 | " cv2.waitKey()\n", 80 | " cv2.imshow('Contour sorted' , original_image)\n", 81 | " \n", 82 | "cv2.waitKey()\n", 83 | "cv2.destroyAllWindows()" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 8, 89 | "metadata": {}, 90 | "outputs": [ 91 | { 92 | "data": { 93 | "text/plain": [ 94 | "-1" 95 | ] 96 | }, 97 | "execution_count": 8, 98 | "metadata": {}, 99 | "output_type": "execute_result" 100 | } 101 | ], 102 | "source": [ 103 | "#spatial mode sorting\n", 104 | "\n", 105 | "def x_cord_contour(contours):\n", 106 | " #Returning X coordinate for the contour centroid\n", 107 | " if cv2.contourArea(contours) > 10:\n", 108 | " M = cv2.moments(contours)\n", 109 | " return (int(M['m10']/M['m00']))\n", 110 | " \n", 111 | "def label_contour_center(image , c):\n", 112 | " #place a red circle on center\n", 113 | " M = cv2.moments(c) #to get center\n", 114 | " cx = int(M['m10'] / M['m00'])\n", 115 | " cy = int(M['m01'] / M['m00'])\n", 116 | " \n", 117 | " cv2.circle(image , (cx , cy) , 10 , (0 , 0 , 255) , -1)\n", 118 | " return image\n", 119 | "\n", 120 | "\n", 121 | "for(i,c ) in enumerate(contours):\n", 122 | " orig = label_contour_center(image , c)\n", 123 | " \n", 124 | "cv2.imshow('contours center' , image)\n", 125 | "cv2.waitKey()" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [] 134 | } 135 | ], 136 | "metadata": { 137 | "kernelspec": { 138 | "display_name": "Python 3", 139 | "language": "python", 140 | "name": "python3" 141 | }, 142 | "language_info": { 143 | "codemirror_mode": { 144 | "name": "ipython", 145 | "version": 3 146 | }, 147 | "file_extension": ".py", 148 | "mimetype": "text/x-python", 149 | "name": "python", 150 | "nbconvert_exporter": "python", 151 | "pygments_lexer": "ipython3", 152 | "version": "3.7.1" 153 | } 154 | }, 155 | "nbformat": 4, 156 | "nbformat_minor": 2 157 | } 158 | -------------------------------------------------------------------------------- /Approximating Contours.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import numpy as np\n", 21 | "import cv2\n", 22 | "\n", 23 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\input3.png')\n", 24 | "\n", 25 | "original_image = image.copy()\n", 26 | "\n", 27 | "gray = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 28 | "ret , thresh = cv2.threshold(gray , 127 , 255 , cv2.THRESH_BINARY_INV)\n", 29 | "\n", 30 | "contours , heirarchy = cv2.findContours(thresh.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 31 | "\n", 32 | "for c in contours: #Drawing rectangle boxes\n", 33 | " x , y , w , h = cv2.boundingRect(c)\n", 34 | " cv2.rectangle(original_image , (x,y) , (x+w , y+h) , (0,0,255) , 2)\n", 35 | " cv2.imshow('rectangle' , original_image)\n", 36 | " \n", 37 | "cv2.waitKey()\n", 38 | "\n", 39 | "\n", 40 | " " 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 3, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "for c in contours:\n", 50 | " #calculate accuracy as a percent of perimeter\n", 51 | " accuracy = 0.01 * cv2.arcLength(c , True) #lower accuracy higher precision when approximating\n", 52 | " approx = cv2.approxPolyDP(c , accuracy , True)\n", 53 | " cv2.drawContours(image , [approx] , 0 , (0,255,0) , 2)\n", 54 | " cv2.imshow('approx' , image)\n", 55 | " \n", 56 | "cv2.waitKey()\n", 57 | "cv2.destroyAllWindows()" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "metadata": {}, 64 | "outputs": [], 65 | "source": [] 66 | } 67 | ], 68 | "metadata": { 69 | "kernelspec": { 70 | "display_name": "Python 3", 71 | "language": "python", 72 | "name": "python3" 73 | }, 74 | "language_info": { 75 | "codemirror_mode": { 76 | "name": "ipython", 77 | "version": 3 78 | }, 79 | "file_extension": ".py", 80 | "mimetype": "text/x-python", 81 | "name": "python", 82 | "nbconvert_exporter": "python", 83 | "pygments_lexer": "ipython3", 84 | "version": "3.7.1" 85 | } 86 | }, 87 | "nbformat": 4, 88 | "nbformat_minor": 2 89 | } 90 | -------------------------------------------------------------------------------- /Blob Detection.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 10, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "\n", 11 | "daisies = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\daisies.jpg' , cv2.IMREAD_GRAYSCALE)\n" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 13, 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "ename": "AttributeError", 21 | "evalue": "module 'cv2.cv2' has no attribute 'drawKeypoints'", 22 | "output_type": "error", 23 | "traceback": [ 24 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 25 | "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)", 26 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 9\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 10\u001b[0m \u001b[0mblank\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 11\u001b[1;33m blobs = cv2.drawKeypoints(daisies, keypoints, blank, (255,0,0),\n\u001b[0m\u001b[0;32m 12\u001b[0m cv2.DRAW_MATCHES_FLAGS_DEFAULT)\n\u001b[0;32m 13\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Blobs'\u001b[0m \u001b[1;33m,\u001b[0m \u001b[0mblobs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 27 | "\u001b[1;31mAttributeError\u001b[0m: module 'cv2.cv2' has no attribute 'drawKeypoints'" 28 | ] 29 | } 30 | ], 31 | "source": [ 32 | "#setup a detector \n", 33 | "detector = cv2.SimpleBlobDetector_create()\n", 34 | "\n", 35 | "#Detect blobs\n", 36 | "keypoints = detector.detect(daisies)\n", 37 | "\n", 38 | "#draw the detected blobs\n", 39 | "#cv2.draw_matches_flags_draw_rich_keypoints makes sure the size of circle is same as size of blob\n", 40 | "\n", 41 | "blank = np.zeros((1,1)) \n", 42 | "blobs = cv2.drawKeypoints(daisies, keypoints, blank, (255,0,0),\n", 43 | " cv2.DRAW_MATCHES_FLAGS_DEFAULT) #openCV 4.0.0 error\n", 44 | "cv2.imshow('Blobs' , blobs)\n", 45 | "cv2.waitKey()\n", 46 | "cv2.destroyAllWindows()" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [] 55 | } 56 | ], 57 | "metadata": { 58 | "kernelspec": { 59 | "display_name": "Python 3", 60 | "language": "python", 61 | "name": "python3" 62 | }, 63 | "language_info": { 64 | "codemirror_mode": { 65 | "name": "ipython", 66 | "version": 3 67 | }, 68 | "file_extension": ".py", 69 | "mimetype": "text/x-python", 70 | "name": "python", 71 | "nbconvert_exporter": "python", 72 | "pygments_lexer": "ipython3", 73 | "version": "3.7.1" 74 | } 75 | }, 76 | "nbformat": 4, 77 | "nbformat_minor": 2 78 | } 79 | -------------------------------------------------------------------------------- /Contour Detection.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import cv2\n", 21 | "import numpy as np\n", 22 | "\n", 23 | "contour_one = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\square-01.png', 0) #Reference image contour\n", 24 | "cv2.imshow(\"Contour Tempalte\", contour_one)\n", 25 | "cv2.waitKey()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 33, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stdout", 35 | "output_type": "stream", 36 | "text": [ 37 | "1\n", 38 | "0.3767457291322354\n", 39 | "0.39325765426692477\n", 40 | "0.37723717269282037\n", 41 | "0.3151969672568337\n" 42 | ] 43 | } 44 | ], 45 | "source": [ 46 | "#target image to identify similar to reference image\n", 47 | "targer = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 48 | "target_gray = cv2.cvtColor(targer , cv2.COLOR_BGR2GRAY)\n", 49 | "cv2.imshow('Target Image',target_gray)\n", 50 | "cv2.waitKey()\n", 51 | "\n", 52 | "#Threshold both images \n", 53 | "ret , thres1 = cv2.threshold(contour_one , 127 , 255 , 0)\n", 54 | "ret , thres2 = cv2.threshold(target_gray , 127 , 255 , 0)\n", 55 | "\n", 56 | "contours , hierarchy = cv2.findContours(thres1 , cv2.RETR_CCOMP , cv2.CHAIN_APPROX_SIMPLE)\n", 57 | "\n", 58 | "sorted_contours = sorted(contours , key = cv2.contourArea , reverse = True)\n", 59 | "print(len(contours))\n", 60 | "template_contour = contours[0]\n", 61 | "\n", 62 | "contours , heirarchy = cv2.findContours(thres2 , cv2.RETR_CCOMP , cv2.CHAIN_APPROX_SIMPLE)\n", 63 | "\n", 64 | "for c in contours:\n", 65 | " #iterate through each contour in image and use cv2.matchShapes to compare\n", 66 | " match = cv2.matchShapes(template_contour , c , 2 , 0.0)\n", 67 | " print(match)\n", 68 | " if match < 0.32:\n", 69 | " closest_contour = c\n", 70 | " break\n", 71 | " else:\n", 72 | " closest_contour = []\n", 73 | "\n", 74 | "cv2.drawContours(targer , [closest_contour] , -1 , ( 0,255,0) , 3)\n", 75 | "cv2.imshow('Output' , targer)\n", 76 | "cv2.waitKey()\n", 77 | "cv2.destroyAllWindows()\n", 78 | " " 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [] 87 | } 88 | ], 89 | "metadata": { 90 | "kernelspec": { 91 | "display_name": "Python 3", 92 | "language": "python", 93 | "name": "python3" 94 | }, 95 | "language_info": { 96 | "codemirror_mode": { 97 | "name": "ipython", 98 | "version": 3 99 | }, 100 | "file_extension": ".py", 101 | "mimetype": "text/x-python", 102 | "name": "python", 103 | "nbconvert_exporter": "python", 104 | "pygments_lexer": "ipython3", 105 | "version": "3.7.1" 106 | } 107 | }, 108 | "nbformat": 4, 109 | "nbformat_minor": 2 110 | } 111 | -------------------------------------------------------------------------------- /Convex Hull.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Smallest Polygon that can fit outside an object \n", 10 | "import numpy as np\n", 11 | "import cv2\n", 12 | "\n", 13 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 14 | "\n", 15 | "gray = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 16 | "\n", 17 | "ret , thresh = cv2.threshold(gray , 175 , 255 , 0)\n", 18 | "\n", 19 | "contours , heirarchy = cv2.findContours(thresh.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 20 | "\n", 21 | "n = len(contours) -1 #to remove largest frame contour\n", 22 | "contours = sorted(contours , key = cv2.contourArea , reverse = False)[:n] #index to remove outmost frame\n", 23 | " \n", 24 | "for c in contours:\n", 25 | " hull = cv2.convexHull(c)\n", 26 | " cv2.drawContours(image ,[hull] , 0 , (0,255 , 0) , 2)\n", 27 | " cv2.imshow('hull' , image)\n", 28 | "\n", 29 | "cv2.waitKey()\n", 30 | "cv2.destroyAllWindows()" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [] 39 | } 40 | ], 41 | "metadata": { 42 | "kernelspec": { 43 | "display_name": "Python 3", 44 | "language": "python", 45 | "name": "python3" 46 | }, 47 | "language_info": { 48 | "codemirror_mode": { 49 | "name": "ipython", 50 | "version": 3 51 | }, 52 | "file_extension": ".py", 53 | "mimetype": "text/x-python", 54 | "name": "python", 55 | "nbconvert_exporter": "python", 56 | "pygments_lexer": "ipython3", 57 | "version": "3.7.1" 58 | } 59 | }, 60 | "nbformat": 4, 61 | "nbformat_minor": 2 62 | } 63 | -------------------------------------------------------------------------------- /Counting Circles and Ellipses.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "ename": "error", 10 | "evalue": "OpenCV(4.0.0) C:\\projects\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:350: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n", 11 | "output_type": "error", 12 | "traceback": [ 13 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 14 | "\u001b[1;31merror\u001b[0m Traceback (most recent call last)", 15 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[1;31m# Load image\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mimage\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"images/blobs.jpg\"\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 6\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Original Image'\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mimage\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 7\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 8\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", 16 | "\u001b[1;31merror\u001b[0m: OpenCV(4.0.0) C:\\projects\\opencv-python\\opencv\\modules\\highgui\\src\\window.cpp:350: error: (-215:Assertion failed) size.width>0 && size.height>0 in function 'cv::imshow'\n" 17 | ] 18 | } 19 | ], 20 | "source": [ 21 | "import cv2\n", 22 | "import numpy as np\n", 23 | " \n", 24 | "# Load image\n", 25 | "image = cv2.imread(\"images/blobs.jpg\", 0)\n", 26 | "cv2.imshow('Original Image',image)\n", 27 | "cv2.waitKey(0)\n", 28 | " \n", 29 | "# Intialize the detector using the default parameters\n", 30 | "detector = cv2.SimpleBlobDetector_create()\n", 31 | " \n", 32 | "# Detect blobs\n", 33 | "keypoints = detector.detect(image)\n", 34 | " \n", 35 | "# Draw blobs on our image as red circles\n", 36 | "blank = np.zeros((1,1)) \n", 37 | "blobs = cv2.drawKeypoints(image, keypoints, blank, (0,0,255),\n", 38 | " cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)\n", 39 | " \n", 40 | "number_of_blobs = len(keypoints)\n", 41 | "text = \"Total Number of Blobs: \" + str(len(keypoints))\n", 42 | "cv2.putText(blobs, text, (20, 550), cv2.FONT_HERSHEY_SIMPLEX, 1, (100, 0, 255), 2)\n", 43 | " \n", 44 | "# Display image with blob keypoints\n", 45 | "cv2.imshow(\"Blobs using default parameters\", blobs)\n", 46 | "cv2.waitKey(0)\n", 47 | " \n", 48 | " \n", 49 | "# Set our filtering parameters\n", 50 | "# Initialize parameter settiing using cv2.SimpleBlobDetector\n", 51 | "params = cv2.SimpleBlobDetector_Params()\n", 52 | " \n", 53 | "# Set Area filtering parameters\n", 54 | "params.filterByArea = True\n", 55 | "params.minArea = 100\n", 56 | " \n", 57 | "# Set Circularity filtering parameters\n", 58 | "params.filterByCircularity = True \n", 59 | "params.minCircularity = 0.9\n", 60 | " \n", 61 | "# Set Convexity filtering parameters\n", 62 | "params.filterByConvexity = False\n", 63 | "params.minConvexity = 0.2\n", 64 | " \n", 65 | "# Set inertia filtering parameters\n", 66 | "params.filterByInertia = True\n", 67 | "params.minInertiaRatio = 0.01\n", 68 | " \n", 69 | "# Create a detector with the parameters\n", 70 | "detector = cv2.SimpleBlobDetector_create(params)\n", 71 | " \n", 72 | "# Detect blobs\n", 73 | "keypoints = detector.detect(image)\n", 74 | " \n", 75 | "# Draw blobs on our image as red circles\n", 76 | "blank = np.zeros((1,1)) \n", 77 | "blobs = cv2.drawKeypoints(image, keypoints, blank, (0,255,0),\n", 78 | " cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)\n", 79 | " \n", 80 | "number_of_blobs = len(keypoints)\n", 81 | "text = \"Number of Circular Blobs: \" + str(len(keypoints))\n", 82 | "cv2.putText(blobs, text, (20, 550), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 100, 255), 2)\n", 83 | " \n", 84 | "# Show blobs\n", 85 | "cv2.imshow(\"Filtering Circular Blobs Only\", blobs)\n", 86 | "cv2.waitKey(0)\n", 87 | "cv2.destroyAllWindows()" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": null, 93 | "metadata": {}, 94 | "outputs": [], 95 | "source": [] 96 | } 97 | ], 98 | "metadata": { 99 | "kernelspec": { 100 | "display_name": "Python 3", 101 | "language": "python", 102 | "name": "python3" 103 | }, 104 | "language_info": { 105 | "codemirror_mode": { 106 | "name": "ipython", 107 | "version": 3 108 | }, 109 | "file_extension": ".py", 110 | "mimetype": "text/x-python", 111 | "name": "python", 112 | "nbconvert_exporter": "python", 113 | "pygments_lexer": "ipython3", 114 | "version": "3.7.1" 115 | } 116 | }, 117 | "nbformat": 4, 118 | "nbformat_minor": 2 119 | } 120 | -------------------------------------------------------------------------------- /Creating images and drawing shapes in OpenCV.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "\n", 12 | "#creating a black image\n", 13 | "image = np.zeros((512 , 512 , 3) , np.uint8) #512 width and height with 3 rgb colors \n", 14 | "\n", 15 | "#black and white image\n", 16 | "image_b = np.zeros((512 , 512 ) , np.uint8)\n", 17 | "\n", 18 | "cv2.imshow(\"Color black image\" , image)\n", 19 | "cv2.imshow(\"Black and white image\" , image_b)\n", 20 | "cv2.waitKey()\n", 21 | "cv2.destroyAllWindows()" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "#creating a line over the square\n", 31 | "\n", 32 | "cv2.line(image , (0,0) , (512, 512) , (255 , 127 , 0) , 5)\n", 33 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 34 | "cv2.imshow('Blue Line' , image)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 3, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "#Draw a rectangle\n", 46 | "\n", 47 | "cv2.rectangle(image , (100,100) , (300,250) , (127,50,127) , 5)\n", 48 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 49 | "cv2.imshow('Rectangle' , image)\n", 50 | "cv2.waitKey()\n", 51 | "cv2.destroyAllWindows()" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 4, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "cv2.rectangle(image , (100,100) , (300,250) , (127,50,127) , -1) #-1 is to fill the image\n", 61 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 62 | "cv2.imshow('Rectangle' , image)\n", 63 | "cv2.waitKey()\n", 64 | "cv2.destroyAllWindows()" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 6, 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "#drawing a circle\n", 74 | "\n", 75 | "cv2.circle(image , (350,350) , 100 , (127,50,127) , -1) #center of circle and radius no start and end\n", 76 | "#line properties are image , start of our line , end point of our line , color of our line , thickness\n", 77 | "cv2.imshow('Circle' , image)\n", 78 | "cv2.waitKey()\n", 79 | "cv2.destroyAllWindows()" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 7, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "#drawing a polygon\n", 89 | "\n", 90 | "pts = np.array([[10,50] ,[200,30] , [123,24]] , np.int32) #array of points in the image\n", 91 | "\n", 92 | "pts = pts.reshape((-1 , 1 , 2))\n", 93 | "\n", 94 | "cv2.polylines(image , [pts] , True , (0,0,244) , 3) #True if image is closed or not\n", 95 | "cv2.imshow('Polygons' , image)\n", 96 | "cv2.waitKey()\n", 97 | "cv2.destroyAllWindows()" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 10, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "#adding text to images\n", 107 | "\n", 108 | "\n", 109 | "cv2.putText(image , \"Hey dude\" , (25,200) , cv2.FONT_HERSHEY_COMPLEX , 2 ,(100,244,0) , 3)\n", 110 | "cv2.imshow('Text' , image)\n", 111 | "cv2.waitKey()\n", 112 | "cv2.destroyAllWindows()" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": 3, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "#Making a Square\n", 122 | "\n", 123 | "square = np.zeros((300,300) , np.uint8) #array of zeros of height and width 300\n", 124 | "cv2.rectangle(square , (50,50) , (250,250) , 255 , -2)\n", 125 | "\n", 126 | "#Making a ellipse\n", 127 | "ellipse = np.zeros((300,300) , np.uint8)\n", 128 | "cv2.ellipse(ellipse , (150,150) , (150,150) , 30 , 0 , 180 , 255 , -1)\n", 129 | "\n", 130 | "cv2.imshow('Square' , square)\n", 131 | "cv2.imshow('Ellipse' , ellipse)\n", 132 | "cv2.waitKey()\n", 133 | "cv2.destroyAllWindows()" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [] 142 | } 143 | ], 144 | "metadata": { 145 | "kernelspec": { 146 | "display_name": "Python 3", 147 | "language": "python", 148 | "name": "python3" 149 | }, 150 | "language_info": { 151 | "codemirror_mode": { 152 | "name": "ipython", 153 | "version": 3 154 | }, 155 | "file_extension": ".py", 156 | "mimetype": "text/x-python", 157 | "name": "python", 158 | "nbconvert_exporter": "python", 159 | "pygments_lexer": "ipython3", 160 | "version": "3.7.1" 161 | } 162 | }, 163 | "nbformat": 4, 164 | "nbformat_minor": 2 165 | } 166 | -------------------------------------------------------------------------------- /Grayscaling.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Grayscaling" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import cv2" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 3, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg') #Read images using OpenCV\n", 26 | "cv2.imshow('Color image', input)\n", 27 | "cv2.waitKey()\n", 28 | "\n", 29 | "#cvtColor is used to convert to grayscale\n", 30 | "\n", 31 | "gray_image = cv2.cvtColor(input , cv2.COLOR_BGR2GRAY)\n", 32 | "cv2.imshow('Grayscale', gray_image)\n", 33 | "cv2.waitKey()\n", 34 | "cv2.destroyAllWindows()" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 4, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "#Faster method to convert to grayscale\n", 44 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg' , 0) #Read images using OpenCV\n", 45 | "cv2.imshow('Faster Grayscale', input)\n", 46 | "cv2.waitKey()\n", 47 | "cv2.destroyAllWindows()" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [] 56 | } 57 | ], 58 | "metadata": { 59 | "kernelspec": { 60 | "display_name": "Python 3", 61 | "language": "python", 62 | "name": "python3" 63 | }, 64 | "language_info": { 65 | "codemirror_mode": { 66 | "name": "ipython", 67 | "version": 3 68 | }, 69 | "file_extension": ".py", 70 | "mimetype": "text/x-python", 71 | "name": "python", 72 | "nbconvert_exporter": "python", 73 | "pygments_lexer": "ipython3", 74 | "version": "3.7.1" 75 | } 76 | }, 77 | "nbformat": 4, 78 | "nbformat_minor": 2 79 | } 80 | -------------------------------------------------------------------------------- /Histograms in OpenCV.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Visualize Individual Color Components" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import cv2\n", 17 | "import numpy as np\n", 18 | "\n", 19 | "from matplotlib import pyplot as plt # import matplotlib to create histograms " 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 6, 25 | "metadata": {}, 26 | "outputs": [ 27 | { 28 | "data": { 29 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAD8CAYAAACcjGjIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAF7BJREFUeJzt3X+sZOV93/H3pxBo7DgBzNqhu0t3nWzcYittyA2mdWu1poUFR14q2dWiqmzdlVZysZs0jeKlrkRkGwnSNMSoNtXGbL1YFmtEnLIq2GSFnVqVzI/FxvwwwWyAwhriXWsxcWUFB/vbP+a58fieuT925u6duXfeL+nqznzPc2aeZ8/d+cxzzpk5qSokSer3N8bdAUnS5DEcJEkdhoMkqcNwkCR1GA6SpA7DQZLUYThIkjoMB0lSh+EgSeo4ddwdGNbZZ59dmzZtGnc3JGlVefDBB79dVesWa7dqw2HTpk0cOnRo3N2QpFUlyf9dSjt3K0mSOgwHSVKH4SBJ6jAcJEkdhoMkqcNwkCR1GA6SpA7DQZLUYThIkjoMB2kFbNp957i7IJ0Qw0GS1LFoOCTZm+Rokkfn1N+f5IkkjyX5nb761UkOt2WX9NW3ttrhJLv76puT3JfkySSfSXLacg1OmiTOHrSaLGXm8Elga38hyT8FtgG/WFVvAn631c8DtgNvaut8PMkpSU4BPgZcCpwHXNHaAlwP3FBVW4AXgZ2jDkqSNJpFw6GqvgQcn1N+L3BdVb3c2hxt9W3A/qp6uaqeBg4DF7Sfw1X1VFV9H9gPbEsS4O3A7W39fcDlI45JkjSiYY85/ALwj9vuoP+d5FdafT3wXF+7I602X/21wHeq6pU5dUnSGA17PYdTgTOBC4FfAW5L8gYgA9oWg0OoFmg/UJJdwC6Ac8899wS7LI2Hxxq0Gg07czgCfLZ67gd+CJzd6hv72m0Anl+g/m3gjCSnzqkPVFV7qmqmqmbWrVv0QkaSpCENGw7/k96xApL8AnAavRf6A8D2JKcn2QxsAe4HHgC2tDOTTqN30PpAVRXwReBd7XF3AHcMOxhJ0vJYyqmstwJfBt6Y5EiSncBe4A3t9Nb9wI42i3gMuA34OvB54Kqq+kE7pvA+4G7gceC21hbgA8BvJDlM7xjEzcs7RGlyuItJq0V6b95Xn5mZmfIa0ppk8wXBM9e9Y4V7Iv1Ikgeramaxdn5CWpLUYThIkjoMB0lSh+EgSeowHKRl5hlJWgsMB+kkWCggNu2+0wDRxDMcJEkdhoM0Js4eNMkMB2mMDAhNKsNBktRhOEiSOgwHSVKH4SBJ6jAcJEkdhoM0Zp6xpElkOEjLyBd6rRVLuRLc3iRH21Xf5i77zSSV5Ox2P0luTHI4ycNJzu9ruyPJk+1nR1/9l5M80ta5MUmWa3DSamGoaNIsZebwSWDr3GKSjcA/B57tK19K77rRW4BdwE2t7VnANcBbgAuAa5Kc2da5qbWdXa/zXJKklbVoOFTVl4DjAxbdAPwW0H+d0W3ALe160vcCZyQ5B7gEOFhVx6vqReAgsLUt++mq+nL1rld6C3D5aEOSxsN3/1pLhjrmkOSdwDer6mtzFq0Hnuu7f6TVFqofGVCXpo7f1qpJcuqJrpDkVcAHgYsHLR5QqyHq8z33Lnq7oDj33HMX7askaTjDzBx+DtgMfC3JM8AG4CtJfpbeO/+NfW03AM8vUt8woD5QVe2pqpmqmlm3bt0QXZckLcUJh0NVPVJVr6uqTVW1id4L/PlV9efAAeDKdtbShcBLVfUCcDdwcZIz24Hoi4G727LvJrmwnaV0JXDHMo1NkjSkpZzKeivwZeCNSY4k2blA87uAp4DDwB8A/w6gqo4DHwYeaD8fajWA9wKfaOv8GfC54YYiSVouix5zqKorFlm+qe92AVfN024vsHdA/RDw5sX6IUlaOX5CWloGy3mWkWcsaRIYDpKkjhM+lVXSj/guX2uVMwdJUofhIEnqMBwkSR2GgySpw3CQJHUYDpKkDsNBGpKnsWotMxwkSR2GgySpw3CQJHUYDpKkDsNBktRhOEiSOgwHaQJ5mqzGbSmXCd2b5GiSR/tq/yXJnyZ5OMkfJTmjb9nVSQ4neSLJJX31ra12OMnuvvrmJPcleTLJZ5KctpwDlCSduKXMHD4JbJ1TOwi8uap+EfgGcDVAkvOA7cCb2jofT3JKklOAjwGXAucBV7S2ANcDN1TVFuBFYKFrVEuSVsCi4VBVXwKOz6n9cVW90u7eC2xot7cB+6vq5ap6GjgMXNB+DlfVU1X1fWA/sC1JgLcDt7f19wGXjzgmSdKIluOYw78FPtdurwee61t2pNXmq78W+E5f0MzWB0qyK8mhJIeOHTu2DF2XJA0yUjgk+SDwCvDp2dKAZjVEfaCq2lNVM1U1s27duhPtriRpiYa+hnSSHcCvAhdV1ewL+hFgY1+zDcDz7fag+reBM5Kc2mYP/e0lSWMy1MwhyVbgA8A7q+p7fYsOANuTnJ5kM7AFuB94ANjSzkw6jd5B6wMtVL4IvKutvwO4Y7ihSCvHU0211i3lVNZbgS8Db0xyJMlO4L8BrwEOJnkoyX8HqKrHgNuArwOfB66qqh+0WcH7gLuBx4HbWlvohcxvJDlM7xjEzcs6QknSCVt0t1JVXTGgPO8LeFVdC1w7oH4XcNeA+lP0zmaSJE0IPyEtSeowHKQT5PEGTQPDQZpQhpDGyXCQJHUYDpKkDsNBktRhOEgnwOMAmhZDf32GNE0MBU0bZw6SpA7DQZLUYThIi3CXkqaR4SBJ6jAcJEkdhoMkqcNwkCR1GA7SPDwQrWm2lCvB7U1yNMmjfbWzkhxM8mT7fWarJ8mNSQ4neTjJ+X3r7Gjtn2zXn56t/3KSR9o6NybJcg9SOlGzwTDugBj382t6LWXm8Elg65zabuCeqtoC3NPuA1xK77rRW4BdwE3QCxPgGuAt9K76ds1soLQ2u/rWm/tckqQVtmg4VNWXgONzytuAfe32PuDyvvot1XMvcEaSc4BLgINVdbyqXgQOAlvbsp+uqi9XVQG39D2WJGlMhj3m8PqqegGg/X5dq68Hnutrd6TVFqofGVAfKMmuJIeSHDp27NiQXZckLWa5v3hv0PGCGqI+UFXtAfYAzMzMzNtOGpb7+KWeYWcO32q7hGi/j7b6EWBjX7sNwPOL1DcMqEuSxmjYcDgAzJ5xtAO4o69+ZTtr6ULgpbbb6W7g4iRntgPRFwN3t2XfTXJhO0vpyr7HkiSNyaK7lZLcCvwT4OwkR+iddXQdcFuSncCzwLtb87uAy4DDwPeA9wBU1fEkHwYeaO0+VFWzB7nfS++MqJ8EPtd+JEljtGg4VNUV8yy6aEDbAq6a53H2AnsH1A8Bb16sH5KkleMnpCVJHYaD1HimkvQjhoMkqcNwkCR1GA6SpA7DQWKyjzds2n3nRPdPa5PhIK0SqyEgVkMftTSGgySpw3CQJHUYDpKWhbuU1hbDQVPPFzWpy3CQJHUYDtIq4ixHK8VwkCR1GA6SpA7DQVPN3TTDm/3k9tx/Q/9N14aRwiHJf0jyWJJHk9ya5G8m2ZzkviRPJvlMktNa29Pb/cNt+aa+x7m61Z9IcsloQ5K00gyEtWfocEiyHvj3wExVvRk4BdgOXA/cUFVbgBeBnW2VncCLVfXzwA2tHUnOa+u9CdgKfDzJKcP2S9LJZxisfaPuVjoV+MkkpwKvAl4A3g7c3pbvAy5vt7e1+7TlFyVJq++vqper6ml615++YMR+SZJGMHQ4VNU3gd8FnqUXCi8BDwLfqapXWrMjwPp2ez3wXFv3ldb+tf31Aev8mCS7khxKcujYsWPDdl3SCJw1TIdRdiudSe9d/2bgbwGvBi4d0LRmV5ln2Xz1brFqT1XNVNXMunXrTrzTkqQlGWW30j8Dnq6qY1X1V8BngX8InNF2MwFsAJ5vt48AGwHa8p8BjvfXB6wjaUI4Y5guo4TDs8CFSV7Vjh1cBHwd+CLwrtZmB3BHu32g3act/0JVVatvb2czbQa2APeP0C9pSVbri91K9Hu+51jqc3uBotXv1MWbDFZV9yW5HfgK8ArwVWAPcCewP8lHWu3mtsrNwKeSHKY3Y9jeHuexJLfRC5ZXgKuq6gfD9kuSNLqhwwGgqq4BrplTfooBZxtV1V8C757nca4Frh2lL5KWl+/8p5ufkJYkdRgO0ip0Mt/VO2MQGA6SpAEMB0lSh+EgrVLu/tHJNNLZStJq5IuqtDhnDpKkDmcOkgBnVPpxzhykKWUYaCGGg6aKL4g/bvbf42T9u/jvvXoZDtIqNuwX3PWv4wu4BjEcJEkdhoOmhu+QpaXzbCVpShiOOhHOHDQV1voL40LjW+tj18lhOEiSOkYKhyRnJLk9yZ8meTzJP0hyVpKDSZ5sv89sbZPkxiSHkzyc5Py+x9nR2j+ZZMf8zyhpPnPPQPKMJI1i1JnDR4HPV9XfAf4e8DiwG7inqrYA97T7AJfSuz70FmAXcBNAkrPoXU3uLfSuIHfNbKBIOjFzQ8BQ0LCGDockPw28jXaN6Kr6flV9B9gG7GvN9gGXt9vbgFuq517gjCTnAJcAB6vqeFW9CBwEtg7bL2muaXuBnLTxTlp/tDSjzBzeABwD/keSryb5RJJXA6+vqhcA2u/Xtfbrgef61j/SavPVJUljMsqprKcC5wPvr6r7knyUH+1CGiQDarVAvfsAyS56u6Q499xzT6y3miq+W5VGM8rM4QhwpKrua/dvpxcW32q7i2i/j/a139i3/gbg+QXqHVW1p6pmqmpm3bp1I3Rda5nBII1u6HCoqj8Hnkvyxla6CPg6cACYPeNoB3BHu30AuLKdtXQh8FLb7XQ3cHGSM9uB6ItbTZI0JqN+Qvr9wKeTnAY8BbyHXuDclmQn8Czw7tb2LuAy4DDwvdaWqjqe5MPAA63dh6rq+Ij9kiSNYKRwqKqHgJkBiy4a0LaAq+Z5nL3A3lH6IoG7lKTl4iektWYYDNLyMRwkSR2GgySpw6/s1qrn7iRp+TlzkHTSGeCrj+EgSeowHCRJHYaDVjV3V0gnh+EgSeowHCStCGd5q4vhIEnqMBy0avlOVDp5DAdJUsdUhoPvOFc/t6F0ck1lOGh1Mxikk89wkCR1jBwOSU5J8tUk/6vd35zkviRPJvlMu0ocSU5v9w+35Zv6HuPqVn8iySWj9knSZNq0+05nfqvEcswcfg14vO/+9cANVbUFeBHY2eo7gRer6ueBG1o7kpwHbAfeBGwFPp7klGXol9YYX1iklTNSOCTZALwD+ES7H+DtwO2tyT7g8nZ7W7tPW35Ra78N2F9VL1fV0/SuMX3BKP2SJI1m1JnD7wO/Bfyw3X8t8J2qeqXdPwKsb7fXA88BtOUvtfZ/XR+wjiRpDIYOhyS/Chytqgf7ywOa1iLLFlpn7nPuSnIoyaFjx46dUH8lSUs3yszhrcA7kzwD7Ke3O+n3gTOSzF5hbgPwfLt9BNgI0Jb/DHC8vz5gnR9TVXuqaqaqZtatWzdC1yVJCxk6HKrq6qraUFWb6B1Q/kJV/Svgi8C7WrMdwB3t9oF2n7b8C1VVrb69nc20GdgC3D9svyRJozsZ15D+ALA/yUeArwI3t/rNwKeSHKY3Y9gOUFWPJbkN+DrwCnBVVf3gJPRLq5RnKEkrb1nCoar+BPiTdvspBpxtVFV/Cbx7nvWvBa5djr5IkkbnJ6QlSR2Ggyaau5TWJrfr5DMcJEkdhoMkqcNwkCR1GA6aWO6XlsbHcNBEMhjWPrfxZDMcJEkdJ+MT0tLQfDcpTQZnDpoYBoM0OQwHTQSDQZoshoMkqcNw0Ng5a5hebvvJZThorHxxkH8Dk8lwkCR1GA6Sxm7T7judQUyYoT/nkGQjcAvws8APgT1V9dEkZwGfATYBzwD/sqpeTBLgo8BlwPeAf1NVX2mPtQP4z+2hP1JV+4btlyabLwDS6jDKzOEV4D9W1d8FLgSuSnIesBu4p6q2APe0+wCX0rs+9BZgF3ATQAuTa4C30LuC3DVJzhyhX5pQBoMW49/I5Bg6HKrqhdl3/lX1XeBxYD2wDZh9578PuLzd3gbcUj33AmckOQe4BDhYVcer6kXgILB12H5Jkka3LMcckmwCfgm4D3h9Vb0AvQABXtearQee61vtSKvNV9ca4jtCLZXHHybDyOGQ5KeAPwR+var+YqGmA2q1QH3Qc+1KcijJoWPHjp14Z7Xi/I+uYfl3M14jhUOSn6AXDJ+uqs+28rfa7iLa76OtfgTY2Lf6BuD5BeodVbWnqmaqambdunWjdF3SKjD75mI2KAyMlTN0OLSzj24GHq+q3+tbdADY0W7vAO7oq1+ZnguBl9pup7uBi5Oc2Q5EX9xqWuX8j6zl5N/TyhrlK7vfCvxr4JEkD7XafwKuA25LshN4Fnh3W3YXvdNYD9M7lfU9AFV1PMmHgQdauw9V1fER+qUJ4H9kaXUbOhyq6v8w+HgBwEUD2hdw1TyPtRfYO2xfJE2PTbvv5Jnr3vHXb0Ceue4dY+7R2uTFfrSsnDFoJfh3dvIZDhqZ/1E1Tv1/f84ilo/fraShGQqaNHPPbtLwDActqv80wrmnFPqfUJPKv9HRGA5LNPcPrP8dyqBlg9oOWnYyDHqOpTzvUsfkfzatFgbE8NI7iWj1mZmZqUOHDg217uzZDkttu9L6+zZ3f+rc/szWZtcZtHxQfbFl0lo17cclkjxYVTOLtjMcBi+XtPbNfXM1DZYaDlN/tpJBIE2vQbudpikoFjLVxxwMBklzzT1GOK2vE1O7W0mSTtRamFW4W0mSltl8byzXQmjMZThI0ojmO0vwZDzPSgXRVB9zkKSTYS0cq3DmIEknyWo+C8pwkKQVsNAHUee2m4QgMRwkaUzm2/W0UscwFjIxxxySbE3yRJLDSXaPuz+SNCnGcfxiIsIhySnAx4BLgfOAK5KcN95eSdLkWOmAmIhwAC4ADlfVU1X1fWA/sG3MfZKkqTUp4bAeeK7v/pFWkySNwaQckM6AWud7PZLsAna1u/8vyRNDPt/ZwLeHXHe1maaxwnSNd5rGCtM13nnHmutHfuy/vZRGkxIOR4CNffc3AM/PbVRVe4A9oz5ZkkNL+W6RtWCaxgrTNd5pGitM13gnYayTslvpAWBLks1JTgO2AwfG3CdJmloTMXOoqleSvA+4GzgF2FtVj425W5I0tSYiHACq6i7grhV6upF3Ta0i0zRWmK7xTtNYYbrGO/axrtrrOUiSTp5JOeYgSZogUxUO0/AVHUmeSfJIkoeSHGq1s5IcTPJk+33muPs5jCR7kxxN8mhfbeDY0nNj29YPJzl/fD0fzjzj/e0k32zb96Ekl/Utu7qN94kkl4yn18NJsjHJF5M8nuSxJL/W6mtu+y4w1snatlU1FT/0DnT/GfAG4DTga8B54+7XSRjnM8DZc2q/A+xut3cD14+7n0OO7W3A+cCji40NuAz4HL3P0FwI3Dfu/i/TeH8b+M0Bbc9rf9OnA5vb3/op4x7DCYz1HOD8dvs1wDfamNbc9l1grBO1badp5jDNX9GxDdjXbu8DLh9jX4ZWVV8Cjs8pzze2bcAt1XMvcEaSc1amp8tjnvHOZxuwv6perqqngcP0/uZXhap6oaq+0m5/F3ic3rckrLntu8BY5zOWbTtN4TAtX9FRwB8nebB9ohzg9VX1AvT+MIHXja13y2++sa3l7f2+titlb98uwjUz3iSbgF8C7mONb985Y4UJ2rbTFA5L+oqONeCtVXU+vW+4vSrJ28bdoTFZq9v7JuDngL8PvAD811ZfE+NN8lPAHwK/XlV/sVDTAbVVNd4BY52obTtN4bCkr+hY7arq+fb7KPBH9Kaf35qdcrffR8fXw2U339jW5Pauqm9V1Q+q6ofAH/Cj3QurfrxJfoLei+Wnq+qzrbwmt++gsU7atp2mcFjzX9GR5NVJXjN7G7gYeJTeOHe0ZjuAO8bTw5NivrEdAK5sZ7VcCLw0u3tiNZuzX/1f0Nu+0Bvv9iSnJ9kMbAHuX+n+DStJgJuBx6vq9/oWrbntO99YJ27bjvvI/Ur+0DvD4Rv0jvZ/cNz9OQnjewO9sxq+Bjw2O0bgtcA9wJPt91nj7uuQ47uV3nT7r+i9m9o539joTcU/1rb1I8DMuPu/TOP9VBvPw/ReNM7pa//BNt4ngEvH3f8THOs/orer5GHgofZz2VrcvguMdaK2rZ+QliR1TNNuJUnSEhkOkqQOw0GS1GE4SJI6DAdJUofhIEnqMBwkSR2GgySp4/8D4KuAxnU0juwAAAAASUVORK5CYII=\n", 30 | "text/plain": [ 31 | "
" 32 | ] 33 | }, 34 | "metadata": { 35 | "needs_background": "light" 36 | }, 37 | "output_type": "display_data" 38 | }, 39 | { 40 | "data": { 41 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAEvJJREFUeJzt3X+s3Xd93/Hna05CNYiKU19YFNs4bFFFukKSXRmmVCVoxXGiDVOtVW21kFYgSxXp2v2SzJASZFSpP7RWYk0JbrkKdG3SDcjqCdPgFtpso2G+YSaJk4Y4LlvuHNUuZoENRubw3h/n6+1wfe6933vuub72/Twf0tH9fj+fz/d73p98ndf93u/5nnNSVUiS2vHX1roASdKFZfBLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGnPZWhcwyqZNm2rbtm1rXYYkXTIeffTRv6qqqT5jL8rg37ZtG7Ozs2tdhiRdMpL8175jvdQjSY0x+CWpMQa/JDXG4Jekxhj8ktSYJYM/yZYkn0vyVJJjSX5+xJgk+WCS40keS3LTUN8dSZ7pHndMegKSpOXpczvnWeCfVtUXk1wJPJrkcFU9OTTmNuC67vFG4EPAG5NcBdwNTAPVbXuwqr420VlIknpb8oy/qp6vqi92y98AngKumTdsF/CxGngEeGWSq4FbgcNVdaYL+8PAzonOQJK0LMu6xp9kG3Aj8IV5XdcAzw2tz3VtC7VLktZI7+BP8grgE8AvVNXX53eP2KQWaR+1/71JZpPMnj59um9Zught2/eptS5h2bbt+9T/q3s59S81dqX90mroFfxJLmcQ+r9bVZ8cMWQO2DK0vhk4uUj7earqQFVNV9X01FSvj5uQJI2hz109AT4CPFVVv7bAsIPAO7u7e94EvFBVzwMPATuSbEyyEdjRtUmS1kifu3puBt4BPJ7kaNf2L4CtAFV1L3AIuB04DnwT+Jmu70ySDwBHuu32V9WZyZUvSVquJYO/qv4jo6/VD48p4D0L9M0AM2NVJ0maON+5K0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY1Z8hu4kswAfx84VVV/e0T/Pwd+cmh/rwOmuq9d/ArwDeAl4GxVTU+qcEnSePqc8d8H7Fyos6p+tapuqKobgPcCfzrve3Xf0vUb+pJ0EVgy+KvqYaDvF6TvAe5fUUWSpFU1sWv8Sf46g78MPjHUXMBnkjyaZO+knkuSNL4lr/Evwz8A/tO8yzw3V9XJJK8CDif58+4viPN0vxj2AmzdunWCZUmShk3yrp7dzLvMU1Unu5+ngAeB7QttXFUHqmq6qqanpqYmWJYkadhEgj/J9wJvBv5gqO3lSa48twzsAJ6YxPNJksbX53bO+4FbgE1J5oC7gcsBqurebtiPAp+pqv81tOmrgQeTnHue36uqP5xc6ZKkcSwZ/FW1p8eY+xjc9jncdgJ4w7iFSZJWh+/claTGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYsGfxJZpKcSjLy+3KT3JLkhSRHu8ddQ307kzyd5HiSfZMsXJI0nj5n/PcBO5cY8x+q6obusR8gyQbgHuA24HpgT5LrV1KsJGnllgz+qnoYODPGvrcDx6vqRFW9CDwA7BpjP5KkCZrUNf6/m+RLST6d5Ae6tmuA54bGzHVtIyXZm2Q2yezp06cnVJYkab5JBP8XgddU1RuAfwX8u649I8bWQjupqgNVNV1V01NTUxMoS5I0yoqDv6q+XlX/s1s+BFyeZBODM/wtQ0M3AydX+nySpJVZcfAn+RtJ0i1v7/b5VeAIcF2Sa5NcAewGDq70+SRJK3PZUgOS3A/cAmxKMgfcDVwOUFX3Aj8G/GySs8C3gN1VVcDZJHcCDwEbgJmqOrYqs5Ak9bZk8FfVniX6fwP4jQX6DgGHxitNkrQafOeuJDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNWbJ4E8yk+RUkicW6P/JJI91j88necNQ31eSPJ7kaJLZSRYuSRpPnzP++4Cdi/T/BfDmqno98AHgwLz+t1TVDVU1PV6JkqRJ6vOduw8n2bZI/+eHVh8BNq+8LEnSapn0Nf53AZ8eWi/gM0keTbJ3sQ2T7E0ym2T29OnTEy5LknTOkmf8fSV5C4Pg/6Gh5pur6mSSVwGHk/x5VT08avuqOkB3mWh6eromVZck6btN5Iw/yeuB3wZ2VdVXz7VX1cnu5yngQWD7JJ5PkjS+FQd/kq3AJ4F3VNWXh9pfnuTKc8vADmDknUGSpAtnyUs9Se4HbgE2JZkD7gYuB6iqe4G7gO8DfjMJwNnuDp5XAw92bZcBv1dVf7gKc5AkLUOfu3r2LNH/buDdI9pPAG84fwtJ0lrynbuS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUmF7Bn2QmyakkI78zNwMfTHI8yWNJbhrquyPJM93jjkkVLkkaT98z/vuAnYv03wZc1z32Ah8CSHIVg+/ofSOwHbg7ycZxi5UkrVyv4K+qh4EziwzZBXysBh4BXpnkauBW4HBVnamqrwGHWfwXiCRplU3qGv81wHND63Nd20Lt50myN8lsktnTp09PqKy1sW3fp1Z9H6P6h9u27fvUeeujxi72PAv1LbXvvhbb/7jO1bbSYzD836fP/kaNGbXe57/bUvvpW8dS/0YWqms5zzWpfc3f5/x5LPe5JvH/4Eqtxn+fSZlU8GdEWy3Sfn5j1YGqmq6q6ampqQmVJUmab1LBPwdsGVrfDJxcpF2StEYmFfwHgXd2d/e8CXihqp4HHgJ2JNnYvai7o2uTJK2Ry/oMSnI/cAuwKckcgzt1LgeoqnuBQ8DtwHHgm8DPdH1nknwAONLtan9VLfYisSRplfUK/qras0R/Ae9ZoG8GmFl+aZKk1eA7dyWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjekV/El2Jnk6yfEk+0b0/3qSo93jy0n+x1DfS0N9BydZvCRp+Zb8Bq4kG4B7gLcy+PL0I0kOVtWT58ZU1T8eGv9zwI1Du/hWVd0wuZIlSSvR54x/O3C8qk5U1YvAA8CuRcbvAe6fRHGSpMnrE/zXAM8Nrc91bedJ8hrgWuCzQ83fk2Q2ySNJ3j52pZKkiejzZesZ0VYLjN0NfLyqXhpq21pVJ5O8Fvhskser6tnzniTZC+wF2Lp1a4+yJEnj6HPGPwdsGVrfDJxcYOxu5l3mqaqT3c8TwJ/w3df/h8cdqKrpqpqemprqUZYkaRx9gv8IcF2Sa5NcwSDcz7s7J8n3AxuBPxtq25jkZd3yJuBm4Mn520qSLpwlL/VU1dkkdwIPARuAmao6lmQ/MFtV534J7AEeqKrhy0CvAz6c5DsMfsn80vDdQJKkC6/PNX6q6hBwaF7bXfPW3z9iu88DP7iC+iRJE+Y7dyWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxvYI/yc4kTyc5nmTfiP6fTnI6ydHu8e6hvjuSPNM97phk8ZKk5VvyqxeTbADuAd4KzAFHkhwc8d25v19Vd87b9irgbmAaKODRbtuvTaR6SdKy9Tnj3w4cr6oTVfUi8ACwq+f+bwUOV9WZLuwPAzvHK1WSNAl9gv8a4Lmh9bmubb5/mOSxJB9PsmWZ20qSLpA+wZ8RbTVv/d8D26rq9cAfAR9dxraDgcneJLNJZk+fPt2jLEnSOPoE/xywZWh9M3ByeEBVfbWqvt2t/hbwd/puO7SPA1U1XVXTU1NTfWqXJI2hT/AfAa5Lcm2SK4DdwMHhAUmuHlp9G/BUt/wQsCPJxiQbgR1dmyRpjSx5V09VnU1yJ4PA3gDMVNWxJPuB2ao6CPyjJG8DzgJngJ/utj2T5AMMfnkA7K+qM6swD0lST0sGP0BVHQIOzWu7a2j5vcB7F9h2BphZQY2SpAnynbuS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUmF7Bn2RnkqeTHE+yb0T/P0nyZJLHkvxxktcM9b2U5Gj3ODh/W0nShbXkVy8m2QDcA7wVmAOOJDlYVU8ODfsvwHRVfTPJzwK/AvxE1/etqrphwnVLksbU54x/O3C8qk5U1YvAA8Cu4QFV9bmq+ma3+giwebJlSpImpU/wXwM8N7Q+17Ut5F3Ap4fWvyfJbJJHkrx9jBolSRO05KUeICPaauTA5KeAaeDNQ81bq+pkktcCn03yeFU9O2LbvcBegK1bt/YoS5I0jj5n/HPAlqH1zcDJ+YOS/AjwPuBtVfXtc+1VdbL7eQL4E+DGUU9SVQeqarqqpqempnpPQJK0PH2C/whwXZJrk1wB7Aa+6+6cJDcCH2YQ+qeG2jcmeVm3vAm4GRh+UViSdIEteamnqs4muRN4CNgAzFTVsST7gdmqOgj8KvAK4N8mAfhvVfU24HXAh5N8h8EvmV+adzeQJOkC63ONn6o6BBya13bX0PKPLLDd54EfXEmBkqTJ8p27ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1JhewZ9kZ5KnkxxPsm9E/8uS/H7X/4Uk24b63tu1P53k1smVLkkax5LBn2QDcA9wG3A9sCfJ9fOGvQv4WlX9LeDXgV/utr2ewZez/wCwE/jNbn+SpDXS54x/O3C8qk5U1YvAA8CueWN2AR/tlj8O/L0MvnV9F/BAVX27qv4CON7tT5K0RvoE/zXAc0Prc13byDFVdRZ4Afi+nttKki6gVNXiA5IfB26tqnd36+8AtlfVzw2NOdaNmevWn2VwZr8f+LOq+tdd+0eAQ1X1iRHPsxfY261+P/D0mHPaBPzVmNtealqaK7Q135bmCm3Nd7Xm+pqqmuoz8LIeY+aALUPrm4GTC4yZS3IZ8L3AmZ7bAlBVB4ADfYpeTJLZqppe6X4uBS3NFdqab0tzhbbmezHMtc+lniPAdUmuTXIFgxdrD84bcxC4o1v+MeCzNfhT4iCwu7vr51rgOuA/T6Z0SdI4ljzjr6qzSe4EHgI2ADNVdSzJfmC2qg4CHwF+J8lxBmf6u7ttjyX5N8CTwFngPVX10irNRZLUQ59LPVTVIeDQvLa7hpb/N/DjC2z7i8AvrqDG5Vrx5aJLSEtzhbbm29Jcoa35rvlcl3xxV5K0vviRDZLUmHUT/Et9rMR6kOQrSR5PcjTJbNd2VZLDSZ7pfm5c6zrHkWQmyakkTwy1jZxbBj7YHevHkty0dpWPZ4H5vj/Jf++O79Ektw/1XbIffZJkS5LPJXkqybEkP9+1r7vju8hcL65jW1WX/IPBi87PAq8FrgC+BFy/1nWtwjy/Amya1/YrwL5ueR/wy2td55hz+2HgJuCJpeYG3A58GgjwJuALa13/hOb7fuCfjRh7ffdv+mXAtd2/9Q1rPYdlzPVq4KZu+Urgy92c1t3xXWSuF9WxXS9n/H0+VmK9Gv64jI8Cb1/DWsZWVQ8zuCNs2EJz2wV8rAYeAV6Z5OoLU+lkLDDfhVzSH31SVc9X1Re75W8ATzF4B/+6O76LzHUha3Js10vwt/LREAV8Jsmj3TudAV5dVc/D4B8d8Ko1q27yFprbej7ed3aXN2aGLtutm/l2n9x7I/AF1vnxnTdXuIiO7XoJ/oxoW4+3K91cVTcx+KTU9yT54bUuaI2s1+P9IeBvAjcAzwP/smtfF/NN8grgE8AvVNXXFxs6ou2Smu+IuV5Ux3a9BH/vj4a4lFXVye7nKeBBBn8S/uW5P4O7n6fWrsKJW2hu6/J4V9VfVtVLVfUd4Lf4/3/yX/LzTXI5gyD83ar6ZNe8Lo/vqLlebMd2vQR/n4+VuKQleXmSK88tAzuAJ/juj8u4A/iDtalwVSw0t4PAO7u7P94EvHDuksGlbN517B9lcHzhEv/okyRh8O7+p6rq14a61t3xXWiuF92xXetXwSf4avrtDF5BfxZ431rXswrzey2DV/+/BBw7N0cGH3/9x8Az3c+r1rrWMed3P4M/gf8Pg7Ogdy00NwZ/Ht/THevHgem1rn9C8/2dbj6PMQiEq4fGv6+b79PAbWtd/zLn+kMMLl88BhztHrevx+O7yFwvqmPrO3clqTHr5VKPJKkng1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMb8Xyhtm6m/1Zy+AAAAAElFTkSuQmCC\n", 42 | "text/plain": [ 43 | "
" 44 | ] 45 | }, 46 | "metadata": { 47 | "needs_background": "light" 48 | }, 49 | "output_type": "display_data" 50 | }, 51 | { 52 | "data": { 53 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW4AAAD8CAYAAABXe05zAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAADBtJREFUeJzt3H+I5HUdx/HXq9vTSi/MbpLDk1ZDhCPIO4bDuDjIwh9XdAUFCpmFsf+oKBSx4j/2Z0FSgQibXlmZEv4g8dSUUkTIs1k9zzvXyx9deHl5I1Jqf2jquz/mu7kuMzvfnZ3vzrxnng9Ydnb2M7Pvj9+9JzOz39ERIQBAHh8Y9AAAgOUh3ACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AyhBsAkpmo4k7Xr18fk5OTVdw1AIyk2dnZVyKiVmZtJeGenJxUo9Go4q4BYCTZ/nvZtbxUAgDJEG4ASIZwA0AyhBsAkiHcAJBMqXDbPsH2bbafsT1n+zNVDwYAaK/s6YA/lXRfRHzN9jGSPlzhTACAJXQNt+2PSNou6VuSFBFvSXqr2rEAAJ2UeankNElNSb+w/YTtG2wfV/FcAIAOyoR7QtIWSddHxGZJ/5E0vXiR7SnbDduNZrPZ80CT07t7vi0AjIMy4T4s6XBE7Cm+vk2tkL9PRMxERD0i6rVaqbfbAwB60DXcEfFPSS/aPqO46vOSnq50KgBAR2XPKrlc0s3FGSUvSPp2dSMBAJZSKtwRsVdSveJZAAAl8M5JAEiGcANAMoQbAJIh3ACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AyhBsAkiHcAJAM4QaAZAg3ACRDuAEgGcINAMkQbgBIhnADQDKEGwCSIdwAkAzhBoBkCDcAJEO4ASAZwg0AyRBuAEiGcANAMhNlFtk+JOl1Se9Iejsi6lUOBQDorFS4C5+LiFcqmwQAUAovlQBAMmXDHZLutz1re6rdAttTthu2G81ms38TAgDep2y4t0XEFknnS7rU9vbFCyJiJiLqEVGv1Wp9HRIA8J5S4Y6Il4rPRyXdKWlrlUMBADrrGm7bx9leN39Z0jmS9lc9GACgvTJnlZwk6U7b8+t/GxH3VToVAKCjruGOiBckfXoVZgEAlMDpgACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AyhBsAkiHcAJAM4QaAZAg3ACRDuAEgGcINAMkQbgBIhnADQDKEGwCSIdwAkAzhBoBkCDcAJEO4ASAZwg0AyRBuAEiGcANAMoQbAJIh3ACQTOlw215j+wnbd1c5EABgact5xH2FpLmqBgEAlFMq3LY3SvqipBuqHQcA0E3ZR9w/kfR9Se9WOAsAoISu4bb9JUlHI2K2y7op2w3bjWaz2bcBAQDvV+YR9zZJX7Z9SNKtks62/ZvFiyJiJiLqEVGv1Wp9HhMAMK9ruCPiqojYGBGTki6Q9KeI+EblkwEA2uI8bgBIZmI5iyPiIUkPVTIJAKAUHnEDQDKEGwCSIdwAkAzhBoBkCDcAJEO4ASAZwg0AyRBuAEiGcANAMoQbAJIh3ACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AyhBsAkiHcAJAM4QaAZAg3ACRDuAEgGcINAMkQbgBIhnADQDKEGwCS6Rpu2x+0/ZjtJ20fsP2D1RgMANDeRIk1b0o6OyLesL1W0iO2742IRyueDQDQRtdwR0RIeqP4cm3xEVUOBQDorNRr3LbX2N4r6aikByJiT5s1U7YbthvNZnNFQ01O717R7QFglJUKd0S8ExFnStooaavtT7VZMxMR9Yio12q1fs8JACgs66ySiPiXpIcknVfJNACArsqcVVKzfUJx+UOSviDpmaoHAwC0V+askg2SbrK9Rq3Q/y4i7q52LABAJ2XOKtknafMqzAIAKIF3TgJAMoQbAJIh3ACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AyhBsAkiHcAJAM4QaAZAg3ACRDuAEgGcINAMkQbgBIhnADQDKEGwCSIdwAkAzhBoBkCDcAJEO4ASAZwg0AyRBuAEiGcANAMl3DbfsU2w/anrN9wPYVqzEYAKC9iRJr3pb03Yh43PY6SbO2H4iIpyueDQDQRtdH3BFxJCIeLy6/LmlO0slVDwYAaG9Zr3HbnpS0WdKeKoYBAHRXOty2j5d0u6QrI+K1Nt+fst2w3Wg2m/2cEUhlcnr3im7b6+1X8nP78fPHzSD/W5UKt+21akX75oi4o92aiJiJiHpE1Gu1Wj9nBAAsUOasEku6UdJcRFxb/UgAgKWUecS9TdJFks62vbf42FHxXACADrqeDhgRj0jyKswCACiBd04CQDKEGwCSIdwAkAzhBoBkCDcAJEO4ASAZwg0AyRBuAEiGcANAMoQbAJIh3ACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AyhBsAkiHcAJAM4QaAZAg3ACRDuAEgGcINAMkQbgBIhnADQDKEGwCS6Rpu27tsH7W9fzUGAgAsrcwj7l9KOq/iOQAAJXUNd0Q8LOnVVZgFAFBC317jtj1lu2G70Ww2+3W3/zc5vbv0uoVry95uqbXLuY/l/qzF8670/jp9r1976Od9Ledn9vNnt7uPldzvSubqdtulZi1z28Xf7/RvY7nzd1vfabaV/neu+nar/bvdq76FOyJmIqIeEfVardavuwUALMJZJQCQDOEGgGTKnA54i6Q/SzrD9mHbl1Q/FgCgk4luCyLiwtUYBABQDi+VAEAyhBsAkiHcAJAM4QaAZAg3ACRDuAEgGcINAMkQbgBIhnADQDKEGwCSIdwAkAzhBoBkCDcAJEO4ASAZwg0AyRBuAEiGcANAMoQbAJIh3ACQDOEGgGQINwAkQ7gBIBnCDQDJEG4ASIZwA0AypcJt+zzbB20/Z3u66qEAAJ11DbftNZKuk3S+pE2SLrS9qerBAADtlXnEvVXScxHxQkS8JelWSTurHQsA0EmZcJ8s6cUFXx8urgMADIAjYukF9tclnRsR3ym+vkjS1oi4fNG6KUlTxZdnSDrY40zrJb3S422zGae9SuO133HaqzRe+61qr5+IiFqZhRMl1hyWdMqCrzdKemnxooiYkTRTarwl2G5ERH2l95PBOO1VGq/9jtNepfHa7zDstcxLJX+RdLrtU20fI+kCSXdVOxYAoJOuj7gj4m3bl0n6g6Q1knZFxIHKJwMAtFXmpRJFxD2S7ql4lnkrfrklkXHaqzRe+x2nvUrjtd+B77XrHycBAMOFt7wDQDJDE+5xeFu97UO2n7K913ajuO5E2w/Yfrb4/NFBz9kL27tsH7W9f8F1bffmlp8Vx3qf7S2Dm7w3HfZ7je1/FMd3r+0dC753VbHfg7bPHczUvbF9iu0Hbc/ZPmD7iuL6kTu+S+x1uI5tRAz8Q60/ej4v6TRJx0h6UtKmQc9VwT4PSVq/6LofSZouLk9L+uGg5+xxb9slbZG0v9veJO2QdK8kSzpL0p5Bz9+n/V4j6Xtt1m4qfqePlXRq8bu+ZtB7WMZeN0jaUlxeJ+mvxZ5G7vgusdehOrbD8oh7nN9Wv1PSTcXlmyR9ZYCz9CwiHpb06qKrO+1tp6RfRcujkk6wvWF1Ju2PDvvtZKekWyPizYj4m6Tn1PqdTyEijkTE48Xl1yXNqfXu6ZE7vkvstZOBHNthCfe4vK0+JN1ve7Z4p6kknRQRR6TWL42kjw9suv7rtLdRPt6XFS8P7FrwstfI7Nf2pKTNkvZoxI/vor1KQ3RshyXcbnPdKJ7usi0itqj1f1q81Pb2QQ80IKN6vK+X9ElJZ0o6IunHxfUjsV/bx0u6XdKVEfHaUkvbXJdqv232OlTHdljCXept9dlFxEvF56OS7lTrKdXL808ji89HBzdh33Xa20ge74h4OSLeiYh3Jf1c7z1lTr9f22vVCtnNEXFHcfVIHt92ex22Yzss4R75t9XbPs72uvnLks6RtF+tfV5cLLtY0u8HM2ElOu3tLknfLM4+OEvSv+efcme26HXcr6p1fKXWfi+wfaztUyWdLumx1Z6vV7Yt6UZJcxFx7YJvjdzx7bTXoTu2g/4r7oK/zu5Q6y+4z0u6etDzVLC/09T66/OTkg7M71HSxyT9UdKzxecTBz1rj/u7Ra2nkP9V61HIJZ32ptbTy+uKY/2UpPqg5+/Tfn9d7GefWv+gNyxYf3Wx34OSzh/0/Mvc62fVevq/T9Le4mPHKB7fJfY6VMeWd04CQDLD8lIJAKAkwg0AyRBuAEiGcANAMoQbAJIh3ACQDOEGgGQINwAk8z/sbkgYCPWhLAAAAABJRU5ErkJggg==\n", 54 | "text/plain": [ 55 | "
" 56 | ] 57 | }, 58 | "metadata": { 59 | "needs_background": "light" 60 | }, 61 | "output_type": "display_data" 62 | }, 63 | { 64 | "data": { 65 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAEOVJREFUeJzt3X+MZXV5x/H3p8uKphKp7rRuYHGwkiZoFHBCMTSGqK1ADdtGSNakCgaziZGKiU2zaILKfzSpNhYjWQsRqFEMWru6EItVov7B6kCXhXWlrtaGLURGUJCo2NWnf9xDO73cmXtm5s7Oznffr+Rmzo9nznm+e+5+5syZc+9NVSFJastvrXUDkqTJM9wlqUGGuyQ1yHCXpAYZ7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTpurXa8adOmmp6eXqvdS9K6dM899/y4qqbG1a1ZuE9PTzM7O7tWu5ekdSnJf/ap87KMJDXIcJekBhnuktQgw12SGmS4S1KDeod7kg1J/i3Jl0asOz7JrUkOJtmTZHqSTUqSlmYpZ+5XAgcWWHc58JOqehnwEeDalTYmSVq+XuGe5GTgT4F/WKBkK3BTN30b8PokWXl7kqTl6Hvm/nfAXwO/WWD9ScBDAFV1GHgCeNGKu5MkLcvYcE/yJuDRqrpnsbIRy571ydtJtieZTTI7Nze3hDYlaXHTO3Yvu356x+4lf//Rrs+Z+7nARUl+CHwGeF2SfxyqOQRsAUhyHPAC4PHhDVXVzqqaqaqZqamxb40gSVqmseFeVVdV1clVNQ1sA75aVX8xVLYLuLSbvriredaZuyTpyFj2G4cluQaYrapdwA3ALUkOMjhj3zah/iRJy7CkcK+qu4C7uumr5y3/JXDJJBuTJC2fr1CVpAYZ7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTLcJalBhrskNchwl6QGGe6S1CDDXZIaZLhLUoMMd0lqkOEuSQ0y3CWpQYa7JDXIcJekBo0N9yTPTfKtJPcl2Z/kQyNqLksyl2Rv93jH6rQrSeqjz8fsPQ28rqqeSrIR+GaSO6rq7qG6W6vqism3KElaqrHhXlUFPNXNbuwetZpNSZJWptc19yQbkuwFHgXurKo9I8renGRfktuSbJlol5KkJekV7lX166o6AzgZODvJK4ZKvghMV9Urga8AN43aTpLtSWaTzM7Nza2kb0nSIpZ0t0xV/RS4Czh/aPljVfV0N/sJ4NULfP/OqpqpqpmpqalltCtJ6qPP3TJTSU7spp8HvAH47lDN5nmzFwEHJtmkJGlp+twtsxm4KckGBj8MPltVX0pyDTBbVbuAdye5CDgMPA5ctloNS5LG63O3zD7gzBHLr543fRVw1WRbkyQtl69QlaQGGe6S1CDDXZIaZLhLUoMMd0lqkOEuSQ0y3CWpQYa7JDXIcJekBhnuktQgw12SGmS4S1KDDHdJapDhLkkNMtwlqUGGuyQ1yHCXpAb1+QzV5yb5VpL7kuxP8qERNccnuTXJwSR7kkyvRrOSpH76nLk/Dbyuql4FnAGcn+ScoZrLgZ9U1cuAjwDXTrZNSdJSjA33Gniqm93YPWqobCtwUzd9G/D6JJlYl5KkJel1zT3JhiR7gUeBO6tqz1DJScBDAFV1GHgCeNGI7WxPMptkdm5ubmWdSzqqTO/YvdYt9LYWvU7v2H1E99sr3Kvq11V1BnAycHaSVwyVjDpLHz67p6p2VtVMVc1MTU0tvVtJUi9Lulumqn4K3AWcP7TqELAFIMlxwAuAxyfQnyRpGfrcLTOV5MRu+nnAG4DvDpXtAi7tpi8GvlpVzzpzlyQdGcf1qNkM3JRkA4MfBp+tqi8luQaYrapdwA3ALUkOMjhj37ZqHUuSxhob7lW1DzhzxPKr503/Erhksq1JkpbLV6hKUoMMd0lqkOEuSQ0y3CWpQYa7JDXIcJekBhnuktQgw12SGmS4S1KDDHdJapDhLkkNMtwlqUGGuyQ1yHCXpAYZ7pLUIMNdkhpkuEtSg/p8huqWJF9LciDJ/iRXjqg5L8kTSfZ2j6tHbUuSdGT0+QzVw8B7q+reJCcA9yS5s6q+M1T3jap60+RblCQt1dgz96p6pKru7aZ/BhwATlrtxiRJy7eka+5Jphl8WPaeEatfk+S+JHckefkC3789yWyS2bm5uSU3K0nqp3e4J3k+8DngPVX15NDqe4GXVNWrgL8HvjBqG1W1s6pmqmpmampquT1LksboFe5JNjII9k9V1eeH11fVk1X1VDd9O7AxyaaJdipJ6q3P3TIBbgAOVNWHF6h5cVdHkrO77T42yUYlSf31uVvmXOCtwP1J9nbL3gecAlBV1wMXA+9Mchj4BbCtqmoV+pUk9TA23Kvqm0DG1FwHXDeppiRJK+MrVCWpQYa7JDXIcJekBhnuktQgw12SGmS4S1KDDHdJapDhLkkNMtwlqUGGuyQ1yHCXpAYZ7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTLcJalBfT5DdUuSryU5kGR/kitH1CTJR5McTLIvyVmr064kqY8+n6F6GHhvVd2b5ATgniR3VtV35tVcAJzWPf4Q+Hj3VZK0BsaeuVfVI1V1bzf9M+AAcNJQ2Vbg5hq4GzgxyeaJdytJ6mVJ19yTTANnAnuGVp0EPDRv/hDP/gFAku1JZpPMzs3NLa1TSSsyvWP3WrfQ2zO9HomeR+1jesfuBZeP+97l7G819A73JM8HPge8p6qeHF494lvqWQuqdlbVTFXNTE1NLa1TSVJvvcI9yUYGwf6pqvr8iJJDwJZ58ycDD6+8PUnScvS5WybADcCBqvrwAmW7gLd1d82cAzxRVY9MsE9J0hL0uVvmXOCtwP1J9nbL3gecAlBV1wO3AxcCB4GfA2+ffKuSpL7GhntVfZPR19Tn1xTwrkk1JUlaGV+hKkkNMtwlqUGGuyQ1yHCXpAYZ7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTLcJalBhrskNchwl6QGGe6S1CDDXZIaZLhLUoMMd0lqUJ+P2bsxyaNJHlhg/XlJnkiyt3tcPfk2JUlL0edj9j4JXAfcvEjNN6rqTRPpSJK0YmPP3Kvq68DjR6AXSdKETOqa+2uS3JfkjiQvn9A2JUnL1OeyzDj3Ai+pqqeSXAh8AThtVGGS7cB2gFNOOWUCu5YkjbLiM/eqerKqnuqmbwc2Jtm0QO3OqpqpqpmpqamV7lqStIAVh3uSFydJN312t83HVrpdSdLyjb0sk+TTwHnApiSHgA8AGwGq6nrgYuCdSQ4DvwC2VVWtWseSpLHGhntVvWXM+usY3CopSTpK+ApVSWqQ4S5JDTLcJalBhrskNchwl6QGGe6S1CDDXZIaZLhLUoMMd0lqkOEuSQ0y3CWpQYa7JDXIcJekBhnuktQgw12SGmS4S1KDDHdJatDYcE9yY5JHkzywwPok+WiSg0n2JTlr8m1Kkpaiz5n7J4HzF1l/AXBa99gOfHzlbUmSVmJsuFfV14HHFynZCtxcA3cDJybZPKkGJUlLN4lr7icBD82bP9QtkyStkUmEe0Ysq5GFyfYks0lm5+bmJrDro8P0jt3LWjfJ/ax0WyvZ9vSO3f/7WM6+J2Wh7c5f/sz0/H7H9TNq/ST+vYan+25/sX/rPn2t9Fgtdx+jniej5oenF/r3mr+9hZYt9FhoLEt5Di3Faj3nFzOJcD8EbJk3fzLw8KjCqtpZVTNVNTM1NTWBXUuSRplEuO8C3tbdNXMO8ERVPTKB7UqSlum4cQVJPg2cB2xKcgj4ALARoKquB24HLgQOAj8H3r5azUqS+hkb7lX1ljHrC3jXxDqSJK2Yr1CVpAYZ7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTLcJalBhrskNchwl6QGGe6S1CDDXZIaZLhLUoMMd0lqkOEuSQ0y3CWpQYa7JDXIcJekBvUK9yTnJ3kwycEkO0asvyzJXJK93eMdk29VktRXn89Q3QB8DPhj4BDw7SS7quo7Q6W3VtUVq9CjJGmJ+py5nw0crKofVNWvgM8AW1e3LUnSSvQJ95OAh+bNH+qWDXtzkn1JbkuyZSLdSZKWpU+4Z8SyGpr/IjBdVa8EvgLcNHJDyfYks0lm5+bmltapJKm3PuF+CJh/Jn4y8PD8gqp6rKqe7mY/Abx61IaqamdVzVTVzNTU1HL6lST10Cfcvw2cluTUJM8BtgG75hck2Txv9iLgwORalCQt1di7ZarqcJIrgC8DG4Abq2p/kmuA2araBbw7yUXAYeBx4LJV7FmSNMbYcAeoqtuB24eWXT1v+irgqsm2JklaLl+hKkkNMtwlqUGGuyQ1yHCXpAYZ7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTLcJalBhrskNchwl6QGGe6S1CDDXZIaZLhLUoMMd0lqkOEuSQ3qFe5Jzk/yYJKDSXaMWH98klu79XuSTE+6UUlSf2PDPckG4GPABcDpwFuSnD5Udjnwk6p6GfAR4NpJNypJ6q/PmfvZwMGq+kFV/Qr4DLB1qGYrcFM3fRvw+iSZXJuSpKXoE+4nAQ/Nmz/ULRtZU1WHgSeAF02iQUnS0qWqFi9ILgHeWFXv6ObfCpxdVX85r2Z/V3Oom/9+V/PY0La2A9u72T8AHlxm35uAHy/ze9ejY2m8x9JY4dga77E0Vli98b6kqqbGFR3XY0OHgC3z5k8GHl6g5lCS44AXAI8Pb6iqdgI7e+xzUUlmq2pmpdtZL46l8R5LY4Vja7zH0lhh7cfb57LMt4HTkpya5DnANmDXUM0u4NJu+mLgqzXuVwJJ0qoZe+ZeVYeTXAF8GdgA3FhV+5NcA8xW1S7gBuCWJAcZnLFvW82mJUmL63NZhqq6Hbh9aNnV86Z/CVwy2dYWteJLO+vMsTTeY2mscGyN91gaK6zxeMf+QVWStP749gOS1KB1F+7j3gphvUvywyT3J9mbZLZb9sIkdyb5Xvf1d9a6z+VKcmOSR5M8MG/ZyPFl4KPdsd6X5Ky163zpFhjrB5P8V3d89ya5cN66q7qxPpjkjWvT9fIl2ZLka0kOJNmf5MpueXPHd5GxHj3Ht6rWzYPBH3S/D7wUeA5wH3D6Wvc14TH+ENg0tOxvgB3d9A7g2rXucwXjey1wFvDAuPEBFwJ3AAHOAfasdf8TGOsHgb8aUXt693w+Hji1e55vWOsxLHG8m4GzuukTgH/vxtXc8V1krEfN8V1vZ+593gqhRfPf3uEm4M/WsJcVqaqv8+zXQCw0vq3AzTVwN3Biks1HptOVW2CsC9kKfKaqnq6q/wAOMni+rxtV9UhV3dtN/ww4wODV680d30XGupAjfnzXW7j3eSuE9a6Af0lyT/eKXoDfq6pHYPCkAn53zbpbHQuNr9XjfUV3GeLGeZfYmhpr986wZwJ7aPz4Do0VjpLju97CfdSbkbV2u8+5VXUWg3fhfFeS1651Q2uoxeP9ceD3gTOAR4C/7ZY3M9Ykzwc+B7ynqp5crHTEsnU15hFjPWqO73oL9z5vhbCuVdXD3ddHgX9i8Kvbj575dbX7+ujadbgqFhpfc8e7qn5UVb+uqt8An+D/fjVvYqxJNjIIu09V1ee7xU0e31FjPZqO73oL9z5vhbBuJfntJCc8Mw38CfAA///tHS4F/nltOlw1C41vF/C27q6Kc4Annvn1fr0auqb85wyOLwzGui2DD745FTgN+NaR7m8lkoTBq9UPVNWH561q7vguNNaj6viu9V+dl/FX6gsZ/GX6+8D717qfCY/tpQz+on4fsP+Z8TF4++R/Bb7XfX3hWve6gjF+msGvq//N4Gzm8oXGx+BX2Y91x/p+YGat+5/AWG/pxrKPwX/4zfPq39+N9UHggrXufxnj/SMGlxr2AXu7x4UtHt9FxnrUHF9foSpJDVpvl2UkST0Y7pLUIMNdkhpkuEtSgwx3SWqQ4S5JDTLcJalBhrskNeh/AIN/eF8POG+4AAAAAElFTkSuQmCC\n", 66 | "text/plain": [ 67 | "
" 68 | ] 69 | }, 70 | "metadata": { 71 | "needs_background": "light" 72 | }, 73 | "output_type": "display_data" 74 | }, 75 | { 76 | "data": { 77 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD8CAYAAAB+UHOxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzsnXl8VcXZx7+TlSyEJBASkgBhCfsmBkQ2FZBFEdBqi7WKVqW11tetb19t3UrVqm3VUpcKYsUVdwU3RFYXdpAQwpKQsGQhCRAIkIUs8/7xnEsScpPcwE3uSTLfzyefc++cOXPm3ntyfud5ZuZ5lNYag8FgMLQ+vDzdAYPBYDB4BiMABoPB0EoxAmAwGAytFCMABoPB0EoxAmAwGAytFCMABoPB0EoxAmAwGAytFCMABoPB0EoxAmAwGAytFB9Pd6AuOnTooOPi4jzdDYPBYGhWbN68+bDWOqK+erYWgLi4ODZt2uTpbhgMBkOzQim135V6xgVkMBgMrRQjAAaDwdBKMQJgMBgMrRQjAAaDwdBKMQJgMBgMrRQjAAaDwdBKMQJgMBgMrRQjAAaDwT1s2gQ//ODpXhgagEsCoJS6Vym1QymVpJR6VynVRinVTSm1XimVopR6TynlZ9X1t96nWvvjqrTzoFW+Wyk1qXE+ksFg8Ai33AJ33eXpXhgaQL0CoJSKAf4HSNBaDwC8gZnA08BzWut4IB+41TrkViBfa90TeM6qh1Kqn3Vcf2Ay8JJSytu9H8dgMHiEffsgKQlycjzdE0MDcNUF5AMEKKV8gEAgGxgHfGjtXwjMsF5Pt95j7R+vlFJW+SKtdYnWOh1IBYaf/0cwGAweZ8kS2eblgdae7YvBZeoVAK11JvAP4ABy4z8ObAaOaa3LrGoZQIz1OgY4aB1bZtVvX7XcyTEGg6E58/nnsi0thYICz/bF4DKuuIDCkKf3bkA0EARMcVLVIfuqln21lZ99vtlKqU1KqU15eXn1dc9gsA0HD7ZSD0hpKaxaBZGR8t783zYbXHEBTQDStdZ5WutS4GNgJBBquYQAYoEs63UG0BnA2t8OOFq13MkxZ9Baz9NaJ2itEyIi6o1majDYhp/9DG67zdO98ACHDsHp0zBqlLw3AtBscEUADgAjlFKBli9/PJAMrASuterMAj6zXi+23mPtX6G11lb5TGuWUDcgHtjgno9hMHiW0lLYtg0SEz3dEw+QnS3bQYNkawSg2VBvPgCt9Xql1IfAFqAM2ArMA74AFimlHrfKFliHLADeVEqlIk/+M612diil3kfEowy4U2td7ubPYzB4hNRUeQg+cABOnYKgIE/3qAkxAtBscSkhjNb6UeDRs4rTcDKLR2tdDFxXSztPAE80sI8Gg+1JSqp8vWcPXHCB5/rS5GRZnlyHAOTmeq4vhgZh64xgBoPdOXAAXn8dSkoqy3bvbmUCkJ0NSkHXrmL6GAug2WAEwGA4D955Bx59FMLD5f534ADs2uXpXjUx2dnQsSP4+EBEhBGAZoSJBWQwNJCKCrj9dli3DtLSpOzoUUhIgG7dWqkAREfLayMAzQpjARgMDSQrC159FQICKgUAoH9/KCoSF1CrIjsbOnWS1xERMi3U0CwwAmAwNJDUVNnu2AHp6TBtGvj6wjXXwMmTsHKlRENQzpY+tkSysysHPSIiYPt2z/bH4DLGBWQwNJC9e2WbmCg+/0GD4MMPYfBg6NxZrIBjxzzbxyajvFyWP1e1AEw8oGaDEQCDoYE4LIDDh+X+17175T7H4vVWMRMyK0vMnYqKSgHo2BGKi2UxhMH2GAEwGBqIwwJw0KNH5euOHWXb4sdB164Vk+fyy+W9QwAc8YDMOECzwAiAwdBAUlOhT5/K963OAigthauugnbtKj+8QwAcs4EyMz3TN0ODMAJgMDQArcUCGDcOAgPBz6/yngeVAtCiLYB16+DIEXjmGfj6a7jzThgyRPbFWBHes2rEeTTYEDMLyGBoAEeOSLj7+Hjo21dm/XhVeYxqFQKwdCl4e8P48WIFvPBC5T5jATQrjAAYDA3AMQDcsyf85S9QWFh9v5+f3BNbtAto6VIYMUI+6NmEhEg4CCMAzQIjAAZDA1ixQrZ9+1Yf/K1Kx44t2AI4fBg2b4Y5c5zvV0rcQMYF1CwwYwAGg4ucOAHPPgtXXFH7zR/EDdRiLYA1a2QgZMKE2utERxsLoJlgBMBgcJGXX5YxgEfPDox+Fi06HM66deLnqivcqbEAmg2u5ATurZT6qcpfgVLqHqVUuFJqmVIqxdqGWfWVUmquUipVKZWolBpapa1ZVv0UpdSs2s9qMNiPxYth2DAYXiMLRnXOdgFtztpMu6fasSV7CwDHi4/zw4EfKKsoa8TeNhLr18vN39+/9jrR0SIAZjWw7alXALTWu7XWQ7TWQ4ALgULgE+ABYLnWOh5Ybr0HSRgfb/3NBl4GUEqFI0llLkISyTzqEA2Dwe6Ulorr25H2ti4cFkBFhbyft3keBSUFvLTxJV7a+BKd/tmJ0f8dzYCXBpByJKVxO+5Oyspg40YZAK6LmBhJkHDkSNP0y3DONNQFNB7Yq7XeD0wHFlrlC4EZ1uvpwBtaWIckj+8ETAKWaa2Paq3zgWXA5PP+BAZDE5CYKBEO6rv3gQhAebnEAyouK+b95PdRKN5Nepd7l97LxZ0vZsG0BaQcTWHhtoX1N2gXtm+XQEcXXVR3PcdUUOMGsj0NFYCZwLvW60itdTaAtbUWwRMDHKxyTIZVVlu5wWB71q2TbX0CUFZRRmHIVojeRG6u5os9X3Cs+BiPXvIohaWFBPgE8PY1b/PrC35NZFAkOSdzGr/z7mL9etnWJwCOxWBmINj2uDwNVCnlB0wDHqyvqpMyXUf52eeZjbiO6NKli6vdMxgalfXrJcxNXZdk6tFUrv/oejZlbYLZ8Pq2BWwr+4DottH8eeyfOVhwkIk9JhIVHAVAVHAUh041o5g5778PsbGS9aYujAXQbGiIBTAF2KK1djyy5FiuHaytY+JbBtC5ynGxQFYd5dXQWs/TWidorRMiHMsqDQYPojX8+KM8/dcW47+wtJAJb0xg79G9PDTkFcgZyKt7H+Pr1K+ZPXQ2Pl4+vDrtVX7e/+dnjokKjuLQyWYiAGvXSuTPe++tP9FB+/ayzc9v/H4ZzouGCMD1VLp/ABYDjpk8s4DPqpTfZM0GGgEct1xES4GJSqkwa/B3olVmMNiaN96Q+D/Tp9de5+nvn2b/8f18OvNT7hk9G374X46UHsTHy4fZF852ekyzEICyMpn3+utfy439N7+p/5jgYImP0WqSIjRfXBIApVQgcDnwcZXip4DLlVIp1r6nrPIvgTQgFZgP/A5Aa30U+Cuw0fqbY5UZDLbl6FG4/36Z/TOrlonL+UX5PP3D01w/4HrGdh1LeDi02fsLQirimDlgJp3adnJ6XFRwFDknc6jQFY34Cc6TtWtl1W9ZmSyECAqq/xilJEzE8eON3z/DeeHSGIDWuhBof1bZEWRW0Nl1NXBnLe28BrzW8G4aDJ7hs89kNuNzz1UP+laVTVmbKCkv4bahtwFy/+sS40ffXT/x6sNtam07MiiS0opS8ovyaR/YvtZ6HiUxUbarVlUO7rqCEYBmgVkJbDDUwfLlsrArIaH2Oo4FXhdEVa6O7dwZDu1vh79P7QumHIPBtnYDJSZCeHj1mNeu0K6dcQE1A4wAGAy1oLUEfxs3ru5xz83Zm+kW2o2wgMp1jZ07w8GDtR8DzUgABg1qeIb70FBjATQDjAAYDLWwaxdkZ0vYe2dsz9nOhswNbMnewtBOQ6vt69JFji0trb192wtARYUs/ho0qOHHGgugWWAEwGCoheXLZVubAPzy41/K1M/8vTUEoHNnsSDqWgtlOwHQWka9HaSnS3L3cxEAYwE0C0w+AIOhFrZtE/+/s3VPuw7vIik36cz7CztdWG1/Z2vFy8GDEBfnvP0Q/xDa+LSxjwC8845MdVqwAHJyKpc/n6sFYATA9hgBMBhqIStLFr4646PkjwC4+6K7mbd5HgnR1UeJqwpAbSilJBzEKZuEg9iwQYIY3XyzvPf2lmmf/fs3vC2HAFRU1D59yuBxzC9jMNRCZmbtMx8/SP6AkZ1H8vzk58n5Q06NaZyuCADYbDFYcjIMGAB/+pNM+zx8WMoCAxveVmiouJROnnR7Nw3uwwiAwVALWVnOZz8eLjzMtpxtTOs1DYC2/m1r1GnbVh6CDxyo+xwdgzraxwJIToahQ+GJJ+CSS+Qmfq7xuBz5gs1AsK0xAmAwOOH0aYnp70wAtudsB+DC6Atr7qxCv36wdWvd52kf2J78IhvEzDl2TBSvXz/3tBcaKlszDmBrjAAYDE44ZHllnLmAEnNkdezAjgPrbGPMGNi0SULo10Z4m3COFNkgccrOnbJ1lwA4LAAjALbGCIDB4ATH9E1nFkBiTiIdgzoSGRxZZxtjxsg6AEcYfWeEB4RTWFpIcVnxefTWDSQny9bdAmBcQLbGCIDB4ARHKHunApCbWO/TP0gAOaXgu+9qr+MYPD5a5OG4iMnJ0KZN7XNWG4pxATULjAAYDE6oTQDKK8rZkbuDQZH1z40PC4OBA+sWgPCAcMAGArBrF/TuLVM/3YFxATULjAAYDE7IzARfX+jQoXr53vy9FJUVuSQAAKNHy3oqXSP3ndA+wCYWwP799Wf6agjGBdQsMAJgMDjBMQX07BhoH++UlBiDIwe71M6gQXDiBGRkON/vsACOFHpwIFhrEQB3pmBt0wb8/Y0FYHOMABgMTnC2BmDtwbU8vPJhZvSZwZCoIS614xhTdYyxno0tXEDHj8uCrc6d66/bEExAONvjakawUKXUh0qpXUqpnUqpi5VS4UqpZUqpFGsbZtVVSqm5SqlUpVSiUmpolXZmWfVTlFK15FcyGDzPgQM1BeCpH54iIjCC/07/L8rF8Mh9+8q2NgFwDAJ7dCqoY7WaOy0AMAHhmgGuWgD/Ar7WWvcBBgM7gQeA5VrreGC59R4keXy89TcbeBlAKRUOPApcBAwHHnWIhsFgJ376CVJSZBqnA6016zPWc3mPywltE+pyWx06QERE7QIQ5BuEr5evZy0AR7yKxhAAYwHYmnoFQCkVAowFFgBorU9rrY8B04GFVrWFwAzr9XTgDS2sA0KVUp2AScAyrfVRrXU+sAyY7NZPYzC4gVdeERf2TTdVlmUUZJBzKofh0cMb3F6/frULgFKK9oHtPSsAjWUBhIVBvg1WORtqxRULoDuQB/xXKbVVKfWqUioIiNRaZwNY245W/RigagisDKustvJqKKVmK6U2KaU25eXlNfgDGQznw8mT8Pbb8POfy/3LwYbMDQAMixnW4DYdAlDbTKDwAA+vBj5wQKY8RUW5t93w8Or5BQy2wxUB8AGGAi9rrS8ATlHp7nGGM+eorqO8eoHW87TWCVrrhIiICBe6ZzC4j40bZdbO9ddXL9+QuQFfL1+XZ/9UpV8/8YRkZzvfHx4Q7nkLICbG/WGbjQDYHld+8QwgQ2vtWND+ISIIOZZrB2ubW6V+1ekEsUBWHeUGg23YsUO2Z+dA2ZC1gSFRQ+pM8l4bffrIdvdu5/vbB9jABeRu9w+IAOTnS04Agy2pVwC01oeAg0qp3lbReCAZWAw4ZvLMAj6zXi8GbrJmA40AjlsuoqXARKVUmDX4O9EqMxhsw44dMnbZqVNlWX5RPhsyNzA8puH+f6gMKFeXBeDRdQAHDzaeAGhtZgLZGFczgt0FvK2U8gPSgFsQ8XhfKXUrcAC4zqr7JXAFkAoUWnXRWh9VSv0V2GjVm6O1NvahwVYkJ4vLpuosz+fXPU9haSG3D739nNp0iMmhWvK+eNQCKC2VVWruXgMAIgAgbqAwM+HPjrgkAFrrn4AEJ7tqpMvWWmvgzlraeQ14rSEdNBiaCq3FArjmmsqyY8XHeH7981zT9xoGRzXc/w+yHsrfv24LoKisiKLSIgJ8A87pHOfMTz9JGsghri1saxBVBaBHD/e3bzhvzEpgg8EiNxeOHKkeEXlp6lIKSgr4w8V/OOd2lRIroDYLwKOrgb//XrajRrm/7aoCYLAlRgAMBgvHXP2qOdA3ZG6gjU+bGknfG0qnTrVbAB4NCf399xIErrbkx+eDEQDbYwTAYLBISpJtVQtgQ9YGhnYaiq+373m1HRVVtwsIPBAOQmsRgKpLnt2JEQDbYwTAYLD4+GNxVTtiAJVVlLE5a/M5rf49m7pcQB4LCZ2aKn6v0aMbp33HwK8RANtiBMBgQGL/rFoFt95aOQNoR+4OisqKznn6Z1WiouQ+WFJSc59HQkJXVMCDD8qHveyyxjmHry+0bWsEwMYYATAYgNdek2RYs6rEqHWEf3CHADimgubk1NzXpIPAZWWynTMHPvoI/vEP6Nmz8c5nVgPbGiMAhlZPeTm88QZMmVI9BPS2nG20829H97Du530OhwA4GwcI9A3E39u/8QXgq68gJARmz4a//hVuvBHuvbdxz2kEwNYYATC0er7/XhLA3HBD9fKDBQfpGtrV5dj/deGIs+ZMAJRSjR8QLj8fbrtNXD7z50vy9xdfrJnyzN0YAbA1rq4ENhhaLIsWQWAgXHVV9fLMgkxi2rpnemS9q4EbOyT0s8/KydevlwTww4eLf76xCQ+H7dsb/zyGc8IIgKFVU1oKH34I06ZBUFD1fRkFGQztNNT5gQ2kY0d52K4zHlBjWgBbtsDAgZCQIH9NhbEAbI1xARlaNd9/D4cPS/z/qpwuP03uqVy3WQA+PrLWKi3N+f5GDwm9e3dlWNKmxCEAtSVDMHgUIwCGVsmuXbBtGyxeLHF6Jk6svj/7RDYaTWxIrNvOWVdmsEYNCFdcDOnp0Lt3/XXdTXi4zDw6ebLpz22oF+MCMrQ6Skth0iRJ0tK2LYwf79z9AxAT4r4QCf36wbx5MgX/7NwrjpDQWmu3DDpXIzVVTuopCwDECmiKMQdDgzAWgKHVsWiR5EApKIDMzJqDvwCZJzIB3G4BFBZWpuCtSvuA9pSUl1BUVuS2851h1y7ZeloADLbDCIChVaE1PP00DBgAjz0Gfn7OBeCMBeCmMQCojDHkzA3UqKuBHQLQq5f7264PIwC2xiUBUErtU0ptV0r9pJTaZJWFK6WWKaVSrG2YVa6UUnOVUqlKqUSl1NAq7cyy6qcopWbVdj6DobHYtUti/t95JzzySGU63LPJLMgk0DeQ0Dahbjt3376yrUsAGmUcYNcuyfh1tp+rKTACYGsaYgFcprUeorV2zCF7AFiutY4HllOZKH4KEG/9zQZeBhEM4FHgImA48KhDNAyGpmLFCtlOnCjTMiMjndfLOJFBTNsYt/rjw8NlQZgzAWjUkNDJyZ5x/4ARAJtzPi6g6cBC6/VCYEaV8je0sA4ItZLGTwKWaa2Paq3zgWXA5PM4v8HQYFasgK5dJQR+XWQUZLjV/++gb996XEDuXguweTNs3QqXXuredl3FRAS1Na4KgAa+UUptVkrNtsoirWTvWNuOVnkMcLDKsRlWWW3lBkOTUF4OK1fCuHH1R0DYd2wfndu5P09u374yJf/safGOkNBuHwN47DG5Cd/pNEtr4xMQIH9GAGyJq9NAR2mts5RSHYFlSqldddR19q+l6yivfrAIzGyALl26uNg9g6F+tm2TkDjja2Syrk5GQQZZJ7K4sNOFbu9Dr14y/fTwYYiIqCyPCJI3OaechAs9V1JT4fPPJfBbSIj72m0oZjWwbXHJAtBaZ1nbXOATxIefY7l2sLa5VvUMoOqjUyyQVUf52eeap7VO0FonRFT9DzEYzhOH/7++8PdrD64F4OLYi93eh/h42aakVC/38/YjrE0YOSfdKABbtsh26lT3tXkuGAGwLfUKgFIqSCnV1vEamAgkAYsBx0yeWcBn1uvFwE3WbKARwHHLRbQUmKiUCrMGfydaZQZDk7BihYyFVg357Iy1GWsJ8AlgSNQQt/fBMRNzz56a+yKDI91rASQlyYozTw0AOzACYFtccQFFAp9YsyF8gHe01l8rpTYC7yulbgUOANdZ9b8ErgBSgULgFgCt9VGl1F+BjVa9OVprc1UYmoTSUlizpnrCl9pYm7GWhOiE884D7Iy4OIkL5EwAooKj3CsAO3ZIspc2bdzX5rkQHl7T5DHYgnoFQGudBgx2Un4EqOFN1VprwOmIk9b6NeC1hnfTYDg/Nm6EU6dq9/8/t/Y5OgR24Lr+17E5azP3jmicRCk+PtC9ey0WQFAkW7K3uO9kSUnQv7/72jtXjAVgW0wsIEOrYPlymflzySU19/1r3b+475v78PHy4Zu0byitKOWybo2UJxdxAzl7II4McpML6MsvZZpRair84hfn3975YgTAtphQEIYWT2kp/Pe/MGIEtG9ffV96fjr3fXMfV/W6io5BHXkr8S1uGXILk3pMarT+OASgoqJ6eWRwJAUlBRSVnkc8IK0ls/2MGXKCAQPOr7PuIDxcIpIWNUKcI8N5YSwAQ4tn4UKJhvzvf9fc92Hyh1ToCuZOmUvWiSw+2PEBT014yv0ROasQHy/3wsxM6FxlXlxkkCxLzjmVQ1xo3Lk1npxcPe2YXVxAIFaAs7gbBo9hBMDQojl9WqbBDx8OV1whZT8c+IFtOdsYEjWED3d+SEJ0AnGhccSFxjGy88hG75NjFfL+/dUFICpYEgfnnDwPAfj2W9necQd8+mnlvFNPYgTAthgBMLRoFiyQgG/z5skYwKtbXuX2JbcD4K28KdflPDX+qSbtU6wVYSIjo3p5ZHClBXDOfPut3PRfeklMHm/vc2/LXZh4QLbFjAEYWizFxfDEEzBqlAR/25C5gd98/hsm95xM2v+kcUncJXgrb67td22T9sshAJmZ1cvPuIDOdTFYaSmsWgUTJsh7O9z8oXLgJS/Ps/0w1MBYAIYWy733yk32zTfl6f/r1K/RWvPuz94ltE0oX93wFQePH6RHeI8m7VdICAQH17QAOgZJOK1ztgC+/VZSL062WYxFR0iXffs82g1DTYwFYGiRLFoE//kP/N//VYZ+2Ji1kT4d+pyJ8e/n7dfkN38QMYqJqSkA/j7+hLYJPXcL4O23JfCb3QQgLEysgNRUT/fEcBZGAAwtki++kJAPTzwh77XWbMzcSEJ0Qt0HNhGxsTVdQCADwdknsxve4KlTMuh73XWS5sxu9OxpBMCGGAEwtEj275d7jsMNnnkik5xTOQyLHubZjlnExta0AAAGRw5mzf41lFWUNazBL74QEfjlL93TQXdjBMCWGAEwtEgOHKh0PQNszJQQVMNi7CEAMTGQlSU5Cqryi/6/IK8wj5XpKxvW4OrVMrAwerT7OulOevaUH6WkxNM9MVTBCIChxVFWJk/XXbtWlm3K2oSPlw+DI2uEtfIIsbFy88/NrV4+JX4Kbf3a8t6O9xrW4I8/wkUX2Wfmz9n07CmrlNPTPd0TQxWMABhaHI4n66oC8P3B7xkcOZgA3wDPdawKta0FaOPThhl9ZvDRzo9cdwOdPAmJiTCy8RexnTM9e8rWuIFshREAQ4tj/37ZOgTg1OlTrD24lvHd6kkF1oQ4FsQ6Gwe4Mv5KjhUfY2v2Vtca27hR4v5c7P4ENm7DCIAtMQJgaHE4BMAxBvDdge8orShlQvcJnuvUWdRmAQBcEichS9fsX+NaY2slgxkjRrihZ41E+/bQrp0RAJvhsgAopbyVUluVUp9b77sppdYrpVKUUu8ppfyscn/rfaq1P65KGw9a5buVUo0XbtHQqjlbAL5N+xZ/b39Gd7HPAGmHDpKsy9ni2KjgKHq178Xq/avrbygnB954Q7LNh4W5v6PuQikJfORs7qvBYzTEArgb2Fnl/dPAc1rreCAfuNUqvxXI11r3BJ6z6qGU6gfMBPoDk4GXlFI2HbEyNGcOHJCE64GB8v7btG8Z1WWUbfz/IDf/sLDaw+OM7TKW7w58R3lFufMKIKEfxo2Dgwedhzq1Gx071hz1NngUlwRAKRULXAm8ar1XwDjgQ6vKQmCG9Xq69R5r/3ir/nRgkda6RGudjqSMHO6OD2EwVGX//kr/f0ZBBttytnF598s92yknhIfDkSPO910SdwnHio+xPXd77Q2sWCHhn199tfZUZ3bCCIDtcNUCeB74I+BIYdEeOKa1dkxTyAAccV5jgIMA1v7jVv0z5U6OMRjcRnp6pftnye4lAEzvPd2DPXJOXYmyxnQZA8CPB3+svYH33pPAQldf3Qi9awQiIkxAOJtRrwAopaYCuVrrzVWLnVTV9eyr65iq55utlNqklNqUZy4WQwPZt0/y7TrGQxfvWUzP8J706dDHo/1yRl0C0KVdFzoGdWRj1kbnFUpK4JNPJPOXp5O+u0rHjnD8uFkMZiNcsQBGAdOUUvuARYjr53kgVCnliCYaC2RZrzOAzgDW/nbA0arlTo45g9Z6ntY6QWudEBER0eAPZGjdfPKJbK+5Bk6UnGBF+gqm9ZrWqBm+zpW6BEApxfCY4WzI3OC8wsKFcOyYPXL+ukpHiXZqrAD7UK8AaK0f1FrHaq3jkEHcFVrrG4CVgCOQ+izgM+v1Yus91v4VWmttlc+0Zgl1A+KBWq5ug+Hc+PhjGDwYevQQ98np8tNcEX+Fp7vllPpypQ+PHs7OvJ0UlBRU37F+Pdx1lwwAT2pGk+kcAmDGAWzD+awD+D/gPqVUKuLjX2CVLwDaW+X3AQ8AaK13AO8DycDXwJ1a6zqmOBgMDSM7G374QZ7+Afbm7wWgX0Q/D/aqdsLD5SH+7HhADobHDEej2Zy1ufqOZ56RKUTvv2/f0A/OMBaA7WhQQhit9SpglfU6DSezeLTWxcB1tRz/BPBEQztpMLjC669LuBmHVyQtP40An4AzuXbthiNT4rFjlUmzquIIXLchcwOXdbuscsf27RL0zdlBdsbh0jUWgG0wGcEMzZ65cyUA3Pz5cOml0Lu3lO/N30v3sO629P9D9VS5zu7l4QHhxIXGsS1nW2VhURHs3WvfsM91YVxAtsMIgKFZk5EB998vAgDw5JOV+9Ly0+ge1t0zHXMBV3Kld2nXhYyCKvEidu2SuD/9+zdu5xqDkBBJVmMEwDaYWECGZs3cuXI/vPtuuOSSyinxWmv2Ht2GGt3XAAAgAElEQVTb7AUgum00mSeqhE9ISpLtgAGN17HGQimzGMxmGAvA0GwpLIR58+Daa+H556vvyyvM41TpKXqENX3OX1dxRQBi2saQWZCJ1lpcWTt2yFO0I7pmc8MsBrMVxgIwNFsSE2Vd0fXX19yXlp8G0OwtgJi2MZSUl5BfnC8FSUnQpw/4+jZ+BxsDYwHYCiMAhmaLwxsyaFDNfXuPyhTQHuH2tQBCQ2VbpwCESLSUzIJMmeKUmNg8/f8OjADYCuMCMjRbtm+HoCCIi6u5b2/+XhSKuFAnO22Cj4+EyK/PAgBJaj9wzzGJ/HnZZbUfYHciIowA2AgjAIZmS1KSPAx7nWXHlpaX8mbimwyJGkIbH3vHyalvNXA1C+D5L+WAG25oot41Ah06yFTWoiIIsE947taKcQEZmi3bt8PAgTXL522eR+rRVB4f93jTd6qB1CcA0W2jASjesQ0+/RR++9vKRAfNEceCh9riYBuaFFsLQHFx7cvkDa2b3FyZTOJsNuTz659nTJcxTOk5pek71kDCw+Hw4dr3+3n7Ee3Xgalz3hV/0V13NV3nGgMjALbC1gKwY4cskzcYzmbLFtmeLQCl5aWk5adxadyltl0BXJXISCcu8YoKKKgMAPe/G33pmnoYFiyAKHuGtXCZDh1kW5fqGZoMWwsASNY7gwHkvnjddZL+dupUGQAeMqR6nYMFB6nQFbYe/K1KVBQcOiQTfM7w8styo3z1VQBG7C9nb6c2zSfxS10YC8BWGAEwNBt27YIPP5R74wMPwMaNlQ+UDvYd2wdAt9BuTd/BcyAyUlydJ05UKfzhB7nwb78dvv2WbtnFJHWoQOsa+ZOaH0YAbIXtBcAR48Vg2GBlj5g3Dx5/XCyBs3EIQHOyAECsgDPs2CGx/n184Msvicg5wU/hp6vHBGquOATAuIBsge0FwFgABgfr10s8MUe0T2ek56fjrbzp3K5z7ZVsRA0BKCuD3bvhwgtlhdu77+JVoUmOgLUZaz3WT7fh5wdt2xoLwCa4khO4jVJqg1Jqm1Jqh1LqL1Z5N6XUeqVUilLqPaWUn1Xub71PtfbHVWnrQat8t1LKpVRGRgAMDjZsgGHDas77r8q+4/uIDYnFx6t5LHGpIQBpaZIzt18/uOiiMzv2RvmxLmOdZzrpbtq3NwJgE1yxAEqAcVrrwcAQYLJSagTwNPCc1joeyAduterfCuRrrXsCz1n1UEr1Q1JK9gcmAy8ppepNZ2QEwACybigxUe6JdZGen95s3D8gYwAAOTlWQXKybPv1g+FWviUvL0IGJrQMCwBEAIwLyBa4khNYa61PWm99rT+NJIf/0CpfCMywXk+33mPtH69kPt50YJHWukRrnQ6k4iSj2NkYATCATPssK6u8J9bGvmP76BbWPAaAQe6F3t5VLACHAPTtW6l2PXuS0G0UW7K3UFJW4pF+upUOHYwFYBNcGgNQSnkrpX4CcoFlwF7gmNbaMUSbAcRYr2OAgwDW/uNIzuAz5U6OqRUzCGwAePFFWQA7enTtdUrKSsg6kUVcu7gm69f54uUlVsAZAdixA7p0ET95796y+KtfP0bEjuB0+enq2cGaK8YFZBtccpRayduHKKVCgU8AJ/MvcMxRc7b6RtdRXg2l1Gxgtry70FgABn76Cd59Fx58sO40uPuP70ejm5ULCCrXAjB3Lnz0EVxxhezw8oJFiyAmhm6h8iSUWZDpwmOTzTEuINvQoFlAWutjSFL4EUCoUsohILFAlvU6A+gMYO1vBxytWu7kmKrnmKe1TtBaJ4BxAbV2KirgvvskdPL//m/ddddnrAdgcNTgJuiZ+4iMhOB92yWt2fjx8NJLlTsnT4aBA+kYJPl0c0+1gEiaHTrISmfzz+1xXJkFFGE9+aOUCgAmADuBlcC1VrVZwGfW68XWe6z9K7SsYFkMzLRmCXUD4oEN9Z3fXCOtm/nzYeVKeOYZCAuru+7q/asJbRPKwI5OIsTZmKgoSDj4qaRMfO01p+EeOgTKire8whaQTcssBrMNrriAOgELrRk7XsD7WuvPlVLJwCKl1OPAVmCBVX8B8KZSKhV58p8JoLXeoZR6H0gGyoA7LddSnRgBaL0cPw5//KOsibrttvrrr9m/hjFdxuDtVe/kMlsRFQWXnViMHnERyjEt6Cz8ffxp59+uZVgAVQWgucc2aubUKwBa60TgAiflaTiZxaO1Lgauq6WtJ4AnGtJBMwjcennlFfEU/P3v8nBcF9knskk5msJvLvxN03TOjfRok0kCmzg54W8E11GvY1DHliEAjvgdJjewxzErgQ22pKREEr1PmABDh9Zff/X+1QBcEndJI/fM/Qw59DUAGUOuqrNeRFBEy3ABxcbKNqMFhLZo5hgBMNiSr76C7GwZAHaFNfvX0NavLUOihtRf2WbE5m7hOCHs9e9XZ70WYwF06SLb/fs92w+DEQCDPfnmGwn3PH68a/VX71/NqC6jmk0IiKqEHthGIoPIyKzbz9UxsCN5p1qABRAYKLmBjQB4HCMABluybBlceqnEDquPvFN5JOclc0nX5uf+oaICv12JJKrB9XpEIoIiOFx4mApd0TR9a0y6djUCYANsLwBmELj1sW8fpKbC5Ze7Vv+7A98BMLbr2MbrVGOxfz/qxAn2t6tfADoGdaRcl5NflN80fWtMjADYAtsLgLEAWh/Llsm2qgDsP7af2UtmU1haWKP+6n2rCfAJICE6oYl66Ea2SWiHIzGD6rcAAiOAFrIYrGtXOHDgrFRohqbGCIDBVhw5IsleevWqnvDlHz/+g/lb5vPFni8A0FpTUFLA7sO7eTfpXUZ3GY2ftwv+IruxbRsoRXHPAS5ZANCCBKCoyEwF9TBGAAy24je/kdk/b71VOfe/uKyYt7e/DcDnKZ8D8Gbim7R7qh39X+qPUop/Tf6Xp7p87mgNq1ZBfDwRcUEcPFj3A7FDAFrEVNCuXWVr3EAexfZTJowAtB5OnIBPP4V775XEL6XlpUx5ewrHS46TX5xPj7AefJXyFeUV5byz/R1i2sZwXb/rmH3hbPpGOItPaHNeeUUE4LnniC2DU6dk4Vu7ds6rRwS1MBcQiAAMG+bZvrRibG8BmEHg1sMPP0B5OUyycsUt3LaQ5enL2X14N3069GHOZXPIK8xjefpyVqSvYOaAmTw3+bnmefPPyJBFDpMmwf/8j0tro87EA2oJU0GNBWALjAVgsA2rVoGvL4wcKbH956yew0UxF7Hq5lWUVZRRWl6Kv7c/sz6dRWlFKTP6zKi3TdvyyCOidv/5D3h5VROA/v2dH+Lj5UP7gPYcOnnIeYXmRGio5DzYt8/TPWnV2NoCUMoIQGti5UpJghUYCEv2LOFgwUEeu/Qx2vi0IdgvmLCAMF6f8To5J3OICIzg4tiLPd3lc2PjRli4EH7/e4iLA1yPjtCpbSeyT2Y3bv+aAqWge3dIT/d0T1o1trYAjAC0HgoKYPNmSfoCIgDhAeFc3r36YoCZA2YS5BsE0OyifgKywGHqVLnj//nPZ4prJIevhU7BLUQAQARg1y5P96JVYwTAYAs+/lg8IhMnQnlFOV+mfMkV8Vc4vclf1bvuoGm25tZb5YN+8w2Eh58pbtNG8h1k13Nv79S2E7sOt5CbZvfuEvSpokKynxmaHFt/60qZQeDWwssvQ58+kvN3XcY6Dhce5qpezfhG74zVq2HNGnj0Ucn3exZnUkPWQXRwNIdOHkK3hAVU3btDcXH9H9rQaNheAIwF0PLZvBk2bIA77pDf/PM9n+Pj5cOkHpM83TX38te/Sv7HWrLbuCIAndp2orSilCNFLSCbVvfusk1L82w/WjGupITsrJRaqZTaqZTaoZS62yoPV0otU0qlWNswq1wppeYqpVKVUolKqaFV2ppl1U9RSs2q7ZyV9Y0AtHQ2bYLp02Xu+003SdmSPUsY23Us7drUMiG+OXL8OKxYAbNnQ0CA0yqdOrngAgruBEgCnGaPEQCP44oFUAbcr7XuiySDv1Mp1Q94AFiutY4HllvvAaYg+X7jgdnAyyCCATwKXIRkEnvUIRq1YQSg5XP77fI7r14tMwPT89PZkbej5bl/Nm+WZb6jRtVaxWEB1OXd6dTWEoCWMBDctav8+EYAPEa9AqC1ztZab7Fen0ASwscA04GFVrWFgGNS9nTgDS2sA0KVUp2AScAyrfVRrXU+sAyYXNe5jQC0bE6fhqQkuO7G4+xvs5jNWZtZvHsxAFN7TfVw79zMhg2yrWPVa1QUFBbCyZO1N9OiLAB/f+jc2QiAB2nQLCClVBySH3g9EKm1zgYRCaVUR6taDHCwymEZVllt5WefYzZiOeDrO9gMArdg9uyRQf517X/Lc4sWAaBQ9OnQh57hPT3cOzezYQP07Flt5s/ZdJJ7O9nZskbKaZ2WZAGAuIGMAHgMlweBlVLBwEfAPVrrgrqqOinTdZRXL9B6ntY6QWud4OvrYyyAFkxSEhC9kbUnF3HnsDt5Y8YbTOg+gftGuJgHsjmxYQMMH15nFVfWAgT6BhLiH9IyLACQZc9btkgYWEOT45IFoJTyRW7+b2utP7aKc5RSnayn/06AI0JVBtC5yuGxQJZVfulZ5avqPq9xAbVkkpKAcY8QERjBk+OfJMQ/hBsH3+jpbrmH7dshOVlucIGBkJlZrwA4LIBWtRjst7+FF1+UkBhVFsYZmgZXZgEpYAGwU2v9bJVdiwHHTJ5ZwGdVym+yZgONAI5brqKlwESlVJg1+DvRKqvj3EYAWiJaQ34+bNtRhOq2gpsG30SIf4inu+U+vv0WBg2CmTNh4EARAR8fmDChzsMcFoAri8FajAAMGCAB8f79b1kTYGhSXHEBjQJuBMYppX6y/q4AngIuV0qlAJdb7wG+BNKAVGA+8DsArfVR4K/ARutvjlVWK0YAWiYLF0pO8OV71qG9T3NZ3GWe7pL70BoeeEBmuGzdKtltbr4Zfvqp9ihvFuHhEgyvPgsgNiSW9Pz0lrEYDOD++yEnBz780NM9aXXU6wLSWn+Pc/89wHgn9TVwZy1tvQa85mrnzErglsnixRINoajjKhRejO4y2tNdch+ffCJTPl9/HYYMkT8XUcq1xWAXx17MW4lvkX4sne5h3c+vv3Zg/HiIjxc30K9+5enetCrMSmBDk1JRIWGfp0+HiOGr6Bc2tGUt+HrxRYnweY43spiY+iMkj+s2DoAV6SvO6Ry2w8tLxgJ++EHGTgxNhhEAQ5OybZv4/ydencfxtuuY0udST3fJfaSlyWrfX/8avM8tUumQIeI5qsu707t9bzoFd2o5AgAwaxb4+YnlZGgyjAAYmpQ3vt0EI55n3skr8FJe3DDoBk93yX28/rpctDfffM5NDB0qUSPqmhqvlGJct3GsSF/RcsYB2reHcePgs8/qVj+DWzECYGgy1qyBl/b/DibfS/LRbXxw3QcMiXLdR25rTp2SHL+TJ8vq1nPkwgtlu3lz3fXGdRtHzqkctuVsO+dz2Y7p02HvXti509M9aTXYXgDMIHDLICUFxk0s5nT4T1wT+3sO/eFQywr3MHcu5ObCww+fVzMDBognpD4BmNZ7Gj5ePryd+PZ5nc9WTJsm288+q7uewW3YXgCMBdC8SUmR3CePPgo+sdvAu5QbRl5GeEDtIRGaHSdOwDPPwFVXwcXnl6bSz0+WDtQnAB0CO3Bl/JW8tf0tyipayFNSdLTESlq0yLiBmggjAIZG4+RJyfA1aRK8+y6MmSkB0S6KucjDPXMzn34Kx47B//2fW5q78MLK4KF1cdPgmzh08hDfpn3rlvPagt/8BhIT5anB0OgYATA0CkVFcM89sH+/rPC/9loIHbCe6LbRxITUiAHYvHnnHVn4NXKkW5q7+GLRk6SkuutdGX8l7fzb8cGOD9xyXltw440yF/aJJ4z/twkwAmBwK+Xlsp6nSxdYsADuvVcWwy56r5ytuesYHlN3PJxmR14eLFsG118vF6wbGG8tr/y2ngd7fx9/rux1JYv3LKa8otwt5/Y4fn6ykvq772Rx2LBh8OSTnu5Vi8X2AmAeApoX994rqR0HDJAp8f/4B+w6vIue/+7J3vy9TOhWdzycZseiRaJ611/vtiY7d5Z73/Ll9ded0XsGhwsP8+PBH912fo9z553w8cfQq5eMrzz2WP2r4wznhO0FwFgAzYekJFkIO3u23Pwvu0x+wye/e5LDhYf54LoPuGPYHZ7upvvQGubPF6f9oEFubXrCBMmSVt/1P7nnZPy8/fh016duPb9HUQquvhqWLhUzSCl5snjzTfEtGtyG7QVAa3nAMtgbreG++yAkRCx2hzck91Qu7+14j5sH38y1/a7FS9n6kmsY69dL6ILZs93e9PjxMojuSCRWG2392zK552Tmb5nPzrwWOH8+NlZMyk8/laTRf/yjvH7oITNTyA3Y+r/RcRMxVoD90FpmPvbtC336iJ9/2TL4y19kUSdAYk4id3xxB6fLT3PncKfxAZs38+ZBcLBb3T8Oxo6V7dq19dd9YcoLBPgGMOO9GRSXtcCQyk8/DRs3ygyhF16An/1MBonXrPF0z5o9thYAB2YcwH78+c8y6zEyUmasPPIIJCSI+xbg0MlDjHh1BJ/v+Zz7RtxHnw59PNthd3PsmPj/f/nL2vM3ngcRETKxaOPG+ut2bteZV696lT1H9rBk9xK398Xj+PvLxfWPf8i4wJgx0LEjPPVU/cca6sTWAmAsAHvy3Xfwt7/B7bfDypUS3XPqVPjvfytjoD279llKykvYfsd2/jnpnx7tb6Pw9tvij24E94+DYcNcEwCAK+KvIDYklte3vd5o/fE4wcHiclu5UuYYf/21rMA2N4hzxpWMYK8ppXKVUklVysKVUsuUUinWNswqV0qpuUqpVKVUolJqaJVjZln1U5RSs5yd62zyKvaAT5H5fW2C1vL35JPyhPr88yLSffrAkiUy8wfgSOERXtr4EjMHzKRX+16e7XRjoLW4f4YOrQze0wgMGwbp6XD4cP11vb28uXHQjSxNXdpy8gU7w89PLrq77pK4S3ffLWVTp5oxgXPAFQvgdWDyWWUPAMu11vHAcus9wBQg3vqbDbwMIhjAo8BFwHDgUYdo1EWJPgEBR40A2ICKCrjiCrG8v/5aJmUEBjqvO2f1HIrKivjT6D81bSebit27ZbXqLbc06mmGDZOtq1bAzUNuBiBhfgL3fn0vi5IWNU7H7EBwMHz5pUwXveUW+OIL+OorIwINpF4B0FqvAc5O3TgdWGi9XgjMqFL+hhbWAaFWwvhJwDKt9VGtdT6wjJqi4hwjALbg3/+WG3///jBiBPzudzXrlJaXsjJ9JS9ufJHbh95O/451p0Bstnz+uWynT2/U01x4oTzsuioAvdr3YuWslfTp0IdXNr/C9R9dz9z1cxu1jx7FMV30lVdk5eHdd0tKtUcf9XTPmg3nOgYQaSV6x9p2tMpjgINV6mVYZbWV10/AUTMI7GEyM2Vx5tSp4n5duxbanZXEq7yinJGvjWTcG+No16Ydf73sr57pbFPw+ecwePB5hX12hZAQWV7w1VeuHzOm6xiW37Sck386ydV9ruaer+/hm70tPK6Or6/MSEhNlddz5sgFu3ixXKxmHnmtuHsQ2NlaeF1Hec0GlJqtlNqklNoEGAvABjz5pMzE+ve/a4928PHOj9mUtYk5l85h+x3biQiKaNpONhX5+fD996KGTcANN8C6deJ1agheyos3r36TfhH9uOmTm8g9lds4HbQLd9wh6ebS0yWs9NNPi4U2cqT8VidPyqD9998bQajCuQpAjuXawdo6rq4MoOpjUSyQVUd5DbTW87TWCVrrBMAIgIdJT5fFrrfeKqlunVGhK3j6h6eJD4/nT2P+RHTb6CbtY5ORnAwzZsgNxBG7vpH51a9kZtW5ZEoM8gvinZ+9w7HiYwyfP5yHVjzEIysfYe/RvW7vp8dRSswlX19ZKJabK6vonn1WFqiEhckClTFjRBwMgta63j8gDkiq8v7vwAPW6weAZ6zXVwJfIU/8I4ANVnk4kA6EWX/pQHi95+2EZuQzetMmbfAAhYVaX3ih1sHBWh84UHP/4VOH9TPfP6N7/KuH5jH0/M3zm76Tjc3GjVq3b6/1mDFa+/hoHRam9YIFTdqFK6/UOiJCfoNDh7QuKWnY8SvTV+rh84drHkOrx5T2mePTMn+r2vj+e60ffFDru+/Wetw4rYOCtM7M9HSvGhVgk3bl3l5vBXgXyAZKkSf5W4H2yOyfFGsbbtVVwIvAXmA7kFClnV8DqdbfLS51Llppxj+g165tku/MUIWKCq1vuEFrpbT+7LOa+1elr9IRz0RoHkOP/e9Y/e72d3VFRUXTd7SxufJKrUNCtB44UOtf/1rr3Nwm78K2bdKFiAj5PX71q3Nr53TZaZ1VkKXHLRynAx4P0KlHUt3b0eZAaqrWfn5ad+ig9S23aH30qKd71Ci4TQA8+ecd7aOZOlt/9537vyBD3Tz9tFwdjz9ec9/ytOXaZ46P7v3v3npL1pam71xTsXWrfAlz5ni6J3rVKq379tX60kulSytWnHtbGcczdMjfQnTss7H6pk9u0tknst3X0ebA0qWior6+WnfpovXDD2udkuLpXrmVFiEAfjFtNNdde14Xu4PSUq3Lys6/nZZMebnWRUVaf/GFPGn+/OdiCVQlKSdJhz0Vpvu/2F/nF+V7pqONxaZN4ipIT5cLZswYefS20VNiYaHW3bpp3bGj1i+8UPP3cZWlqUv1FW9foQOfCNRxz8fpy16/TA94aYCe/u50/cOBH9zbabuydq3WI0Zo7eUl7r3775d/gNJS+WdoxrgqAErq2pPAzsG6aNxFLL1hORMnunZMRQV88gmkpYGPD5w+LetFNmyQNSLdukF4uKxeHTlSsi+Fhcm0xqoLm8rLwcvLbTk+mgWzZ8Nrr8n31revTJgICqrcvzJ9JVe/dzUBvgH8+Osf6RbWzXOddTe7d8Po0bLs1ttbFjwkJkoI4l/9ytO9q0ZiIvzP/0i46LlzZU3Gjh1yrSckNKytjZkb+eXHvyTIN4huYd1Ye3AtOady+Pvlf+cPI//QOB/Abhw6JMGs5s+XbGR5efJl9u4tsZ5++1tZAdmMUEpt1o6JNHXVs7MAtO0aqk9O6cbnV23lyitr7k9Kkn+AdeuguBg6dZLfbd266vWGDJHY9D4+IgxHjkhIkSNHqtfr2VN+77Fj4Re/qC4Y48fDjz+KwIwZA927y0QQP7/G+/xNwYEDcP/9Enjsn/+U1fUhIfD3v8vaGgdp+WkMfWUoMSExfHXDV3Rp16X2RpsThw/DpZfKHbR9e/joI5l4/847cOWV8PLLnu6hU7SW2Y0rVki3MzOlfP58uO22c2/31OlT3PLZLXyQ/AFvXv0mV/e5msW7FzOt9zTKKsrIOpFF34i+7vkQdmPJEok22r+/rDT+4QdZ+NKmjXypf/yjhKe2M1OnwsSJqLvvdkkAPO7mqeuvXdf2mnu66E8+EbMmJ0frGTO07t5d6+hocVMEB2s9ebLWM2dqPXy41p07a/3qq1oXFGh95Igc44yKCq0TE7V+6y2tX3pJfN2TJmntiHjTq5fWt96q9ZQpWvfpI2XR0Vr36FFZ57LLKr0Dx4833EzzNIcOaR0fLxYwyDhncXHNetknsvUF/7lAt/tbO512NK3pO9qYPPigXEiPP6717t2e7k2DyM7WumdPrS+/XOs339R64kTxZMTFaf3735+7e6iotEiPfm205jF06FOhmsfQV7x9hR740kDtM8dHr9m3xr0fxM7s3Kn1zTfLF9u2rQyOLVmidb4N3Z+lpVp7e2v90EMtYwwgvFtHzYPB+oMP5GK++moZwJ85U27Ojzyi9eHD7v0Oly6V3zsrq7KsokKm4JWWyvv8fK1fe03GkKKitB4/Xr7Jp56SukuXan3JJdLf++/Xes2ayuM8NVHmzW1v6pfWv6L/+1+thw6Vvg4erHVgoMyS+/FHmRl3ouSE/irlK33TJzfpOavm6Bc3vKgjnonQAY8H6CW7l3im843F0aPyT33ddZ7uiVvIz5dr13E9/vOf5369lZSV6L+u/qset3Ccvn/p/ZrH0L5zfHWX57roiGci9Fvb3tIlZQ2cj9qcSUur/GJBbkTDhmn9u9/V/pTZ1OzbJ32bP79ljAFE9ojRuTdlEfvfEiI7+LF5s6zh+OMfPd0zYdMmCT+yd6/4zFetggsuEPdSTIyMKaSny3jC734n1mWfPpLPYvhwCa725ZfQo4e4n5YuFbfUsGHSnoPT5afRWuPv4++0H5uzNhMZHElRTiyvvy5jGqdPi+U6eDCkZOVw197ulOnT8EIykeFB5Bw7jo+PeDhGjYLC0kL+9v3f+Hjnx2g07fzbcbzkOAAXx17Mq9NepV9Ev8b/UpuKL7+UHy8tDbZudXtKR09SUSHr1ZYskcV7jz8urs3zGc96YcML9AjrQfew7sx4bwa7Du+ie1h3Hhn7CD/r9zNyTuYQFRxFkF9Q/Y01V7QWn+n+/fLlbtsm//Rt28Jbb4nvuLxcfKieYM0auOQS+OYb1MSJzX8MIDq+q87+1QH4+yHGDI0kIUF8046Y83aiokLCI3/2mYxFvPKKDCwfOybXxfbtMo5w/Dhs3izXUpcucj2Fh8tg9BdfAMGHwLuEfr3bkDCiiP+9J5hR/5nAKb80Lo/+BacTp7G/7Qf4BB4numgyh36cQMqEgQTpThS+uJrS6DXQcTv0+gK0glV/gR5LYegCvLU/HQIiyT29D+0kEkeATwB3Db+LsV3HMqH7BNLy0ygoKWB4zHBUSxgNLyuTVaILFkhku969RZUntLBE9ciY2DvvwH/+I8HkZsyQsauXX5YkPiNGwJQpMvzR0P+nCl3BVylf8ecVf2ZbzrYz5T5ePlzS9RKm9Z5G6tFU7hp+F/Ht4937wexGcrKoa2KiDDJ6e0uiDE+MF7z5ptpNuogAABLbSURBVKTN3LUL1adP8xeAuL7d9f6Z6cw8ksy7c10beNJa82Hyh4zuMppObTtV25eYk0jq0VSm9Z6Gj5dPY3TZKXl5ktv66p+V0sbPl+JiWLgQ/j63gC4/n8vmNR0o2DKJS+7/Dz9UPEuZrhL9rswftMI7dQblPZaA3ynU6WD0iU7QPgXvknDKOQ3ep8G7FJTGx8uHi2NGkVmQTdrxPQDcPOjX9GzfnYdXPswdCXcwtuvYGv0c2Xkknds1boAzj5CVJabYY4/JDxEZKQmM77mn+Y/i10N5uURDePhhKCmRBw0vLxGF06flHvWrX0nYnLw8mQz129/KGGh5ORQW1p7wrEJXsGb/GpanLadraFf2Ht3L29vf5mCBxH0cFDmIh8Y8xOr9q3lg9APEhsRyuPAwpeWlNf43mzWnTkkAuooKOHoU3nhDvuTYWBg4EK65Rp4Mb71VzP7G4vHH5YcuLEQFBjZ/Aeg1sJdOuTaF72/5nlFdRtVZ97Wtr7Ht0DZOlZ5iwdYF9G7fm18O/CUf7fyI8IBwTp4+yaYsiS+XEJ3Aq1e9yuCowTXaOV58nE93fcqU+Cl0DOpIaXkpuw7vYkDHAfU+BaccSSHAN4DYkErlT8pNIvdULok5iTy4/EEm9ZhEgG8AK9NXUlRWREFJQbU2bhlyCyM7j6S4rJj9+72Yv3QNv+p3G3ddOYH3Pz1BzwmrmdDnIkJ8OvDo6kd4eu3j/DpqLl0iOrBLL+b3w37P8Jjh+Hr7crr8NCvTV1JQUsDEHhMJ8Q8h91QukcGRrv4EzY+CArljOX6r1atlalNxsaQWfOEFiR9vRzOyEdmzR27uU6fKV1NYKBbnwoViDFWNj9a3r7gyV64Ui3XxYrFetZbkW2drZnm5fJ07d8KR/DJi+h4kKTeJaYsq4yUF+AQQ3Taa9GPpeCtvXpn6Ctf2u5ak3CSWpS0jLT+NP4z8AydPn8Tf258LOl3A6fLT+Hk3Q4Hetw9efFEePL76SgII+vqKBTplimRT+sMfKjMouYvbb5cfKyenZUwD7Te4n955zU5emfoKKUdSSMpL4k+j/8TwmOH4+/iTeyqXFza8wJ4je3hvx3soFBrNzAEz+XTXpxSXFTO6y2gqdAWBvoGM7jya7mHd+cOyP3Ck8Aiju4ymXJez9+heSspLuKbPNSQfTubHgz/i6+XLE+OeYFP2Jt7f8T7dQrsxvtt44tvHE+gbyMHjB3k/+X1ABMXP2493tr8DQGRQJG182qDRHDh+4MznGdl5JNsObUMpxdV9rsbP24/ZF84mvyif5Lxkpvaa2mCTef+x/XRp16VluGjOheJiSc9YUAApKeLj6N1b/GrZ2TLHu1s3ySfbp4/M3zVU4/Bhudn7+8vN/Xe/k5v90KEiGqmpMG4c7NolhlRoqDzs3nijjHU98YSsS3jxRfkZHME4v8p/juKyYn7W72f8e/2/ySvMIz48nh8zfmRF+ooz51coAn0DOVV66kzZqM6jWJ+5nkfGPsLDlzzsia/FPeTnw08/yZd5//2yIGnfPjhxojKA3Y03wqxZMqbQoYO4klatkn0XXyzvQa7lu++WB5y5c2tmZJo0Sc63YUPLEIBBFwzS22dsB+Qi6RDYgbzCPAAuiLqAo0VHOVhwkCDfIG694Fb+PPbPJOclM6bLGNZnrudEyQku73F5jXaPFh3lsVWPsfXQVgB6hffidMXpMzfwuZPnsjx9OZ/s+gSA2y64jcwTmazPXM/RoqNn+jOxx0TatWnHpqxNHDx+kN8P/z3RbaPZc2QPJeUllJaXMrLzSPp26Eu5Lufy7pdz4vQJFIq2/u5PJN5i+e47+OYb8VHExcnAyp49MoC7b588poL8Q918s5SVl8vihjZtJEFIjGvpJwzVOXJEQutv2ADR0XDRRVJ2+DAsshKOxcZCRob8NDffLOtJCgtl/Y23t0RiHjtWhlpOnoS8I6X8cOxDPl1xgDDVjTWvjycyqoJn1z5Ll5A49hxJ4YvUxXQI7MDajLVc0/caALqFdmP3kd0cLTpKfHg8/5r8L9q1aVdr323LkSNiejmU9+yFS1Xp3l38+oWFkoa0qEh8d/HxcO214n4aPx6uukoecAYOhA8+aBkCkJCQoB95+xFyT+UyLHoYPcN78vb2t8k+kc2HOz/kWPExPvnFJyREN3D5Yy38dOgnCkoKGNt1LBW6gsdWPYZC8dilj6GUQmvNqdJTFJYWEuIfQhufNnKg1lSg8VLuTq9wjnz/PfTq1fSrF8vKKm/GoaGVbpZjx+QGPnWqPLXs3SvLtYOC5CL+4gu5uKOi5OkmOVnC+Q4YIE/xW7dKW+3aiY/V11emTvXoITf2mTPFb1FYaJ7wm5C335af8k9/kvVzo0aJGOTmygPq+vViSfj5iYafPFn9+ClTpFwpOS4sTNbjHT8ul0Fx6WlCbrmB0sh1BHgHkleWRlBJPKHe0WT5raZvhz48NPYhooKjCPQN5IJOF1BeUc6ipEW09W97Rjhsz+bN8v8wfLg85BQXy9P85s0ygLNhg3xJ06dLco4DB+ShZsMGsQ5KS6Xen/8s5ts//tFyBGDTpk1O92mt0Z666Z46JbZxUZEM/qSkwAcfSA6/L7+Uq3jIENl6ecmNOChIlD8vDwIC5GantdjSERGyf/duaWvAAJlV8PrrcPnlMiIXHCxm5IkT0s6RI+LmuPdeuSHu3y/zUt95Ry6mLl3gmWfkv+maa+ScIDfgpCTJlNS3rwyIPvSQxIHYsUNGB++5R87l5SUDWvfdBz//uayMXbJE/JpPPCFP4e3ayQ343XfFzZKTI+fx94frr5enkpdekos2Olq+l+XLZUQSZMpcQYHUU0ruEnFxUnfnTlnmOm2a9C8wUOoGBlaaxYZmQWmpXFrBwfJssGmTPLRu2iQPw3l5ckn37i2afvSoXH4rV4pnRGvw8dV0i1McOAAlMd/i9bNZVARVphVR5X74+ChKtVxbj476G6HBbfjp0E9cHHsxv0n4jac+/vlRXCx/oaE1y0FG8T/6SF7PnQt33dXyBeC80FpuNlrLU2NFhTxyrFolZtm118oNOCJC9icny/zaJ5+UQZZduyqTTwcHy00wL0+eupOS3NfPSy6R/xBvbxGbqplx/PzEFBw8WP5zMjKkPCAA7rpLxCM3t7Juaam00+H/2zv32CrLM4D/ntMb9gKWXmwpd2TZxDqKWryAWwyOyzIrRhP9w+miwWy6bH+QiPEfjUsGJlvUZFmUjUSIUUnUqJFlmoVl0QFSBlagLRRHufQO0va09Jz29N0fz3doYT3AzDnnO+v3/JLm+8573nPO8z59vvd53+e9lWosMU5h4cVNs/j35uVpi2TXLo2ht7ZqelaWVtDHjo3pMM6KFWMnZR0+rHOjBwf1qd6wAbZv19+urlbHee6cDobNmKF7GEzyGTnGt2NgQB+vmTPV74fD2s7auCnG/u49kD3EjbU9nHb1nDvnKDpzN33fexmu16MwJTINl9fLsoKfwXA+X589TqyolflV09j8k9cunF3dM9hDQ2cDneFOskPZLCpfdGHty0B0gN5IL5GRCPVt9ay8fiVT8yae7z84PMioG6UwtzA9CopGdaLDzp062+jeewPuAGIxHQzJz9eKb8sWjbPV1OiKq8ce042/2tu1ogdtmR44oPc336zdr/EVXE6Oftfq1RoIra7WJkptrVrlpk0aqrj7bo3ZNTaOnVDU1aVWW1o65lQ6O/XzoZBWiuGwVpTz5umCjuxsnaQ9Ojo2baOlRVsBJSXaY9i+Xc9Cra7W6WW33qr3eXn6m01N6ti2bVMnFYnoKN6KFVr+zZs1XPT669pzWLAAHnxQezNNTboyrahIr85pU6yiQh3Atm0a2G1t1U2S6urUGY1naEj/F/n5wdpVz0gLQ0OwcaOa3dq12jl+4QV9lK6bEaUjVM/pg/OZPqWM9yI/Z+C7m+H8tWSH5zJyZg6hObuQKb0UDSzhvHQRKfjvk9JqypdyU1kN7za/RXik90L63MLvsKi4lhODjcyeXkl7zwALy+YQdt18fPRjAFZUPsgv577GPcuLueaasXZnSjh7Vs9sXb8eCgoy1wGIyCrgFSAL+JNzbmOivJd1ALGYNg2GhuDFF8cq7MpKrbDCYa1Ep0zR+xkzdFoWaIilr08r4/vv14pxxw6tFEMhnSr4zDNakWZlaajkgw+0Z5DKebyGYaSEaBQONg0xs2IK5eUaxfzNyx38M7SR/Pn7KcouIbtzKf3NN3P7oip2fTFCX+kncOPbUHEAjq2Eo2vIyYsxfGYG/PgXuvam7VaypnYRG8pHSo+QlxtiasvjhM8PM3jTyxCKwfliCrp/QLRnFtOj36duwcMcP5pPNKpttiee0PkKeRMv9P+fGB7WNmdGOgARyQKOAPegp4vtBR52zh2eKP8FB9Dbq9u1xpc6i2jM6+RJLS1o5R2N6gyQVau0JdzRoS3tp57SVv3WrRrmeeWVxKtbYEyLhmFMauJrGC6lv1+DBi0tEImOMmtmiFOndIhu8WIomhqD0ChtJ3PYt08761u3Ofbvh2V3ChUVMGtpPa3Zn/LliWO0jPyd4ZweoqFeOF9MWfsjZEsuHYcW4hrvQyLF3FKTQ1mZ/n5JifZsdu/WSOmyZbp9fSikjf1YTN+fO3dseK+/XwMUa9fC+vWZ6QBuB553zq30Xj8L4Jz77UT5bykvd/WLF+tUgUhEQy/79ul/7I47NNzS3Q1PPnnx5jmGYRg+cLkwj3OOz058xqt7XuX9pvcJSYjhUR3XExdi2jc/ZMqZpeQMVRFuq+Kb1ipKZCEzS6+loeHi4bbxFBdr9DYa1Xkc77wDDzxwdQ4g3VMpqoCT416fApYmzH32rFbw69Zp3L6mJsWBNMMwjG/P5aomEWH5nOUsn7OcyEiE3Kxc9rbt5fMTn9MR7uCjIx9x5MxLxFwMqvUzZ4CRvGlUPlJIKFZAtmSTlQUOiAzB8IgGLA5H1AFUPgxHyq/+AKN0O4CJ1HORXxORdcA6gNmzZ+vA6sUZUiWbYRhGWojv7FtbVUttVS0Am+7ZRGw0RvdgN6f7TnOq7xRNPU209bcRjoYJD4cZdaNX/O6KwoqrliOzQ0CpmgZqGIYxibnaQeB0r6LaCywUkXkikgs8BHyYZhkMwzAM0hwCcs6NiMjTwF/RaaBbnHOH0imDYRiGoaR9Pb1zbgewI92/axiGYVxMhuxeZhiGYaQbcwCGYRgBxRyAYRhGQDEHYBiGEVDMARiGYQSUjN4OWkT6gWa/5cgASoEev4XwGdOB6SCO6eHKOpjjnCu70pdk+rFKzVezmm2yIyL1QdeD6cB0EMf0kDwdWAjIMAwjoJgDMAzDCCiZ7gBe91uADMH0YDoA00Ec00OSdJDRg8CGYRhG6sj0HoBhGIaRIjLWAYjIKhFpFpEWEdngtzzpQkSOi8hXInJAROq9tOki8qmIHPWuxX7LmWxEZIuIdInIwXFpE5ZblFc922gQkSX+SZ48EujgeRE57dnDARFZM+69Zz0dNIvISn+kTi4iMktEdopIo4gcEpFfeelBs4VEekiuPTjnMu4P3Sr6GDAfyAW+BG7wW640lf04UHpJ2kvABu9+A7DJbzlTUO67gCXAwSuVG1gD/AU9Ye42YI/f8qdQB88D6yfIe4P3XOQB87znJcvvMiRBB5XAEu++CDjilTVotpBID0m1h0ztAdQCLc65r51zUeBtoM5nmfykDnjDu38DuM9HWVKCc+4fwNlLkhOVuw7Y6pTdwLUiUpkeSVNHAh0kog542zkXcc79G2hBn5v/a5xz7c65f3n3/UAjepZ40GwhkR4S8a3sIVMdwESHx1+u8JMJB3wiIvu885EBrnPOtYMaBlDum3TpJVG5g2YfT3vhjS3jwn+TXgciMheoAfYQYFu4RA+QRHvIVAdwxcPjJzF3OueWAKuBp0TkLr8FykCCZB9/BBYAi4F24Hde+qTWgYgUAu8Cv3bO9V0u6wRpk1kPSbWHTHUAp4BZ417PBNp8kiWtOOfavGsX8D7ajeuMd2u9a5d/EqaVROUOjH045zqdczHn3CiwmbFu/aTVgYjkoJXem86597zkwNnCRHpItj1kqgMI5OHxIlIgIkXxe+BHwEG07I962R4FPvBHwrSTqNwfAj/1ZoDcBvTGwwOTjUvi2WtRewDVwUMikici84CFwBfpli/ZiIgAfwYanXO/H/dWoGwhkR6Sbg9+j3ZfZhR8DTryfQx4zm950lTm+ehI/pfAoXi5gRLgb8BR7zrdb1lTUPa30C7tMNqaeTxRudHu7h882/gKuMVv+VOog21eGRu8h7xyXP7nPB00A6v9lj9JOliGhi4agAPe35oA2kIiPSTVHmwlsGEYRkDJ1BCQYRiGkWLMARiGYQQUcwCGYRgBxRyAYRhGQDEHYBiGEVDMARiGYQQUcwCGYRgBxRyAYRhGQPkPEyTxRnWknJ8AAAAASUVORK5CYII=\n", 78 | "text/plain": [ 79 | "
" 80 | ] 81 | }, 82 | "metadata": { 83 | "needs_background": "light" 84 | }, 85 | "output_type": "display_data" 86 | } 87 | ], 88 | "source": [ 89 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg')\n", 90 | "\n", 91 | "histogram = cv2.calcHist([input] , [0] , None , [256] , [0,256] ) #histogram of blue color of the image\n", 92 | "histogram_1 = cv2.calcHist([input] , [1] , None , [256] , [0,256] ) #histogram of Greens in image\n", 93 | "histogram_2 = cv2.calcHist([input] , [2] , None , [256] , [0,256] ) #histogram of reds\n", 94 | "\n", 95 | "''' Parameters of Histogram\n", 96 | "first input is the image , it is in square brackets since it needs to be in an array form\n", 97 | "second parameter is the number of channels to get 0 , 1 or 2 to get colors such as red green or blue.\n", 98 | "third parameter for mask\n", 99 | "fourth parameter is the histogram size\n", 100 | "fifth parameter is the range of the histogram\n", 101 | "'''\n", 102 | "#ravel() is used to convert 2d array to 1d array\n", 103 | "\n", 104 | "plt.hist(input.ravel() , 256 , [0,256])\n", 105 | "plt.show()\n", 106 | "\n", 107 | "plt.hist(histogram , 256 , [0,256])\n", 108 | "plt.show()\n", 109 | "\n", 110 | "plt.hist(histogram_1 , 256 , [0,256])\n", 111 | "plt.show()\n", 112 | "\n", 113 | "plt.hist(histogram_2 , 256 , [0,256])\n", 114 | "plt.show()\n", 115 | "\n", 116 | "color = ('b' , 'g' , 'r')\n", 117 | "#sepearting different color channels\n", 118 | "\n", 119 | "#seperating the colors and plotting each for the histogram\n", 120 | "\n", 121 | "for i, col in enumerate(color):\n", 122 | " histogram2 = cv2.calcHist([input] , [i] , None , [256] , [0,256])\n", 123 | " plt.plot(histogram2 , color = col )\n", 124 | " plt.xlim([0,256])\n", 125 | " \n", 126 | "plt.show()\n" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [] 135 | } 136 | ], 137 | "metadata": { 138 | "kernelspec": { 139 | "display_name": "Python 3", 140 | "language": "python", 141 | "name": "python3" 142 | }, 143 | "language_info": { 144 | "codemirror_mode": { 145 | "name": "ipython", 146 | "version": 3 147 | }, 148 | "file_extension": ".py", 149 | "mimetype": "text/x-python", 150 | "name": "python", 151 | "nbconvert_exporter": "python", 152 | "pygments_lexer": "ipython3", 153 | "version": "3.7.1" 154 | } 155 | }, 156 | "nbformat": 4, 157 | "nbformat_minor": 2 158 | } 159 | -------------------------------------------------------------------------------- /Image Brightening and Darkening.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "\n", 12 | "\n", 13 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "metadata": {}, 20 | "outputs": [ 21 | { 22 | "name": "stdout", 23 | "output_type": "stream", 24 | "text": [ 25 | "[[[75 75 75]\n", 26 | " [75 75 75]\n", 27 | " [75 75 75]\n", 28 | " ...\n", 29 | " [75 75 75]\n", 30 | " [75 75 75]\n", 31 | " [75 75 75]]\n", 32 | "\n", 33 | " [[75 75 75]\n", 34 | " [75 75 75]\n", 35 | " [75 75 75]\n", 36 | " ...\n", 37 | " [75 75 75]\n", 38 | " [75 75 75]\n", 39 | " [75 75 75]]\n", 40 | "\n", 41 | " [[75 75 75]\n", 42 | " [75 75 75]\n", 43 | " [75 75 75]\n", 44 | " ...\n", 45 | " [75 75 75]\n", 46 | " [75 75 75]\n", 47 | " [75 75 75]]\n", 48 | "\n", 49 | " ...\n", 50 | "\n", 51 | " [[75 75 75]\n", 52 | " [75 75 75]\n", 53 | " [75 75 75]\n", 54 | " ...\n", 55 | " [75 75 75]\n", 56 | " [75 75 75]\n", 57 | " [75 75 75]]\n", 58 | "\n", 59 | " [[75 75 75]\n", 60 | " [75 75 75]\n", 61 | " [75 75 75]\n", 62 | " ...\n", 63 | " [75 75 75]\n", 64 | " [75 75 75]\n", 65 | " [75 75 75]]\n", 66 | "\n", 67 | " [[75 75 75]\n", 68 | " [75 75 75]\n", 69 | " [75 75 75]\n", 70 | " ...\n", 71 | " [75 75 75]\n", 72 | " [75 75 75]\n", 73 | " [75 75 75]]]\n" 74 | ] 75 | } 76 | ], 77 | "source": [ 78 | "\n", 79 | "M = np.ones(image.shape , dtype = 'uint8') * 75\n", 80 | "print(M)\n", 81 | "#Creates a numpy array with ones of same size of the image of type integer" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 3, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "#When we add to our color values of our image the brightness increases\n", 91 | "#when we subtract the brightness decreases\n", 92 | "\n", 93 | "bright_image = cv2.add(image , M)\n", 94 | "dark_image = cv2.subtract(image , M)\n", 95 | "\n", 96 | "cv2.imshow('Bright' , bright_image)\n", 97 | "cv2.imshow('Dark' , dark_image)\n", 98 | "\n", 99 | "cv2.waitKey()\n", 100 | "cv2.destroyAllWindows()" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [] 109 | } 110 | ], 111 | "metadata": { 112 | "kernelspec": { 113 | "display_name": "Python 3", 114 | "language": "python", 115 | "name": "python3" 116 | }, 117 | "language_info": { 118 | "codemirror_mode": { 119 | "name": "ipython", 120 | "version": 3 121 | }, 122 | "file_extension": ".py", 123 | "mimetype": "text/x-python", 124 | "name": "python", 125 | "nbconvert_exporter": "python", 126 | "pygments_lexer": "ipython3", 127 | "version": "3.7.1" 128 | } 129 | }, 130 | "nbformat": 4, 131 | "nbformat_minor": 2 132 | } 133 | -------------------------------------------------------------------------------- /Image Color Spaces.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "HSV - \n", 11 | "Hue - color value(0 -179)\n", 12 | "Saturation - vibrancy (0-255)\n", 13 | "Value - brightness (0 - 255)\n", 14 | "\n", 15 | "'''\n", 16 | "\n", 17 | "#Opencv uses BGR or RGB\n", 18 | "\n", 19 | "import cv2\n", 20 | "import numpy as np" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 8, 26 | "metadata": {}, 27 | "outputs": [ 28 | { 29 | "name": "stdout", 30 | "output_type": "stream", 31 | "text": [ 32 | "69 78 115\n", 33 | "92 98 135\n", 34 | "(450, 800, 3)\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg') #Read images using OpenCV\n", 40 | "\n", 41 | "B , G , R = input[0 , 0] #BGR values at the first 0,0 pixel\n", 42 | "print(B,G,R)\n", 43 | "\n", 44 | "B , G , R = input[10 , 12] #BGR values at the first 10,12 pixel\n", 45 | "print(B,G,R)\n", 46 | "\n", 47 | "print(input.shape)" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 9, 53 | "metadata": {}, 54 | "outputs": [ 55 | { 56 | "name": "stdout", 57 | "output_type": "stream", 58 | "text": [ 59 | "(450, 800)\n", 60 | "88\n", 61 | "111\n" 62 | ] 63 | } 64 | ], 65 | "source": [ 66 | "#Convert images to grayscale\n", 67 | "\n", 68 | "gray_img = cv2.cvtColor(input , cv2.COLOR_BGR2GRAY)\n", 69 | "print(gray_img.shape)\n", 70 | "\n", 71 | "print(gray_img[0,0])\n", 72 | "print(gray_img[21,42])" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 11, 78 | "metadata": {}, 79 | "outputs": [], 80 | "source": [ 81 | "#Convert images to HSV\n", 82 | "\n", 83 | "hsv_image = cv2.cvtColor(input , cv2.COLOR_BGR2HSV)\n", 84 | "\n", 85 | "cv2.imshow('HSV Image' , hsv_image)\n", 86 | "cv2.imshow('Hue Channel' , hsv_image[:,:,0]) #first two are height and width , third one is color\n", 87 | "cv2.imshow('Saturation Channel' , hsv_image[:,:,1])\n", 88 | "cv2.imshow('Value Channel' , hsv_image[:,:,2])\n", 89 | "cv2.waitKey()\n", 90 | "cv2.destroyAllWindows()" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 13, 96 | "metadata": {}, 97 | "outputs": [ 98 | { 99 | "name": "stdout", 100 | "output_type": "stream", 101 | "text": [ 102 | "(450, 800, 3)\n" 103 | ] 104 | } 105 | ], 106 | "source": [ 107 | "#individual channels in RGB image\n", 108 | "\n", 109 | "print(input.shape)\n", 110 | "B,G,R = cv2.split(input)\n", 111 | "\n", 112 | "cv2.imshow('Red' , R)\n", 113 | "cv2.imshow('Greem' , G)\n", 114 | "cv2.imshow('Blue' , B)\n", 115 | "cv2.waitKey(0)\n", 116 | "cv2.destroyAllWindows()\n", 117 | "\n", 118 | "merged = cv2.merge([B, G , R]) #Merge all the three channels\n", 119 | "cv2.imshow('Merged', merged)\n", 120 | "\n", 121 | "merged_amplified = cv2.merge([B+100 , G , R])\n", 122 | "cv2.imshow('Amplified' , merged_amplified)\n", 123 | "\n", 124 | "\n", 125 | "cv2.waitKey(0)\n", 126 | "cv2.destroyAllWindows()" 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": 17, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | " #Zero Matrix with dimensions of image \n", 136 | "zeros = np.zeros(input.shape[:2] , dtype = 'uint8')\n", 137 | "\n", 138 | "cv2.imshow('Red' , cv2.merge([zeros , zeros , R]))#display image with only Red \n", 139 | "cv2.imshow('Green' , cv2.merge([zeros , G , zeros])) #display image with only green\n", 140 | "cv2.imshow('Blue' , cv2.merge([B , zeros , zeros]))\n", 141 | "cv2.waitKey()\n", 142 | "cv2.destroyAllWindows()" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [] 151 | } 152 | ], 153 | "metadata": { 154 | "kernelspec": { 155 | "display_name": "Python 3", 156 | "language": "python", 157 | "name": "python3" 158 | }, 159 | "language_info": { 160 | "codemirror_mode": { 161 | "name": "ipython", 162 | "version": 3 163 | }, 164 | "file_extension": ".py", 165 | "mimetype": "text/x-python", 166 | "name": "python", 167 | "nbconvert_exporter": "python", 168 | "pygments_lexer": "ipython3", 169 | "version": "3.7.1" 170 | } 171 | }, 172 | "nbformat": 4, 173 | "nbformat_minor": 2 174 | } 175 | -------------------------------------------------------------------------------- /Image Contours.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 16, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 16, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import cv2\n", 21 | "import numpy as np\n", 22 | "\n", 23 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 24 | "cv2.imshow('Original', image)\n", 25 | "cv2.waitKey()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 17, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "data": { 35 | "text/plain": [ 36 | "-1" 37 | ] 38 | }, 39 | "execution_count": 17, 40 | "metadata": {}, 41 | "output_type": "execute_result" 42 | } 43 | ], 44 | "source": [ 45 | "grayscaled = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 46 | "edges = cv2.Canny(grayscaled , 30 , 200)\n", 47 | "cv2.imshow('Canny' , edges)\n", 48 | "cv2.waitKey()" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": 18, 54 | "metadata": {}, 55 | "outputs": [ 56 | { 57 | "name": "stdout", 58 | "output_type": "stream", 59 | "text": [ 60 | "Number of Contours 12\n" 61 | ] 62 | } 63 | ], 64 | "source": [ 65 | "contours , heirarchy = cv2.findContours(edges.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 66 | "#findcontours() takes in image then retrieval mode and approximation mode , .copy() creates a new edge image\n", 67 | "cv2.imshow('Canny Edges with contouring' , edges)\n", 68 | "cv2.waitKey()\n", 69 | "\n", 70 | "print('Number of Contours ' + str(len(contours)))" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 19, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "data": { 80 | "text/plain": [ 81 | "-1" 82 | ] 83 | }, 84 | "execution_count": 19, 85 | "metadata": {}, 86 | "output_type": "execute_result" 87 | } 88 | ], 89 | "source": [ 90 | "# -1 to draw all contours or 1 to draw first contour or 2 to draw two contours\n", 91 | "cv2.drawContours(image , contours , -1 ,(0 , 255 , 0) , 3)\n", 92 | "cv2.imshow('contours' , image)\n", 93 | "cv2.waitKey()" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 11, 99 | "metadata": {}, 100 | "outputs": [ 101 | { 102 | "data": { 103 | "text/plain": [ 104 | "'\\nCHAIN_APPROX_NONE returns boundary points\\nCHAIN_APPROX_SIMPLE stores ending points\\n\\n\\nRetrieval modes talks about heirarchy\\nRETR_LIST - all contours\\nRETR_EXTERNAL - external\\nRETR_COMP - Retrieves in 2 Level Hierarchy\\nRETR_TREE - Retrieves Full Hierarchy\\n\\n'" 105 | ] 106 | }, 107 | "execution_count": 11, 108 | "metadata": {}, 109 | "output_type": "execute_result" 110 | } 111 | ], 112 | "source": [ 113 | "'''\n", 114 | "CHAIN_APPROX_NONE returns boundary points\n", 115 | "CHAIN_APPROX_SIMPLE stores ending points\n", 116 | "\n", 117 | "\n", 118 | "Retrieval modes talks about heirarchy\n", 119 | "RETR_LIST - all contours\n", 120 | "RETR_EXTERNAL - external\n", 121 | "RETR_COMP - Retrieves in 2 Level Hierarchy\n", 122 | "RETR_TREE - Retrieves Full Hierarchy\n", 123 | "\n", 124 | "'''" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": null, 130 | "metadata": {}, 131 | "outputs": [], 132 | "source": [] 133 | } 134 | ], 135 | "metadata": { 136 | "kernelspec": { 137 | "display_name": "Python 3", 138 | "language": "python", 139 | "name": "python3" 140 | }, 141 | "language_info": { 142 | "codemirror_mode": { 143 | "name": "ipython", 144 | "version": 3 145 | }, 146 | "file_extension": ".py", 147 | "mimetype": "text/x-python", 148 | "name": "python", 149 | "nbconvert_exporter": "python", 150 | "pygments_lexer": "ipython3", 151 | "version": "3.7.1" 152 | } 153 | }, 154 | "nbformat": 4, 155 | "nbformat_minor": 2 156 | } 157 | -------------------------------------------------------------------------------- /Image Convolution and Blurring.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "\n", 12 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "#Kernels are used to normalize image and to specify a matrix to apply convolution or blurring\n", 22 | "\n", 23 | "kernel_3x3 = np.ones((3,3) , np.float32) / 9\n", 24 | "\n", 25 | "#to convolve image using opencv \n", 26 | "blurred = cv2.filter2D(image , -1 , kernel_3x3)\n", 27 | "\n", 28 | "\n", 29 | "kernel_7x7 = np.ones((7,7) , np.float32) / 49\n", 30 | "\n", 31 | "blurred1 = cv2.filter2D(image , -1 , kernel_7x7)\n", 32 | "\n", 33 | "cv2.imshow('Original', image)\n", 34 | "cv2.imshow('Blurred 1' , blurred)\n", 35 | "cv2.imshow('Blurred 2' , blurred1)\n", 36 | "cv2.waitKey()\n", 37 | "cv2.destroyAllWindows()" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 4, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "#Other blurring techniques\n", 47 | "\n", 48 | "#averaging by convolving the image with a box filter\n", 49 | "#average values over a specified windows\n", 50 | "blur = cv2.blur(image , (3,3)) #averaging with 3x3 box filter\n", 51 | "\n", 52 | "#gaussian kernel\n", 53 | "#similar to a normal 1's kernel , but has more emphasis or weighting around the center\n", 54 | "gaussian = cv2.GaussianBlur(image , (7,7) , 0)\n", 55 | "\n", 56 | "#medianBlur\n", 57 | "#finding median value of each kernal , paint effect\n", 58 | "median = cv2.medianBlur(image , 5) #uses the median values of a box filter instead of averaging\n", 59 | "\n", 60 | "\n", 61 | "#bilateral effective for noise removal and to keep edges sharp\n", 62 | "biateral = cv2.bilateralFilter(image , 9,75,75)\n", 63 | "\n", 64 | "cv2.imshow('Averaging' ,blur)\n", 65 | "cv2.imshow('Gaussian', gaussian)\n", 66 | "cv2.imshow('Median' , median)\n", 67 | "cv2.imshow('Bilateral' , biateral)\n", 68 | "\n", 69 | "cv2.waitKey()\n", 70 | "cv2.destroyAllWindows()" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 7, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "#Image Denoising\n", 80 | "\n", 81 | "denoising = cv2.fastNlMeansDenoisingColored(image , None , 6 , 6, 7, 21)\n", 82 | "\n", 83 | "cv2.imshow('Denoising' ,denoising)\n", 84 | "cv2.waitKey()\n", 85 | "cv2.destroyAllWindows()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [] 94 | } 95 | ], 96 | "metadata": { 97 | "kernelspec": { 98 | "display_name": "Python 3", 99 | "language": "python", 100 | "name": "python3" 101 | }, 102 | "language_info": { 103 | "codemirror_mode": { 104 | "name": "ipython", 105 | "version": 3 106 | }, 107 | "file_extension": ".py", 108 | "mimetype": "text/x-python", 109 | "name": "python", 110 | "nbconvert_exporter": "python", 111 | "pygments_lexer": "ipython3", 112 | "version": "3.7.1" 113 | } 114 | }, 115 | "nbformat": 4, 116 | "nbformat_minor": 2 117 | } 118 | -------------------------------------------------------------------------------- /Image Cropping.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2 \n", 10 | "import numpy as np\n", 11 | "\n", 12 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 13 | "\n", 14 | "height , width = image.shape[:2]" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 2, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "#for cropping mention starting coordinates and ending coordinates and use indexing to crop out the image\n", 24 | "\n", 25 | "\n", 26 | "#starting coordinates\n", 27 | "start_row , start_col = int(height *.25) , int(width * .25) #25 % of height and width\n", 28 | "\n", 29 | "end_row , end_col = int(height * .75) , int(width * .75) #75%of height and width\n", 30 | "\n", 31 | "cropped_image = image[start_row:end_row , start_col:end_col]\n", 32 | "\n", 33 | "cv2.imshow('Original Image', image)\n", 34 | "cv2.imshow('Cropped Image ' , cropped_image)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [] 45 | } 46 | ], 47 | "metadata": { 48 | "kernelspec": { 49 | "display_name": "Python 3", 50 | "language": "python", 51 | "name": "python3" 52 | }, 53 | "language_info": { 54 | "codemirror_mode": { 55 | "name": "ipython", 56 | "version": 3 57 | }, 58 | "file_extension": ".py", 59 | "mimetype": "text/x-python", 60 | "name": "python", 61 | "nbconvert_exporter": "python", 62 | "pygments_lexer": "ipython3", 63 | "version": "3.7.1" 64 | } 65 | }, 66 | "nbformat": 4, 67 | "nbformat_minor": 2 68 | } 69 | -------------------------------------------------------------------------------- /Image Dilation and Erosion.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 12, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#erosion removes pixels at the boundaries\n", 10 | "#dilation adds pixels to the boundaries\n", 11 | "import cv2\n", 12 | "import numpy as np\n", 13 | "\n", 14 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\inpu1.jpg') #Read images using OpenCV" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 13, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "kernel = np.ones((5,5) , np.uint8)\n", 24 | "cv2.imshow(\"Original\" , image)\n", 25 | "\n", 26 | "erosion = cv2.erode(image , kernel , iterations = 1)\n", 27 | "cv2.imshow(\"Erosion\" , erosion)\n", 28 | "\n", 29 | "dilation = cv2.dilate(image , kernel , iterations = 1)\n", 30 | "cv2.imshow('Dilation' , dilation)\n", 31 | "\n", 32 | "\n", 33 | "#Opening - GOod for removing noise - Erosion followed by diilation\n", 34 | "opening = cv2.morphologyEx(image , cv2.MORPH_OPEN , kernel)\n", 35 | "cv2.imshow('OPening', opening)\n", 36 | "\n", 37 | "\n", 38 | "#closing - Also good for removing noise , DIlation followed by erosion\n", 39 | "closing = cv2.morphologyEx(image , cv2.MORPH_CLOSE , kernel)\n", 40 | "cv2.imshow(\"closing\" , closing)\n", 41 | "\n", 42 | "\n", 43 | "cv2.waitKey()\n", 44 | "cv2.destroyAllWindows()" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [] 53 | } 54 | ], 55 | "metadata": { 56 | "kernelspec": { 57 | "display_name": "Python 3", 58 | "language": "python", 59 | "name": "python3" 60 | }, 61 | "language_info": { 62 | "codemirror_mode": { 63 | "name": "ipython", 64 | "version": 3 65 | }, 66 | "file_extension": ".py", 67 | "mimetype": "text/x-python", 68 | "name": "python", 69 | "nbconvert_exporter": "python", 70 | "pygments_lexer": "ipython3", 71 | "version": "3.7.1" 72 | } 73 | }, 74 | "nbformat": 4, 75 | "nbformat_minor": 2 76 | } 77 | -------------------------------------------------------------------------------- /Image Edge Detection.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 7, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "Edges are discountinuities in images\n", 11 | "\n", 12 | "Edge Detection Algorithms\n", 13 | "Sobel = Emphasize Verical and Horizontal\n", 14 | "Canny - Lowest Error Rate\n", 15 | "Laplacian - Gets orientations\n", 16 | "\n", 17 | "\n", 18 | "Canny\n", 19 | "1) Applies Gaussian Blurring\n", 20 | "2) Finds intensity gradient\n", 21 | "3) Removes pixels that are not edges - Non maximum suppression\n", 22 | "4) If pixel is within upper and threshold consider it as an edge - Hysterisis Applies thresholds\n", 23 | "\n", 24 | "'''\n", 25 | "\n", 26 | "import cv2\n", 27 | "import numpy as np" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 13, 33 | "metadata": {}, 34 | "outputs": [ 35 | { 36 | "ename": "error", 37 | "evalue": "OpenCV(4.0.0) c:\\projects\\opencv-python\\opencv\\modules\\imgproc\\src\\color.hpp:261: error: (-2:Unspecified error) in function '__cdecl cv::CvtHelper,struct cv::Set<3,4,-1>,struct cv::Set<0,2,5>,2>::CvtHelper(const class cv::_InputArray &,const class cv::_OutputArray &,int)'\n> Unsupported depth of input image:\n> 'VDepth::contains(depth)'\n> where\n> 'depth' is 6 (CV_64F)\n", 38 | "output_type": "error", 39 | "traceback": [ 40 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 41 | "\u001b[1;31merror\u001b[0m Traceback (most recent call last)", 42 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 19\u001b[0m \u001b[0msobel_NOT\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mbitwise_not\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0msobel_x\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 20\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 21\u001b[1;33m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Sobel OR Image'\u001b[0m \u001b[1;33m,\u001b[0m \u001b[0msobel_OR\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 22\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mwaitKey\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 23\u001b[0m \u001b[0mcv2\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mimshow\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Sobel And Image'\u001b[0m \u001b[1;33m,\u001b[0m \u001b[0msobel_AND\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 43 | "\u001b[1;31merror\u001b[0m: OpenCV(4.0.0) c:\\projects\\opencv-python\\opencv\\modules\\imgproc\\src\\color.hpp:261: error: (-2:Unspecified error) in function '__cdecl cv::CvtHelper,struct cv::Set<3,4,-1>,struct cv::Set<0,2,5>,2>::CvtHelper(const class cv::_InputArray &,const class cv::_OutputArray &,int)'\n> Unsupported depth of input image:\n> 'VDepth::contains(depth)'\n> where\n> 'depth' is 6 (CV_64F)\n" 44 | ] 45 | } 46 | ], 47 | "source": [ 48 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg',0)\n", 49 | "\n", 50 | "height , width = image.shape\n", 51 | "\n", 52 | "#Sobel Filter\n", 53 | "#Extract Sobel Edges\n", 54 | "sobel_x = cv2.Sobel(image , cv2.CV_64F , 0 , 1 , ksize=5) #ksize = kernel size to get different strength\n", 55 | "sobel_y = cv2.Sobel(image , cv2.CV_64F , 1 , 0 , ksize=5)\n", 56 | "\n", 57 | "cv2.imshow('Original Image' , image)\n", 58 | "cv2.waitKey(0)\n", 59 | "#cv2.imshow('Sobel X Image' , sobel_x)\n", 60 | "#cv2.waitKey(0)\n", 61 | "#cv2.imshow('Sobel Y Image' , sobel_y)\n", 62 | "#cv2.waitKey(0)\n", 63 | "\n", 64 | "sobel_OR = cv2.bitwise_or(sobel_x ,sobel_y)\n", 65 | "sobel_AND = cv2.bitwise_and(sobel_x , sobel_y)\n", 66 | "sobel_NOT = cv2.bitwise_not(sobel_x)\n", 67 | "\n", 68 | "cv2.imshow('Sobel OR Image' , sobel_OR)\n", 69 | "cv2.waitKey()\n", 70 | "cv2.imshow('Sobel And Image' , sobel_AND)\n", 71 | "cv2.waitKey()\n", 72 | "cv2.imshow('Sobel Not Image' , sobel_NOT)\n", 73 | "cv2.waitKey()\n", 74 | "\n", 75 | "\n", 76 | "#Laplacian\n", 77 | "laplacian = cv2.Laplacian(image , cv2.CV_64F)\n", 78 | "cv2.imshow('Laplacian Image' , laplacian)\n", 79 | "cv2.waitKey()\n", 80 | "\n", 81 | "canny = cv2.Canny(image , 20 , 170)\n", 82 | "cv2.imshow('Canny Image' , canny)\n", 83 | "\n", 84 | "cv2.waitKey()\n", 85 | "cv2.destroyAllWindows()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "metadata": {}, 99 | "outputs": [], 100 | "source": [] 101 | } 102 | ], 103 | "metadata": { 104 | "kernelspec": { 105 | "display_name": "Python 3", 106 | "language": "python", 107 | "name": "python3" 108 | }, 109 | "language_info": { 110 | "codemirror_mode": { 111 | "name": "ipython", 112 | "version": 3 113 | }, 114 | "file_extension": ".py", 115 | "mimetype": "text/x-python", 116 | "name": "python", 117 | "nbconvert_exporter": "python", 118 | "pygments_lexer": "ipython3", 119 | "version": "3.7.1" 120 | } 121 | }, 122 | "nbformat": 4, 123 | "nbformat_minor": 2 124 | } 125 | -------------------------------------------------------------------------------- /Image Masking.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import cv2\n", 11 | "\n", 12 | "\n", 13 | "#Making a Square\n", 14 | "\n", 15 | "square = np.zeros((300,300) , np.uint8) #array of zeros of height and width 300\n", 16 | "cv2.rectangle(square , (50,50) , (250,250) , 255 , -2)\n", 17 | "\n", 18 | "#Making a ellipse\n", 19 | "ellipse = np.zeros((300,300) , np.uint8)\n", 20 | "cv2.ellipse(ellipse , (150,150) , (150,150) , 30 , 0 , 180 , 255 , -1)\n", 21 | "\n", 22 | "cv2.imshow('Square' , square)\n", 23 | "cv2.imshow('Ellipse' , ellipse)\n", 24 | "cv2.waitKey()\n", 25 | "cv2.destroyAllWindows()" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 5, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "#experimenting with some bitwise operations\n", 35 | "#only works on black and white images\n", 36 | "\n", 37 | "And = cv2.bitwise_and(square , ellipse) #intersection of images \n", 38 | "cv2.imshow('AND' , And)\n", 39 | "cv2.waitKey()\n", 40 | "\n", 41 | "Or = cv2.bitwise_or(square , ellipse) #combination of images\n", 42 | "cv2.imshow('OR' , Or)\n", 43 | "cv2.waitKey()\n", 44 | "\n", 45 | "Xor = cv2.bitwise_xor(square , ellipse) #combination of and and or\n", 46 | "cv2.imshow('XOR' , Xor)\n", 47 | "cv2.waitKey()\n", 48 | "\n", 49 | "\n", 50 | "Not = cv2.bitwise_not(square) #changes white to black of square\n", 51 | "cv2.imshow('NOT' , Not)\n", 52 | "cv2.waitKey()\n", 53 | "\n", 54 | "cv2.destroyAllWindows()" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [] 63 | } 64 | ], 65 | "metadata": { 66 | "kernelspec": { 67 | "display_name": "Python 3", 68 | "language": "python", 69 | "name": "python3" 70 | }, 71 | "language_info": { 72 | "codemirror_mode": { 73 | "name": "ipython", 74 | "version": 3 75 | }, 76 | "file_extension": ".py", 77 | "mimetype": "text/x-python", 78 | "name": "python", 79 | "nbconvert_exporter": "python", 80 | "pygments_lexer": "ipython3", 81 | "version": "3.7.1" 82 | } 83 | }, 84 | "nbformat": 4, 85 | "nbformat_minor": 2 86 | } 87 | -------------------------------------------------------------------------------- /Image Pyramids or Image Scaling.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Scaling images\n", 10 | "\n", 11 | "import cv2\n", 12 | "\n", 13 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 14 | "\n", 15 | "smaller = cv2.pyrDown(image)\n", 16 | "larger = cv2.pyrUp(image)\n", 17 | "scale_back = cv2.pyrUp(smaller)\n", 18 | "\n", 19 | "cv2.imshow('Smaller' , smaller)\n", 20 | "cv2.imshow('Larger' , larger)\n", 21 | "cv2.imshow('Scale Back' , scale_back)\n", 22 | "cv2.waitKey(0)\n", 23 | "cv2.destroyAllWindows()\n" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [] 32 | } 33 | ], 34 | "metadata": { 35 | "kernelspec": { 36 | "display_name": "Python 3", 37 | "language": "python", 38 | "name": "python3" 39 | }, 40 | "language_info": { 41 | "codemirror_mode": { 42 | "name": "ipython", 43 | "version": 3 44 | }, 45 | "file_extension": ".py", 46 | "mimetype": "text/x-python", 47 | "name": "python", 48 | "nbconvert_exporter": "python", 49 | "pygments_lexer": "ipython3", 50 | "version": "3.7.1" 51 | } 52 | }, 53 | "nbformat": 4, 54 | "nbformat_minor": 2 55 | } 56 | -------------------------------------------------------------------------------- /Image Resizing or Scaling.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "cv2.INTER_AREA - For shrinking or down sampling\n", 11 | "cv2.INTER_NEAREST - Fastest \n", 12 | "cv2.INTER_CUBIC - Better\n", 13 | "cv2.INTER_LANCZOS4 - Best\n", 14 | "cv2.INTER_LINEAR - Good for zooming\n", 15 | "'''\n", 16 | "\n", 17 | "import cv2\n", 18 | "import numpy as np\n", 19 | "\n", 20 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 21 | "\n", 22 | "image_scaled = cv2.resize(image , None , fx=0.75 , fy = 0.75)\n", 23 | "#Default cv2.resize is set to Linear Interpolation \n", 24 | "#Parameters - image input , output image size , scale image factors of x scale , y scale , interpolation\n", 25 | "cv2.imshow('Scaling - Linear Interpolation' , image_scaled)\n", 26 | "cv2.waitKey()\n", 27 | "\n", 28 | "#doubling size of image\n", 29 | "\n", 30 | "image_scaled = cv2.resize(image , None , fx=2 , fy = 2 , interpolation = cv2.INTER_CUBIC)\n", 31 | "cv2.imshow('Scaling - Cubic Interpolation' , image_scaled)\n", 32 | "cv2.waitKey()\n", 33 | "\n", 34 | "#resizing by down sampling\n", 35 | "image_scaled = cv2.resize(image ,(900 , 400), interpolation = cv2.INTER_AREA)\n", 36 | "cv2.imshow('Scaling - AREA Interpolation' , image_scaled)\n", 37 | "cv2.waitKey()\n", 38 | "\n", 39 | "cv2.destroyAllWindows()\n" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [] 48 | } 49 | ], 50 | "metadata": { 51 | "kernelspec": { 52 | "display_name": "Python 3", 53 | "language": "python", 54 | "name": "python3" 55 | }, 56 | "language_info": { 57 | "codemirror_mode": { 58 | "name": "ipython", 59 | "version": 3 60 | }, 61 | "file_extension": ".py", 62 | "mimetype": "text/x-python", 63 | "name": "python", 64 | "nbconvert_exporter": "python", 65 | "pygments_lexer": "ipython3", 66 | "version": "3.7.1" 67 | } 68 | }, 69 | "nbformat": 4, 70 | "nbformat_minor": 2 71 | } 72 | -------------------------------------------------------------------------------- /Image Rotations.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "#Rotation Matrix \n", 10 | "# M = [cosx -sinx\n", 11 | "# sinx cosx]\n", 12 | "# x - the angle of rotation\n", 13 | "\n", 14 | "import numpy as np\n", 15 | "import cv2" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 2, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 25 | "\n", 26 | "height , width = image.shape[:2]\n", 27 | "\n", 28 | "rotation_matrix = cv2.getRotationMatrix2D((width/2 , height/2) , 90 , 1)\n", 29 | "#rotation matrix parameters = ( center of image , rotation angle , scaling factor)\n", 30 | "\n", 31 | "rotated_image = cv2.warpAffine(image , rotation_matrix , (width , height))\n", 32 | "\n", 33 | "cv2.imshow('Rotated Image' , rotated_image)\n", 34 | "cv2.waitKey()\n", 35 | "cv2.destroyAllWindows()" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 3, 41 | "metadata": {}, 42 | "outputs": [], 43 | "source": [ 44 | "#Alternated method to rotate image to avoid black spaces around image as shown previously\n", 45 | "\n", 46 | "rotated_image = cv2.transpose(image)\n", 47 | "\n", 48 | "cv2.imshow('Rotated Image' , rotated_image)\n", 49 | "cv2.waitKey(0)\n", 50 | "cv2.destroyAllWindows()" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [] 59 | } 60 | ], 61 | "metadata": { 62 | "kernelspec": { 63 | "display_name": "Python 3", 64 | "language": "python", 65 | "name": "python3" 66 | }, 67 | "language_info": { 68 | "codemirror_mode": { 69 | "name": "ipython", 70 | "version": 3 71 | }, 72 | "file_extension": ".py", 73 | "mimetype": "text/x-python", 74 | "name": "python", 75 | "nbconvert_exporter": "python", 76 | "pygments_lexer": "ipython3", 77 | "version": "3.7.1" 78 | } 79 | }, 80 | "nbformat": 4, 81 | "nbformat_minor": 2 82 | } 83 | -------------------------------------------------------------------------------- /Image Sharpening.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "Sharpening Kernel = [ -1 -1 -1\n", 11 | " -1 9 -1 \n", 12 | " -1 -1 -1]\n", 13 | "\n", 14 | "'''\n", 15 | "\n", 16 | "import cv2\n", 17 | "import numpy as np" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 2, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 27 | "\n", 28 | "sharpening_kernel = np.array([[-1 , -1 , -1],\n", 29 | " [-1 , 9 , -1],\n", 30 | " [-1 , -1 , -1]])\n", 31 | "\n", 32 | "sharpened_image = cv2.filter2D(image , -1 , sharpening_kernel)\n", 33 | "\n", 34 | "cv2.imshow('Sharpened Image' , sharpened_image)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": null, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [] 45 | } 46 | ], 47 | "metadata": { 48 | "kernelspec": { 49 | "display_name": "Python 3", 50 | "language": "python", 51 | "name": "python3" 52 | }, 53 | "language_info": { 54 | "codemirror_mode": { 55 | "name": "ipython", 56 | "version": 3 57 | }, 58 | "file_extension": ".py", 59 | "mimetype": "text/x-python", 60 | "name": "python", 61 | "nbconvert_exporter": "python", 62 | "pygments_lexer": "ipython3", 63 | "version": "3.7.1" 64 | } 65 | }, 66 | "nbformat": 4, 67 | "nbformat_minor": 2 68 | } 69 | -------------------------------------------------------------------------------- /Image Thresholding.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 7, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "'''\n", 10 | "Thresholding only works with grayscale images\n", 11 | "cv2.threshold(image , Threshold Value , Max Value , Threshold Type)\n", 12 | "cv2.THRESH_BINARY - Most common thresholding\n", 13 | "cv2.THRESH_BINARY_INV - Another common\n", 14 | "'''\n", 15 | "\n", 16 | "import cv2\n", 17 | "import numpy as np\n", 18 | "\n", 19 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg' , 0)" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 4, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "cv2.imshow('Gray Scaled', image)\n", 29 | "cv2.waitKey()\n", 30 | "cv2.destroyAllWindows()" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 5, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "#Values below 127 goes to 0 , above goes to 255\n", 40 | "ret, thresh1 = cv2.threshold(image , 127 , 255 , cv2.THRESH_BINARY)\n", 41 | "cv2.imshow('Threshold Binary' , thresh1)\n", 42 | "\n", 43 | "# input parameters image , threshold value , max value and type\n", 44 | "\n", 45 | "#Values below 127 got to 255 above 127 goes to 0 \n", 46 | "ret, thresh2 = cv2.threshold(image , 127 , 255 , cv2.THRESH_BINARY_INV)\n", 47 | "cv2.imshow('Threshold Inverse Binary' , thresh2)\n", 48 | "\n", 49 | "#values above 127 goes to same value 127 has.\n", 50 | "ret, thresh3 = cv2.threshold(image , 127 , 255 , cv2.THRESH_TRUNC)\n", 51 | "cv2.imshow('Threshold Truncated' , thresh3)\n", 52 | "\n", 53 | "#values below 127 goes to zero , rest remain unchanged\n", 54 | "ret, thresh4 = cv2.threshold(image , 127 , 255 , cv2.THRESH_TOZERO)\n", 55 | "cv2.imshow('Threshold ToZero' , thresh4)\n", 56 | "\n", 57 | "#values below 127 remains unchanged rest goes to zero\n", 58 | "ret, thresh5 = cv2.threshold(image , 127 , 255 , cv2.THRESH_TOZERO_INV)\n", 59 | "cv2.imshow('Threshold ToZero Inverse' , thresh5)\n", 60 | "\n", 61 | "\n", 62 | "cv2.waitKey()\n", 63 | "cv2.destroyAllWindows()" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 9, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "#Adaptive Thresholding\n", 73 | "\n", 74 | "#adaptivethreshold parameters are image , max value , adaptive type , threshold type , block size , constant that is subratcted from mean\n", 75 | "thresh6 = cv2.adaptiveThreshold(image , 127 , cv2.ADAPTIVE_THRESH_MEAN_C , cv2.THRESH_BINARY , 3 , 5)\n", 76 | "cv2.imshow('Adaptive Mean Threshold' , thresh6) #Based on mean of neighbourhood of pixels\n", 77 | "\n", 78 | "_ , thresh7 = cv2.threshold(image , 0 , 255 , cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n", 79 | "cv2.imshow('Otsu Thresholding' , thresh7)\n", 80 | "\n", 81 | "#Otsu looks into histogram and finds peaks and finds an optimal values to seperate peaks\n", 82 | "#Otsu Thresholding with Gaussian Filtering\n", 83 | "blur = cv2.GaussianBlur(image , (5 , 53) , 0)\n", 84 | "_ , thresh8 = cv2.threshold(image , 0 , 255 , cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n", 85 | "cv2.imshow('Otsu Gaussian Thresholding' , thresh8)\n", 86 | "\n", 87 | "cv2.waitKey()\n", 88 | "cv2.destroyAllWindows()\n" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": {}, 95 | "outputs": [], 96 | "source": [] 97 | } 98 | ], 99 | "metadata": { 100 | "kernelspec": { 101 | "display_name": "Python 3", 102 | "language": "python", 103 | "name": "python3" 104 | }, 105 | "language_info": { 106 | "codemirror_mode": { 107 | "name": "ipython", 108 | "version": 3 109 | }, 110 | "file_extension": ".py", 111 | "mimetype": "text/x-python", 112 | "name": "python", 113 | "nbconvert_exporter": "python", 114 | "pygments_lexer": "ipython3", 115 | "version": "3.7.1" 116 | } 117 | }, 118 | "nbformat": 4, 119 | "nbformat_minor": 2 120 | } 121 | -------------------------------------------------------------------------------- /Image Translations.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 4, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "[[ 1. 0. 200. ]\n", 13 | " [ 0. 1. 112.5]]\n" 14 | ] 15 | } 16 | ], 17 | "source": [ 18 | "#Translation Matrix = [ 1 0 x\n", 19 | "# 0 1 y]\n", 20 | "# x distance along x axis , y along y axis\n", 21 | "\n", 22 | "import cv2\n", 23 | "import numpy as np \n", 24 | "\n", 25 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 26 | "\n", 27 | "height , width = image.shape[:2] #Store height and width of image\n", 28 | "\n", 29 | "quarter_height , quarter_width = height/4 , width/4\n", 30 | "\n", 31 | "T = np.float32([[1 , 0 , quarter_width], [0 , 1, quarter_height ]])\n", 32 | "\n", 33 | "image_translated = cv2.warpAffine(image , T , (width , height))\n", 34 | "cv2.imshow('translated image' , image_translated)\n", 35 | "cv2.waitKey()\n", 36 | "cv2.destroyAllWindows()\n", 37 | "\n", 38 | "print(T)" 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": null, 44 | "metadata": {}, 45 | "outputs": [], 46 | "source": [] 47 | } 48 | ], 49 | "metadata": { 50 | "kernelspec": { 51 | "display_name": "Python 3", 52 | "language": "python", 53 | "name": "python3" 54 | }, 55 | "language_info": { 56 | "codemirror_mode": { 57 | "name": "ipython", 58 | "version": 3 59 | }, 60 | "file_extension": ".py", 61 | "mimetype": "text/x-python", 62 | "name": "python", 63 | "nbconvert_exporter": "python", 64 | "pygments_lexer": "ipython3", 65 | "version": "3.7.1" 66 | } 67 | }, 68 | "nbformat": 4, 69 | "nbformat_minor": 2 70 | } 71 | -------------------------------------------------------------------------------- /Non-Affine Image Transformations.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cv2\n", 10 | "import numpy as np\n", 11 | "import matplotlib.pyplot as plt" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 3, 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "data": { 21 | "text/plain": [ 22 | "-1" 23 | ] 24 | }, 25 | "execution_count": 3, 26 | "metadata": {}, 27 | "output_type": "execute_result" 28 | } 29 | ], 30 | "source": [ 31 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input.jpg')\n", 32 | "\n", 33 | "cv2.imshow('Original' , image)\n", 34 | "cv2.waitKey()\n", 35 | "\n", 36 | "#Coordinated of the 4 corners of the original image that need to be skewed and aligned\n", 37 | "points_A = np.float32([[320,14] , [70,213] , [53,134] , [24,42]])\n", 38 | "\n", 39 | "points_B = np.float32([[0,0] , [23,145] , [0,504], [56,425]])\n", 40 | "\n", 41 | "M = cv2.getPerspectiveTransform(points_A , points_B) #Transform Matrix\n", 42 | "\n", 43 | "warpped = cv2.warpPerspective(image , M , (420 , 504)) #Final size of image\n", 44 | "\n", 45 | "cv2.imshow(\"Warpped Image\" , warpped)\n", 46 | "cv2.waitKey()" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [] 55 | } 56 | ], 57 | "metadata": { 58 | "kernelspec": { 59 | "display_name": "Python 3", 60 | "language": "python", 61 | "name": "python3" 62 | }, 63 | "language_info": { 64 | "codemirror_mode": { 65 | "name": "ipython", 66 | "version": 3 67 | }, 68 | "file_extension": ".py", 69 | "mimetype": "text/x-python", 70 | "name": "python", 71 | "nbconvert_exporter": "python", 72 | "pygments_lexer": "ipython3", 73 | "version": "3.7.1" 74 | } 75 | }, 76 | "nbformat": 4, 77 | "nbformat_minor": 2 78 | } 79 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OpenCV Tutorials 2 | -------------------------------------------------------------------------------- /Read Write and Display Images using OpenCV.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Reading and Displaying Images" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "#import libaries\n", 17 | "import cv2\n", 18 | "import numpy as np" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 11, 24 | "metadata": {}, 25 | "outputs": [], 26 | "source": [ 27 | "input = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\OpenCV Tutorials\\\\input.jpg') #Read images using OpenCV\n", 28 | "cv2.imshow('Hello World' , input) #imshow() - first parameter title of image , followed by image source\n", 29 | "cv2.waitKey() #Necessary when showing images using openCV , placing numbers in the parameter allows a delay for the image to be kept open\n", 30 | "cv2.destroyAllWindows() #closes all open windows" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "metadata": {}, 36 | "source": [ 37 | "# Storing Images" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 13, 43 | "metadata": {}, 44 | "outputs": [ 45 | { 46 | "name": "stdout", 47 | "output_type": "stream", 48 | "text": [ 49 | "(450, 800, 3)\n" 50 | ] 51 | } 52 | ], 53 | "source": [ 54 | "print(input.shape)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 14, 60 | "metadata": {}, 61 | "outputs": [ 62 | { 63 | "name": "stdout", 64 | "output_type": "stream", 65 | "text": [ 66 | "Height of Image 450\n", 67 | "Width of Image 800\n" 68 | ] 69 | } 70 | ], 71 | "source": [ 72 | "print('Height of Image' , int(input.shape[0]))\n", 73 | "print('Width of Image' , int(input.shape[1]))" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 15, 79 | "metadata": {}, 80 | "outputs": [ 81 | { 82 | "data": { 83 | "text/plain": [ 84 | "True" 85 | ] 86 | }, 87 | "execution_count": 15, 88 | "metadata": {}, 89 | "output_type": "execute_result" 90 | } 91 | ], 92 | "source": [ 93 | "#Writing Images\n", 94 | "cv2.imwrite('inpu1.jpg' , input) #First parameter of name and extension followed by image to save" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "metadata": {}, 101 | "outputs": [], 102 | "source": [] 103 | } 104 | ], 105 | "metadata": { 106 | "kernelspec": { 107 | "display_name": "Python 3", 108 | "language": "python", 109 | "name": "python3" 110 | }, 111 | "language_info": { 112 | "codemirror_mode": { 113 | "name": "ipython", 114 | "version": 3 115 | }, 116 | "file_extension": ".py", 117 | "mimetype": "text/x-python", 118 | "name": "python", 119 | "nbconvert_exporter": "python", 120 | "pygments_lexer": "ipython3", 121 | "version": "3.7.1" 122 | } 123 | }, 124 | "nbformat": 4, 125 | "nbformat_minor": 2 126 | } 127 | -------------------------------------------------------------------------------- /Sorting Contours.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "data": { 10 | "text/plain": [ 11 | "-1" 12 | ] 13 | }, 14 | "execution_count": 2, 15 | "metadata": {}, 16 | "output_type": "execute_result" 17 | } 18 | ], 19 | "source": [ 20 | "import cv2\n", 21 | "import numpy as np\n", 22 | "\n", 23 | "image = cv2.imread('C:\\\\Users\\\\Zahid\\\\Desktop\\\\Deep Learning\\\\openCVtutorials\\\\input3.png')\n", 24 | "cv2.imshow('Original Image', image)\n", 25 | "cv2.waitKey()\n", 26 | "\n", 27 | "blank_image = np.zeros((image.shape[0] , image.shape[1] , 3))\n", 28 | "\n", 29 | "original_image = image\n", 30 | "gray = cv2.cvtColor(image , cv2.COLOR_BGR2GRAY)\n", 31 | "canny = cv2.Canny(gray , 30 ,200)\n", 32 | "\n", 33 | "contours , hierarchy = cv2.findContours(canny.copy() , cv2.RETR_LIST , cv2.CHAIN_APPROX_NONE)\n", 34 | "cv2.imshow('Contours' , canny)\n", 35 | "cv2.waitKey()\n", 36 | "\n", 37 | "cv2.drawContours(image , contours , -1 , (0,255,0) , 3)\n", 38 | "cv2.imshow('All Contours' , image)\n", 39 | "cv2.waitKey()" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 7, 45 | "metadata": {}, 46 | "outputs": [ 47 | { 48 | "name": "stdout", 49 | "output_type": "stream", 50 | "text": [ 51 | "Before Sorting\n", 52 | "[1407.0, 1453.0, 1595.0, 1597.0, 952.0, 996.0, 1658.5, 1696.5, 1158.0, 1213.0, 22258.0, 22261.0]\n", 53 | "After sorting\n", 54 | "[22261.0, 22258.0, 1696.5, 1658.5, 1597.0, 1595.0, 1453.0, 1407.0, 1213.0, 1158.0, 996.0, 952.0]\n" 55 | ] 56 | } 57 | ], 58 | "source": [ 59 | "#sorting contours by area\n", 60 | "\n", 61 | "def get_contour_areas(contours):\n", 62 | " all_areas = []\n", 63 | " for cnt in contours:\n", 64 | " area = cv2.contourArea(cnt) #area of contours\n", 65 | " all_areas.append(area)\n", 66 | " return all_areas\n", 67 | "\n", 68 | "print(\"Before Sorting\")\n", 69 | "print(get_contour_areas(contours))\n", 70 | "\n", 71 | "sorted_contours = sorted(contours , key = cv2.contourArea , reverse = True)\n", 72 | "#reverse = true means big to small , key is the function that we are passing the input to\n", 73 | "\n", 74 | "print(\"After sorting\")\n", 75 | "print(get_contour_areas(sorted_contours))\n", 76 | "\n", 77 | "for c in sorted_contours:\n", 78 | " cv2.drawContours(original_image , [c] , -1 , (255 , 0 , 0) , 3)\n", 79 | " cv2.waitKey()\n", 80 | " cv2.imshow('Contour sorted' , original_image)\n", 81 | " \n", 82 | "cv2.waitKey()\n", 83 | "cv2.destroyAllWindows()" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 8, 89 | "metadata": {}, 90 | "outputs": [ 91 | { 92 | "data": { 93 | "text/plain": [ 94 | "-1" 95 | ] 96 | }, 97 | "execution_count": 8, 98 | "metadata": {}, 99 | "output_type": "execute_result" 100 | } 101 | ], 102 | "source": [ 103 | "#spatial mode sorting\n", 104 | "\n", 105 | "def x_cord_contour(contours):\n", 106 | " #Returning X coordinate for the contour centroid\n", 107 | " if cv2.contourArea(contours) > 10:\n", 108 | " M = cv2.moments(contours)\n", 109 | " return (int(M['m10']/M['m00']))\n", 110 | " \n", 111 | "def label_contour_center(image , c):\n", 112 | " #place a red circle on center\n", 113 | " M = cv2.moments(c) #to get center\n", 114 | " cx = int(M['m10'] / M['m00'])\n", 115 | " cy = int(M['m01'] / M['m00'])\n", 116 | " \n", 117 | " cv2.circle(image , (cx , cy) , 10 , (0 , 0 , 255) , -1)\n", 118 | " return image\n", 119 | "\n", 120 | "\n", 121 | "for(i,c ) in enumerate(contours):\n", 122 | " orig = label_contour_center(image , c)\n", 123 | " \n", 124 | "cv2.imshow('contours center' , image)\n", 125 | "cv2.waitKey()" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [] 134 | } 135 | ], 136 | "metadata": { 137 | "kernelspec": { 138 | "display_name": "Python 3", 139 | "language": "python", 140 | "name": "python3" 141 | }, 142 | "language_info": { 143 | "codemirror_mode": { 144 | "name": "ipython", 145 | "version": 3 146 | }, 147 | "file_extension": ".py", 148 | "mimetype": "text/x-python", 149 | "name": "python", 150 | "nbconvert_exporter": "python", 151 | "pygments_lexer": "ipython3", 152 | "version": "3.7.1" 153 | } 154 | }, 155 | "nbformat": 4, 156 | "nbformat_minor": 2 157 | } 158 | -------------------------------------------------------------------------------- /daisies.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/daisies.jpg -------------------------------------------------------------------------------- /inpu1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/inpu1.jpg -------------------------------------------------------------------------------- /input.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/input.jpg -------------------------------------------------------------------------------- /input2.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/input2.jpeg -------------------------------------------------------------------------------- /input3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/input3.png -------------------------------------------------------------------------------- /square-01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/square-01.png -------------------------------------------------------------------------------- /sudoku.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zadahmed/openCVtutorials/032ca5b45e183a2c3474a54eb60d34e9a25b73d3/sudoku.jpg --------------------------------------------------------------------------------