└── Handpose Tutorial.ipynb /Handpose Tutorial.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 1. Install and Import Dependencies" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "!pip install mediapipe opencv-python" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import mediapipe as mp\n", 26 | "import cv2\n", 27 | "import numpy as np\n", 28 | "import uuid\n", 29 | "import os" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "mp_drawing = mp.solutions.drawing_utils\n", 39 | "mp_hands = mp.solutions.hands" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "# 2. Draw Hands\n", 47 | "" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "cap = cv2.VideoCapture(0)\n", 57 | "\n", 58 | "with mp_hands.Hands(min_detection_confidence=0.8, min_tracking_confidence=0.5) as hands: \n", 59 | " while cap.isOpened():\n", 60 | " ret, frame = cap.read()\n", 61 | " \n", 62 | " # BGR 2 RGB\n", 63 | " image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n", 64 | " \n", 65 | " # Flip on horizontal\n", 66 | " image = cv2.flip(image, 1)\n", 67 | " \n", 68 | " # Set flag\n", 69 | " image.flags.writeable = False\n", 70 | " \n", 71 | " # Detections\n", 72 | " results = hands.process(image)\n", 73 | " \n", 74 | " # Set flag to true\n", 75 | " image.flags.writeable = True\n", 76 | " \n", 77 | " # RGB 2 BGR\n", 78 | " image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\n", 79 | " \n", 80 | " # Detections\n", 81 | " print(results)\n", 82 | " \n", 83 | " # Rendering results\n", 84 | " if results.multi_hand_landmarks:\n", 85 | " for num, hand in enumerate(results.multi_hand_landmarks):\n", 86 | " mp_drawing.draw_landmarks(image, hand, mp_hands.HAND_CONNECTIONS, \n", 87 | " mp_drawing.DrawingSpec(color=(121, 22, 76), thickness=2, circle_radius=4),\n", 88 | " mp_drawing.DrawingSpec(color=(250, 44, 250), thickness=2, circle_radius=2),\n", 89 | " )\n", 90 | " \n", 91 | " \n", 92 | " cv2.imshow('Hand Tracking', image)\n", 93 | "\n", 94 | " if cv2.waitKey(10) & 0xFF == ord('q'):\n", 95 | " break\n", 96 | "\n", 97 | "cap.release()\n", 98 | "cv2.destroyAllWindows()" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": null, 104 | "metadata": {}, 105 | "outputs": [], 106 | "source": [ 107 | "mp_drawing.DrawingSpec??" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "metadata": {}, 113 | "source": [ 114 | "# 3. Output Images " 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [ 123 | "os.mkdir('Output Images')" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "cap = cv2.VideoCapture(0)\n", 133 | "\n", 134 | "with mp_hands.Hands(min_detection_confidence=0.8, min_tracking_confidence=0.5) as hands: \n", 135 | " while cap.isOpened():\n", 136 | " ret, frame = cap.read()\n", 137 | " \n", 138 | " # BGR 2 RGB\n", 139 | " image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n", 140 | " \n", 141 | " # Flip on horizontal\n", 142 | " image = cv2.flip(image, 1)\n", 143 | " \n", 144 | " # Set flag\n", 145 | " image.flags.writeable = False\n", 146 | " \n", 147 | " # Detections\n", 148 | " results = hands.process(image)\n", 149 | " \n", 150 | " # Set flag to true\n", 151 | " image.flags.writeable = True\n", 152 | " \n", 153 | " # RGB 2 BGR\n", 154 | " image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\n", 155 | " \n", 156 | " # Detections\n", 157 | " print(results)\n", 158 | " \n", 159 | " # Rendering results\n", 160 | " if results.multi_hand_landmarks:\n", 161 | " for num, hand in enumerate(results.multi_hand_landmarks):\n", 162 | " mp_drawing.draw_landmarks(image, hand, mp_hands.HAND_CONNECTIONS, \n", 163 | " mp_drawing.DrawingSpec(color=(121, 22, 76), thickness=2, circle_radius=4),\n", 164 | " mp_drawing.DrawingSpec(color=(250, 44, 250), thickness=2, circle_radius=2),\n", 165 | " )\n", 166 | " \n", 167 | " # Save our image \n", 168 | " cv2.imwrite(os.path.join('Output Images', '{}.jpg'.format(uuid.uuid1())), image)\n", 169 | " cv2.imshow('Hand Tracking', image)\n", 170 | "\n", 171 | " if cv2.waitKey(10) & 0xFF == ord('q'):\n", 172 | " break\n", 173 | "\n", 174 | "cap.release()\n", 175 | "cv2.destroyAllWindows()" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "metadata": {}, 182 | "outputs": [], 183 | "source": [] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": null, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": null, 195 | "metadata": {}, 196 | "outputs": [], 197 | "source": [] 198 | } 199 | ], 200 | "metadata": { 201 | "kernelspec": { 202 | "display_name": "hands", 203 | "language": "python", 204 | "name": "hands" 205 | }, 206 | "language_info": { 207 | "codemirror_mode": { 208 | "name": "ipython", 209 | "version": 3 210 | }, 211 | "file_extension": ".py", 212 | "mimetype": "text/x-python", 213 | "name": "python", 214 | "nbconvert_exporter": "python", 215 | "pygments_lexer": "ipython3", 216 | "version": "3.7.3" 217 | } 218 | }, 219 | "nbformat": 4, 220 | "nbformat_minor": 2 221 | } 222 | --------------------------------------------------------------------------------