├── Capture.png ├── Hand_Gesture_Ex1.py ├── Hand_Gesture_Ex2.py └── README.md /Capture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codeingschool/Hand-Gestures/c2986545eb51b2b251455a9521316c1f0b070eca/Capture.png -------------------------------------------------------------------------------- /Hand_Gesture_Ex1.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | 4 | hand = cv2.imread('Capture.png',0) 5 | 6 | ret, the = cv2.threshold(hand, 70, 255, cv2.THRESH_BINARY) 7 | 8 | _,contours,_ = cv2.findContours(the.copy(),cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE) 9 | 10 | hull = [cv2.convexHull(c) for c in contours] 11 | final = cv2.drawContours(hand, hull, -1, (255,0,0)) 12 | 13 | cv2.imshow('Originals', hand) 14 | cv2.imshow('Thresh',the) 15 | cv2.imshow('Convex hull',final) 16 | 17 | cv2.waitKey(0) 18 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /Hand_Gesture_Ex2.py: -------------------------------------------------------------------------------- 1 | # Imports 2 | import numpy as np 3 | import cv2 4 | import math 5 | 6 | # Open Camera 7 | capture = cv2.VideoCapture(0) 8 | 9 | while capture.isOpened(): 10 | 11 | # Capture frames from the camera 12 | ret, frame = capture.read() 13 | 14 | # Get hand data from the rectangle sub window 15 | cv2.rectangle(frame, (100, 100), (300, 300), (0, 255, 0), 0) 16 | crop_image = frame[100:300, 100:300] 17 | 18 | # Apply Gaussian blur 19 | blur = cv2.GaussianBlur(crop_image, (3, 3), 0) 20 | 21 | # Change color-space from BGR -> HSV 22 | hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV) 23 | 24 | # Create a binary image with where white will be skin colors and rest is black 25 | mask2 = cv2.inRange(hsv, np.array([2, 0, 0]), np.array([20, 255, 255])) 26 | 27 | # Kernel for morphological transformation 28 | kernel = np.ones((5, 5)) 29 | 30 | # Apply morphological transformations to filter out the background noise 31 | dilation = cv2.dilate(mask2, kernel, iterations=1) 32 | erosion = cv2.erode(dilation, kernel, iterations=1) 33 | 34 | # Apply Gaussian Blur and Threshold 35 | filtered = cv2.GaussianBlur(erosion, (3, 3), 0) 36 | ret, thresh = cv2.threshold(filtered, 127, 255, 0) 37 | 38 | # Show threshold image 39 | cv2.imshow("Thresholded", thresh) 40 | 41 | # Find contours 42 | image, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) 43 | 44 | try: 45 | # Find contour with maximum area 46 | contour = max(contours, key=lambda x: cv2.contourArea(x)) 47 | 48 | # Create bounding rectangle around the contour 49 | x, y, w, h = cv2.boundingRect(contour) 50 | cv2.rectangle(crop_image, (x, y), (x + w, y + h), (0, 0, 255), 0) 51 | 52 | # Find convex hull 53 | hull = cv2.convexHull(contour) 54 | 55 | # Draw contour 56 | drawing = np.zeros(crop_image.shape, np.uint8) 57 | cv2.drawContours(drawing, [contour], -1, (0, 255, 0), 0) 58 | cv2.drawContours(drawing, [hull], -1, (0, 0, 255), 0) 59 | 60 | # Find convexity defects 61 | hull = cv2.convexHull(contour, returnPoints=False) 62 | defects = cv2.convexityDefects(contour, hull) 63 | 64 | # Use cosine rule to find angle of the far point from the start and end point i.e. the convex points (the finger 65 | # tips) for all defects 66 | count_defects = 0 67 | 68 | for i in range(defects.shape[0]): 69 | s, e, f, d = defects[i, 0] 70 | start = tuple(contour[s][0]) 71 | end = tuple(contour[e][0]) 72 | far = tuple(contour[f][0]) 73 | 74 | a = math.sqrt((end[0] - start[0]) ** 2 + (end[1] - start[1]) ** 2) 75 | b = math.sqrt((far[0] - start[0]) ** 2 + (far[1] - start[1]) ** 2) 76 | c = math.sqrt((end[0] - far[0]) ** 2 + (end[1] - far[1]) ** 2) 77 | angle = (math.acos((b ** 2 + c ** 2 - a ** 2) / (2 * b * c)) * 180) / 3.14 78 | 79 | # if angle > 90 draw a circle at the far point 80 | if angle <= 90: 81 | count_defects += 1 82 | cv2.circle(crop_image, far, 1, [0, 0, 255], -1) 83 | 84 | cv2.line(crop_image, start, end, [0, 255, 0], 2) 85 | 86 | # Print number of fingers 87 | if count_defects == 0: 88 | cv2.putText(frame, "ONE", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255),2) 89 | elif count_defects == 1: 90 | cv2.putText(frame, "TWO", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2) 91 | elif count_defects == 2: 92 | cv2.putText(frame, "THREE", (5, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2) 93 | elif count_defects == 3: 94 | cv2.putText(frame, "FOUR", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2) 95 | elif count_defects == 4: 96 | cv2.putText(frame, "FIVE", (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 2,(0,0,255), 2) 97 | else: 98 | pass 99 | except: 100 | pass 101 | 102 | # Show required images 103 | cv2.imshow("Gesture", frame) 104 | all_image = np.hstack((drawing, crop_image)) 105 | cv2.imshow('Contours', all_image) 106 | 107 | # Close the camera if 'q' is pressed 108 | if cv2.waitKey(1) == ord('q'): 109 | break 110 | 111 | capture.release() 112 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Hand-Gestures 2 | Hand Gestures using Open-Cv Python 3 | 4 | Put them in same folder. 5 | 6 | 1st run Hand_Gesture_Ex1.py. It will train your machine, with the help of Capture.png 7 | 8 | 2nd run Hand_Gesture_Ex2.py, it give the final output. 9 | Put your hand in the green box. 10 | --------------------------------------------------------------------------------