├── requirements.txt ├── LICENSE ├── README.md └── main.py /requirements.txt: -------------------------------------------------------------------------------- 1 | opencv-python 2 | mediapipe 3 | numpy 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Tuba Khan 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Hand Tracking AR UI 3 | 4 | This project is an Augmented Reality (AR) Hand Tracking User Interface demo. It uses Python, OpenCV, and MediaPipe to detect your hand via webcam and overlays futuristic UI graphics—radial gauges, HUD elements, and gesture-based controls—directly onto your hand in real time. 5 | 6 | ## Features 7 | - Real-time hand tracking using MediaPipe 8 | - AR-style radial and pinch UI overlays 9 | - Gesture-based switching (open hand, pinch, fist) 10 | - Futuristic HUD graphics: concentric circles, radial ticks, core pattern, numeric overlays 11 | - All graphics generated programmatically 12 | 13 | ## Technologies Used 14 | - Python 3.8+ 15 | - OpenCV 16 | - MediaPipe 17 | - Numpy 18 | 19 | ## Installation 20 | 1. Clone this repository: 21 | ```bash 22 | git clone https://github.com//.git 23 | cd 24 | ``` 25 | 2. Install dependencies: 26 | ```bash 27 | pip install -r requirements.txt 28 | ``` 29 | 3. Run the project: 30 | ```bash 31 | python main.py 32 | ``` 33 | 34 | ## Usage 35 | - Allow webcam access when prompted. 36 | - Move your hand in front of the camera to interact with the AR UI overlays. 37 | - Try different gestures (open hand, pinch, fist) to see UI changes. 38 | 39 | ## Author 40 | Made by Tuba Khan 41 | 42 | ## License 43 | MIT License 44 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import mediapipe as mp 3 | import numpy as np 4 | 5 | # Initialize MediaPipe Hands 6 | mp_hands = mp.solutions.hands 7 | mp_drawing = mp.solutions.drawing_utils 8 | hands = mp_hands.Hands(static_image_mode=False, 9 | max_num_hands=1, 10 | min_detection_confidence=0.7, 11 | min_tracking_confidence=0.7) 12 | 13 | # Colors for UI overlays (reference style) 14 | CYAN = (255, 255, 0) 15 | ORANGE = (0, 180, 255) 16 | WHITE = (255, 255, 255) 17 | RED = (0, 0, 255) 18 | CORE = (0, 255, 180) 19 | 20 | def draw_glow_circle(img, center, radius, color, thickness=2, glow=15): 21 | # Draw outer glow 22 | for g in range(glow, 0, -3): 23 | alpha = 0.08 + 0.12 * (g / glow) 24 | overlay = img.copy() 25 | cv2.circle(overlay, center, radius+g, color, thickness) 26 | cv2.addWeighted(overlay, alpha, img, 1-alpha, 0, img) 27 | # Draw main circle 28 | cv2.circle(img, center, radius, color, thickness) 29 | 30 | def draw_radial_ticks(img, center, radius, color, num_ticks=24, length=22, thickness=3): 31 | # Draw radial ticks (reference style) 32 | for i in range(num_ticks): 33 | angle = np.deg2rad(i * (360/num_ticks)) 34 | x1 = int(center[0] + (radius-length) * np.cos(angle)) 35 | y1 = int(center[1] + (radius-length) * np.sin(angle)) 36 | x2 = int(center[0] + radius * np.cos(angle)) 37 | y2 = int(center[1] + radius * np.sin(angle)) 38 | cv2.line(img, (x1, y1), (x2, y2), color, thickness) 39 | 40 | def draw_core_pattern(img, center, radius): 41 | # Draw stylized core (reference style) 42 | for t in np.linspace(0, 2*np.pi, 40): 43 | r = radius * (0.7 + 0.3 * np.sin(6*t)) 44 | x = int(center[0] + r * np.cos(t)) 45 | y = int(center[1] + r * np.sin(t)) 46 | cv2.circle(img, (x, y), 3, ORANGE, -1) 47 | cv2.circle(img, center, int(radius*0.6), CYAN, 2) 48 | cv2.circle(img, center, int(radius*0.4), ORANGE, 2) 49 | 50 | def draw_hud_details(img, center): 51 | # Draw bottom HUD bars and segments (reference style) 52 | for i in range(8): 53 | angle = np.deg2rad(210 + i*10) 54 | x1 = int(center[0] + 140 * np.cos(angle)) 55 | y1 = int(center[1] + 140 * np.sin(angle)) 56 | x2 = int(center[0] + 170 * np.cos(angle)) 57 | y2 = int(center[1] + 170 * np.sin(angle)) 58 | cv2.line(img, (x1, y1), (x2, y2), CYAN, 4) 59 | # Draw HUD blocks 60 | for i in range(4): 61 | angle = np.deg2rad(270 + i*15) 62 | x = int(center[0] + 120 * np.cos(angle)) 63 | y = int(center[1] + 120 * np.sin(angle)) 64 | cv2.rectangle(img, (x-10, y-10), (x+10, y+10), CYAN, 2) 65 | 66 | def draw_arc_segments(img, center): 67 | # Draw arc segments (reference style) 68 | cv2.ellipse(img, center, (110,110), 0, -30, 210, CYAN, 3) 69 | cv2.ellipse(img, center, (100,100), 0, -30, 210, ORANGE, 2) 70 | cv2.ellipse(img, center, (80,80), 0, 0, 360, CYAN, 1) 71 | 72 | # Start webcam 73 | cap = cv2.VideoCapture(0) 74 | 75 | while cap.isOpened(): 76 | ret, frame = cap.read() 77 | if not ret: 78 | break 79 | frame = cv2.flip(frame, 1) 80 | rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) 81 | results = hands.process(rgb) 82 | 83 | if results.multi_hand_landmarks: 84 | for hand_landmarks in results.multi_hand_landmarks: 85 | h, w, _ = frame.shape 86 | lm = [(int(l.x * w), int(l.y * h)) for l in hand_landmarks.landmark] 87 | 88 | # Draw hand skeleton 89 | mp_drawing.draw_landmarks(frame, hand_landmarks, mp_hands.HAND_CONNECTIONS) 90 | 91 | palm = lm[9] 92 | tips = [lm[i] for i in [4, 8, 12, 16, 20]] 93 | dists = [np.linalg.norm(np.array(tip) - np.array(palm)) for tip in tips] 94 | avg_dist = np.mean(dists) 95 | 96 | # Pinch detection (thumb tip to index tip) 97 | pinch_dist = np.linalg.norm(np.array(lm[4]) - np.array(lm[8])) 98 | pinch_val = int(100 - min(pinch_dist, 100)) 99 | 100 | # Gesture logic 101 | if avg_dist > 70: 102 | # Open hand: full AR UI 103 | draw_glow_circle(frame, palm, 120, CYAN, 3, glow=30) 104 | draw_glow_circle(frame, palm, 90, CYAN, 2, glow=20) 105 | draw_glow_circle(frame, palm, 60, ORANGE, 2, glow=10) 106 | draw_radial_ticks(frame, palm, 120, CYAN, num_ticks=24, length=22, thickness=3) 107 | draw_core_pattern(frame, palm, 35) 108 | draw_hud_details(frame, palm) 109 | draw_arc_segments(frame, palm) 110 | # Dynamic lines to fingertips 111 | for i in [4, 8, 12, 16, 20]: 112 | cv2.line(frame, palm, lm[i], CYAN, 2) 113 | cv2.circle(frame, lm[i], 12, ORANGE, -1) 114 | # Numeric overlay (angle between thumb and index) 115 | v1 = np.array(lm[4]) - np.array(palm) 116 | v2 = np.array(lm[8]) - np.array(palm) 117 | try: 118 | angle = int(np.degrees(np.arccos(np.dot(v1, v2)/(np.linalg.norm(v1)*np.linalg.norm(v2)+1e-6)))) 119 | except: 120 | angle = 0 121 | cv2.putText(frame, f'{angle}°', (palm[0]+40, palm[1]-40), cv2.FONT_HERSHEY_DUPLEX, 1.5, WHITE, 4) 122 | elif pinch_val < 60: 123 | # Pinch gesture: show orange arcs and value 124 | draw_glow_circle(frame, palm, 60, ORANGE, 3, glow=20) 125 | cv2.putText(frame, f'Pinch: {pinch_val}', (palm[0]-40, palm[1]-70), cv2.FONT_HERSHEY_SIMPLEX, 1, ORANGE, 3) 126 | for i in range(5): 127 | cv2.ellipse(frame, (palm[0]+80, palm[1]), (30,30), 0, 180, 180+pinch_val+i*10, ORANGE, 2) 128 | else: 129 | # Fist: simple glowing circle 130 | draw_glow_circle(frame, palm, 60, CYAN, 3, glow=20) 131 | cv2.putText(frame, 'FIST', (palm[0]-30, palm[1]-70), cv2.FONT_HERSHEY_SIMPLEX, 1, ORANGE, 3) 132 | 133 | cv2.imshow('Hand Tracking AR UI', frame) 134 | if cv2.waitKey(1) & 0xFF == 27: 135 | break 136 | 137 | cap.release() 138 | cv2.destroyAllWindows() 139 | cap.release() 140 | cv2.destroyAllWindows() 141 | --------------------------------------------------------------------------------