├── requirements.txt ├── utils.py ├── LICENSE ├── README.md ├── main.py └── hand_overlay.py /requirements.txt: -------------------------------------------------------------------------------- 1 | mediapipe>=0.10.1 2 | opencv-python>=4.7.0 3 | numpy>=1.23.0 4 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | class Smoother: 4 | """Simple exponential smoother for 2D/3D points or scalars""" 5 | def __init__(self, alpha=0.6): 6 | self.alpha = alpha 7 | self.state = None 8 | 9 | def update(self, value): 10 | v = np.array(value, dtype=float) 11 | if self.state is None: 12 | self.state = v 13 | else: 14 | self.state = self.alpha * v + (1 - self.alpha) * self.state 15 | return self.state 16 | 17 | 18 | def angle_between(a, b): 19 | """Return angle in degrees between vectors a and b""" 20 | a = np.array(a, dtype=float) 21 | b = np.array(b, dtype=float) 22 | an = np.linalg.norm(a) 23 | bn = np.linalg.norm(b) 24 | if an == 0 or bn == 0: 25 | return 0.0 26 | cos = np.clip(np.dot(a, b) / (an * bn), -1.0, 1.0) 27 | return np.degrees(np.arccos(cos)) 28 | 29 | 30 | def project_point(pt, w, h): 31 | """Convert normalized MediaPipe point to image pixel coordinates""" 32 | return int(pt[0] * w), int(pt[1] * h) 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 tubakhxn 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Hand HUD — palm-anchored kinematic dashboard 2 | 3 | This project captures webcam input, detects a hand using MediaPipe, and draws a white, mechanical-style kinematic HUD over the hand (palm-centered radial UI, finger bones, rotation readout, small 3D cube and grid) similar to the reference images. 4 | 5 | ## Files 6 | 7 | - `requirements.txt` — Python dependencies 8 | - `main.py` — application entrypoint and webcam loop 9 | - `hand_overlay.py` — functions for drawing the HUD and computing kinematics 10 | - `utils.py` — small helpers (smoothing, geometry) 11 | 12 | ## How to run (Windows PowerShell) 13 | 14 | 1. Create and activate a virtual environment (optional but recommended): 15 | 16 | python -m venv .venv; .\.venv\Scripts\Activate.ps1 17 | 18 | 2. Install dependencies: 19 | 20 | pip install -r requirements.txt 21 | 22 | 3. Run: 23 | 24 | python main.py 25 | 26 | ## Notes 27 | 28 | - Works with a single hand in frame. For multi-hand, toggle a flag in `main.py`. 29 | - Tweak smoothing and drawing constants in `hand_overlay.py`. 30 | - If you want recording/output images, I can add that next. 31 | 32 | ## Copyright & Attribution 33 | 34 | This repository is published publicly by the original creator: **tubakhxn**. 35 | 36 | You are welcome to fork, clone, and contribute to this project. If you reuse substantial parts of the code or publish derivative works, please give clear credit to the original author. A suggested credit line is: 37 | 38 | > Based on tubakhxn/hand-overlay — https://github.com/tubakhxn/hand-overlay 39 | 40 | Please keep a copy of this README and the `LICENSE` file in redistributed or forked versions so attribution remains visible. 41 | 42 | ## License 43 | 44 | This project is released under the MIT License — see the `LICENSE` file included in this repository. The MIT License permits reuse, modification, and redistribution; please preserve the copyright notice and give credit to the original author when distributing or republishing the work. 45 | 46 | If you'd prefer a license that explicitly requires attribution (for example, Creative Commons Attribution 4.0), let me know and I can switch the license file. 47 | 48 | ## Contributing 49 | 50 | Contributions are welcome. To contribute: 51 | 52 | 1. Open an issue to discuss proposed changes or file a bug report. 53 | 2. Create a branch for your changes and open a pull request with a clear description. 54 | 3. Keep changes focused and include tests or usage notes when appropriate. 55 | 56 | By submitting a pull request you agree that your contribution will be made under the repository's license (MIT by default). 57 | 58 | ## Contact 59 | 60 | If you want to reach out, open an issue or visit the author's GitHub profile: https://github.com/tubakhxn 61 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cv2 3 | import mediapipe as mp 4 | import numpy as np 5 | import time 6 | from hand_overlay import ( 7 | draw_skeleton, 8 | draw_palm_radial_ui, 9 | draw_rotation_text, 10 | draw_cube_and_grid, 11 | draw_fingertip_gears, 12 | draw_palm_data_text, 13 | landmarks_to_pixel, 14 | ) 15 | from utils import Smoother, angle_between 16 | 17 | # Quiet TensorFlow / TF logger from MediaPipe 18 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' 19 | 20 | mp_hands = mp.solutions.hands 21 | 22 | # smoothing objects 23 | palm_smoother = Smoother(alpha=0.6) 24 | rot_smoother = Smoother(alpha=0.6) 25 | openess_smoother = Smoother(alpha=0.4) 26 | 27 | 28 | def compute_palm_rotation(landmarks): 29 | """Estimate palm rotation using vector from wrist (0) to middle_finger_mcp (9) 30 | returns degrees of rotation around camera z (2D) 31 | """ 32 | w = np.array(landmarks[0][:2]) 33 | m = np.array(landmarks[9][:2]) 34 | v = m - w 35 | angle = np.degrees(np.arctan2(v[1], v[0])) 36 | # normalize to 0-360 37 | angle = float(angle % 360) 38 | return angle 39 | 40 | 41 | def run(): 42 | cap = cv2.VideoCapture(0) 43 | prev = time.time() 44 | fps = 0.0 45 | with mp_hands.Hands(static_image_mode=False, max_num_hands=1, min_detection_confidence=0.6, min_tracking_confidence=0.6) as hands: 46 | try: 47 | while True: 48 | ok, frame = cap.read() 49 | if not ok: 50 | break 51 | now = time.time() 52 | dt = now - prev if now - prev > 0 else 1e-6 53 | prev = now 54 | fps = 0.9 * fps + 0.1 * (1.0 / dt) 55 | 56 | h, w = frame.shape[:2] 57 | frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) 58 | results = hands.process(frame_rgb) 59 | 60 | overlay = frame.copy() 61 | t = now 62 | if results.multi_hand_landmarks: 63 | hand = results.multi_hand_landmarks[0] 64 | lm = [(l.x, l.y, l.z) for l in hand.landmark] 65 | pix = landmarks_to_pixel(lm, w, h) 66 | 67 | # compute palm center roughly as average of wrist and palm base 68 | palm = ((pix[0][0] + pix[9][0]) // 2, (pix[0][1] + pix[9][1]) // 2) 69 | palm = tuple(map(int, palm_smoother.update(palm))) 70 | 71 | rot = compute_palm_rotation(lm) 72 | rot = float(rot_smoother.update(rot)) 73 | 74 | # draw animated HUD 75 | draw_palm_radial_ui(overlay, palm, rot, t=t) 76 | draw_skeleton(overlay, pix, t=t) 77 | try: 78 | from hand_overlay import draw_decorative_bones 79 | draw_decorative_bones(overlay, pix) 80 | except Exception: 81 | pass 82 | draw_rotation_text(overlay, palm, rot) 83 | # minimal HUD (no extra decorative leaves/connectors) 84 | 85 | # cube near wrist 86 | wrist_anchor = (pix[0][0] - 80, pix[0][1] + 40) 87 | # estimate openness: mean distance of fingertips from palm center mapped to 0-100 88 | tips = [4, 8, 12, 16, 20] 89 | dists = [] 90 | for ti in tips: 91 | tx = int(lm[ti][0] * w) 92 | ty = int(lm[ti][1] * h) 93 | dists.append(np.hypot(tx - palm[0], ty - palm[1])) 94 | openness = 0 95 | if dists: 96 | mean_dist = float(np.mean(dists)) 97 | # dynamic calibration: closed ~ small fraction of frame, open ~ 0.55 * min(w,h) 98 | closed_ref = max(12.0, min(40.0, min(w, h) * 0.04)) 99 | open_ref = max(60.0, min(w, h) * 0.55) 100 | raw_openness = (mean_dist - closed_ref) / (open_ref - closed_ref) * 100.0 101 | raw_openness = float(np.clip(raw_openness, 0.0, 100.0)) 102 | # smooth openness so it can still reach 0/100 but not jitter 103 | openness = float(openess_smoother.update(raw_openness)) 104 | draw_cube_and_grid(overlay, wrist_anchor, t=t) 105 | # pass scale into palm UI so fingertip gear size responds 106 | draw_palm_radial_ui(overlay, palm, rot, t=t, width=1.0 + openness / 160.0) 107 | draw_fingertip_gears(overlay, pix, rot, t=t) 108 | draw_palm_data_text(overlay, wrist_anchor, openness) 109 | 110 | # HUD: FPS and instructions 111 | cv2.putText(overlay, f"FPS: {int(fps)}", (10, 24), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (180, 180, 180), 2, cv2.LINE_AA) 112 | cv2.putText(overlay, "Press Esc to quit", (10, h - 16), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (200, 200, 200), 1, cv2.LINE_AA) 113 | 114 | cv2.imshow('Hand HUD', overlay) 115 | key = cv2.waitKey(1) & 0xFF 116 | if key == 27: 117 | break 118 | except KeyboardInterrupt: 119 | # graceful exit 120 | pass 121 | finally: 122 | cap.release() 123 | cv2.destroyAllWindows() 124 | 125 | 126 | if __name__ == '__main__': 127 | run() 128 | -------------------------------------------------------------------------------- /hand_overlay.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | from utils import project_point, angle_between 4 | 5 | # visual constants 6 | LINE_COLOR = (255, 255, 255) # white 7 | LINE_WIDTH = 1 8 | DOT_RADIUS = 3 9 | FONT = cv2.FONT_HERSHEY_SIMPLEX 10 | 11 | 12 | def _add_glow(img, draw_fn, alpha=0.15, layers=3): 13 | """Simple glow effect: draw thicker shapes on a transparent overlay and blend.""" 14 | overlay = img.copy() 15 | for i in range(layers, 0, -1): 16 | temp = img.copy() 17 | draw_fn(temp, width=LINE_WIDTH + i * 2, color=LINE_COLOR) 18 | cv2.addWeighted(temp, alpha * (i / layers), overlay, 1 - alpha * (i / layers), 0, overlay) 19 | return overlay 20 | 21 | 22 | def draw_skeleton(img, landmarks, t=0.0, width=LINE_WIDTH, color=LINE_COLOR, handedness=None): 23 | """Draw kinematic lines connecting finger joints and small dots for joints. 24 | landmarks: list of 21 (x,y) pixel tuples like MediaPipe hand landmarks projected to image coords 25 | t: time in seconds for animated accents 26 | """ 27 | # finger indices from MediaPipe 28 | fingers = [ 29 | [0, 1, 2, 3, 4], 30 | [0, 5, 6, 7, 8], 31 | [0, 9, 10, 11, 12], 32 | [0, 13, 14, 15, 16], 33 | [0, 17, 18, 19, 20], 34 | ] 35 | 36 | # draw lines 37 | for f in fingers: 38 | pts = [landmarks[i] for i in f] 39 | for i in range(len(pts) - 1): 40 | cv2.line(img, pts[i], pts[i+1], color, width, cv2.LINE_AA) 41 | 42 | # draw joints 43 | for p in landmarks: 44 | cv2.circle(img, p, DOT_RADIUS, color, -1, cv2.LINE_AA) 45 | # (NO extra decorative flow lines here) keep the skeleton clean and minimal 46 | 47 | 48 | def draw_decorative_bones(img, landmarks, color=LINE_COLOR): 49 | """Draw subtle decorative accents along each finger bone to match reference style. 50 | This adds a thin parallel stroke and small leaf/ellipse ornaments between joints. 51 | landmarks are pixel (x,y) tuples. 52 | """ 53 | fingers = [ 54 | [0, 1, 2, 3, 4], 55 | [0, 5, 6, 7, 8], 56 | [0, 9, 10, 11, 12], 57 | [0, 13, 14, 15, 16], 58 | [0, 17, 18, 19, 20], 59 | ] 60 | # scale accents based on wrist->middle mcp distance 61 | try: 62 | w = landmarks[0] 63 | m = landmarks[9] 64 | scale_ref = max(1.0, int(np.hypot(m[0]-w[0], m[1]-w[1]) / 40.0)) 65 | except Exception: 66 | scale_ref = 1 67 | 68 | for f in fingers: 69 | for i in range(1, len(f)): 70 | a = landmarks[f[i-1]] 71 | b = landmarks[f[i]] 72 | ax, ay = a 73 | bx, by = b 74 | vx = bx - ax 75 | vy = by - ay 76 | L = np.hypot(vx, vy) 77 | if L < 6: 78 | continue 79 | # perpendicular unit vector 80 | px = -vy / L 81 | py = vx / L 82 | # small offset for parallel stroke (scale with hand) 83 | off = int(4 * scale_ref) 84 | pa1 = (int(ax + px * off), int(ay + py * off)) 85 | pb1 = (int(bx + px * off), int(by + py * off)) 86 | pa2 = (int(ax - px * off), int(ay - py * off)) 87 | pb2 = (int(bx - px * off), int(by - py * off)) 88 | # draw the subtle parallel strokes 89 | cv2.line(img, pa1, pb1, color, 1, cv2.LINE_AA) 90 | cv2.line(img, pa2, pb2, color, 1, cv2.LINE_AA) 91 | 92 | # draw small leaf/ellipse at the segment midpoint 93 | mx = int((ax + bx) / 2) 94 | my = int((ay + by) / 2) 95 | leaf_w = max(6, int(L / 4)) 96 | leaf_h = max(3, int(leaf_w / 3)) 97 | angle = int(np.degrees(np.arctan2(vy, vx))) 98 | # main ellipse 99 | cv2.ellipse(img, (mx, my), (leaf_w, leaf_h), angle, 0, 360, color, 1, cv2.LINE_AA) 100 | # small inner dot 101 | cv2.circle(img, (mx, my), max(1, scale_ref), color, -1, cv2.LINE_AA) 102 | 103 | 104 | def draw_palm_radial_ui(img, palm_center, rotation_deg, t=0.0, width=1, color=LINE_COLOR): 105 | """Draw a palm-anchored radial UI: circular rings + spoke lines + animated mechanical ticks""" 106 | cx, cy = palm_center 107 | # allow scale via width param hack: if width > 1 treat as scale 108 | scale = 1.0 109 | try: 110 | scale = float(width) 111 | except Exception: 112 | scale = 1.0 113 | # thickness must be integer for OpenCV drawing functions 114 | thickness = 1 115 | # The reference uses minimal central mechanics without full concentric rings. 116 | # Remove outer rings/spokes/ticks to match the requested cleaner look. 117 | # Draw only a small central marker and a couple of tiny segmented accents. 118 | cv2.circle(img, (cx, cy), int(18 * scale), color, 1, cv2.LINE_AA) 119 | # small segmented accents (not full rings) 120 | spin = (rotation_deg + t * 40) % 360 121 | for a in [-30, 30]: 122 | theta1 = np.deg2rad(a + spin) 123 | theta2 = np.deg2rad(a + 8 + spin) 124 | pt1 = (int(cx + np.cos(theta1) * 36 * scale), int(cy + np.sin(theta1) * 36 * scale)) 125 | pt2 = (int(cx + np.cos(theta2) * 36 * scale), int(cy + np.sin(theta2) * 36 * scale)) 126 | cv2.line(img, pt1, pt2, color, 1, cv2.LINE_AA) 127 | 128 | 129 | def draw_rotation_text(img, palm_center, rotation_deg, color=LINE_COLOR): 130 | cx, cy = palm_center 131 | txt = f"rotation {int(rotation_deg)}" 132 | # larger, more visible rotation text 133 | red = (0, 0, 255) 134 | # small red triangle marker to the left of the text 135 | try: 136 | tri = np.array([[cx - 76, cy + 18], [cx - 60, cy + 10], [cx - 60, cy + 28]]) 137 | cv2.fillPoly(img, [tri], red) 138 | except Exception: 139 | pass 140 | cv2.putText(img, txt, (cx - 56, cy + 26), FONT, 0.9, red, 2, cv2.LINE_AA) 141 | 142 | 143 | def draw_cube_and_grid(img, anchor_point, t=0.0): 144 | """Draw a small orange cube and a blue grid beneath it, near the wrist area. Adds small pulsing animation.""" 145 | ax, ay = anchor_point 146 | # cube (isometric-ish) 147 | orange = (0, 80, 255) 148 | blue = (255, 120, 0) 149 | size = 46 150 | pulse = 1 + 0.05 * np.sin(t * 6) 151 | size = int(size * pulse) 152 | # front square 153 | p1 = (ax, ay) 154 | p2 = (ax + size, ay) 155 | p3 = (ax + size, ay - size) 156 | p4 = (ax, ay - size) 157 | cv2.polylines(img, [np.array([p1, p2, p3, p4])], True, orange, 1, cv2.LINE_AA) 158 | # top square offset 159 | offset = (-14, -16) 160 | q1 = (p1[0] + offset[0], p1[1] + offset[1]) 161 | q2 = (p2[0] + offset[0], p2[1] + offset[1]) 162 | q3 = (p3[0] + offset[0], p3[1] + offset[1]) 163 | q4 = (p4[0] + offset[0], p4[1] + offset[1]) 164 | cv2.polylines(img, [np.array([q1, q2, q3, q4])], True, orange, 1, cv2.LINE_AA) 165 | # connect 166 | for pa, qb in zip([p1, p2, p3, p4], [q1, q2, q3, q4]): 167 | cv2.line(img, pa, qb, orange, 1, cv2.LINE_AA) 168 | 169 | # small grid beneath 170 | grid_origin = (ax + size + 8, ay + 14) 171 | gw = 5 172 | gh = 5 173 | cell = 12 174 | for r in range(gh): 175 | for c in range(gw): 176 | pt = (grid_origin[0] + c * cell, grid_origin[1] + r * cell) 177 | rect = np.array([pt, (pt[0]+cell, pt[1]), (pt[0]+cell, pt[1]+cell), (pt[0], pt[1]+cell)]) 178 | cv2.polylines(img, [rect], True, blue, 1, cv2.LINE_AA) 179 | 180 | 181 | def _draw_leaf_between(img, a, b, color=LINE_COLOR): 182 | """Draw a small leaf/ellipse between two points to mimic decorative links.""" 183 | ax, ay = a 184 | bx, by = b 185 | mx = (ax + bx) // 2 186 | my = (ay + by) // 2 187 | vx = bx - ax 188 | vy = by - ay 189 | L = max(1, int(np.hypot(vx, vy))) 190 | # perpendicular 191 | px = -vy 192 | py = vx 193 | # normalize and scale 194 | pn = np.hypot(px, py) 195 | if pn == 0: 196 | return 197 | px = int(px / pn * min(12, L // 3)) 198 | py = int(py / pn * min(12, L // 3)) 199 | pts = np.array([[mx - px, my - py], [mx + px, my + py]]) 200 | cv2.ellipse(img, (mx, my), (max(6, L//4), max(6, int(abs(L/6)))), int(np.degrees(np.arctan2(vy, vx))), 0, 360, color, 1, cv2.LINE_AA) 201 | 202 | 203 | def draw_finger_leaves(img, landmarks, color=LINE_COLOR): 204 | # removed: decorative leaves are omitted to keep lines minimal 205 | return 206 | 207 | 208 | def draw_connecting_fingertip_lines(img, landmarks, palm_center, color=LINE_COLOR): 209 | """Draw thin lines connecting fingertips to the palm center, as in the reference.""" 210 | # removed: connectors to palm (kept out to match reference) 211 | return 212 | 213 | 214 | def draw_palm_data_text(img, anchor_point, openness_pct, color=(0, 80, 255)): 215 | ax, ay = anchor_point 216 | txt = f"palm data\n{int(openness_pct)}%" 217 | # draw simple multiline by splitting 218 | lines = txt.split('\n') 219 | for i, line in enumerate(lines): 220 | cv2.putText(img, line, (ax - 20, ay + i * 22), cv2.FONT_HERSHEY_SIMPLEX, 0.8, color, 2, cv2.LINE_AA) 221 | # small rectangular accent near palm anchor 222 | cv2.rectangle(img, (ax - 8, ay + 28), (ax + 8, ay + 36), color, 1, cv2.LINE_AA) 223 | 224 | def draw_fingertip_gears(img, landmarks, rotation_deg, t=0.0, color=LINE_COLOR): 225 | """Draw small gear-like circles at fingertips that rotate with time and palm rotation.""" 226 | tips = [4, 8, 12, 16, 20] 227 | for i, idx in enumerate(tips): 228 | cx, cy = landmarks[idx][:2] 229 | r = 22 # even larger fingertip gears for the reference look 230 | # base circle 231 | cv2.circle(img, (cx, cy), r, color, 1, cv2.LINE_AA) 232 | # animated spokes 233 | spokes = 6 234 | phase = np.deg2rad(rotation_deg + t * 180 + i * 30) 235 | for s in range(spokes): 236 | ang = phase + s * (2 * np.pi / spokes) 237 | x2 = int(cx + np.cos(ang) * r) 238 | y2 = int(cy + np.sin(ang) * r) 239 | cv2.line(img, (cx, cy), (x2, y2), color, 1, cv2.LINE_AA) 240 | 241 | 242 | def landmarks_to_pixel(landmarks, w, h): 243 | return [(int(x * w), int(y * h)) for (x, y, z) in landmarks] 244 | --------------------------------------------------------------------------------