└── heartMonitor.py /heartMonitor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import cv 4 | import numpy 5 | import pylab 6 | import time 7 | 8 | class Camera(object): 9 | 10 | def __init__(self, camera = 0): 11 | """ 12 | A simple web-cam wrapper. 13 | """ 14 | self.cam = cv.CaptureFromCAM(camera) 15 | 16 | if not self.cam: 17 | raise Exception("Camera not accessible.") 18 | 19 | 20 | def get_frame(self): 21 | """ 22 | Return the most recent (successful) image from the webcam 23 | """ 24 | frame = None 25 | while not frame: 26 | frame = cv.QueryFrame(self.cam) 27 | 28 | return frame 29 | 30 | 31 | def get_fps(self): 32 | fps = cv.GetCaptureProperty(self.cam, cv.CV_CAP_PROP_FPS) 33 | return fps if fps != -1 else 30.0 34 | 35 | 36 | def get_size(self): 37 | w = int(cv.GetCaptureProperty(self.cam, cv.CV_CAP_PROP_FRAME_WIDTH)) 38 | h = int(cv.GetCaptureProperty(self.cam, cv.CV_CAP_PROP_FRAME_HEIGHT)) 39 | return (w,h) 40 | 41 | 42 | class FaceDetector(object): 43 | 44 | def __init__(self, width, height, cascade_file="haarcascade_frontalface_alt.xml"): 45 | """ 46 | Detects faces in an image. 47 | @param width Width of the images that will be supplied 48 | @param height Height of the images that will be supplied 49 | @param cascade_file Haar cascade data file for fronts of faces 50 | """ 51 | 52 | # Load the cascade 53 | self.cascade = cv.Load(cascade_file) 54 | 55 | # Storage for the algorithm to use 56 | self.storage = cv.CreateMemStorage() 57 | 58 | # A grayscale buffer to copy images for processing into 59 | self.gray = cv.CreateImage((width, height), 8, 1) 60 | 61 | def get_faces(self, image): 62 | """ 63 | Given an opencv image, return a ((x,y,w,h), certainty) tuple for each face 64 | detected. 65 | """ 66 | 67 | # Convert the image to grayscale and normalise 68 | cv.CvtColor(image, self.gray, cv.CV_BGR2GRAY) 69 | cv.EqualizeHist(self.gray, self.gray) 70 | 71 | # Detect faces 72 | return cv.HaarDetectObjects(self.gray, self.cascade, self.storage, 73 | scale_factor = 1.3, 74 | min_neighbors = 2, 75 | flags = cv.CV_HAAR_DO_CANNY_PRUNING, 76 | min_size = (40,40)) 77 | 78 | def get_best_face(self, image): 79 | """ 80 | Wrapper around get_faces which returns the face with the highest certainty 81 | or None if no faces were found 82 | """ 83 | try: 84 | return max(self.get_faces(image), 85 | key = (lambda f: f[1])) 86 | except ValueError: 87 | return None 88 | 89 | 90 | class HeartMonitor(object): 91 | 92 | def __init__(self, window_duration, fps = 30, min_bpm = 50, max_bpm = 200): 93 | """ 94 | Class which detects heart-beats in a sequence of image colour samples. 95 | @param window_duration The number of seconds of samples to use 96 | @param fps The nominal sample rate 97 | @param min_bpm Minimum cut-off for possible heartrates 98 | @param max_bpm Maximum cut-off for possible heartrates 99 | """ 100 | 101 | self.min_bpm = min_bpm 102 | self.max_bpm = max_bpm 103 | 104 | # The maximum number of samples to buffer 105 | self.buf_size = int(window_duration*fps) 106 | 107 | # Buffer of (timestamp, value) tuples 108 | self.buf = [] 109 | 110 | 111 | @property 112 | def fps(self): 113 | """ 114 | The average framerate/samplerate of the buffer 115 | """ 116 | return float(len(self.buf)) / (self.buf[-1][0] - self.buf[0][0]) 117 | 118 | 119 | def get_fft(self): 120 | """ 121 | Perform an Fast-Fourier-Transform on the buffer and return (magnitude, 122 | phase) tuples for each of the bins. 123 | """ 124 | # Get the "ideal" evenly spaced times 125 | even_times = numpy.linspace(self.buf[0][0], self.buf[-1][0], len(self.buf)) 126 | 127 | # Interpolate the data to generate evenly temporally spaced samples 128 | interpolated = numpy.interp(even_times, *zip(*self.buf)) 129 | 130 | # Perform the FFT 131 | fft = numpy.fft.rfft(interpolated) 132 | return zip(numpy.abs(fft), numpy.angle(fft)) 133 | 134 | 135 | def bin_to_bpm(self, bin): 136 | """ 137 | Convert an FFT bin number into a heart-rate in beats-per-minute for the 138 | current framerate. Bin numbers start from 1. 139 | """ 140 | 141 | return (60.0 * bin * self.fps) / float(len(self.buf)) 142 | 143 | 144 | def bpm_to_bin(self, bpm): 145 | """ 146 | Convert a heart-rate in beats-per-minute into an FFT bin number for the 147 | current framerate. Bin numbers start from 1. 148 | """ 149 | 150 | return int(float(len(self.buf) * bpm) / float(60.0 * self.fps)) 151 | 152 | 153 | def get_bpm(self): 154 | """ 155 | Get the current beats-per-minute, the phase and the band of FFT data within 156 | the allowed heart-rate range as a list of (bpm, (magnitude,phase)) tuples. 157 | """ 158 | 159 | fft = self.get_fft() 160 | 161 | # Get the bin numbers of the bounds of the possible allowed heart-rates in 162 | # the FFT 163 | min_bin = self.bpm_to_bin(self.min_bpm) 164 | max_bin = self.bpm_to_bin(self.max_bpm) 165 | 166 | # Find the bin with the highest intensity (the heartbeat) 167 | if min_bin == max_bin: 168 | best_bin = min_bin 169 | else: 170 | best_bin = max(range(min_bin, max_bin), 171 | key=(lambda i: fft[i-1][0])) 172 | heartrate = self.bin_to_bpm(best_bin) 173 | phase = fft[best_bin-1][1] 174 | 175 | # Produce the FFT data in the format described above 176 | fft_data = zip((self.bin_to_bpm(b) for b in range(min_bin, max_bin+1)), 177 | fft[min_bin-1:max_bin]) 178 | 179 | return heartrate, phase, fft_data 180 | 181 | 182 | @property 183 | def buf_full(self): 184 | return len(self.buf) >= self.buf_size 185 | 186 | 187 | @property 188 | def ready(self): 189 | return len(self.buf) >= 2 190 | 191 | 192 | def add_sample(self, time, value): 193 | """ 194 | Add a new colour sample 195 | """ 196 | if self.buf_full: 197 | self.buf.pop(0) 198 | 199 | self.buf.append((time, value)) 200 | 201 | 202 | def reset(self): 203 | """ 204 | Reset the heartrate monitor, start from scratch again. 205 | """ 206 | self.buf = [] 207 | 208 | 209 | class FaceTracker(object): 210 | 211 | def __init__(self, frame, face_position, 212 | fh_x = 0.5, fh_y = 0.13, 213 | fh_w = 0.25, fh_h = 0.15): 214 | """ 215 | A motion tracker that can track a face (and forehead). Note: This class 216 | simply provides the interface but doesn't actually track the face as it 217 | moves. 218 | @param frame The first frame containing the face 219 | @param face_position The position of the face in the frame 220 | @param fh_x The x-position on the face of the center of the forehead 221 | @param fh_y The y-position on the face of the center of the forehead 222 | @param fh_w The width, relative to the face, of the forehead 223 | @param fh_h The height, relative to the face, of the forehead 224 | """ 225 | 226 | self.face_position = face_position 227 | 228 | self.fh_x = fh_x 229 | self.fh_y = fh_y 230 | self.fh_w = fh_w 231 | self.fh_h = fh_h 232 | 233 | 234 | def update(self, time, frame, face_position = None): 235 | """ 236 | Add a new frame. Will override the face position if specified. 237 | """ 238 | self.face_position = face_position or self.face_position 239 | 240 | 241 | def get_face(self): 242 | return self.face_position 243 | 244 | 245 | def get_forehead(self): 246 | """ 247 | Get the position of the forehead as tracked by the MotionTracker 248 | """ 249 | x,y,w,h = self.get_face() 250 | 251 | x += w * self.fh_x 252 | y += h * self.fh_y 253 | w *= self.fh_w 254 | h *= self.fh_h 255 | 256 | x -= (w / 2.0) 257 | y -= (h / 2.0) 258 | 259 | return (x,y,w,h) 260 | 261 | 262 | class Annotator(object): 263 | 264 | THICK = 3 # Thick line width 265 | THIN = 1 # Thin line width 266 | BORDER = 2 # Additional width for outlines 267 | 268 | # Colour (Fill, Outline) 269 | COLOUR_OK = ((0,255,0), (0,0,0)) 270 | COLOUR_BUSY = ((0,0,255), (0,0,0)) 271 | 272 | COLOUR_FACE = (0,255,255) 273 | COLOUR_FOREHEAD = (0,255,0) 274 | 275 | PULSE_SIZE = (9,12) # Size of the pluse-blob (normal, on pulse) 276 | PULSE_PHASE = numpy.pi / 4 # Phase during which pulse occurs 277 | 278 | SMALL_PULSE_SIZE = 6 # Size of the small pluse-blob 279 | 280 | HEAD_WIDTH_SCALE = 0.8 # Scale head width for appearence's sake 281 | 282 | FFT_HEIGHT = 0.4 # Height of the FFT on the image 283 | 284 | 285 | def __init__(self): 286 | """ 287 | Can annotate various features onto frames. 288 | """ 289 | # Setup fonts 290 | self.large_font = self._get_font(1,Annotator.THICK) 291 | self.large_font_outline = self._get_font(1,Annotator.THICK + Annotator.BORDER) 292 | 293 | self.small_font = self._get_font(0.5,Annotator.THIN) 294 | self.small_font_outline = self._get_font(0.5,Annotator.THIN + Annotator.BORDER) 295 | 296 | # Text colour 297 | self.colour = Annotator.COLOUR_BUSY 298 | 299 | self.forehead = (0,0,1,1) 300 | self.face = (0,0,1,1) 301 | 302 | 303 | def _get_font(self, size=1, weight=1, italic=0): 304 | return cv.InitFont(cv.CV_FONT_HERSHEY_SIMPLEX, 305 | size, size, italic, weight) 306 | 307 | 308 | def set_busy(self, busy): 309 | self.colour = Annotator.COLOUR_OK if not busy else Annotator.COLOUR_BUSY 310 | 311 | 312 | def set_forehead(self, forehead): 313 | self.forehead = tuple(map(int, forehead)) 314 | 315 | def set_face(self, face): 316 | self.face = tuple(map(int, face)) 317 | 318 | @property 319 | def metrics(self): 320 | x,_,w,h = map(int, self.forehead) 321 | _,y,_,_ = map(int, self.face) 322 | return (x,y,w,h) 323 | 324 | 325 | def get_colour(self): 326 | return self.colour 327 | 328 | 329 | def draw_bpm(self, frame, bpm): 330 | x,y,w,h = self.metrics 331 | c = self.get_colour() 332 | 333 | cv.PutText(frame, "%0.0f"%bpm, (x,y), self.large_font_outline, c[1]) 334 | cv.PutText(frame, "%0.0f"%bpm, (x,y), self.large_font, c[0]) 335 | 336 | 337 | def draw_phase(self, frame, phase): 338 | x,y,w,h = self.metrics 339 | c = self.get_colour() 340 | 341 | x -= int(Annotator.PULSE_SIZE[1] * 1.5) 342 | y -= Annotator.PULSE_SIZE[1] 343 | 344 | if (phase % (2.0 * numpy.pi)) < Annotator.PULSE_PHASE: 345 | radius = Annotator.PULSE_SIZE[1] 346 | else: 347 | radius = Annotator.PULSE_SIZE[0] 348 | 349 | cv.Circle(frame, (x,y), radius + Annotator.BORDER, c[1], -1) 350 | cv.Circle(frame, (x,y), radius, c[0], -1) 351 | 352 | 353 | def draw_face(self, frame): 354 | x,y,w,h = self.face 355 | 356 | # Center of the face 357 | x += w/2 358 | y += h/2 359 | 360 | # Slightly narrow the elipse to fit most faces better 361 | w *= Annotator.HEAD_WIDTH_SCALE 362 | 363 | c = Annotator.COLOUR_FACE 364 | 365 | cv.Ellipse(frame, (int(x),int(y)), (int(w/2),int(h/2)), 0, 0, 360, c, Annotator.THIN) 366 | 367 | 368 | def draw_forehead(self, frame): 369 | x,y,w,h = self.forehead 370 | c = Annotator.COLOUR_FOREHEAD 371 | 372 | cv.Rectangle(frame, (int(x),int(y)), (int(x+w),int(y+h)), c, Annotator.THIN) 373 | 374 | 375 | def draw_fft(self, frame, fft_data, min_bpm, max_bpm): 376 | w = frame.width 377 | h = int(frame.height * Annotator.FFT_HEIGHT) 378 | x = 0 379 | y = frame.height 380 | 381 | max_magnitude = max(d[1][0] for d in fft_data) 382 | 383 | def get_position(i): 384 | point_x = int(w * (float(fft_data[i][0] - min_bpm) / float(max_bpm - min_bpm))) 385 | point_y = int(y - ((h * fft_data[i][1][0]) / max_magnitude)) 386 | return point_x, point_y 387 | 388 | line = [get_position(i) for i in range(len(fft_data))] 389 | 390 | cv.PolyLine(frame, [line], False, self.get_colour()[0], 3) 391 | 392 | # Label the largest bin 393 | max_bin = max(range(len(fft_data)), key=(lambda i: fft_data[i][1][0])) 394 | 395 | x,y = get_position(max_bin) 396 | c = self.get_colour() 397 | text = "%0.1f"%fft_data[max_bin][0] 398 | 399 | cv.PutText(frame, text, (x,y), self.small_font_outline, c[1]) 400 | cv.PutText(frame, text, (x,y), self.small_font, c[0]) 401 | 402 | # Pulse ring 403 | r = Annotator.SMALL_PULSE_SIZE 404 | phase = int(((fft_data[max_bin][1][1] % (2*numpy.pi)) / numpy.pi) * 180) 405 | cv.Ellipse(frame, (int(x-(r*1.5)),int(y-r)), (int(r),int(r)), 0, 90, 90-phase, c[1], Annotator.THIN+Annotator.BORDER) 406 | cv.Ellipse(frame, (int(x-(r*1.5)),int(y-r)), (int(r),int(r)), 0, 90, 90-phase, c[0], Annotator.THIN) 407 | 408 | 409 | 410 | class Program(object): 411 | 412 | def __init__(self, 413 | webcam = 0, 414 | sample_duration = 10, 415 | window_title = "Heart Monitor"): 416 | """ 417 | Program to monitor heartrates using a webcam. 418 | """ 419 | 420 | self.cam = Camera(webcam) 421 | self.face_detector = FaceDetector(*self.cam.get_size()) 422 | self.face_tracker = None 423 | self.heart_monitor = HeartMonitor(sample_duration, fps = self.cam.get_fps()) 424 | self.annotator = Annotator() 425 | self.window = window_title 426 | 427 | cv.NamedWindow(self.window) 428 | 429 | self.show_bpm = True 430 | self.show_face = True 431 | self.show_forehead = True 432 | self.show_fft = True 433 | 434 | 435 | def find_face(self, frame): 436 | # Try and find a face 437 | face = self.face_detector.get_best_face(frame) 438 | 439 | if face is not None: 440 | # Track the new face 441 | self.face_tracker = FaceTracker(frame, face[0]) 442 | 443 | 444 | def sample_frame(self, frame): 445 | # Get an average of the green channel in on the forehead 446 | cv.SetImageROI(frame, self.face_tracker.get_forehead()) 447 | sample = cv.Avg(frame)[1] 448 | cv.ResetImageROI(frame) 449 | 450 | return sample 451 | 452 | 453 | def update(self): 454 | """ 455 | Mainloop body. Returns True unless termination requested. 456 | """ 457 | 458 | frame = self.cam.get_frame() 459 | frame_time = time.time() 460 | 461 | if self.face_tracker is None: 462 | # No face known 463 | self.find_face(frame) 464 | else: 465 | # Track the face 466 | self.face_tracker.update(frame_time, frame) 467 | self.annotator.set_face(self.face_tracker.get_face()) 468 | self.annotator.set_forehead(self.face_tracker.get_forehead()) 469 | 470 | # Update the heart monitor 471 | self.heart_monitor.add_sample(frame_time, self.sample_frame(frame)) 472 | self.annotator.set_busy(not self.heart_monitor.buf_full) 473 | 474 | if self.heart_monitor.ready: 475 | bpm, phase, fft_data = self.heart_monitor.get_bpm() 476 | 477 | # Draw the OSD 478 | if fft_data and self.show_fft: 479 | self.annotator.draw_fft(frame, fft_data, 480 | self.heart_monitor.min_bpm, 481 | self.heart_monitor.max_bpm) 482 | 483 | if self.show_face: 484 | self.annotator.draw_face(frame) 485 | 486 | if self.show_forehead: 487 | self.annotator.draw_forehead(frame) 488 | 489 | if self.show_bpm: 490 | self.annotator.draw_bpm(frame, bpm) 491 | self.annotator.draw_phase(frame, phase) 492 | 493 | # Display the (possibly annotated) frame 494 | cv.ShowImage(self.window, frame) 495 | 496 | # Handle keypresses 497 | key = cv.WaitKey(10) & 255 498 | if key == 27: # Escape 499 | # Exit 500 | return False 501 | elif key == ord("r"): 502 | # Reset the heart monitor and face tracker 503 | self.face_tracker = None 504 | self.heart_monitor.reset() 505 | elif key == ord(" "): 506 | # Re-find the face 507 | self.face_tracker = None 508 | elif key == ord("1"): 509 | self.show_face = not self.show_face 510 | elif key == ord("2"): 511 | self.show_forehead = not self.show_forehead 512 | elif key == ord("3"): 513 | self.show_fft = not self.show_fft 514 | elif key == ord("4"): 515 | self.show_bpm = not self.show_bpm 516 | 517 | return True 518 | 519 | 520 | def run(self): 521 | """ 522 | Blocks running the mainloop 523 | """ 524 | 525 | while self.update(): 526 | pass 527 | 528 | 529 | if __name__=="__main__": 530 | Program().run() 531 | 532 | --------------------------------------------------------------------------------