├── tracking.py └── uArm_face_track_demo ├── Arduino_code └── fan_uarm_face │ └── fan_uarm_face.ino ├── README.md ├── image ├── connect_arduino.jpg ├── connect_uarm.jpg ├── fix_fan.jpg ├── install_fan.jpg ├── install_openmv.jpg ├── openmv_ide_connect.jpg ├── openmv_ide_image.jpg ├── openmv_ide_start.jpg ├── pc_arduino.jpg ├── pc_openmv.jpg ├── usb_drive.jpg └── xloader.jpg ├── lib └── flexitimer2.zip └── openMV_Kit_code └── main.py /tracking.py: -------------------------------------------------------------------------------- 1 | # Object tracking with keypoints example. 2 | # Show the camera an object and then run the script. A set of keypoints will be extracted 3 | # once and then tracked in the following frames. If you want a new set of keypoints re-run 4 | # the script. NOTE: see the docs for arguments to tune find_keypoints and match_keypoints. 5 | import sensor, time, image, utime 6 | from pyb import UART 7 | from pyb import LED 8 | 9 | #initial the uarm 10 | led = LED(3) # Blue led 11 | led.toggle() 12 | #initial the uart 13 | uart = UART(3, 115200) 14 | #get the command first, since the arm should change the communication port from main uart to second port 15 | flag = uart.readline() 16 | while(flag != b'ok\n'): 17 | flag = uart.readline() 18 | utime.sleep_ms(100) 19 | print(flag) 20 | 21 | 22 | #print("done") 23 | 24 | 25 | #set the uarm to the default position 26 | utime.sleep_ms(500) 27 | 28 | uart.write("G0 X250 Y0 Z") 29 | uart.write("160 F15000\r\n") 30 | 31 | utime.sleep_ms(8000) 32 | 33 | #finish the initialization 34 | led.off() 35 | 36 | # Reset sensor 37 | sensor.reset() 38 | 39 | # Sensor settings 40 | sensor.set_contrast(3) 41 | sensor.set_gainceiling(16) 42 | sensor.set_framesize(sensor.VGA) 43 | sensor.set_windowing((320, 240)) 44 | sensor.set_pixformat(sensor.GRAYSCALE) 45 | 46 | sensor.skip_frames(time = 200) 47 | sensor.set_auto_gain(False, value=100) 48 | 49 | def draw_keypoints(img, kpts): 50 | #print(kpts) 51 | #img.draw_keypoints(kpts) 52 | img = sensor.snapshot() 53 | time.sleep(1000) 54 | 55 | kpts1 = None 56 | # NOTE: uncomment to load a keypoints descriptor from file 57 | #kpts1 = image.load_descriptor("/desc.orb") 58 | #img = sensor.snapshot() 59 | #draw_keypoints(img, kpts1) 60 | 61 | clock = time.clock() 62 | while (True): 63 | clock.tick() 64 | img = sensor.snapshot() 65 | if (kpts1 == None): 66 | # NOTE: By default find_keypoints returns multi-scale keypoints extracted from an image pyramid. 67 | kpts1 = img.find_keypoints(max_keypoints=150, threshold=20, scale_factor=1.35) 68 | draw_keypoints(img, kpts1) 69 | else: 70 | # NOTE: When extracting keypoints to match the first descriptor, we use normalized=True to extract 71 | # keypoints from the first scale only, which will match one of the scales in the first descriptor. 72 | kpts2 = img.find_keypoints(max_keypoints=150, threshold=10, normalized=True) 73 | if (kpts2): 74 | match = image.match_descriptor(kpts1, kpts2, threshold=85) 75 | if (match.count()>10): 76 | # If we have at least n "good matches" 77 | # Draw bounding rectangle and cross. 78 | img.draw_rectangle(match.rect()) 79 | img.draw_cross(match.cx(), match.cy(), size=10) 80 | 81 | print(kpts2, "matched:%d dt:%d"%(match.count(), match.theta())) 82 | 83 | coords = list(match.rect()) 84 | #print(coords) 85 | 86 | #convert the xyz coordinates for uarm 87 | delta_y = (coords[2]/2+coords[0] - 160)/20 88 | delta_x = (coords[3]/2+coords[1] - 120)/20 89 | 90 | #Gcode command, seperated the command because of the limit lenght 91 | final_coords = "G2204 X"+str(delta_x) 92 | uart.write(final_coords) 93 | uart.write(" Y"+str(delta_y)+" Z0") 94 | uart.write(" F1000\r\n") 95 | utime.sleep_ms(300) 96 | 97 | -------------------------------------------------------------------------------- /uArm_face_track_demo/Arduino_code/fan_uarm_face/fan_uarm_face.ino: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | char data[50] = {0}; 4 | unsigned long rev_cnt = 0; 5 | unsigned long timer_tick_cnt = 0; 6 | bool timeout_flag = false; 7 | unsigned long timeout_cnt = 0; 8 | bool is_fan_open = false; 9 | 10 | enum uart_state_e { 11 | RECV_DATA = 0, 12 | PARSE_DATA, 13 | }uart_state = RECV_DATA; 14 | enum uarm_mode_e { 15 | SEEK_MODE = 0, 16 | FOLLOW_MODE, 17 | WAIT_MODE, 18 | } uarm_mode = WAIT_MODE; 19 | 20 | 21 | void setup() { 22 | // put your setup code here, to run once: 23 | Serial.begin(115200); 24 | Serial1.begin(9600); 25 | Serial2.begin(115200); 26 | 27 | Serial.println( "openMV kit demo" ); 28 | delay(1000); 29 | Serial2.write("M2400 S0\n"); 30 | delay(100); 31 | Serial2.write("G0 X200 Y0 Z150 F1000000\n"); // 30 ){ // 49){ 69 | data[49] = '\0'; 70 | break; 71 | } 72 | delay(2); 73 | } 74 | if( rev_cnt == 0){ 75 | break; 76 | } 77 | uart_state = PARSE_DATA; 78 | 79 | case PARSE_DATA: // 125 ){ 93 | Serial.println(">>>>"); 94 | angle -= 1; 95 | if( angle < 30 ){ 96 | angle = 30; 97 | } 98 | }else if( camera_x < 115 ){ 99 | Serial.println("<<<<"); 100 | angle += 1; 101 | if(angle > 150){ 102 | angle = 150; 103 | } 104 | } 105 | sprintf( cmd, "G2202 N0 V%d F800\n", angle ); 106 | Serial2.write(cmd); 107 | timeout_flag = true; 108 | timeout_cnt = 0; 109 | delay(30); 110 | if( !is_fan_open ){ // 150 ){ 131 | angle = 150; 132 | toggle_dir = true; 133 | }else if( angle<30 ){ 134 | angle = 30; 135 | toggle_dir = false; 136 | } 137 | sprintf( cmd, "G2202 N0 V%d F500\n", angle ); 138 | Serial2.write(cmd); 139 | uarm_mode = WAIT_MODE; 140 | break; 141 | case FOLLOW_MODE: 142 | 143 | break; 144 | case WAIT_MODE: 145 | if(timer_tick_cnt>10 && timer_tick_cnt%12==0){ 146 | uarm_mode = SEEK_MODE; 147 | timer_tick_cnt += 1; 148 | } 149 | break; 150 | } 151 | 152 | 153 | } 154 | -------------------------------------------------------------------------------- /uArm_face_track_demo/README.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | **This demo includes uArm Swift Pro,mini fan seeed module and openMV Kit.If openMV detects human face,the uArm Swift Pro will follow the face of human and turn on the fan at the same time.If the openMV don't detect face,the uArm Swift Pro will enter the patrol mode to look for face,the fan will be closed in this mode.** 4 | 5 | ## How to install software 6 | 7 | ### 1.Install openMV firmware 8 | * Download [OpenMV IDE](https://openmv.io/pages/download) and install it. 9 | * Open main.py file in openMV_Kit_code folder using openMV IDE. 10 | * Connect the openMV and PC using microUSB wire. 11 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/pc_openmv.jpg) 12 | * Click the "Connect" button to connect the openMV with OpenMV IDE. 13 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/openmv_ide_connect.jpg) 14 | * Click the "Start" button to start run the demo code. 15 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/openmv_ide_start.jpg) 16 | * Move the openMV to detech face.Make sure the image is clear and it can detach face,otherwise it is necessary to adjust the camera. 17 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/openmv_ide_image.jpg) 18 | * Move the main.py to the disk and replace the old one. 19 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/usb_drive.jpg) 20 | * Remove the microUSB wire. 21 | 22 | ### 2.Install Arduino mega2560 firmware 23 | * Connect Arduino mega2560 and PC using USB wire. 24 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/pc_arduino.jpg) 25 | * Open fan_uarm_face.ino. 26 | * Add flexiTimer2.Zip libary from lib folder. 27 | * Click "Upload" button to upload firmware to Arduino mega2560. 28 | 29 | ### 3.Install uArm Swift Pro firmware 30 | * Connect uArm Swift Pro and PC using microUSB wire. 31 | * Upload uArmSwiftPro_2ndUART.hex using [XLoader](http://xloader.russemotto.com/XLoader.zip) 32 | 33 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/xloader.jpg) 34 | 35 | ## How to install hardware 36 | * Fix the openMV in uArm Swift Pro. 37 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/install_openmv.jpg) 38 | * Connect mini fan seeed module and the seeedinterface board. 39 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/install_fan.jpg) 40 | * Connect the seeedinterface board in uArm Swift Pro and fix the fan. 41 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/fix_fan.jpg) 42 | * Connect uArm Swift Pro with Arduino mega2560 using the TYPE-C wire and power on the uArm Swift Pro. 43 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/connect_uarm.jpg) 44 | * Arduino mega2560 connect USB wire. 45 | ![](https://github.com/uArm-Developer/OpenMV-Examples/blob/master/uArm_face_track_demo/image/connect_arduino.jpg) 46 | * Reset the Arduino mega2560,and test the demo.If the openMV detects human face,the led of openMV will blink. 47 | 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /uArm_face_track_demo/image/connect_arduino.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/connect_arduino.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/connect_uarm.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/connect_uarm.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/fix_fan.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/fix_fan.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/install_fan.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/install_fan.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/install_openmv.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/install_openmv.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/openmv_ide_connect.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/openmv_ide_connect.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/openmv_ide_image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/openmv_ide_image.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/openmv_ide_start.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/openmv_ide_start.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/pc_arduino.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/pc_arduino.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/pc_openmv.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/pc_openmv.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/usb_drive.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/usb_drive.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/image/xloader.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/image/xloader.jpg -------------------------------------------------------------------------------- /uArm_face_track_demo/lib/flexitimer2.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uArm-Developer/OpenMV-Examples/34d5ad6f6b6ebac7a37e01f469b4b14009e8aae5/uArm_face_track_demo/lib/flexitimer2.zip -------------------------------------------------------------------------------- /uArm_face_track_demo/openMV_Kit_code/main.py: -------------------------------------------------------------------------------- 1 | # Face Detection Example 2 | # 3 | # This example shows off the built-in face detection feature of the OpenMV Cam. 4 | # 5 | # Face detection works by using the Haar Cascade feature detector on an image. A 6 | # Haar Cascade is a series of simple area contrasts checks. For the built-in 7 | # frontalface detector there are 25 stages of checks with each stage having 8 | # hundreds of checks a piece. Haar Cascades run fast because later stages are 9 | # only evaluated if previous stages pass. Additionally, your OpenMV Cam uses 10 | # a data structure called the integral image to quickly execute each area 11 | # contrast check in constant time (the reason for feature detection being 12 | # grayscale only is because of the space requirment for the integral image). 13 | 14 | # Modify by UFACTORY 15 | 16 | import sensor, time, image, pyb 17 | from pyb import UART 18 | from pyb import LED 19 | 20 | # Reset sensor 21 | sensor.reset() 22 | 23 | # Sensor settings 24 | sensor.set_contrast(1) 25 | sensor.set_gainceiling(16) 26 | # HQVGA and GRAYSCALE are the best for face tracking. 27 | sensor.set_framesize(sensor.HQVGA) 28 | sensor.set_pixformat(sensor.GRAYSCALE) 29 | 30 | # Load Haar Cascade 31 | # By default this will use all stages, lower satges is faster but less accurate. 32 | face_cascade = image.HaarCascade("frontalface", stages=25) 33 | print(face_cascade) 34 | 35 | # FPS clock 36 | #clock = time.clock() 37 | 38 | # uart interface 39 | uart = UART(3, 9600) 40 | 41 | led = LED(1) 42 | 43 | img = sensor.snapshot() 44 | 45 | frame_width = img.width() 46 | frame_height = img.height() 47 | print(frame_width) 48 | print(frame_height) 49 | 50 | while (True): 51 | #clock.tick() 52 | 53 | # Capture snapshot 54 | img = sensor.snapshot() 55 | 56 | # Find objects. 57 | # Note: Lower scale factor scales-down the image more and detects smaller objects. 58 | # Higher threshold results in a higher detection rate, with more false positives. 59 | objects = img.find_features(face_cascade, threshold=0.3, scale_factor=1.25) 60 | 61 | # Draw objects 62 | for r in objects: 63 | img.draw_rectangle(r) 64 | # r = [x, y, w, h] 65 | # center_x = x + w/2 66 | # center_y = y + h/2 67 | if r[2]*r[3] > 60*60: 68 | objects_x = r[0] + r[2]/2 69 | objects_y = r[1] + r[3]/2 70 | rtn_str = '#%.2f,%.2f#\r\n' % (objects_x, objects_y) 71 | 72 | uart.write(rtn_str) 73 | led.toggle() 74 | pyb.delay(50); 75 | #print(r[2], r[3]) 76 | #print( rtn_str ) 77 | #print( objects_x, objects_y ) 78 | 79 | # Print FPS. 80 | # Note: Actual FPS is higher, streaming the FB makes it slower. 81 | #print(clock.fps()) 82 | 83 | --------------------------------------------------------------------------------