├── model └── single_leg_gait.gif ├── image-sender ├── README.md ├── rpi_send_video.py └── imagezmq.py ├── .gitignore ├── control_quadruped.py ├── image-receiver └── receive_and_store_video.py ├── controllers ├── network_receiver.py ├── utils │ └── ip_helper.py ├── local_keyboard_controller.py ├── network_sender_keyboard.py └── object_tracker_controller.py ├── README.md └── gait_logic ├── quadruped.py ├── stair.py └── stairs.py /model/single_leg_gait.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JackDemeter/quadruped-robot/HEAD/model/single_leg_gait.gif -------------------------------------------------------------------------------- /image-sender/README.md: -------------------------------------------------------------------------------- 1 | # Raspberry Pi Streaming 2 | 3 | Just install all prerequisites and run `rpi_send_video.py`. -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore notebooks checkpoints 2 | .ipynb_checkpoints 3 | */.ipynb_checkpoints/* 4 | 5 | # Ignore video captures 6 | *.avi 7 | 8 | # Ignore Pi saves 9 | *.py.save 10 | *.pyc 11 | __pycache__/ -------------------------------------------------------------------------------- /image-sender/rpi_send_video.py: -------------------------------------------------------------------------------- 1 | # run this program on each RPi to send a labelled image stream 2 | import socket 3 | import time 4 | from imutils.video import VideoStream 5 | import imagezmq 6 | sender = imagezmq.ImageSender(connect_to='tcp://192.168.2.115:5555') 7 | 8 | rpi_name = socket.gethostname() # send RPi hostname with each image 9 | picam = VideoStream('/dev/video0').start() 10 | time.sleep(2.0) # allow camera sensor to warm up 11 | try: 12 | while True: # send images as stream until Ctrl-C 13 | image = picam.read() 14 | sender.send_image(rpi_name, image) 15 | 16 | except KeyboardInterrupt: 17 | picam.stop() 18 | -------------------------------------------------------------------------------- /control_quadruped.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | import argparse 3 | 4 | # from controllers.local_keyboard_controller import controller 5 | from gait_logic.quadruped import Quadruped 6 | 7 | def get_controller(controller_file): 8 | controller_lib = importlib.import_module(controller_file) 9 | return controller_lib.controller 10 | 11 | if __name__ == "__main__": 12 | parser = argparse.ArgumentParser() 13 | parser.add_argument('--controller', default='controllers.network_receiver') 14 | args = parser.parse_args() 15 | 16 | controller = get_controller(args.controller) 17 | 18 | r = Quadruped() 19 | r.calibrate() 20 | r.move(controller) -------------------------------------------------------------------------------- /image-receiver/receive_and_store_video.py: -------------------------------------------------------------------------------- 1 | # run this program on the Mac to display image streams from multiple RPis 2 | import cv2 3 | import imagezmq 4 | image_hub = imagezmq.ImageHub() 5 | vid_cod = cv2.VideoWriter_fourcc(*'XVID') 6 | output = cv2.VideoWriter("cam_video.avi", vid_cod, 5.0, (640,480)) 7 | count = 0 8 | while True: # show streamed images until Ctrl-C 9 | rpi_name, image = image_hub.recv_image() 10 | cv2.imshow(rpi_name, image) # 1 window for each RPi 11 | key = cv2.waitKey(1) 12 | image_hub.send_reply(b'OK') 13 | if key == ord('q'): 14 | break 15 | if rpi_name: 16 | if count %100 ==0: 17 | print('reading') 18 | count += 1 19 | output.write(image) 20 | 21 | 22 | output.release() 23 | 24 | -------------------------------------------------------------------------------- /controllers/network_receiver.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import socket 3 | 4 | from controllers.utils.ip_helper import create_socket_connection 5 | 6 | s = create_socket_connection() 7 | 8 | def controller(momentum): 9 | try: 10 | s.settimeout(0.00001) 11 | data, addr = s.recvfrom(1024) 12 | if data: 13 | momentum = np.frombuffer(data) 14 | except: 15 | pass 16 | return momentum 17 | 18 | if __name__ == "__main__": 19 | # Test network commands by printing out the momentum changes 20 | momentum = np.asarray([0,0,1,0],dtype=np.float32) 21 | lm = momentum 22 | while True: 23 | momentum = controller(momentum) 24 | if (lm != momentum).any(): 25 | print(momentum) 26 | lm=momentum -------------------------------------------------------------------------------- /controllers/utils/ip_helper.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | def get_ip(): 4 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 5 | s.settimeout(0) 6 | try: 7 | # doesn't even have to be reachable 8 | s.connect(('10.255.255.255', 1)) 9 | IP = s.getsockname()[0] 10 | except Exception: 11 | IP = '127.0.0.1' 12 | finally: 13 | s.close() 14 | return IP 15 | 16 | def create_socket_connection(start_port=5000): 17 | 18 | host=get_ip() # this computer's IP 19 | port = start_port 20 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 21 | while port < min(start_port+1000,9999): 22 | try: 23 | s.bind((host,port)) 24 | break 25 | except OSError as e: 26 | port+=1 27 | print(f"Connection Info - ip: {host}, port: {port}") 28 | return s -------------------------------------------------------------------------------- /controllers/local_keyboard_controller.py: -------------------------------------------------------------------------------- 1 | # Note that this library requires sudo privileges 2 | 3 | import keyboard 4 | 5 | def controller(momentum, accel=0.01, bound=4): 6 | """ 7 | Update the momentum of the robot based on keyboard presses. 8 | :param momentum: the existing momentum parameter 9 | :param accel: how quickly the robot starts walking in a given direction 10 | :param bound: the max/min magnitude that the robot can walk at 11 | :returns: updated momentum array 12 | """ 13 | if keyboard.is_pressed('w'): 14 | momentum[0] = min(momentum[0]+accel, bound) 15 | print('THIS') 16 | if keyboard.is_pressed('s'): 17 | momentum[0] = max(momentum[0]-accel, -bound) 18 | if keyboard.is_pressed('a'): 19 | momentum[1] = max(momentum[1]-accel, -bound) 20 | if keyboard.is_pressed('d'): 21 | momentum[1] = min(momentum[1]+accel, bound) 22 | return momentum 23 | 24 | if __name__ == "__main__": 25 | import numpy as np 26 | 27 | momentum = np.asarray([0,0,1],dtype=np.float32) 28 | while True: 29 | momentum = controller(momentum) 30 | print(momentum) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # quadruped-robot 2 | ![quadruped](https://user-images.githubusercontent.com/24850401/160302298-203e922e-81ea-4b7f-a4e0-efc720e2eb74.JPG) 3 | 4 | A quadruped robot for a university capstone project. The associated CAD files can be found on [GrabCad](https://grabcad.com/library/quadruped-robot-w-code-1) 5 | 6 | ## Installation 7 | To use this code simply download the repository onto the raspberry pi, install the requirements, set up the pi for camera and the servo library usage, and run `python3 control-quadruped`. This will start the walking motion and print out the pi's IP and port. Run the controller on your computer, setting the IP and port given by the pi and the robot will start taking momentum data from the controller. 8 | 9 | ### Computer vision controller 10 | The computer vision controller has a few extra steps that can seen here. 11 | Specific to the computer-vision controller you will also need to run `python3 image-sender/rpi_send_video.py` to pass camera data to you computer, in this case make sure to change the IP and Port to the ones used by the controller. 12 | 13 | ## Project Info 14 | ### Intro 15 | This project was created with the intention of developing a robot that can follow users as a proof of concept for a larger assistant quadruped. The associated code allows for keyboard based control or computer-vision-based control and was structured such that new controllers can easily be implemented by sending momentum packets to the raspberry pi. 16 | 17 | ### Robot Control 18 | The robot uses an inverse kinematic model to determine how to position the foot in the requested location. Some of the math for this can be seen in the model directory with the jupyter notebook 19 | 20 | #TODO: clean up jupyter notebook (and probably all other files) 21 | #TODO: add set up instructions for initial robot building 22 | #TODO: consistent file/folder naming 23 | #TODO: add wiki? and finish readme 24 | -------------------------------------------------------------------------------- /controllers/network_sender_keyboard.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import numpy as np 3 | import argparse 4 | import keyboard 5 | 6 | from utils.ip_helper import create_socket_connection 7 | 8 | def controller(pi_ip, pi_port, accel = 0.002, bound=4, return_to_zero=False): 9 | 10 | s = create_socket_connection() 11 | 12 | server = (pi_ip, pi_port) 13 | momentum = np.array([0.,0.,1.,0.]) # Control [x,z,y,quit] telemetry 14 | close = False 15 | while not close: 16 | moved = False 17 | if keyboard.is_pressed('w'): 18 | momentum[0] = min(momentum[0]+accel, bound) 19 | moved = True 20 | if keyboard.is_pressed('s'): 21 | momentum[0] = max(momentum[0]-accel, -bound) 22 | moved = True 23 | if keyboard.is_pressed('a'): 24 | momentum[1] = max(momentum[1]-accel, -bound) 25 | moved = True 26 | if keyboard.is_pressed('d'): 27 | momentum[1] = min(momentum[1]+accel, bound) 28 | moved = True 29 | if keyboard.is_pressed('p'): 30 | momentum[3] = 1 31 | close = True 32 | moved = True 33 | 34 | # Not controlling the robot will slowly come to a stop 35 | if return_to_zero and not moved: 36 | moved = True 37 | if momentum[0] > 0: 38 | momentum[0] = momentum[0]-accel 39 | elif momentum[0] < 0: 40 | momentum[0] = momentum[0]+accel 41 | if momentum[1] > 0: 42 | momentum[1] = momentum[1]-accel 43 | elif momentum[1] < 0: 44 | momentum[1] = momentum[1]+accel 45 | if moved: 46 | s.sendto(momentum.tobytes(), server) 47 | print(momentum) 48 | s.close() 49 | 50 | if __name__=='__main__': 51 | parser = argparse.ArgumentParser() 52 | parser.add_argument('pi_ip') 53 | parser.add_argument('pi_port', type=int) 54 | parser.add_argument('--accel', type=float, default=0.002) 55 | parser.add_argument('--bound', type=int, default=4) 56 | parser.add_argument('--return_to_zero', action='store_true') 57 | args = parser.parse_args() 58 | 59 | controller(args.pi_ip, args.pi_port, args.accel, args.bound, args.return_to_zero) -------------------------------------------------------------------------------- /gait_logic/quadruped.py: -------------------------------------------------------------------------------- 1 | from adafruit_servokit import ServoKit 2 | from enum import IntEnum 3 | import math 4 | import bezier 5 | import numpy as np 6 | 7 | class Motor(IntEnum): 8 | # identifies the corresponding pin location with the motor location 9 | FR_SHOULDER = 0 10 | FR_ELBOW = 1 11 | FR_HIP = 2 12 | FL_SHOULDER = 3 13 | FL_ELBOW = 4 14 | FL_HIP = 5 15 | BR_SHOULDER = 6 16 | BR_ELBOW = 7 17 | BL_SHOULDER = 8 18 | BL_ELBOW = 9 19 | 20 | class Quadruped: 21 | def __init__(self): 22 | self.kit = ServoKit(channels=16) 23 | self.upper_leg_length = 10 24 | self.lower_leg_length = 10.5 25 | for i in range(10): 26 | self.kit.servo[i].set_pulse_width_range(500,2500) 27 | 28 | def set_angle(self,motor_id, degrees): 29 | """ 30 | set the angle of a specific motor to a given angle 31 | :param motor_id: the motor id 32 | :param degrees: the angle to put the motor to 33 | :returns: void 34 | """ 35 | self.kit.servo[motor_id].angle = degrees 36 | 37 | def rad_to_degree(self,rad): 38 | """ 39 | Converts radians to degrees 40 | :param rad: radians 41 | :returns: the corresponding degrees as a float 42 | """ 43 | return rad*180/math.pi 44 | 45 | def calibrate(self): 46 | """ 47 | sets the robot into the default "middle position" use this for attaching legs in right location 48 | :returns: void 49 | """ 50 | self.set_angle(Motor.FR_SHOULDER, 60) 51 | self.set_angle(Motor.FR_ELBOW, 90) 52 | self.set_angle(Motor.FR_HIP, 90) 53 | self.set_angle(Motor.FL_SHOULDER, 120) 54 | self.set_angle(Motor.FL_ELBOW, 90) 55 | self.set_angle(Motor.FL_HIP, 90) 56 | self.set_angle(Motor.BR_SHOULDER, 60) 57 | self.set_angle(Motor.BR_ELBOW, 90) 58 | self.set_angle(Motor.BL_SHOULDER, 120) 59 | self.set_angle(Motor.BL_ELBOW, 90) 60 | 61 | def inverse_positioning(self, shoulder, elbow, x, y, z=0, hip=None, right=True): 62 | ''' 63 | Positions the end effector at a given position based on cartesian coordinates in 64 | centimeter units and with respect to the should motor of the 65 | :param shoulder: motor id used for the shoulder 66 | :param elbow: motor id used for the elbow 67 | :param x: cartesian x with respect to shoulder motor (forward/back) 68 | :param y: cartesian y with respect to shoulder motor (up/down) 69 | :param z: cartesian z with respect to shoulder motor (left/right) 70 | :param hip: motor id used for the hip 71 | :param right: a boolean that flips the logic for left and right side to properly map "forward direction" 72 | :return: a list containing the appropriate angle for the shoulder and elbow 73 | ''' 74 | L=2 75 | y_prime = -math.sqrt((z+L)**2 + y**2) 76 | thetaz = math.atan2(z+L,abs(y))-math.atan2(L,abs(y_prime)) 77 | 78 | elbow_offset = 20 79 | shoulder_offset = 10 80 | a1 = self.upper_leg_length 81 | a2 = self.lower_leg_length 82 | 83 | c2 = (x**2+y_prime**2-a1**2-a2**2)/(2*a1*a2) 84 | s2 = math.sqrt(1-c2**2) 85 | theta2 = math.atan2(s2,c2) 86 | c2 = math.cos(theta2) 87 | s2 = math.sin(theta2) 88 | 89 | c1 = (x*(a1+(a2*c2)) + y_prime*(a2*s2))/(x**2+y_prime**2) 90 | s1 = (y_prime*(a1+(a2*c2)) - x*(a2*s2))/(x**2+y_prime**2) 91 | theta1 = math.atan2(s1,c1) 92 | # generate positions with respect to robot motors 93 | theta_shoulder = -theta1 94 | theta_elbow = theta_shoulder - theta2 95 | theta_hip = 0 96 | if right: 97 | theta_shoulder = 180 - self.rad_to_degree(theta_shoulder) + shoulder_offset 98 | theta_elbow = 130 - self.rad_to_degree(theta_elbow) + elbow_offset 99 | if hip: 100 | theta_hip = 90 - self.rad_to_degree(thetaz) 101 | else: 102 | theta_shoulder = self.rad_to_degree(theta_shoulder) - shoulder_offset 103 | theta_elbow = 50 + self.rad_to_degree(theta_elbow) - elbow_offset 104 | if hip: 105 | theta_hip = 90 + self.rad_to_degree(thetaz) 106 | self.set_angle(shoulder, theta_shoulder) 107 | self.set_angle(elbow, theta_elbow) 108 | if hip: 109 | self.set_angle(hip, theta_hip) 110 | # print("theta shoulder:",theta_shoulder,"\ttheta_elbow:",theta_elbow) 111 | return [theta_shoulder, theta_elbow] 112 | 113 | def leg_position(self, leg_id, x, y, z=0): 114 | """ 115 | wrapper for inverse position that makes it easier to control each leg for making fixed paths 116 | :param led_id: string for the leg to be manipulated 117 | :param x: cartesian x with respect to shoulder motor (forward/back) 118 | :param y: cartesian y with respect to shoulder motor (up/down) 119 | :param z: cartesian z with respect to shoulder motor (left/right) 120 | """ 121 | if leg_id == 'FL': 122 | self.inverse_positioning(Motor.FL_SHOULDER, Motor.FL_ELBOW, x, y, z=z, hip=Motor.FL_HIP, right=False) 123 | if leg_id == 'FR': 124 | self.inverse_positioning(Motor.FR_SHOULDER, Motor.FR_ELBOW, x, y, z=z, hip=Motor.FR_HIP, right=True) 125 | if leg_id == 'BL': 126 | self.inverse_positioning(Motor.BL_SHOULDER, Motor.BL_ELBOW, x, y, right=False) 127 | if leg_id == 'BR': 128 | self.inverse_positioning(Motor.BR_SHOULDER, Motor.BR_ELBOW, x, y, right=True) 129 | 130 | def move(self, controller=None): 131 | """ 132 | Walks around based on the controller inputted momentum 133 | :param controller: the controller that is called to determine the robot momentum 134 | :returns: None, enters an infinite loop 135 | """ 136 | momentum = np.asarray([0,0,1,0],dtype=np.float32) 137 | index = 0 138 | 139 | # Generate footstep 140 | s_vals = np.linspace(0.0, 1.0, 20) 141 | step_nodes = np.asfortranarray([ 142 | [-1.0, -1.0, 1.0, 1.0], 143 | [-1.0, -1.0, 1.0, 1.0], 144 | [-15.0, -10, -10, -15.0], 145 | ]) 146 | curve = bezier.Curve(step_nodes, degree=3) 147 | step = curve.evaluate_multi(s_vals) 148 | slide_nodes = np.asfortranarray([ 149 | [1.0, -1.0], 150 | [1.0, -1.0], 151 | [-15.0, -15], 152 | ]) 153 | curve = bezier.Curve(slide_nodes, degree=1) 154 | slide = curve.evaluate_multi(s_vals) 155 | 156 | motion = np.concatenate((step,slide), axis=1) 157 | 158 | close = False 159 | while not close: 160 | momentum = controller(momentum) 161 | tragectory = motion * momentum[:3, None] 162 | if momentum[3]: 163 | close = True 164 | x,z,y = tragectory 165 | # 166 | i1 = index%40 167 | i2 = (index+20)%40 168 | # Apply movement based movement 169 | self.inverse_positioning(Motor.FR_SHOULDER,Motor.FR_ELBOW,x[i1],y[i1]-1,z=z[i1],hip=Motor.FR_HIP,right=True) 170 | self.inverse_positioning(Motor.BR_SHOULDER,Motor.BR_ELBOW,x[i2],y[i2]+2,right=True) 171 | self.inverse_positioning(Motor.FL_SHOULDER,Motor.FL_ELBOW,x[i2],y[i2]-1,z=-z[i2],hip=Motor.FL_HIP,right=False) 172 | self.inverse_positioning(Motor.BL_SHOULDER,Motor.BL_ELBOW,x[i1],y[i1]+2,right=False) 173 | index += 1 174 | 175 | -------------------------------------------------------------------------------- /gait_logic/stair.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | if __name__ == '__main__': 4 | from quadruped import Quadruped 5 | 6 | def stair(r): 7 | 8 | # Function for walking up stairs 9 | pause = 0.25 10 | 11 | # Initialize: 12 | r.leg_position('FL', -1, -15) 13 | r.leg_position('FR', 2, -16) 14 | r.leg_position('BL', -3, -15) 15 | r.leg_position('BR', 0, -15) 16 | time.sleep(3) 17 | 18 | #Ready to Pounce: 19 | r.leg_position('BL', 0, -9) 20 | r.leg_position('BR', 2, -8) 21 | 22 | #Front Paws up: 23 | r.leg_position('FL', -1, -15, z = -2) 24 | time.sleep(pause) 25 | r.leg_position('FR', 2, -6) 26 | time.sleep(pause) 27 | 28 | #Front steps: 29 | r.leg_position('FR', 10, -8) 30 | time.sleep(pause) 31 | r.leg_position('FR', 11, -13) 32 | time.sleep(pause) 33 | r.leg_position('BL', -1, -10) 34 | time.sleep(pause) 35 | r.leg_position('FL', -1, -15) 36 | time.sleep(pause) 37 | r.leg_position('FL', 3, -6) 38 | time.sleep(pause) 39 | r.leg_position('FL', 9, -12) 40 | time.sleep(pause) 41 | 42 | #Lean Forward & Lift Back Paws: 43 | r.leg_position('BL', -8, -15) 44 | r.leg_position('BR', -5, -15) 45 | time.sleep(1) 46 | 47 | r.leg_position('FL', 9, -15) 48 | r.leg_position('FR', 11, -16) 49 | time.sleep(pause) 50 | 51 | r.leg_position('FL', 5, -15) 52 | r.leg_position('FR', 7, -16) 53 | r.leg_position('BL', -12, -15) 54 | r.leg_position('BR', -9, -17) 55 | time.sleep(pause) 56 | # Bring back right forward: 57 | r.leg_position('BR', -9, -14) 58 | time.sleep(pause) 59 | r.leg_position('BR', -2, -14) 60 | time.sleep(pause) 61 | r.leg_position('BR', -2, -17) 62 | time.sleep(pause) 63 | 64 | # Shift weight 65 | r.leg_position('BL', -12, -16) 66 | r.leg_position('FR', 7, -13, z = 2) 67 | r.leg_position('FL', 5, -16, z = 2) 68 | time.sleep(pause) 69 | 70 | #Bring back left leg forward: 71 | r.leg_position('BL', -8, -12) 72 | time.sleep(pause) 73 | r.leg_position('BL', -2, -15) 74 | time.sleep(pause) 75 | r.leg_position('BL', -2, -17) 76 | time.sleep(pause) 77 | 78 | #Shift back to normal 79 | r.leg_position('FL', 5, -16) 80 | r.leg_position('FR', 8, -15) 81 | time.sleep(pause) 82 | 83 | # FR Step 2 up: 84 | [r.leg_position('FL', 5, -16), r.leg_position('FR', 8, -15), r.leg_position('BL', -2, -17), r.leg_position('BR', -2, -18)] 85 | time.sleep(pause) 86 | [r.leg_position('FL', 5, -16), r.leg_position('FR', 8, -9), r.leg_position('BL', -2, -17), r.leg_position('BR', -2, -18)] 87 | time.sleep(pause) 88 | [r.leg_position('FL', 5, -16), r.leg_position('FR', 14, -9), r.leg_position('BL', -2, -17), r.leg_position('BR', -2, -18)] 89 | time.sleep(pause) 90 | [r.leg_position('FL', 5, -16), r.leg_position('FR', 14, -12), r.leg_position('BL', -2, -17), r.leg_position('BR', -2, -18)] 91 | time.sleep(pause) 92 | 93 | # Slide forward: 94 | [r.leg_position('FL', 2, -16), r.leg_position('FR', 14, -12), r.leg_position('BL', -5, -17), r.leg_position('BR', -5, -18)] 95 | time.sleep(pause) 96 | [r.leg_position('BL', -5, -18), r.leg_position('BR', -5, -17)] 97 | 98 | # # 2nd Step up Front: 99 | 100 | r.leg_position('FL', 2, -7) 101 | time.sleep(pause) 102 | r.leg_position('FL', 11, -7) 103 | time.sleep(pause) 104 | r.leg_position('FL', 13, -14) 105 | time.sleep(pause) 106 | 107 | [r.leg_position('FL', 8, -14), r.leg_position('FR', 9, -12), r.leg_position('BL', -10, -15), r.leg_position('BR', -10, -16)] 108 | time.sleep(pause) 109 | [r.leg_position('FL', 8, -14, z =1), r.leg_position('FR', 9, -10, z =1), r.leg_position('BL', -10, -15), r.leg_position('BR', -10, -16)] 110 | time.sleep(pause) 111 | [r.leg_position('FL', 8, -14, z =1), r.leg_position('FR', 9, -10, z =1), r.leg_position('BL', -10, -14), r.leg_position('BR', -10, -16)] 112 | time.sleep(pause) 113 | [r.leg_position('FL', 8, -14, z =1), r.leg_position('FR', 9, -10, z =1), r.leg_position('BL', -6, -15), r.leg_position('BR', -10, -16)] 114 | time.sleep(pause) 115 | [r.leg_position('FL', 8, -14, z =1), r.leg_position('FR', 9, -10, z =1), r.leg_position('BL', -6, -18), r.leg_position('BR', -10, -16)] 116 | time.sleep(pause) 117 | 118 | 119 | [r.leg_position('FL', 8, -10, z =1), r.leg_position('FR', 9, -12, z =2), r.leg_position('BL', -6, -18), r.leg_position('BR', -10, -16)] 120 | time.sleep(pause) 121 | [r.leg_position('FL', 8, -10, z =1), r.leg_position('FR', 9, -12, z =2), r.leg_position('BL', -6, -18), r.leg_position('BR', -10, -14)] 122 | time.sleep(pause) 123 | [r.leg_position('FL', 8, -10, z =1), r.leg_position('FR', 9, -12, z =2), r.leg_position('BL', -6, -16), r.leg_position('BR', -3, -14)] 124 | time.sleep(pause) 125 | [r.leg_position('FL', 8, -10, z =1), r.leg_position('FR', 9, -12, z =2), r.leg_position('BL', -6, -14), r.leg_position('BR', -3, -17)] 126 | time.sleep(pause) 127 | 128 | r.leg_position('FR', 9, -3, z =2) 129 | time.sleep(pause) 130 | r.leg_position('FR', 16, -3, z = 2) 131 | time.sleep(pause) 132 | r.leg_position('FR', 16, -8, z = 1) 133 | time.sleep(pause) 134 | 135 | 136 | [r.leg_position('FL', 1, -16, z =2), r.leg_position('FR', 13, -11), r.leg_position('BL', -9, -16), r.leg_position('BR', -6, -17)] 137 | 138 | [r.leg_position('FL', 1, -16, z =2), r.leg_position('FR', 13, -10), r.leg_position('BL', -9, -16), r.leg_position('BR', -6, -16)] 139 | time.sleep(pause) 140 | [r.leg_position('FL', 1, -6, z =2), r.leg_position('FR', 13, -10), r.leg_position('BL', -9, -16), r.leg_position('BR', -6, -16)] 141 | time.sleep(pause) 142 | [r.leg_position('FL', 13, -6, z =2), r.leg_position('FR', 13, -11), r.leg_position('BL', -9, -16), r.leg_position('BR', -6, -16)] 143 | time.sleep(pause) 144 | [r.leg_position('FL', 13, -6), r.leg_position('FR', 13, -13, z = 2), r.leg_position('BL', -9, -16), r.leg_position('BR', -6, -16)] 145 | time.sleep(pause) 146 | [r.leg_position('FL', 13, -11), r.leg_position('FR', 13, -13, z = 2), r.leg_position('BL', -9, -16), r.leg_position('BR', -6, -17)] 147 | time.sleep(pause) 148 | [r.leg_position('FL', 13, -11), r.leg_position('FR', 13, -13, z = 2), r.leg_position('BL', -13, -14), r.leg_position('BR', -10, -17)] 149 | time.sleep(pause) 150 | [r.leg_position('FL', 13, -11), r.leg_position('FR', 13, -13, z = 2), r.leg_position('BL', -13, -14), r.leg_position('BR', -10, -17)] 151 | time.sleep(pause) 152 | r.leg_position('BR', -10, -12) 153 | time.sleep(pause) 154 | r.leg_position('BR', -5, -12) 155 | time.sleep(pause) 156 | r.leg_position('BR', -5, -17) 157 | time.sleep(pause) 158 | [r.leg_position('FL', 7, -14), r.leg_position('FR', 7, -8),r.leg_position('BL', -13, -14), r.leg_position('BR', -5, -16)] 159 | time.sleep(pause) 160 | [r.leg_position('FL', 7, -14), r.leg_position('FR', 7, -8),r.leg_position('BL', -5, -14), r.leg_position('BR', -5, -16)] 161 | time.sleep(pause) 162 | [r.leg_position('FL', 7, -14), r.leg_position('FR', 7, -8),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 163 | time.sleep(pause) 164 | [r.leg_position('FL', 7, -11), r.leg_position('FR', 7, -11),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 165 | time.sleep(pause) 166 | [r.leg_position('FL', 7, -8), r.leg_position('FR', 7, -8),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 167 | time.sleep(pause) 168 | [r.leg_position('FL', 4, -8), r.leg_position('FR', 4, -8),r.leg_position('BL', -12, -16), r.leg_position('BR', -12, -16)] 169 | time.sleep(pause) 170 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -8),r.leg_position('BL', -12, -16), r.leg_position('BR', -12, -16)] 171 | time.sleep(pause) 172 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -10, z=2),r.leg_position('BL', -12, -16), r.leg_position('BR', -10, -13)] 173 | time.sleep(pause) 174 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -10, z=2),r.leg_position('BL', -12, -16), r.leg_position('BR', -3, -11)] 175 | time.sleep(pause) 176 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -10, z=2),r.leg_position('BL', -12, -16), r.leg_position('BR', -2, -14)] 177 | time.sleep(pause) 178 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -8, z=2),r.leg_position('BL', -13, -13), r.leg_position('BR', -2, -14)] 179 | time.sleep(pause) 180 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -8, z=2),r.leg_position('BL', -3, -9), r.leg_position('BR', -2, -14)] 181 | time.sleep(pause) 182 | [r.leg_position('FL', -1, -8), r.leg_position('FR', -1, -8, z=2),r.leg_position('BL', 0, -14), r.leg_position('BR', -2, -14)] 183 | time.sleep(pause) 184 | [r.leg_position('FL', -3, -10), r.leg_position('FR', -3, -10, z=2),r.leg_position('BL', -2, -16), r.leg_position('BR', -2, -14)] 185 | time.sleep(pause) 186 | [r.leg_position('FL', -5, -12, z=-2), r.leg_position('FR', -5, -13, z=2),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 187 | time.sleep(pause) 188 | [r.leg_position('FL', -5, -12), r.leg_position('FR', -5, -13),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 189 | time.sleep(pause) 190 | 191 | # FR to 4th 192 | [r.leg_position('FL', -5, -12), r.leg_position('FR', -3, -9),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 193 | time.sleep(pause) 194 | [r.leg_position('FL', -5, -12), r.leg_position('FR', 9, -6),r.leg_position('BL', -5, -16), r.leg_position('BR', -5, -16)] 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | # Step 3 206 | #Lifting up FL 207 | # [r.leg_position('FL', 8, -10, z =1), r.leg_position('FR', 9, -6, z =2), r.leg_position('BL', -6, -10), r.leg_position('BR', -3, -10)] 208 | # time.sleep(pause) 209 | # Move body forward 210 | # [r.leg_position('FL', 1, -17, z =1), r.leg_position('FR', 9, -6, z = 2), r.leg_position('BL', -13, -13), r.leg_position('BR', -13, -13)] 211 | # time.sleep(pause) 212 | # r.leg_position('FR', 13, -5, z = 1) 213 | # time.sleep(pause) 214 | # r.leg_position('FR', 13, -14) 215 | # time.sleep(pause) 216 | # # Shift to right side 217 | # [r.leg_position('FL', 4, -14, z =1), r.leg_position('FR', 15, -10), r.leg_position('BL', -11, -17), r.leg_position('BR', -8, -15)] 218 | # time.sleep(pause) 219 | # #Lift up FL paw 220 | # [r.leg_position('FL', 4, -6), r.leg_position('FR', 15, -10), r.leg_position('BL', -11, -17), r.leg_position('BR', -8, -15)] 221 | # time.sleep(pause) 222 | # [r.leg_position('FL', 12, -6), r.leg_position('FR', 15, -10), r.leg_position('BL', -11, -17), r.leg_position('BR', -8, -15)] 223 | # time.sleep(pause) 224 | # [r.leg_position('FL', 13, -10), r.leg_position('FR', 15, -10), r.leg_position('BL', -11, -16), r.leg_position('BR', -8, -17)] 225 | # time.sleep(pause) 226 | # # Shift forward 227 | 228 | # [r.leg_position('FL', 8, -10), r.leg_position('FR', 11, -10, z = 1), r.leg_position('BL', -13, -13), r.leg_position('BR', -10, -11)] 229 | # time.sleep(pause) 230 | # # BR Lift 231 | # [r.leg_position('FL', 8, -10), r.leg_position('FR', 11, -10, z = 1), r.leg_position('BL', -13, -13), r.leg_position('BR', -3, -11)] 232 | # time.sleep(pause) 233 | # [r.leg_position('FL', 8, -10), r.leg_position('FR', 11, -10, z = 1), r.leg_position('BL', -13, -13), r.leg_position('BR', -3, -18)] 234 | # time.sleep(pause) 235 | 236 | 237 | #--------------------------------------------------------------- 238 | #Old----------------------------------------------------------- 239 | 240 | # r.leg_position('FR', 8, -15, z = 2) 241 | 242 | # r.leg_position('FL', 4, -6) 243 | # time.sleep(pause) 244 | # r.leg_position('FL', 9, -10) 245 | # time.sleep(pause) 246 | 247 | # r.leg_position('FL', 9, -10, z = 2) 248 | 249 | # r.leg_position('FR', 7, -6) 250 | # time.sleep(pause) 251 | # r.leg_position('FR', 12, -11) 252 | 253 | 254 | 255 | # r.leg_position('FR', 9, -10, z = -3) 256 | # r.leg_position('BL', 2, -6) 257 | # time.sleep(0.1) 258 | # r.leg_position('BL', -1, -17) 259 | # time.sleep(0.1) 260 | # r.leg_position('FR', 9, -10) 261 | # time.sleep(pause) 262 | # r.leg_position('FL', 7, -11, z = -3) 263 | # r.leg_position('BR', 2, -6) 264 | # time.sleep(0.1) 265 | # r.leg_position('BR', 2, -17) 266 | # time.sleep(0.1) 267 | # r.leg_position('FL', 7, -11) 268 | # time.sleep(pause) 269 | 270 | # #Front Paws up - Step 2: 271 | # r.leg_position('FL', 7, -6) 272 | # time.sleep(pause) 273 | # r.leg_position('FL', 9, -12) 274 | # time.sleep(pause) 275 | 276 | # r.leg_position('FR', 9, -6) 277 | # time.sleep(pause) 278 | # r.leg_position('FR', 10, -8) 279 | # time.sleep(pause) 280 | # r.leg_position('FR', 11, -13) 281 | # time.sleep(pause) 282 | 283 | 284 | if __name__ == '__main__': 285 | r = Quadruped() 286 | stair(r) 287 | 288 | 289 | 290 | -------------------------------------------------------------------------------- /gait_logic/stairs.py: -------------------------------------------------------------------------------- 1 | from adafruit_servokit import ServoKit 2 | from enum import IntEnum 3 | import time 4 | import math 5 | import bezier 6 | import numpy as np 7 | import curses 8 | import os 9 | 10 | class Motor(IntEnum): 11 | # may be useful for tuning specific motors 12 | FR_SHOULDER = 0 13 | FR_ELBOW = 1 14 | FR_HIP = 2 15 | FL_SHOULDER = 3 16 | FL_ELBOW = 4 17 | FL_HIP = 5 18 | BR_SHOULDER = 6 19 | BR_ELBOW = 7 20 | BL_SHOULDER = 8 21 | BL_ELBOW = 9 22 | 23 | class Quadruped: 24 | def __init__(self): 25 | self.kit = ServoKit(channels=16) 26 | self.upper_leg_length = 10 27 | self.lower_leg_length = 10.5 28 | for i in range(10): 29 | self.kit.servo[i].set_pulse_width_range(500,2500) 30 | 31 | def calibrate(self): 32 | # set the robot into the default "middle position" use this for attaching legs in right location 33 | self.kit.servo[Motor.FR_SHOULDER].angle = 60 34 | self.kit.servo[Motor.FR_ELBOW].angle = 90 35 | self.kit.servo[Motor.FR_HIP].angle = 90 36 | self.kit.servo[Motor.FL_SHOULDER].angle = 120 37 | self.kit.servo[Motor.FL_ELBOW].angle = 90 38 | self.kit.servo[Motor.FL_HIP].angle = 90 39 | self.kit.servo[Motor.BR_SHOULDER].angle = 60 40 | self.kit.servo[Motor.BR_ELBOW].angle = 90 41 | self.kit.servo[Motor.BL_SHOULDER].angle = 120 42 | self.kit.servo[Motor.BL_ELBOW].angle = 90 43 | 44 | def set_angle(self,leg_id, degrees): 45 | self.kit.servo[leg_id].angle = degrees 46 | 47 | def test_pos(self,shoulder,elbow,x,y,z=0,hip=None,right=True): 48 | L=2 49 | y_prime = -math.sqrt((z+L)**2 + y**2) 50 | thetaz = math.atan2(z+L,abs(y))-math.atan2(L,abs(y_prime)) 51 | print(thetaz/math.pi*180) 52 | 53 | elbow_offset = 20 54 | shoulder_offset = 10 55 | a1 = self.upper_leg_length 56 | a2 = self.lower_leg_length 57 | 58 | c2 = (x**2+y_prime**2-a1**2-a2**2)/(2*a1*a2) 59 | s2 = math.sqrt(1-c2**2) 60 | theta2 = math.atan2(s2,c2) 61 | c2 = math.cos(theta2) 62 | s2 = math.sin(theta2) 63 | 64 | c1 = (x*(a1+(a2*c2)) + y_prime*(a2*s2))/(x**2+y_prime**2) 65 | s1 = (y_prime*(a1+(a2*c2)) - x*(a2*s2))/(x**2+y_prime**2) 66 | theta1 = math.atan2(s1,c1) 67 | # generate positions with respect to robot motors 68 | theta_shoulder = -theta1 69 | theta_elbow = theta_shoulder - theta2 70 | theta_hip = 0 71 | if right: 72 | theta_shoulder = 180 - self.rad_to_degree(theta_shoulder) + shoulder_offset 73 | theta_elbow = 130 - self.rad_to_degree(theta_elbow) + elbow_offset 74 | if hip: 75 | theta_hip = 90 - self.rad_to_degree(thetaz) 76 | else: 77 | theta_shoulder = self.rad_to_degree(theta_shoulder) - shoulder_offset 78 | theta_elbow = 50 + self.rad_to_degree(theta_elbow) - elbow_offset 79 | if hip: 80 | theta_hip = 90 + self.rad_to_degree(thetaz) 81 | self.kit.servo[shoulder].angle = theta_shoulder 82 | self.kit.servo[elbow].angle = theta_elbow 83 | if hip: 84 | self.kit.servo[hip].angle = theta_hip 85 | # print("theta shoulder:",theta_shoulder,"\ttheta_elbow:",theta_elbow) 86 | return [theta_shoulder, theta_elbow] 87 | 88 | def test_step(self, cycles=1): 89 | # Generate foot tragectory 90 | step_nodes = np.asfortranarray([ 91 | [-4.0, -4.0, 0.0, 0.0], 92 | [-15.0, -10, -10, -15.0], 93 | ]) 94 | curve = bezier.Curve(step_nodes, degree=3) 95 | s_vals = np.linspace(0.0, 1.0, 20) 96 | step = curve.evaluate_multi(s_vals) 97 | 98 | slide_nodes = np.asfortranarray([ 99 | [0.0, -4.0], 100 | [-15.0, -15.0], 101 | ]) 102 | curve = bezier.Curve(slide_nodes, degree=1) 103 | s_vals = np.linspace(0.0, 1.0, 20) 104 | slide = curve.evaluate_multi(s_vals) 105 | 106 | foot_motion = step 107 | for x,y in zip(slide[0],slide[1]): 108 | new_x=np.append(foot_motion[0],x) 109 | new_y=np.append(foot_motion[1],y) 110 | foot_motion = [new_x,new_y] 111 | 112 | x = foot_motion[0] 113 | y = foot_motion[1] 114 | 115 | for cycle in range(cycles): 116 | for index in range(40): 117 | self.test_pos(x[index],y[index],Motor.FR_SHOULDER,Motor.FR_ELBOW,right=True) 118 | self.test_pos(x[(index+20)%40],y[(index+20)%40],Motor.BR_SHOULDER,Motor.BR_ELBOW,right=True) 119 | self.test_pos(x[(index+20)%40],y[(index+20)%40],Motor.FL_SHOULDER,Motor.FL_ELBOW,right=False) 120 | self.test_pos(x[index],y[index],Motor.BL_SHOULDER,Motor.BL_ELBOW,right=False) 121 | time.sleep(0.01) 122 | 123 | def rad_to_degree(self,rad): 124 | return rad*180/math.pi 125 | 126 | def test_turn(self,cycles=1): 127 | # Generate foot tragectory 128 | step_nodes = np.asfortranarray([ 129 | [-4.0, -4.0, 0.0, 0.0], 130 | [-15.0, -10, -10, -15.0], 131 | ]) 132 | curve = bezier.Curve(step_nodes, degree=3) 133 | s_vals = np.linspace(0.0, 1.0, 20) 134 | step = curve.evaluate_multi(s_vals) 135 | 136 | slide_nodes = np.asfortranarray([ 137 | [0.0, -4.0], 138 | [-15.0, -15.0], 139 | ]) 140 | curve = bezier.Curve(slide_nodes, degree=1) 141 | s_vals = np.linspace(0.0, 1.0, 20) 142 | slide = curve.evaluate_multi(s_vals) 143 | 144 | turn_motion = step 145 | for x,y,z in zip(slide[0],slide[1],slide[2]): 146 | new_x=np.append(turn_motion[0],x) 147 | new_y=np.append(turn_motion[1],y) 148 | turn_motion = [new_x,new_y] 149 | 150 | z = turn_motion[0] 151 | y = turn_motion[1] 152 | for cycle in range(cycles): 153 | for index in range(40): 154 | self.test_pos(Motor.FR_SHOULDER,Motor.FR_ELBOW,0,y[index],z=z[index],hip=Motor.FR_HIP,right=True) 155 | self.test_pos(Motor.BR_SHOULDER,Motor.BR_ELBOW,0,y[(index+20)%40],right=True) 156 | self.test_pos(Motor.FL_SHOULDER,Motor.FL_ELBOW,0,y[(index+20)%40],z=-z[(index+20)%40],hip=Motor.FL_HIP,right=False) 157 | self.test_pos(Motor.BL_SHOULDER,Motor.BL_ELBOW,0,y[(index)%40],right=False) 158 | 159 | 160 | def WASD(self): 161 | def main(win): 162 | win.nodelay(True) 163 | key="" 164 | win.clear() 165 | momentum = np.asarray([0,0,1],dtype=np.float32) 166 | string = "forward: " + str(momentum[0]) + "sideways: " + str(momentum[1]) 167 | win.addstr(string) 168 | key = None 169 | step_size = 1 170 | index = 1 171 | # Generate footstep 172 | s_vals = np.linspace(0.0, 1.0, 20) 173 | 174 | step_nodes = np.asfortranarray([ 175 | [-2.0, -2.0, 2.0, 2.0], 176 | [-2.0, -2.0, 2.0, 2.0], 177 | [-18.0, -6, -6, -18.0], 178 | ]) 179 | curve = bezier.Curve(step_nodes, degree=3) 180 | step = curve.evaluate_multi(s_vals) 181 | 182 | slide_nodes = np.asfortranarray([ 183 | [2.0, -2.0], 184 | [2.0, -2.0], 185 | [-18.0, -18], 186 | ]) 187 | curve = bezier.Curve(slide_nodes, degree=1) 188 | slide = curve.evaluate_multi(s_vals) 189 | 190 | motion = np.concatenate((step,slide), axis=1) 191 | x_range = 4 192 | z_range = 4 193 | while True: 194 | try: 195 | key = win.getkey() 196 | curses.flushinp() 197 | except: 198 | key = None 199 | win.clear() 200 | if key == 'w': 201 | if momentum[0] < x_range: 202 | momentum[0]+= step_size 203 | elif key == 's': 204 | if momentum[0] > -x_range: 205 | momentum[0]-= step_size 206 | if key == 'a': 207 | if momentum[1] > -z_range: 208 | momentum[1]-= step_size 209 | elif key == 'd': 210 | if momentum[1] < z_range: 211 | momentum[1]+= step_size 212 | 213 | string = "x: " + str(round(momentum[0],2)) + " y: " + str(round(momentum[1],2)) 214 | win.addstr(string) 215 | if key == os.linesep: 216 | break 217 | 218 | tragectory = motion * momentum[:, None] 219 | x,z,y = tragectory 220 | # 221 | i1 = index%40 222 | i2 = (index+20)%40 223 | # Apply movement based movement 224 | self.test_pos(Motor.FR_SHOULDER,Motor.FR_ELBOW,x[i1],y[i1],z=z[i1],hip=Motor.FR_HIP,right=True) 225 | self.test_pos(Motor.BR_SHOULDER,Motor.BR_ELBOW,x[i2]-2,y[i2],right=True) 226 | self.test_pos(Motor.FL_SHOULDER,Motor.FL_ELBOW,x[i2]-4,y[i2],z=-z[i2],hip=Motor.FL_HIP,right=False) 227 | self.test_pos(Motor.BL_SHOULDER,Motor.BL_ELBOW,x[i1]-3,y[i1],right=False) 228 | index += 1 229 | time.sleep(0.003) 230 | 231 | curses.wrapper(main) 232 | 233 | def stair(self): 234 | def main(win): 235 | win.nodelay(True) 236 | key="" 237 | win.clear() 238 | momentum = np.asarray([0,0,1],dtype=np.float32) 239 | string = "forward: " + str(momentum[0]) + "sideways: " + str(momentum[1]) 240 | win.addstr(string) 241 | key = None 242 | step_size = 1 243 | index = 1 244 | # Generate footstep 245 | s_vals = np.linspace(0.0, 1.0, 20) 246 | 247 | step_nodes = np.asfortranarray([ 248 | [-1.0, -1.0, 1.0, 1.0], 249 | [-1.0, -1.0, 1.0, 1.0], 250 | [-10.0, -3.0, -3.0, -10.0], 251 | ]) 252 | curve = bezier.Curve(step_nodes, degree=3) 253 | step = curve.evaluate_multi(s_vals) 254 | 255 | 256 | s_vals = np.linspace(0.0, 1.0, 60) 257 | slide_nodes = np.asfortranarray([ 258 | [1.0, -1.0], 259 | [1.0, -1.0], 260 | [-15.0, -15], 261 | ]) 262 | curve = bezier.Curve(slide_nodes, degree=1) 263 | slide = curve.evaluate_multi(s_vals) 264 | 265 | motion = np.concatenate((step,slide), axis=1) 266 | x_range = 4 267 | z_range = 4 268 | while True: 269 | try: 270 | key = win.getkey() 271 | curses.flushinp() 272 | except: 273 | key = None 274 | win.clear() 275 | if key == 'w': 276 | if momentum[0] < x_range: 277 | momentum[0]+= step_size 278 | elif key == 's': 279 | if momentum[0] > -x_range: 280 | momentum[0]-= step_size 281 | if key == 'a': 282 | if momentum[1] > -z_range: 283 | momentum[1]-= step_size 284 | elif key == 'd': 285 | if momentum[1] < z_range: 286 | momentum[1]+= step_size 287 | 288 | string = "x: " + str(round(momentum[0],2)) + " y: " + str(round(momentum[1],2)) 289 | win.addstr(string) 290 | if key == os.linesep: 291 | break 292 | 293 | tragectory = motion * momentum[:, None] 294 | x,z,y = tragectory 295 | # 296 | i1 = index%80 297 | i2 = (index+20)%80 298 | i3 = (index+40)%80 299 | i4 = (index+60)%80 300 | # Apply movement based movement 301 | self.test_pos(Motor.FR_SHOULDER,Motor.FR_ELBOW,x[i1],y[i1],z=z[i1],hip=Motor.FR_HIP,right=True) 302 | self.test_pos(Motor.BR_SHOULDER,Motor.BR_ELBOW,x[i2],y[i2]-1,right=True) 303 | self.test_pos(Motor.FL_SHOULDER,Motor.FL_ELBOW,x[i3],y[i3],z=-z[i3],hip=Motor.FL_HIP,right=False) 304 | self.test_pos(Motor.BL_SHOULDER,Motor.BL_ELBOW,x[i4],y[i4]-1,right=False) 305 | index += 1 306 | time.sleep(0.01) 307 | curses.wrapper(main) 308 | 309 | 310 | if __name__ == "__main__": 311 | r = Quadruped() 312 | r.calibrate() 313 | r.WASD() 314 | # r.stair() 315 | -------------------------------------------------------------------------------- /controllers/object_tracker_controller.py: -------------------------------------------------------------------------------- 1 | """ 2 | This program takes input from an imagezmq Image hub connection and attaches then sends a numpy momentum array 3 | to a targetted IP/Port. 4 | 5 | This is a modified version of object_tracker.py from the yolov4-deepsort-base project (https://github.com/theAIGuysCode/yolov4-deepsort) 6 | it should be placed in that repository to be used. 7 | """ 8 | 9 | 10 | import os 11 | import time 12 | import imagezmq 13 | # comment out below line to enable tensorflow logging outputs 14 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' 15 | import time 16 | import tensorflow as tf 17 | physical_devices = tf.config.experimental.list_physical_devices('GPU') 18 | if len(physical_devices) > 0: 19 | tf.config.experimental.set_memory_growth(physical_devices[0], True) 20 | from absl import app, flags, logging 21 | from absl.flags import FLAGS 22 | import core.utils as utils 23 | from core.yolov4 import filter_boxes 24 | from tensorflow.python.saved_model import tag_constants 25 | from core.config import cfg 26 | from PIL import Image 27 | import cv2 28 | import numpy as np 29 | import matplotlib.pyplot as plt 30 | from tensorflow.compat.v1 import ConfigProto 31 | from tensorflow.compat.v1 import InteractiveSession 32 | # deep sort imports 33 | from deep_sort import preprocessing, nn_matching 34 | from deep_sort.detection import Detection 35 | from deep_sort.tracker import Tracker 36 | from tools import generate_detections as gdet 37 | 38 | # Local imports 39 | from utils.ip_helper import create_socket_connection 40 | 41 | flags.DEFINE_string('framework', 'tf', '(tf, tflite, trt') 42 | flags.DEFINE_string('weights', './checkpoints/yolov4-416', 43 | 'path to weights file') # Change second argument to './checkpoints/yolov4-416' to use non-tiny model 44 | flags.DEFINE_integer('size', 416, 'resize images to') 45 | flags.DEFINE_boolean('tiny', False, 'yolo or yolo-tiny') # Change second argument to 'False' to use non-tiny model (approx. 100% speedup using tiny) 46 | flags.DEFINE_string('model', 'yolov4', 'yolov3 or yolov4') 47 | flags.DEFINE_float('iou', 0.45, 'iou threshold') 48 | flags.DEFINE_float('score', 0.50, 'score threshold') 49 | flags.DEFINE_boolean('dont_show', False, 'dont show video output') 50 | flags.DEFINE_boolean('info', False, 'show detailed info of tracked objects') 51 | flags.DEFINE_boolean('count', False, 'count objects being tracked on screen') 52 | 53 | flags.DEFINE_string('pi_ip', '192.168.2.135', "the ip of the quadruped's raspberry pi") # Set the default value to your raspberry pi for easier startup. 54 | flags.DEFINE_integer('pi_port', 5000, "the port of the quadruped's raspberry pi") 55 | flags.DEFINE_boolean('return_to_zero', False, 'slowly reduce momentum to zero if not controlling') 56 | 57 | def main(_argv): 58 | # Definition of the parameters 59 | max_cosine_distance = 0.4 60 | nn_budget = None 61 | nms_max_overlap = 1.0 62 | 63 | # initialize deep sort 64 | model_filename = 'model_data/mars-small128.pb' 65 | encoder = gdet.create_box_encoder(model_filename, batch_size=1) 66 | # calculate cosine distance metric 67 | metric = nn_matching.NearestNeighborDistanceMetric("cosine", max_cosine_distance, nn_budget) 68 | # initialize tracker 69 | tracker = Tracker(metric) 70 | 71 | # load configuration for object detector 72 | config = ConfigProto() 73 | config.gpu_options.allow_growth = True 74 | session = InteractiveSession(config=config) 75 | STRIDES, ANCHORS, NUM_CLASS, XYSCALE = utils.load_config(FLAGS) 76 | input_size = FLAGS.size 77 | 78 | # load tflite model if flag is set 79 | if FLAGS.framework == 'tflite': 80 | interpreter = tf.lite.Interpreter(model_path=FLAGS.weights) 81 | interpreter.allocate_tensors() 82 | input_details = interpreter.get_input_details() 83 | output_details = interpreter.get_output_details() 84 | print(input_details) 85 | print(output_details) 86 | # otherwise load standard tensorflow saved model 87 | else: 88 | saved_model_loaded = tf.saved_model.load(FLAGS.weights, tags=[tag_constants.SERVING]) 89 | infer = saved_model_loaded.signatures['serving_default'] 90 | 91 | image_hub = imagezmq.ImageHub() 92 | frame_num = 0 93 | 94 | # Start network connection 95 | s = create_socket_connection() 96 | 97 | server = (FLAGS.pi_ip, FLAGS.pi_port) 98 | print(server) 99 | momentum = np.array([0.,0.,1.,0.]) # Control [x,z,y,quit] telemetry 100 | close = False 101 | # while program is running 102 | while not close: 103 | try: 104 | device_name, image = image_hub.recv_image() 105 | image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) 106 | 107 | frame_num +=1 108 | print('Frame #: ', frame_num) 109 | frame_size = image.shape[:2] 110 | image_data = cv2.resize(image, (input_size, input_size)) 111 | image_data = image_data / 255. 112 | image_data = image_data[np.newaxis, ...].astype(np.float32) 113 | start_time = time.time() 114 | 115 | # run detections on tflite if flag is set 116 | if FLAGS.framework == 'tflite': 117 | interpreter.set_tensor(input_details[0]['index'], image_data) 118 | interpreter.invoke() 119 | pred = [interpreter.get_tensor(output_details[i]['index']) for i in range(len(output_details))] 120 | # run detections using yolov3 if flag is set 121 | if FLAGS.model == 'yolov3' and FLAGS.tiny == True: 122 | boxes, pred_conf = filter_boxes(pred[1], pred[0], score_threshold=0.25, 123 | input_shape=tf.constant([input_size, input_size])) 124 | else: 125 | boxes, pred_conf = filter_boxes(pred[0], pred[1], score_threshold=0.25, 126 | input_shape=tf.constant([input_size, input_size])) 127 | else: 128 | batch_data = tf.constant(image_data) 129 | pred_bbox = infer(batch_data) 130 | for key, value in pred_bbox.items(): 131 | boxes = value[:, :, 0:4] 132 | pred_conf = value[:, :, 4:] 133 | 134 | boxes, scores, classes, valid_detections = tf.image.combined_non_max_suppression( 135 | boxes=tf.reshape(boxes, (tf.shape(boxes)[0], -1, 1, 4)), 136 | scores=tf.reshape( 137 | pred_conf, (tf.shape(pred_conf)[0], -1, tf.shape(pred_conf)[-1])), 138 | max_output_size_per_class=50, 139 | max_total_size=50, 140 | iou_threshold=FLAGS.iou, 141 | score_threshold=FLAGS.score 142 | ) 143 | 144 | # convert data to numpy arrays and slice out unused elements 145 | num_objects = valid_detections.numpy()[0] 146 | norm_bboxes = boxes.numpy()[0] 147 | norm_bboxes = norm_bboxes[0:int(num_objects)] 148 | scores = scores.numpy()[0] 149 | scores = scores[0:int(num_objects)] 150 | classes = classes.numpy()[0] 151 | classes = classes[0:int(num_objects)] 152 | 153 | # format bounding boxes from normalized ymin, xmin, ymax, xmax ---> xmin, ymin, width, height 154 | original_h, original_w, _ = image.shape 155 | bboxes = utils.format_boxes(norm_bboxes, original_h, original_w) 156 | 157 | # store all predictions in one parameter for simplicity when calling functions 158 | pred_bbox = [bboxes, scores, classes, num_objects] 159 | 160 | # read in all class names from config 161 | class_names = utils.read_class_names(cfg.YOLO.CLASSES) 162 | 163 | # by default allow all classes in .names file 164 | # allowed_classes = list(class_names.values()) 165 | 166 | # custom allowed classes (uncomment line below to customize tracker for only people) 167 | allowed_classes = ['person'] 168 | 169 | # loop through objects and use class index to get class name, allow only classes in allowed_classes list 170 | names = [] 171 | deleted_indx = [] 172 | for i in range(num_objects): 173 | class_indx = int(classes[i]) 174 | class_name = class_names[class_indx] 175 | if class_name not in allowed_classes: 176 | deleted_indx.append(i) 177 | else: 178 | names.append(class_name) 179 | names = np.array(names) 180 | count = len(names) 181 | if FLAGS.count: 182 | cv2.putText(image, "Objects being tracked: {}".format(count), (5, 35), cv2.FONT_HERSHEY_COMPLEX_SMALL, 2, (0, 255, 0), 2) 183 | print("Objects being tracked: {}".format(count)) 184 | # delete detections that are not in allowed_classes 185 | bboxes = np.delete(bboxes, deleted_indx, axis=0) 186 | norm_bboxes = np.delete(norm_bboxes, deleted_indx, axis=0) 187 | scores = np.delete(scores, deleted_indx, axis=0) 188 | 189 | # encode yolo detections and feed to tracker 190 | features = encoder(image, bboxes) 191 | detections = [Detection(bbox, score, class_name, feature) for bbox, score, class_name, feature in zip(bboxes, scores, names, features)] 192 | 193 | #initialize color map 194 | cmap = plt.get_cmap('tab20b') 195 | colors = [cmap(i)[:3] for i in np.linspace(0, 1, 20)] 196 | 197 | # run non-maxima supression 198 | boxs = np.array([d.tlwh for d in detections]) 199 | scores = np.array([d.confidence for d in detections]) 200 | classes = np.array([d.class_name for d in detections]) 201 | indices = preprocessing.non_max_suppression(boxs, classes, nms_max_overlap, scores) 202 | detections = [detections[i] for i in indices] 203 | 204 | # Call the tracker 205 | tracker.predict() 206 | tracker.update(detections) 207 | 208 | # Track center of each tracked object 209 | centers = np.zeros((len(tracker.tracks), 2)) 210 | 211 | # update tracks 212 | for i,track in enumerate(tracker.tracks): 213 | if not track.is_confirmed() or track.time_since_update > 1: 214 | continue 215 | bbox = track.to_tlbr() 216 | class_name = track.get_class() 217 | 218 | # draw bbox on screen 219 | color = colors[int(track.track_id) % len(colors)] 220 | color = [i * 255 for i in color] 221 | cv2.rectangle(image, (int(bbox[0]), int(bbox[1])), (int(bbox[2]), int(bbox[3])), color, 2) 222 | cv2.rectangle(image, (int(bbox[0]), int(bbox[1]-30)), (int(bbox[0])+(len(class_name)+len(str(track.track_id)))*17, int(bbox[1])), color, -1) 223 | cv2.putText(image, class_name + "-" + str(track.track_id),(int(bbox[0]), int(bbox[1]-10)),0, 0.75, (255,255,255),2) 224 | 225 | # Grab bbox centers 226 | centers[i] = (int((bbox[0] + bbox[2])/2), int((bbox[2] + bbox[3])/2)) 227 | 228 | # if enable info flag then print details about each track 229 | if FLAGS.info: 230 | print("Tracker ID: {}, Class: {}, BBox Coords (xmin, ymin, xmax, ymax): {}".format(str(track.track_id), class_name, (int(bbox[0]), int(bbox[1]), int(bbox[2]), int(bbox[3])))) 231 | 232 | # calculate frames per second of running detections 233 | fps = 1.0 / (time.time() - start_time) 234 | print("FPS: %.2f" % fps) 235 | result = np.asarray(image) 236 | result = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) 237 | 238 | norm_detections = [(bbox, score) for bbox, score in zip(norm_bboxes, scores)] 239 | def sort_score(x): 240 | return x[1] 241 | detection = sorted(norm_detections, key = sort_score) 242 | if len(detection): 243 | frame = detection[0][0] 244 | 245 | norm_bbox = [frame[0]/original_w, 246 | frame[1]/original_h, 247 | frame[2]/original_w, 248 | frame[3]/original_h] 249 | 250 | K = 4 251 | # yaw control 252 | x_center = norm_bbox[0]+ norm_bbox[2]/2 253 | if x_center > 0.6 or x_center < 0.4: 254 | momentum[1] = K*(x_center-0.5) 255 | else: 256 | momentum[1] = 0 257 | # forward/backward control 258 | if norm_bbox[1] < 0.10: 259 | momentum[0] = -2 260 | elif norm_bbox[1] > 0.30: 261 | momentum[0] = 2 262 | else: 263 | momentum[0] = 0 264 | 265 | if not FLAGS.dont_show: 266 | cv2.imshow("Output Video", result) 267 | 268 | if cv2.waitKey(1) & 0xFF == ord('q'): 269 | momentum[3] = 1 270 | close = True 271 | 272 | print("Momentum:", momentum) 273 | 274 | image_hub.send_reply() 275 | s.sendto(momentum.tobytes(), server) 276 | 277 | except: 278 | time.sleep(1) 279 | print('Awaiting data') 280 | s.close() 281 | cv2.destroyAllWindows() 282 | 283 | if __name__ == '__main__': 284 | try: 285 | app.run(main) 286 | except SystemExit: 287 | pass 288 | -------------------------------------------------------------------------------- /image-sender/imagezmq.py: -------------------------------------------------------------------------------- 1 | """ imagezmq: Transport images via ZMQ. 2 | Copyright (c) 2019 by Jeff Bass. 3 | License: MIT 4 | """ 5 | 6 | import zmq 7 | import numpy as np 8 | 9 | class ImageSender(): 10 | """Opens a zmq socket and sends images 11 | 12 | Opens a zmq (REQ or PUB) socket on the image sending computer, often a 13 | Raspberry Pi, that will be sending OpenCV images and 14 | related text messages to the hub computer. Provides methods to 15 | send images or send jpg compressed images. 16 | 17 | Two kinds of ZMQ message patterns are possible in imagezmq: 18 | REQ/REP: an image is sent and the sender waits for a reply ("blocking"). 19 | PUB/SUB: an images is sent and no reply is sent or expected ("non-blocking"). 20 | 21 | There are advantabes and disadvantages for each message pattern. 22 | See the documentation for a full description of REQ/REP and PUB/SUB. 23 | The default is REQ/REP for the ImageSender class and the ImageHub class. 24 | 25 | Arguments: 26 | connect_to: the tcp address:port of the hub computer. 27 | REQ_REP: (optional) if True (the default), a REQ socket will be created 28 | if False, a PUB socket will be created 29 | """ 30 | 31 | def __init__(self, connect_to='tcp://127.0.0.1:5555', REQ_REP = True): 32 | """Initializes zmq socket for sending images to the hub. 33 | 34 | Expects an appropriate ZMQ socket at the connect_to tcp:port address: 35 | If REQ_REP is True (the default), then a REQ socket is created. It 36 | must connect to a matching REP socket on the ImageHub(). 37 | 38 | If REQ_REP = False, then a PUB socket is created. It must connect to 39 | a matching SUB socket on the ImageHub(). 40 | """ 41 | 42 | if REQ_REP == True: 43 | # REQ/REP mode, this is a blocking scenario 44 | self.init_reqrep(connect_to) 45 | else: 46 | #PUB/SUB mode, non-blocking scenario 47 | self.init_pubsub(connect_to) 48 | 49 | def init_reqrep(self, address): 50 | """ Creates and inits a socket in REQ/REP mode 51 | """ 52 | 53 | socketType = zmq.REQ 54 | self.zmq_context = SerializingContext() 55 | self.zmq_socket = self.zmq_context.socket(socketType) 56 | self.zmq_socket.connect(address) 57 | 58 | # Assign corresponding send methods for REQ/REP mode 59 | self.send_image = self.send_image_reqrep 60 | self.send_jpg = self.send_jpg_reqrep 61 | 62 | def init_pubsub(self, address): 63 | """Creates and inits a socket in PUB/SUB mode 64 | """ 65 | 66 | socketType = zmq.PUB 67 | self.zmq_context = SerializingContext() 68 | self.zmq_socket = self.zmq_context.socket(socketType) 69 | self.zmq_socket.bind(address) 70 | 71 | # Assign corresponding send methods for PUB/SUB mode 72 | self.send_image = self.send_image_pubsub 73 | self.send_jpg = self.send_jpg_pubsub 74 | 75 | def send_image(self, msg, image): 76 | """ This is a placeholder. This method will be set to either a REQ/REP 77 | or PUB/SUB sending method, depending on REQ_REP option value. 78 | 79 | Arguments: 80 | msg: text message or image name. 81 | image: OpenCV image to send to hub. 82 | 83 | Returns: 84 | A text reply from hub in REQ/REP mode or nothing in PUB/SUB mode. 85 | 86 | """ 87 | pass 88 | 89 | def send_image_reqrep(self, msg, image): 90 | """Sends OpenCV image and msg to hub computer in REQ/REP mode 91 | 92 | Arguments: 93 | msg: text message or image name. 94 | image: OpenCV image to send to hub. 95 | 96 | Returns: 97 | A text reply from hub. 98 | """ 99 | 100 | if image.flags['C_CONTIGUOUS']: 101 | # if image is already contiguous in memory just send it 102 | self.zmq_socket.send_array(image, msg, copy=False) 103 | else: 104 | # else make it contiguous before sending 105 | image = np.ascontiguousarray(image) 106 | self.zmq_socket.send_array(image, msg, copy=False) 107 | hub_reply = self.zmq_socket.recv() # receive the reply message 108 | return hub_reply 109 | 110 | def send_image_pubsub(self, msg, image): 111 | """Sends OpenCV image and msg hub computer in PUB/SUB mode. If 112 | there is no hub computer subscribed to this socket, then image and msg 113 | are discarded. 114 | 115 | Arguments: 116 | msg: text message or image name. 117 | image: OpenCV image to send to hub. 118 | 119 | Returns: 120 | Nothing; there is no reply from hub computer in PUB/SUB mode 121 | """ 122 | 123 | if image.flags['C_CONTIGUOUS']: 124 | # if image is already contiguous in memory just send it 125 | self.zmq_socket.send_array(image, msg, copy=False) 126 | else: 127 | # else make it contiguous before sending 128 | image = np.ascontiguousarray(image) 129 | self.zmq_socket.send_array(image, msg, copy=False) 130 | 131 | def send_jpg(self, msg, jpg_buffer): 132 | """This is a placeholder. This method will be set to either a REQ/REP 133 | or PUB/SUB sending method, depending on REQ_REP option value. 134 | 135 | Arguments: 136 | msg: image name or message text. 137 | jpg_buffer: bytestring containing the jpg image to send to hub. 138 | 139 | Returns: 140 | A text reply from hub in REQ/REP mode or nothing in PUB/SUB mode. 141 | """ 142 | pass 143 | 144 | def send_jpg_reqrep(self, msg, jpg_buffer): 145 | """Sends msg text and jpg buffer to hub computer in REQ/REP mode. 146 | 147 | Arguments: 148 | msg: image name or message text. 149 | jpg_buffer: bytestring containing the jpg image to send to hub. 150 | 151 | Returns: 152 | A text reply from hub. 153 | """ 154 | 155 | self.zmq_socket.send_jpg(msg, jpg_buffer, copy=False) 156 | hub_reply = self.zmq_socket.recv() # receive the reply message 157 | return hub_reply 158 | 159 | def send_jpg_pubsub(self, msg, jpg_buffer): 160 | """Sends msg text and jpg buffer to hub computer in PUB/SUB mode. If 161 | there is no hub computer subscribed to this socket, then image and msg 162 | are discarded. 163 | 164 | Arguments: 165 | msg: image name or message text. 166 | jpg_buffer: bytestring containing the jpg image to send to hub. 167 | 168 | Returns: 169 | Nothing; there is no reply from the hub computer in PUB/SUB mode. 170 | """ 171 | 172 | self.zmq_socket.send_jpg(msg, jpg_buffer, copy=False) 173 | 174 | def close(self): 175 | """Closes the ZMQ socket and the ZMQ context. 176 | """ 177 | 178 | self.zmq_socket.close() 179 | self.zmq_context.term() 180 | 181 | def __enter__(self): 182 | """Enables use of ImageSender in with statement. 183 | 184 | Returns: 185 | self. 186 | """ 187 | 188 | return self 189 | 190 | def __exit__(self, exc_type, exc_val, exc_tb): 191 | """Enables use of ImageSender in with statement. 192 | """ 193 | 194 | self.close() 195 | 196 | 197 | class ImageHub(): 198 | """Opens a zmq socket and receives images 199 | 200 | Opens a zmq (REP or SUB) socket on the hub computer, for example, 201 | a Mac, that will be receiving and displaying or processing OpenCV images 202 | and related text messages. Provides methods to receive images or receive 203 | jpg compressed images. 204 | 205 | Two kinds of ZMQ message patterns are possible in imagezmq: 206 | REQ/REP: an image is sent and the sender waits for a reply ("blocking"). 207 | PUB/SUB: an images is sent and no reply is sent or expected ("non-blocking"). 208 | 209 | There are advantabes and disadvantages for each message pattern. 210 | See the documentation for a full description of REQ/REP and PUB/SUB. 211 | The default is REQ/REP for the ImageSender class and the ImageHub class. 212 | 213 | Arguments: 214 | open_port: (optional) the socket to open for receiving REQ requests or 215 | socket to connect to for SUB requests. 216 | REQ_REP: (optional) if True (the default), a REP socket will be created 217 | if False, a SUB socket will be created 218 | """ 219 | 220 | def __init__(self, open_port='tcp://*:5555', REQ_REP = True): 221 | """Initializes zmq socket to receive images and text. 222 | 223 | Expects an appropriate ZMQ socket at the senders tcp:port address: 224 | If REQ_REP is True (the default), then a REP socket is created. It 225 | must connect to a matching REQ socket on the ImageSender(). 226 | 227 | If REQ_REP = False, then a SUB socket is created. It must connect to 228 | a matching PUB socket on the ImageSender(). 229 | 230 | """ 231 | self.REQ_REP = REQ_REP 232 | if REQ_REP ==True: 233 | #Init REP socket for blocking mode 234 | self.init_reqrep(open_port) 235 | else: 236 | #Connect to PUB socket for non-blocking mode 237 | self.init_pubsub(open_port) 238 | 239 | def init_reqrep(self, address): 240 | """ Initializes Hub in REQ/REP mode 241 | """ 242 | socketType = zmq.REP 243 | self.zmq_context = SerializingContext() 244 | self.zmq_socket = self.zmq_context.socket(socketType) 245 | self.zmq_socket.bind(address) 246 | 247 | def init_pubsub(self, address): 248 | """ Initialize Hub in PUB/SUB mode 249 | """ 250 | socketType = zmq.SUB 251 | self.zmq_context = SerializingContext() 252 | self.zmq_socket = self.zmq_context.socket(socketType) 253 | self.zmq_socket.setsockopt(zmq.SUBSCRIBE, b'') 254 | self.zmq_socket.connect(address) 255 | 256 | def connect(self, open_port): 257 | """In PUB/SUB mode, the hub can connect to multiple senders at the same 258 | time. 259 | Use this method to connect (and subscribe) to additional senders. 260 | 261 | Arguments: 262 | open_port: the PUB socket to connect to. 263 | """ 264 | 265 | if self.REQ_REP == False: 266 | #This makes sense only in PUB/SUB mode 267 | self.zmq_socket.setsockopt(zmq.SUBSCRIBE, b'') 268 | self.zmq_socket.connect(open_port) 269 | self.zmq_socket.subscribe(b'') 270 | return 271 | 272 | def recv_image(self, copy=False): 273 | """Receives OpenCV image and text msg. 274 | 275 | Arguments: 276 | copy: (optional) zmq copy flag. 277 | 278 | Returns: 279 | msg: text msg, often the image name. 280 | image: OpenCV image. 281 | """ 282 | 283 | msg, image = self.zmq_socket.recv_array(copy=False) 284 | return msg, image 285 | 286 | def recv_jpg(self, copy=False): 287 | """Receives text msg, jpg buffer. 288 | 289 | Arguments: 290 | copy: (optional) zmq copy flag 291 | Returns: 292 | msg: text message, often image name 293 | jpg_buffer: bytestring jpg compressed image 294 | """ 295 | 296 | msg, jpg_buffer = self.zmq_socket.recv_jpg(copy=False) 297 | return msg, jpg_buffer 298 | 299 | def send_reply(self, reply_message=b'OK'): 300 | """Sends the zmq REP reply message. 301 | 302 | Arguments: 303 | reply_message: reply message text, often just string 'OK' 304 | """ 305 | self.zmq_socket.send(reply_message) 306 | 307 | def close(self): 308 | """Closes the ZMQ socket and the ZMQ context. 309 | """ 310 | 311 | self.zmq_socket.close() 312 | self.zmq_context.term() 313 | 314 | def __enter__(self): 315 | """Enables use of ImageHub in with statement. 316 | 317 | Returns: 318 | self. 319 | """ 320 | 321 | return self 322 | 323 | def __exit__(self, exc_type, exc_val, exc_tb): 324 | """Enables use of ImageHub in with statement. 325 | """ 326 | 327 | self.close() 328 | 329 | 330 | class SerializingSocket(zmq.Socket): 331 | """Numpy array serialization methods. 332 | 333 | Modelled on PyZMQ serialization examples. 334 | 335 | Used for sending / receiving OpenCV images, which are Numpy arrays. 336 | Also used for sending / receiving jpg compressed OpenCV images. 337 | """ 338 | 339 | def send_array(self, A, msg='NoName', flags=0, copy=True, track=False): 340 | """Sends a numpy array with metadata and text message. 341 | 342 | Sends a numpy array with the metadata necessary for reconstructing 343 | the array (dtype,shape). Also sends a text msg, often the array or 344 | image name. 345 | 346 | Arguments: 347 | A: numpy array or OpenCV image. 348 | msg: (optional) array name, image name or text message. 349 | flags: (optional) zmq flags. 350 | copy: (optional) zmq copy flag. 351 | track: (optional) zmq track flag. 352 | """ 353 | 354 | md = dict( 355 | msg=msg, 356 | dtype=str(A.dtype), 357 | shape=A.shape, 358 | ) 359 | self.send_json(md, flags | zmq.SNDMORE) 360 | return self.send(A, flags, copy=copy, track=track) 361 | 362 | def send_jpg(self, 363 | msg='NoName', 364 | jpg_buffer=b'00', 365 | flags=0, 366 | copy=True, 367 | track=False): 368 | """Send a jpg buffer with a text message. 369 | 370 | Sends a jpg bytestring of an OpenCV image. 371 | Also sends text msg, often the image name. 372 | 373 | Arguments: 374 | msg: image name or text message. 375 | jpg_buffer: jpg buffer of compressed image to be sent. 376 | flags: (optional) zmq flags. 377 | copy: (optional) zmq copy flag. 378 | track: (optional) zmq track flag. 379 | """ 380 | 381 | md = dict(msg=msg, ) 382 | self.send_json(md, flags | zmq.SNDMORE) 383 | return self.send(jpg_buffer, flags, copy=copy, track=track) 384 | 385 | def recv_array(self, flags=0, copy=True, track=False): 386 | """Receives a numpy array with metadata and text message. 387 | 388 | Receives a numpy array with the metadata necessary 389 | for reconstructing the array (dtype,shape). 390 | Returns the array and a text msg, often the array or image name. 391 | 392 | Arguments: 393 | flags: (optional) zmq flags. 394 | copy: (optional) zmq copy flag. 395 | track: (optional) zmq track flag. 396 | 397 | Returns: 398 | msg: image name or text message. 399 | A: numpy array or OpenCV image reconstructed with dtype and shape. 400 | """ 401 | 402 | md = self.recv_json(flags=flags) 403 | msg = self.recv(flags=flags, copy=copy, track=track) 404 | A = np.frombuffer(msg, dtype=md['dtype']) 405 | return (md['msg'], A.reshape(md['shape'])) 406 | 407 | def recv_jpg(self, flags=0, copy=True, track=False): 408 | """Receives a jpg buffer and a text msg. 409 | 410 | Receives a jpg bytestring of an OpenCV image. 411 | Also receives a text msg, often the image name. 412 | 413 | Arguments: 414 | flags: (optional) zmq flags. 415 | copy: (optional) zmq copy flag. 416 | track: (optional) zmq track flag. 417 | 418 | Returns: 419 | msg: image name or text message. 420 | jpg_buffer: bytestring, containing jpg image. 421 | """ 422 | 423 | md = self.recv_json(flags=flags) # metadata text 424 | jpg_buffer = self.recv(flags=flags, copy=copy, track=track) 425 | return (md['msg'], jpg_buffer) 426 | 427 | 428 | class SerializingContext(zmq.Context): 429 | _socket_class = SerializingSocket --------------------------------------------------------------------------------