├── spot_wrapper ├── __init__.py ├── utils │ ├── __init__.py │ ├── headless.py │ └── utils.py ├── home_robot.py ├── monitor_nav_pose.py ├── stand.py ├── sit.py ├── selfright.py ├── roll_over.py ├── view_arm_proprioception.py ├── headless_hijack.py ├── draw_square.py ├── headless_estop.py ├── view_camera.py ├── headless_pt_teleop.py ├── keyboard_teleop.py ├── estop.py └── spot.py ├── requirements.txt ├── .gitignore ├── data ├── depth_transforms.txt ├── transforms_example.txt └── image_sources.txt ├── setup.py ├── README.md └── generate_executables.py /spot_wrapper/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /spot_wrapper/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | bosdyn-client 2 | bosdyn-api 3 | opencv-python 4 | six 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | spot_wrapper.egg-info/ 2 | __pycache__ 3 | spot_wrapper/home.txt 4 | -------------------------------------------------------------------------------- /spot_wrapper/home_robot.py: -------------------------------------------------------------------------------- 1 | from spot_wrapper.spot import Spot 2 | 3 | if __name__ == "__main__": 4 | spot = Spot("NavPoseMonitor") 5 | spot.home_robot() 6 | -------------------------------------------------------------------------------- /data/depth_transforms.txt: -------------------------------------------------------------------------------- 1 | SpotCamIds.FRONTLEFT_DEPTH 2 | x: 0.41619532493078804 3 | y: 0.03740343144695029 4 | z: 0.023127331893806183 5 | W: 0.5178 X: 0.1422 Y: 0.8133 Z: -0.2241 6 | 7 | SpotCamIds.FRONTRIGHT_DEPTH 8 | x: 0.4164822634134684 9 | y: -0.03614789234067159 10 | z: 0.023188565383785213 11 | W: 0.5184 X: -0.1495 Y: 0.8101 Z: 0.2294 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | setuptools.setup( 4 | name="spot_wrapper", 5 | version="0.1", 6 | author="Naoki Yokoyama", 7 | author_email="naokiyokoyama@github", 8 | description="Python wrapper for Boston Dynamics Spot Robot", 9 | url="https://github.com/naokiyokoyama/bd_spot_wrapper", 10 | packages=setuptools.find_packages(), 11 | ) 12 | -------------------------------------------------------------------------------- /spot_wrapper/monitor_nav_pose.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import numpy as np 4 | from spot_wrapper.spot import Spot 5 | 6 | 7 | def main(spot: Spot): 8 | while True: 9 | x, y, yaw = spot.get_xy_yaw() 10 | spot.loginfo(f"x: {x}, y: {y}, yaw: {np.rad2deg(yaw)}") 11 | time.sleep(1 / 30.0) 12 | 13 | 14 | if __name__ == "__main__": 15 | spot = Spot("NavPoseMonitor") 16 | main(spot) 17 | -------------------------------------------------------------------------------- /spot_wrapper/stand.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from spot_wrapper.spot import Spot 4 | 5 | 6 | def main(spot: Spot): 7 | """Make Spot stand""" 8 | spot.power_on() 9 | spot.blocking_stand() 10 | 11 | # Wait 3 seconds to before powering down... 12 | time.sleep(3) 13 | spot.power_off() 14 | 15 | 16 | if __name__ == "__main__": 17 | spot = Spot("BasicStandingClient") 18 | with spot.get_lease() as lease: 19 | main(spot) 20 | -------------------------------------------------------------------------------- /spot_wrapper/sit.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from spot_wrapper.spot import Spot 4 | 5 | 6 | def main(spot: Spot): 7 | """Make Spot stand""" 8 | spot.power_on() 9 | spot.sit() 10 | 11 | # Wait 5 seconds to before powering down... 12 | while True: 13 | pass 14 | time.sleep(5) 15 | spot.power_off() 16 | 17 | 18 | if __name__ == "__main__": 19 | spot = Spot("RolloverClient") 20 | with spot.get_lease() as lease: 21 | main(spot) 22 | -------------------------------------------------------------------------------- /spot_wrapper/selfright.py: -------------------------------------------------------------------------------- 1 | from spot_wrapper.spot import Spot 2 | 3 | 4 | def main(spot: Spot): 5 | """Make Spot stand""" 6 | spot.power_on() 7 | spot.blocking_selfright() 8 | 9 | # Wait 3 seconds to before powering down... 10 | while True: 11 | pass 12 | time.sleep(3) 13 | spot.power_off() 14 | 15 | 16 | if __name__ == "__main__": 17 | spot = Spot("BasicSelfRightClient") 18 | with spot.get_lease() as lease: 19 | main(spot) 20 | -------------------------------------------------------------------------------- /spot_wrapper/roll_over.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from spot_wrapper.spot import Spot 4 | 5 | 6 | def main(spot: Spot): 7 | """Make Spot stand""" 8 | spot.power_on() 9 | spot.roll_over() 10 | 11 | # Wait 5 seconds to before powering down... 12 | while True: 13 | pass 14 | time.sleep(5) 15 | spot.power_off() 16 | 17 | 18 | if __name__ == "__main__": 19 | spot = Spot("RolloverClient") 20 | with spot.get_lease() as lease: 21 | main(spot) 22 | -------------------------------------------------------------------------------- /spot_wrapper/view_arm_proprioception.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import numpy as np 4 | from spot_wrapper.spot import Spot 5 | 6 | 7 | def main(spot: Spot): 8 | while True: 9 | arm_prop = spot.get_arm_proprioception() 10 | current_joint_positions = np.array( 11 | [v.position.value for v in arm_prop.values()] 12 | ) 13 | spot.loginfo(", ".join([str(i) for i in np.rad2deg(current_joint_positions)])) 14 | spot.loginfo([v.name for v in arm_prop.values()]) 15 | time.sleep(1 / 30) 16 | 17 | 18 | if __name__ == "__main__": 19 | spot = Spot("ArmJointControl") 20 | main(spot) 21 | -------------------------------------------------------------------------------- /spot_wrapper/headless_hijack.py: -------------------------------------------------------------------------------- 1 | """ 2 | Hijacking the robot using a presentation tool ("pt"). 3 | Useful for stopping the robot without it sitting down on to rough pavement. 4 | """ 5 | from spot_wrapper.spot import Spot 6 | from spot_wrapper.utils.headless import KeyboardListener 7 | from spot_wrapper.utils.utils import say 8 | 9 | DEBUG = False 10 | 11 | 12 | class KEY_ID: 13 | r"""Keyboard id codes.""" 14 | ENTER = 28 15 | 16 | 17 | class SpotHeadlessHijack(KeyboardListener): 18 | name = "HeadlessHijack" 19 | debug = DEBUG 20 | 21 | def __init__(self): 22 | if not self.debug: 23 | self.spot = Spot(self.name) 24 | super().__init__() 25 | 26 | def process_pressed_key(self, pressed_key): 27 | if pressed_key == KEY_ID.ENTER: 28 | say("Hijacking lease!.") 29 | if self.debug: 30 | return 31 | with self.spot.get_lease(hijack=True): 32 | self.spot.power_on() 33 | self.spot.blocking_stand() 34 | while True: 35 | pass 36 | 37 | 38 | if __name__ == "__main__": 39 | SpotHeadlessHijack() 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Simple Python API for Spot 2 | 3 | ## Installation 4 | 5 | Create the conda env: 6 | 7 | ```bash 8 | conda create -n spot_env -y python=3.6 9 | conda activate spot_env 10 | ``` 11 | Install requirements 12 | ```bash 13 | pip install -r requirements.txt 14 | ``` 15 | Install this package 16 | ```bash 17 | # Make sure you are in the root of this repo 18 | pip install -e . 19 | ``` 20 | 21 | ## Quickstart 22 | Ensure that you are connected to the robot's WiFi. 23 | 24 | The following script allows you to move the robot without having to use tablet (which prompts you to enter a password once a day): 25 | ``` 26 | python -m spot_wrapper.keyboard_teleop 27 | ``` 28 | If you get an error about the e-stop, you just need to make sure that you run this script in another terminal: 29 | ``` 30 | python -m spot_wrapper.estop 31 | ``` 32 | 33 | Read through the `spot_wrapper/keyboard_teleop.py` to see most of what this repo offers in terms of actuating the Spot and its arm. 34 | 35 | To receive/monitor data (vision/proprioception) from the robot, you can use these scripts: 36 | ``` 37 | python -m spot_wrapper.view_camera 38 | python -m spot_wrapper.view_arm_proprioception 39 | python -m spot_wrapper.monitor_nav_pose 40 | ``` 41 | -------------------------------------------------------------------------------- /spot_wrapper/draw_square.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from spot_wrapper.spot import Spot 3 | 4 | SQUARE_CENTER = np.array([0.5, 0.0, 0.6]) 5 | SQUARE_SIDE = 0.4 6 | GRIPPER_WAYPOINTS = [ 7 | SQUARE_CENTER, 8 | SQUARE_CENTER + np.array([0.0, 0.0, SQUARE_SIDE / 2]), 9 | SQUARE_CENTER + np.array([0.0, SQUARE_SIDE / 2, SQUARE_SIDE / 2]), 10 | SQUARE_CENTER + np.array([0.0, -SQUARE_SIDE / 2, SQUARE_SIDE / 2]), 11 | SQUARE_CENTER + np.array([0.0, -SQUARE_SIDE / 2, -SQUARE_SIDE / 2]), 12 | SQUARE_CENTER + np.array([0.0, SQUARE_SIDE / 2, -SQUARE_SIDE / 2]), 13 | SQUARE_CENTER, 14 | ] 15 | 16 | 17 | def main(spot: Spot): 18 | spot.power_on() 19 | spot.blocking_stand() 20 | 21 | # Open the gripper 22 | spot.open_gripper() 23 | 24 | # Move arm to initial configuration 25 | try: 26 | for point in GRIPPER_WAYPOINTS: 27 | spot.loginfo("TRAVELING TO WAYPOINT") 28 | cmd_id = spot.move_gripper_to_point(point, [0.0, 0.0, 0.0]) 29 | spot.block_until_arm_arrives(cmd_id, timeout_sec=10) 30 | spot.loginfo("REACHED WAYPOINT") 31 | finally: 32 | spot.power_off() 33 | 34 | 35 | if __name__ == "__main__": 36 | spot = Spot("DrawSquare") 37 | with spot.get_lease() as lease: 38 | main(spot) 39 | -------------------------------------------------------------------------------- /generate_executables.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path as osp 3 | import sys 4 | 5 | this_dir = osp.dirname(osp.abspath(__file__)) 6 | base_dir = osp.join(this_dir, "spot_wrapper") 7 | bin_dir = osp.join(os.environ["CONDA_PREFIX"], "bin") 8 | 9 | orig_to_alias = { 10 | "estop": "spot_estop", 11 | "headless_estop": "spot_headless_estop", 12 | "home_robot": "spot_reset_home", 13 | "keyboard_teleop": "spot_keyboard_teleop", 14 | "monitor_nav_pose": "spot_monitor_nav_pose", 15 | "roll_over": "spot_roll_over", 16 | "selfright": "spot_selfright", 17 | "sit": "spot_sit", 18 | "stand": "spot_stand", 19 | "view_arm_proprioception": "spot_view_arm_proprioception", 20 | "view_camera": "spot_view_camera", 21 | } 22 | 23 | 24 | print("Generating executables...") 25 | for orig, alias in orig_to_alias.items(): 26 | exe_path = osp.join(bin_dir, alias) 27 | data = f"#!/usr/bin/env bash \n{sys.executable} -m spot_wrapper.{orig} $@\n" 28 | with open(exe_path, "w") as f: 29 | f.write(data) 30 | os.chmod(exe_path, 33277) 31 | print("Added:", alias) 32 | print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") 33 | print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") 34 | print("THESE EXECUTABLES ARE ONLY VISIBLE TO THE CURRENT CONDA ENV!!") 35 | print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") 36 | print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!") 37 | -------------------------------------------------------------------------------- /spot_wrapper/headless_estop.py: -------------------------------------------------------------------------------- 1 | """ 2 | Controlling the robot using a presentation tool ("pt"). 3 | Program starts idle. In idle mode, 4 | - Up releases the estop 5 | - Down activates the estop 6 | - Enter activates teleop mode 7 | 8 | In teleop mode, 9 | - Up and down will start moving the robot forward or backwards. 10 | - Tab and Enter will start turning the robot left or right (respectively). 11 | - Pressing any of these keys when the robot is moving will halt it. 12 | - Pressing up and down simultaneously will make the robot dock. 13 | - Double-pressing up and down simultaneously will estop the robot and the program idles. 14 | """ 15 | 16 | from bosdyn.client.estop import EstopClient 17 | 18 | from spot_wrapper.estop import EstopNoGui 19 | from spot_wrapper.spot import Spot 20 | from spot_wrapper.utils.headless import KeyboardListener 21 | from spot_wrapper.utils.utils import say 22 | 23 | DEBUG = False 24 | 25 | 26 | class KEY_ID: 27 | r"""Keyboard id codes.""" 28 | UP = 103 29 | DOWN = 108 30 | 31 | 32 | class SpotHeadlessEstop(KeyboardListener): 33 | name = "HeadlessEstop" 34 | debug = DEBUG 35 | 36 | def __init__(self): 37 | if not self.debug: 38 | self.spot = Spot(self.name) 39 | estop_client = self.spot.robot.ensure_client( 40 | EstopClient.default_service_name 41 | ) 42 | self.estop_nogui = EstopNoGui(estop_client, 5, "Estop NoGUI") 43 | super().__init__() 44 | 45 | def process_pressed_key(self, pressed_key): 46 | if pressed_key == KEY_ID.UP: 47 | say("Releasing E-Stop.") 48 | self.release_estop() 49 | elif pressed_key == KEY_ID.DOWN: 50 | say("Activating E-Stop!") 51 | self.estop() 52 | 53 | def estop(self): 54 | if self.debug: 55 | return 56 | self.estop_nogui.settle_then_cut() 57 | 58 | def release_estop(self): 59 | if self.debug: 60 | return 61 | self.estop_nogui.allow() 62 | 63 | 64 | if __name__ == "__main__": 65 | SpotHeadlessEstop() 66 | -------------------------------------------------------------------------------- /data/transforms_example.txt: -------------------------------------------------------------------------------- 1 | {'gpe': parent_frame_name: "odom" 2 | parent_tform_child { 3 | position { 4 | x: -1.0544086694717407 5 | y: -1.139175534248352 6 | z: -4.46881103515625 7 | } 8 | rotation { 9 | x: -0.002153972629457712 10 | y: 0.010553546249866486 11 | z: -2.2733371224603616e-05 12 | w: 0.9999419450759888 13 | } 14 | } 15 | , 'flat_body': parent_frame_name: "body" 16 | parent_tform_child { 17 | position { 18 | } 19 | rotation { 20 | x: 0.001049818005412817 21 | y: 0.0017311159754171968 22 | z: 1.8363418803346576e-06 23 | w: 0.9999979734420776 24 | } 25 | } 26 | , 'hand': parent_frame_name: "body" 27 | parent_tform_child { 28 | position { 29 | x: 0.5526498556137085 30 | y: 0.002247427823022008 31 | z: 0.26357635855674744 32 | } 33 | rotation { 34 | x: -0.0009961266769096255 35 | y: 0.0019155505578964949 36 | z: 0.005823923274874687 37 | w: 0.9999806880950928 38 | } 39 | } 40 | , 'odom': parent_frame_name: "body" 41 | parent_tform_child { 42 | position { 43 | x: 1.564515471458435 44 | y: 0.07693791389465332 45 | z: 4.223989009857178 46 | } 47 | rotation { 48 | x: 0.00032411710708402097 49 | y: 0.0019984564278274775 50 | z: -0.3749612271785736 51 | w: 0.9270382523536682 52 | } 53 | } 54 | , 'vision': parent_frame_name: "body" 55 | parent_tform_child { 56 | position { 57 | x: 1.4845632314682007 58 | y: -0.024931997060775757 59 | z: -0.1606001853942871 60 | } 61 | rotation { 62 | x: -0.0016495820600539446 63 | y: 0.0011737799504771829 64 | z: -0.9973124861717224 65 | w: 0.07323848456144333 66 | } 67 | } 68 | , 'link_wr1': parent_frame_name: "body" 69 | parent_tform_child { 70 | position { 71 | x: 0.35709452629089355 72 | y: -2.9751361580565572e-05 73 | z: 0.26432785391807556 74 | } 75 | rotation { 76 | x: -0.0009961266769096255 77 | y: 0.0019155505578964949 78 | z: 0.005823923274874687 79 | w: 0.9999806880950928 80 | } 81 | } 82 | , 'body': parent_tform_child { 83 | position { 84 | } 85 | rotation { 86 | w: 1.0 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /spot_wrapper/view_camera.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import time 3 | from collections import deque 4 | 5 | import cv2 6 | import numpy as np 7 | from spot_wrapper.spot import ( 8 | Spot, 9 | SpotCamIds, 10 | draw_crosshair, 11 | image_response_to_cv2, 12 | scale_depth_img, 13 | ) 14 | from spot_wrapper.utils.utils import color_bbox, resize_to_tallest 15 | 16 | MAX_HAND_DEPTH = 3.0 17 | MAX_HEAD_DEPTH = 10.0 18 | DETECT_LARGEST_WHITE_OBJECT = False 19 | 20 | 21 | def main(spot: Spot): 22 | parser = argparse.ArgumentParser() 23 | parser.add_argument("-d", "--no-display", action="store_true") 24 | parser.add_argument("-q", "--quality", type=int) 25 | args = parser.parse_args() 26 | window_name = "Spot Camera Viewer" 27 | time_buffer = deque(maxlen=10) 28 | sources = [ 29 | SpotCamIds.FRONTRIGHT_DEPTH, 30 | SpotCamIds.FRONTLEFT_DEPTH, 31 | SpotCamIds.HAND_DEPTH, 32 | SpotCamIds.HAND_COLOR, 33 | ] 34 | try: 35 | while True: 36 | start_time = time.time() 37 | 38 | # Get Spot camera image 39 | image_responses = spot.get_image_responses(sources, quality=args.quality) 40 | imgs = [] 41 | for image_response, source in zip(image_responses, sources): 42 | img = image_response_to_cv2(image_response, reorient=True) 43 | if "depth" in source: 44 | max_depth = MAX_HAND_DEPTH if "hand" in source else MAX_HEAD_DEPTH 45 | img = scale_depth_img(img, max_depth=max_depth, as_img=True) 46 | elif source is SpotCamIds.HAND_COLOR: 47 | img = draw_crosshair(img) 48 | if DETECT_LARGEST_WHITE_OBJECT: 49 | x, y, w, h = color_bbox(img, just_get_bbox=True) 50 | cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2) 51 | 52 | imgs.append(img) 53 | 54 | # Make sure all imgs are same height 55 | img = resize_to_tallest(imgs, hstack=True) 56 | 57 | if not args.no_display: 58 | cv2.imshow(window_name, img) 59 | cv2.waitKey(1) 60 | 61 | time_buffer.append(time.time() - start_time) 62 | print("Avg FPS:", 1 / np.mean(time_buffer)) 63 | finally: 64 | if not args.no_display: 65 | cv2.destroyWindow(window_name) 66 | 67 | 68 | if __name__ == "__main__": 69 | spot = Spot("ViewCamera") 70 | # We don't need a lease because we're passively observing images (no motor ctrl) 71 | main(spot) 72 | -------------------------------------------------------------------------------- /spot_wrapper/utils/headless.py: -------------------------------------------------------------------------------- 1 | """ 2 | Controlling the robot using a presentation tool ("pt"). 3 | Up and down will control the linear velocity, or the angular velocity, depending 4 | on the mode. Mode is controlled with Tab. Enter will hijack or return the robot's lease. 5 | """ 6 | import re 7 | import signal 8 | import struct 9 | import threading 10 | import time 11 | 12 | """ 13 | To run without sudo, you need to run this command: 14 | sudo gpasswd -a $USER input 15 | 16 | and then reboot the computer. 17 | """ 18 | 19 | 20 | class KeyboardListener(object): 21 | silent = False 22 | 23 | def __init__(self): 24 | self.done = False 25 | signal.signal(signal.SIGINT, self.cleanup) 26 | 27 | with open("/proc/bus/input/devices") as f: 28 | devices_file_contents = f.read() 29 | 30 | for handlers in re.findall( 31 | r"""H: Handlers=([^\n]+)""", devices_file_contents, re.DOTALL 32 | ): 33 | dev_event_file = "/dev/input/event" + re.search( 34 | r"event(\d+)", handlers 35 | ).group(1) 36 | if "kbd" in handlers: 37 | t = threading.Thread( 38 | target=self.read_events, kwargs={"dev_event_file": dev_event_file} 39 | ) 40 | t.daemon = True 41 | t.start() 42 | 43 | while not self.done: # Wait for Ctrl+C 44 | time.sleep(0.5) 45 | 46 | def cleanup(self, signum, frame): 47 | self.done = True 48 | 49 | def read_events(self, dev_event_file): 50 | print("Listening for kbd events on dev_event_file=" + str(dev_event_file)) 51 | try: 52 | of = open(dev_event_file, "rb") 53 | while True: 54 | pressed_key = self.listen(of, dev_event_file) 55 | self.process_pressed_key(pressed_key) 56 | except IOError as e: 57 | if e.strerror == "Permission denied": 58 | print( 59 | f"You don't have read permission on ({dev_event_file}). Are you " 60 | f"root? Running the following should fix this issue:\n" 61 | f"sudo gpasswd -a $USER input" 62 | ) 63 | return 64 | 65 | def listen(self, of, dev_event_file): 66 | event_bin_format = "llHHI" # See kernel documentation for 'struct input_event' 67 | # For details, read section 5 of this document: 68 | # https://www.kernel.org/doc/Documentation/input/input.txt 69 | data = of.read(struct.calcsize(event_bin_format)) 70 | seconds, microseconds, e_type, code, value = struct.unpack( 71 | event_bin_format, data 72 | ) 73 | if e_type == 0x1: # 0x1 == EV_KEY means key press or release. 74 | d = "RELEASE" if value == 0 else "PRESS" 75 | if not self.silent: 76 | print( 77 | f"Got key {d} from {dev_event_file}: " 78 | f"t={seconds + microseconds / 1000000}us type={e_type} code={code}" 79 | ) 80 | if d == "PRESS": 81 | return code 82 | return None 83 | 84 | def process_pressed_key(self, pressed_key): 85 | pass 86 | 87 | 88 | if __name__ == "__main__": 89 | a = KeyboardListener() 90 | -------------------------------------------------------------------------------- /spot_wrapper/utils/utils.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | import cv2 4 | import numpy as np 5 | 6 | 7 | def say(text): 8 | try: 9 | text = text.replace("_", " ") 10 | text = f'"{text}"' 11 | cmd = f"/usr/bin/festival -b '(voice_cmu_us_slt_arctic_hts)' '(SayText {text})'" 12 | subprocess.Popen(cmd, shell=True) 13 | except: 14 | pass 15 | print(f'Saying: "{text}"') 16 | 17 | 18 | def resize_to_tallest(imgs, widest=False, hstack=False): 19 | dim_idx = 1 if widest else 0 20 | tallest = max([i.shape[dim_idx] for i in imgs]) 21 | for idx, i in enumerate(imgs): 22 | curr_dim = i.shape[dim_idx] 23 | other_dim = i.shape[1 - dim_idx] 24 | if curr_dim != tallest: 25 | new_dim = int(other_dim * (tallest / curr_dim)) 26 | new_dims = (new_dim, tallest) if widest else (tallest, new_dim) 27 | imgs[idx] = cv2.resize(i, new_dims) 28 | if hstack: 29 | return np.hstack(imgs) 30 | return imgs 31 | 32 | 33 | def inflate_erode(mask, size=50): 34 | mask_copy = mask.copy() 35 | mask_copy = cv2.blur(mask_copy, (size, size)) 36 | mask_copy[mask_copy > 0] = 255 37 | mask_copy = cv2.blur(mask_copy, (size, size)) 38 | mask_copy[mask_copy < 255] = 0 39 | 40 | return mask_copy 41 | 42 | 43 | def erode_inflate(mask, size=20): 44 | mask_copy = mask.copy() 45 | mask_copy = cv2.blur(mask_copy, (size, size)) 46 | mask_copy[mask_copy < 255] = 0 47 | mask_copy = cv2.blur(mask_copy, (size, size)) 48 | mask_copy[mask_copy > 0] = 255 49 | 50 | return mask_copy 51 | 52 | 53 | def contour_mask(mask): 54 | cnt, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE) 55 | new_mask = np.zeros(mask.shape, dtype=np.uint8) 56 | max_area = 0 57 | max_index = 0 58 | for idx, c in enumerate(cnt): 59 | area = cv2.contourArea(c) 60 | if area > max_area: 61 | max_area = area 62 | max_index = idx 63 | cv2.drawContours(new_mask, cnt, max_index, 255, cv2.FILLED) 64 | 65 | return new_mask 66 | 67 | 68 | def color_bbox(img, just_get_bbox=False): 69 | """Makes a bbox around a white object""" 70 | # Filter out non-white 71 | sensitivity = 80 72 | upper_white = np.array([255, 255, 255]) 73 | lower_white = upper_white - sensitivity 74 | color_mask = cv2.inRange(img, lower_white, upper_white) 75 | 76 | # Filter out little bits of white 77 | color_mask = inflate_erode(color_mask) 78 | color_mask = erode_inflate(color_mask) 79 | 80 | # Only use largest contour 81 | color_mask = contour_mask(color_mask) 82 | 83 | # Calculate bbox 84 | x, y, w, h = cv2.boundingRect(color_mask) 85 | 86 | if just_get_bbox: 87 | return x, y, w, h 88 | 89 | height, width = color_mask.shape 90 | cx = (x + w / 2.0) / width 91 | cy = (y + h / 2.0) / height 92 | 93 | # Create bbox mask 94 | bbox_mask = np.zeros([height, width, 1], dtype=np.float32) 95 | bbox_mask[y : y + h, x : x + w] = 1.0 96 | 97 | # Determine if bbox intersects with central crosshair 98 | crosshair_in_bbox = x < width // 2 < x + w and y < height // 2 < y + h 99 | 100 | return bbox_mask, cx, cy, crosshair_in_bbox 101 | -------------------------------------------------------------------------------- /spot_wrapper/headless_pt_teleop.py: -------------------------------------------------------------------------------- 1 | """ 2 | Controlling the robot using a presentation tool ("pt"). 3 | Program starts idle. In idle mode, 4 | - Up releases the estop 5 | - Down activates the estop 6 | - Enter activates teleop mode 7 | 8 | In teleop mode, 9 | - Up and down will start moving the robot forward or backwards. 10 | - Tab and Enter will start turning the robot left or right (respectively). 11 | - Pressing any of these keys when the robot is moving will halt it. 12 | - Pressing up and down simultaneously will make the robot dock. 13 | - Double-pressing up and down simultaneously will estop the robot and the program idles. 14 | """ 15 | import os 16 | import time 17 | 18 | import numpy as np 19 | 20 | from spot_wrapper.headless_estop import SpotHeadlessEstop 21 | 22 | UPDATE_PERIOD = 0.2 23 | LINEAR_VEL = 1.0 24 | ANGULAR_VEL = np.deg2rad(50) 25 | DOCK_ID = int(os.environ.get("SPOT_DOCK_ID", 520)) 26 | DEBUG = False 27 | 28 | 29 | class FSM_ID: 30 | r"""Finite state machine IDs.""" 31 | IDLE = 0 # robot is NOT in teleop, just listening for estop or teleop activation 32 | HALTED = 1 # robot in teleop, but not moving 33 | FORWARD = 2 # robot in teleop, moving forward 34 | BACK = 3 # robot in teleop, moving backwards 35 | LEFT = 4 # robot in teleop, turning left 36 | RIGHT = 5 # robot in teleop, turning right 37 | 38 | 39 | class KEY_ID: 40 | r"""Keyboard id codes.""" 41 | UP = 103 42 | DOWN = 108 43 | TAB = 15 44 | ENTER = 28 45 | UP_DOWN = 123123123 # virtual key representing both up and down pressed 46 | 47 | 48 | class SpotHeadlessTeleop(SpotHeadlessEstop): 49 | silent = True 50 | name = "HeadlessTeleop" 51 | debug = DEBUG 52 | 53 | def __init__(self): 54 | self.fsm_state = FSM_ID.IDLE 55 | self.lease = None 56 | self.last_up = 0 57 | self.last_down = 0 58 | self.last_up_and_down = 0 59 | super().__init__() 60 | 61 | def process_pressed_key(self, pressed_key): 62 | if pressed_key not in [KEY_ID.UP, KEY_ID.DOWN, KEY_ID.ENTER, KEY_ID.TAB]: 63 | return 64 | 65 | # Handlers for when both up and down are pressed at the same time 66 | if pressed_key in [KEY_ID.UP, KEY_ID.DOWN]: 67 | if pressed_key == KEY_ID.DOWN: 68 | self.last_down = time.time() 69 | elif pressed_key == KEY_ID.UP: 70 | self.last_up = time.time() 71 | if abs(self.last_up - self.last_down) < 0.1: 72 | # Both keys have been pressed 73 | self.last_up, self.last_down = 0, 1 74 | pressed_key = KEY_ID.UP_DOWN 75 | # Double click of both keys detected 76 | double_click = time.time() - self.last_up_and_down < 0.4 77 | self.last_up_and_down = time.time() 78 | if double_click and self.fsm_state != FSM_ID.IDLE: 79 | self.estop() 80 | if not self.debug: 81 | self.lease.return_lease() 82 | self.fsm_state = FSM_ID.IDLE 83 | print("Activating E-Stop! Entering IDLE mode.") 84 | if ( 85 | self.last_up_and_down > 0 86 | and time.time() - self.last_up_and_down > 0.4 87 | and self.fsm_state != FSM_ID.IDLE 88 | ): 89 | self.last_up_and_down = 0 90 | print("Halting and docking robot...") 91 | self.halt_robot() 92 | try: 93 | if not self.debug: 94 | self.spot.dock(DOCK_ID) 95 | self.spot.home_robot() 96 | self.lease.return_lease() 97 | self.fsm_state = FSM_ID.IDLE 98 | print("Docking successful! Entering IDLE mode.") 99 | except: 100 | print("Dock was not found!") 101 | return 102 | 103 | if self.fsm_state == FSM_ID.IDLE: 104 | if pressed_key == KEY_ID.UP: 105 | print("Releasing E-Stop.") 106 | self.release_estop() 107 | elif pressed_key == KEY_ID.DOWN: 108 | print("Activating E-Stop!") 109 | self.estop() 110 | elif pressed_key == KEY_ID.ENTER: 111 | print("Hijacking robot! Entering teleop mode.") 112 | self.fsm_state = FSM_ID.HALTED 113 | self.hijack_robot() 114 | else: 115 | if self.fsm_state != FSM_ID.HALTED: 116 | # Halt the robot if any key is pressed, and it's not idle 117 | print("Halting robot.") 118 | self.halt_robot() 119 | self.fsm_state = FSM_ID.HALTED 120 | elif pressed_key == KEY_ID.UP: 121 | print("Moving forwards.") 122 | self.move_forward() 123 | self.fsm_state = FSM_ID.FORWARD 124 | elif pressed_key == KEY_ID.DOWN: 125 | print("Moving backwards.") 126 | self.move_backwards() 127 | self.fsm_state = FSM_ID.BACK 128 | elif pressed_key == KEY_ID.TAB: 129 | print("Turning left.") 130 | self.turn_left() 131 | self.fsm_state = FSM_ID.LEFT 132 | elif pressed_key == KEY_ID.ENTER: 133 | print("Turning right.") 134 | self.turn_right() 135 | self.fsm_state = FSM_ID.RIGHT 136 | 137 | def hijack_robot(self): 138 | if self.debug: 139 | return 140 | self.lease = self.spot.get_lease(hijack=True) 141 | self.spot.power_on() 142 | self.spot.blocking_stand() 143 | 144 | def halt_robot(self, x_vel=0.0, ang_vel=0.0): 145 | if self.debug: 146 | return 147 | self.spot.set_base_velocity(x_vel, 0.0, ang_vel, vel_time=UPDATE_PERIOD * 2) 148 | 149 | def move_forward(self): 150 | self.halt_robot(x_vel=LINEAR_VEL) 151 | 152 | def move_backwards(self): 153 | self.halt_robot(x_vel=-LINEAR_VEL) 154 | 155 | def turn_left(self): 156 | self.halt_robot(ang_vel=ANGULAR_VEL) 157 | 158 | def turn_right(self): 159 | self.halt_robot(ang_vel=-ANGULAR_VEL) 160 | 161 | 162 | if __name__ == "__main__": 163 | SpotHeadlessTeleop() 164 | -------------------------------------------------------------------------------- /spot_wrapper/keyboard_teleop.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import curses 3 | import os 4 | import signal 5 | import time 6 | 7 | import numpy as np 8 | from spot_wrapper.spot import Spot, SpotCamIds 9 | 10 | MOVE_INCREMENT = 0.02 11 | TILT_INCREMENT = 5.0 12 | BASE_ANGULAR_VEL = np.deg2rad(50) 13 | BASE_LIN_VEL = 0.75 14 | DOCK_ID = int(os.environ.get("SPOT_DOCK_ID", 520)) 15 | UPDATE_PERIOD = 0.2 16 | 17 | # Where the gripper goes to upon initialization 18 | INITIAL_POINT = np.array([0.5, 0.0, 0.35]) 19 | INITIAL_RPY = np.deg2rad([0.0, 45.0, 0.0]) 20 | KEY2GRIPPERMOVEMENT = { 21 | "w": np.array([0.0, 0.0, MOVE_INCREMENT, 0.0, 0.0, 0.0]), # move up 22 | "s": np.array([0.0, 0.0, -MOVE_INCREMENT, 0.0, 0.0, 0.0]), # move down 23 | "a": np.array([0.0, MOVE_INCREMENT, 0.0, 0.0, 0.0, 0.0]), # move left 24 | "d": np.array([0.0, -MOVE_INCREMENT, 0.0, 0.0, 0.0, 0.0]), # move right 25 | "q": np.array([MOVE_INCREMENT, 0.0, 0.0, 0.0, 0.0, 0.0]), # move forward 26 | "e": np.array([-MOVE_INCREMENT, 0.0, 0.0, 0.0, 0.0, 0.0]), # move backward 27 | "i": np.deg2rad([0.0, 0.0, 0.0, 0.0, -TILT_INCREMENT, 0.0]), # pitch up 28 | "k": np.deg2rad([0.0, 0.0, 0.0, 0.0, TILT_INCREMENT, 0.0]), # pitch down 29 | "j": np.deg2rad([0.0, 0.0, 0.0, 0.0, 0.0, TILT_INCREMENT]), # pan left 30 | "l": np.deg2rad([0.0, 0.0, 0.0, 0.0, 0.0, -TILT_INCREMENT]), # pan right 31 | } 32 | KEY2BASEMOVEMENT = { 33 | "q": [0.0, 0.0, BASE_ANGULAR_VEL], # turn left 34 | "e": [0.0, 0.0, -BASE_ANGULAR_VEL], # turn right 35 | "w": [BASE_LIN_VEL, 0.0, 0.0], # go forward 36 | "s": [-BASE_LIN_VEL, 0.0, 0.0], # go backward 37 | "a": [0.0, BASE_LIN_VEL, 0.0], # strafe left 38 | "d": [0.0, -BASE_LIN_VEL, 0.0], # strafe right 39 | } 40 | INSTRUCTIONS = ( 41 | "Use 'wasdqe' for translating gripper, 'ijkl' for rotating.\n" 42 | "Use 'g' to grasp whatever is at the center of the gripper image.\n" 43 | "Press 't' to toggle between controlling the arm or the base\n" 44 | "('wasdqe' will control base).\n" 45 | "Press 'z' to quit.\n" 46 | ) 47 | 48 | 49 | def move_to_initial(spot): 50 | point = INITIAL_POINT 51 | rpy = INITIAL_RPY 52 | cmd_id = spot.move_gripper_to_point(point, rpy) 53 | spot.block_until_arm_arrives(cmd_id, timeout_sec=1.5) 54 | cement_arm_joints(spot) 55 | 56 | return point, rpy 57 | 58 | 59 | def cement_arm_joints(spot): 60 | arm_proprioception = spot.get_arm_proprioception() 61 | current_positions = np.array( 62 | [v.position.value for v in arm_proprioception.values()] 63 | ) 64 | spot.set_arm_joint_positions(positions=current_positions, travel_time=UPDATE_PERIOD) 65 | 66 | 67 | def raise_error(sig, frame): 68 | raise RuntimeError 69 | 70 | 71 | def main(spot: Spot, disable_oa=False): 72 | """Uses IK to move the arm by setting hand poses""" 73 | spot.power_on() 74 | spot.blocking_stand() 75 | 76 | # Open the gripper 77 | spot.open_gripper() 78 | 79 | # Move arm to initial configuration 80 | point, rpy = move_to_initial(spot) 81 | control_arm = False 82 | 83 | # Start in-terminal GUI 84 | stdscr = curses.initscr() 85 | stdscr.nodelay(True) 86 | curses.noecho() 87 | signal.signal(signal.SIGINT, raise_error) 88 | stdscr.addstr(INSTRUCTIONS) 89 | last_execution = time.time() 90 | try: 91 | while True: 92 | point_rpy = np.concatenate([point, rpy]) 93 | pressed_key = stdscr.getch() 94 | 95 | key_not_applicable = False 96 | 97 | # Don't update if no key was pressed or we updated too recently 98 | if pressed_key == -1 or time.time() - last_execution < UPDATE_PERIOD: 99 | continue 100 | 101 | pressed_key = chr(pressed_key) 102 | 103 | if pressed_key == "z": 104 | # Quit 105 | break 106 | elif pressed_key == "t": 107 | # Toggle between controlling arm or base 108 | control_arm = not control_arm 109 | if not control_arm: 110 | cement_arm_joints(spot) 111 | spot.loginfo(f"control_arm: {control_arm}") 112 | time.sleep(0.2) # Wait before we starting listening again 113 | elif pressed_key == "g": 114 | # Grab whatever object is at the center of hand RGB camera image 115 | image_responses = spot.get_image_responses([SpotCamIds.HAND_COLOR]) 116 | hand_image_response = image_responses[0] # only expecting one image 117 | spot.grasp_point_in_image(hand_image_response) 118 | # Retract arm back to initial configuration 119 | point, rpy = move_to_initial(spot) 120 | elif pressed_key == "r": 121 | # Open gripper 122 | spot.open_gripper() 123 | elif pressed_key == "n": 124 | try: 125 | spot.dock(DOCK_ID) 126 | spot.home_robot() 127 | except: 128 | print("Dock was not found!") 129 | elif pressed_key == "b": 130 | point, rpy = move_to_initial(spot) 131 | elif pressed_key == "v": 132 | spot.spot_lease.dont_return_lease = True 133 | break 134 | else: 135 | # Tele-operate either the gripper pose or the base 136 | if control_arm: 137 | if pressed_key in KEY2GRIPPERMOVEMENT: 138 | # Move gripper 139 | point_rpy += KEY2GRIPPERMOVEMENT[pressed_key] 140 | point, rpy = point_rpy[:3], point_rpy[3:] 141 | cmd_id = spot.move_gripper_to_point(point, rpy) 142 | print("Gripper destination: ", point, rpy) 143 | spot.block_until_arm_arrives( 144 | cmd_id, timeout_sec=UPDATE_PERIOD * 0.5 145 | ) 146 | elif pressed_key in KEY2BASEMOVEMENT: 147 | # Move base 148 | x_vel, y_vel, ang_vel = KEY2BASEMOVEMENT[pressed_key] 149 | spot.set_base_velocity( 150 | x_vel=x_vel, 151 | y_vel=y_vel, 152 | ang_vel=ang_vel, 153 | vel_time=UPDATE_PERIOD * 2, 154 | disable_obstacle_avoidance=disable_oa, 155 | ) 156 | else: 157 | key_not_applicable = True 158 | 159 | if not key_not_applicable: 160 | last_execution = time.time() 161 | 162 | finally: 163 | curses.echo() 164 | stdscr.nodelay(False) 165 | curses.endwin() 166 | 167 | 168 | if __name__ == "__main__": 169 | parser = argparse.ArgumentParser() 170 | parser.add_argument("--disable-oa", "-d", action="store_true") 171 | args = parser.parse_args() 172 | if args.disable_oa: 173 | print("OBSTACLE AVOIDANCE HAS BEEN DISABLED!!!") 174 | spot = Spot("ArmKeyboardTeleop") 175 | with spot.get_lease(hijack=True) as lease: 176 | main(spot, args.disable_oa) 177 | -------------------------------------------------------------------------------- /data/image_sources.txt: -------------------------------------------------------------------------------- 1 | [name: "back_depth" 2 | cols: 424 3 | rows: 240 4 | depth_scale: 999.0 5 | pinhole { 6 | intrinsics { 7 | focal_length { 8 | x: 213.1068572998047 9 | y: 213.1068572998047 10 | } 11 | principal_point { 12 | x: 210.05804443359375 13 | y: 121.17723083496094 14 | } 15 | } 16 | } 17 | image_type: IMAGE_TYPE_DEPTH 18 | , name: "back_depth_in_visual_frame" 19 | cols: 640 20 | rows: 480 21 | pinhole { 22 | intrinsics { 23 | focal_length { 24 | x: 257.3891296386719 25 | y: 257.0436096191406 26 | } 27 | principal_point { 28 | x: 316.3995056152344 29 | y: 235.3182830810547 30 | } 31 | } 32 | } 33 | image_type: IMAGE_TYPE_DEPTH 34 | , name: "back_fisheye_image" 35 | cols: 640 36 | rows: 480 37 | pinhole { 38 | intrinsics { 39 | focal_length { 40 | x: 257.3891296386719 41 | y: 257.0436096191406 42 | } 43 | principal_point { 44 | x: 316.3995056152344 45 | y: 235.3182830810547 46 | } 47 | } 48 | } 49 | image_type: IMAGE_TYPE_VISUAL 50 | , name: "frontleft_depth" 51 | cols: 424 52 | rows: 240 53 | depth_scale: 999.0 54 | pinhole { 55 | intrinsics { 56 | focal_length { 57 | x: 214.35023498535156 58 | y: 214.35023498535156 59 | } 60 | principal_point { 61 | x: 206.67208862304688 62 | y: 120.26058959960938 63 | } 64 | } 65 | } 66 | image_type: IMAGE_TYPE_DEPTH 67 | , name: "frontleft_depth_in_visual_frame" 68 | cols: 640 69 | rows: 480 70 | pinhole { 71 | intrinsics { 72 | focal_length { 73 | x: 258.2867126464844 74 | y: 257.8219909667969 75 | } 76 | principal_point { 77 | x: 314.52911376953125 78 | y: 243.18246459960938 79 | } 80 | } 81 | } 82 | image_type: IMAGE_TYPE_DEPTH 83 | , name: "frontleft_fisheye_image" 84 | cols: 640 85 | rows: 480 86 | pinhole { 87 | intrinsics { 88 | focal_length { 89 | x: 258.2867126464844 90 | y: 257.8219909667969 91 | } 92 | principal_point { 93 | x: 314.52911376953125 94 | y: 243.18246459960938 95 | } 96 | } 97 | } 98 | image_type: IMAGE_TYPE_VISUAL 99 | , name: "frontright_depth" 100 | cols: 424 101 | rows: 240 102 | depth_scale: 999.0 103 | pinhole { 104 | intrinsics { 105 | focal_length { 106 | x: 215.2163543701172 107 | y: 215.2163543701172 108 | } 109 | principal_point { 110 | x: 213.766357421875 111 | y: 121.92640686035156 112 | } 113 | } 114 | } 115 | image_type: IMAGE_TYPE_DEPTH 116 | , name: "frontright_depth_in_visual_frame" 117 | cols: 640 118 | rows: 480 119 | pinhole { 120 | intrinsics { 121 | focal_length { 122 | x: 257.6131591796875 123 | y: 257.16253662109375 124 | } 125 | principal_point { 126 | x: 317.3861083984375 127 | y: 244.52821350097656 128 | } 129 | } 130 | } 131 | image_type: IMAGE_TYPE_DEPTH 132 | , name: "frontright_fisheye_image" 133 | cols: 640 134 | rows: 480 135 | pinhole { 136 | intrinsics { 137 | focal_length { 138 | x: 257.6131591796875 139 | y: 257.16253662109375 140 | } 141 | principal_point { 142 | x: 317.3861083984375 143 | y: 244.52821350097656 144 | } 145 | } 146 | } 147 | image_type: IMAGE_TYPE_VISUAL 148 | , name: "hand_color_image" 149 | cols: 640 150 | rows: 480 151 | pinhole { 152 | intrinsics { 153 | focal_length { 154 | x: 552.0291012161067 155 | y: 552.0291012161067 156 | } 157 | principal_point { 158 | x: 320.0 159 | y: 240.0 160 | } 161 | } 162 | } 163 | image_type: IMAGE_TYPE_VISUAL 164 | , name: "hand_color_in_hand_depth_frame" 165 | cols: 640 166 | rows: 480 167 | pinhole { 168 | intrinsics { 169 | focal_length { 170 | x: 552.0291012161067 171 | y: 552.0291012161067 172 | } 173 | principal_point { 174 | x: 320.0 175 | y: 240.0 176 | } 177 | } 178 | } 179 | image_type: IMAGE_TYPE_VISUAL 180 | , name: "hand_depth" 181 | cols: 224 182 | rows: 171 183 | depth_scale: 1000.0 184 | pinhole { 185 | intrinsics { 186 | focal_length { 187 | x: 210.4172821044922 188 | y: 210.4172821044922 189 | } 190 | principal_point { 191 | x: 105.02783203125 192 | y: 87.0968246459961 193 | } 194 | } 195 | } 196 | image_type: IMAGE_TYPE_DEPTH 197 | , name: "hand_depth_in_hand_color_frame" 198 | cols: 224 199 | rows: 171 200 | depth_scale: 1000.0 201 | pinhole { 202 | intrinsics { 203 | focal_length { 204 | x: 210.4172821044922 205 | y: 210.4172821044922 206 | } 207 | principal_point { 208 | x: 105.02783203125 209 | y: 87.0968246459961 210 | } 211 | } 212 | } 213 | image_type: IMAGE_TYPE_DEPTH 214 | , name: "hand_image" 215 | cols: 224 216 | rows: 171 217 | depth_scale: 1000.0 218 | pinhole { 219 | intrinsics { 220 | focal_length { 221 | x: 210.4172821044922 222 | y: 210.4172821044922 223 | } 224 | principal_point { 225 | x: 105.02783203125 226 | y: 87.0968246459961 227 | } 228 | } 229 | } 230 | image_type: IMAGE_TYPE_VISUAL 231 | , name: "left_depth" 232 | cols: 424 233 | rows: 240 234 | depth_scale: 999.0 235 | pinhole { 236 | intrinsics { 237 | focal_length { 238 | x: 223.71405029296875 239 | y: 223.71405029296875 240 | } 241 | principal_point { 242 | x: 226.58502197265625 243 | y: 123.37451171875 244 | } 245 | } 246 | } 247 | image_type: IMAGE_TYPE_DEPTH 248 | , name: "left_depth_in_visual_frame" 249 | cols: 640 250 | rows: 480 251 | pinhole { 252 | intrinsics { 253 | focal_length { 254 | x: 257.03656005859375 255 | y: 256.6549987792969 256 | } 257 | principal_point { 258 | x: 317.0937805175781 259 | y: 242.4779815673828 260 | } 261 | } 262 | } 263 | image_type: IMAGE_TYPE_DEPTH 264 | , name: "left_fisheye_image" 265 | cols: 640 266 | rows: 480 267 | pinhole { 268 | intrinsics { 269 | focal_length { 270 | x: 257.03656005859375 271 | y: 256.6549987792969 272 | } 273 | principal_point { 274 | x: 317.0937805175781 275 | y: 242.4779815673828 276 | } 277 | } 278 | } 279 | image_type: IMAGE_TYPE_VISUAL 280 | , name: "right_depth" 281 | cols: 424 282 | rows: 240 283 | depth_scale: 999.0 284 | pinhole { 285 | intrinsics { 286 | focal_length { 287 | x: 221.22935485839844 288 | y: 221.22935485839844 289 | } 290 | principal_point { 291 | x: 228.81666564941406 292 | y: 118.76554870605469 293 | } 294 | } 295 | } 296 | image_type: IMAGE_TYPE_DEPTH 297 | , name: "right_depth_in_visual_frame" 298 | cols: 640 299 | rows: 480 300 | pinhole { 301 | intrinsics { 302 | focal_length { 303 | x: 258.142822265625 304 | y: 257.6732177734375 305 | } 306 | principal_point { 307 | x: 317.4607849121094 308 | y: 248.84344482421875 309 | } 310 | } 311 | } 312 | image_type: IMAGE_TYPE_DEPTH 313 | , name: "right_fisheye_image" 314 | cols: 640 315 | rows: 480 316 | pinhole { 317 | intrinsics { 318 | focal_length { 319 | x: 258.142822265625 320 | y: 257.6732177734375 321 | } 322 | principal_point { 323 | x: 317.4607849121094 324 | y: 248.84344482421875 325 | } 326 | } 327 | } 328 | image_type: IMAGE_TYPE_VISUAL 329 | ] -------------------------------------------------------------------------------- /spot_wrapper/estop.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2021 Boston Dynamics, Inc. All rights reserved. 2 | # 3 | # Downloading, reproducing, distributing or otherwise using the SDK Software 4 | # is subject to the terms and conditions of the Boston Dynamics Software 5 | # Development Kit License (20191101-BDSDK-SL). 6 | 7 | """Provides a programmatic estop to stop the robot.""" 8 | from __future__ import print_function 9 | import argparse 10 | import sys 11 | import os 12 | import signal 13 | import time 14 | import curses 15 | import logging 16 | 17 | from bosdyn.client.estop import EstopEndpoint, EstopKeepAlive, EstopClient 18 | from bosdyn.client.robot_state import RobotStateClient 19 | import bosdyn.client.util 20 | 21 | # Get Spot password and IP address 22 | env_err_msg = ( 23 | "\n{var_name} not found as an environment variable!\n" 24 | "Please run:\n" 25 | "echo 'export {var_name}=' >> ~/.bashrc\nor for MacOS,\n" 26 | "echo 'export {var_name}=' >> ~/.bash_profile\n" 27 | "Then:\nsource ~/.bashrc\nor\nsource ~/.bash_profile" 28 | ) 29 | 30 | try: 31 | SPOT_ADMIN_PW = os.environ["SPOT_ADMIN_PW"] 32 | except KeyError: 33 | raise RuntimeError(env_err_msg.format(var_name="SPOT_ADMIN_PW")) 34 | try: 35 | SPOT_IP = os.environ["SPOT_IP"] 36 | except KeyError: 37 | raise RuntimeError(env_err_msg.format(var_name="SPOT_IP")) 38 | 39 | 40 | class EstopNoGui(): 41 | """Provides a software estop without a GUI. 42 | 43 | To use this estop, create an instance of the EstopNoGui class and use the stop() and allow() 44 | functions programmatically. 45 | """ 46 | 47 | def __init__(self, client, timeout_sec, name=None): 48 | # Force server to set up a single endpoint system 49 | ep = EstopEndpoint(client, name, timeout_sec) 50 | ep.force_simple_setup() 51 | 52 | # Begin periodic check-in between keep-alive and robot 53 | self.estop_keep_alive = EstopKeepAlive(ep) 54 | 55 | # Release the estop 56 | self.estop_keep_alive.allow() 57 | 58 | def __enter__(self): 59 | pass 60 | 61 | def __exit__(self, exc_type, exc_val, exc_tb): 62 | """Cleanly shut down estop on exit.""" 63 | self.estop_keep_alive.end_periodic_check_in() 64 | 65 | def stop(self): 66 | self.estop_keep_alive.stop() 67 | 68 | def allow(self): 69 | self.estop_keep_alive.allow() 70 | 71 | def settle_then_cut(self): 72 | self.estop_keep_alive.settle_then_cut() 73 | 74 | 75 | def main(argv): 76 | """If this file is the main, create an instance of EstopNoGui and wait for user to terminate. 77 | 78 | This has little practical use, because calling the function this way does not give the user 79 | any way to trigger an estop from the terminal. 80 | """ 81 | parser = argparse.ArgumentParser() 82 | bosdyn.client.util.add_common_arguments(parser) 83 | parser.add_argument('-t', '--timeout', type=float, default=5, 84 | help='Timeout in seconds') 85 | options = parser.parse_args(argv) 86 | 87 | # Set username to admin and read PW from os variables 88 | options.username = "admin" 89 | options.password = SPOT_ADMIN_PW 90 | 91 | # Create robot object 92 | sdk = bosdyn.client.create_standard_sdk('estop_nogui') 93 | robot = sdk.create_robot(options.hostname) 94 | robot.authenticate(options.username, options.password) 95 | 96 | # Create estop client for the robot 97 | estop_client = robot.ensure_client(EstopClient.default_service_name) 98 | 99 | # Create nogui estop 100 | estop_nogui = EstopNoGui(estop_client, options.timeout, "Estop NoGUI") 101 | 102 | # Create robot state client for the robot 103 | state_client = robot.ensure_client(RobotStateClient.default_service_name) 104 | 105 | # Initialize curses screen display 106 | stdscr = curses.initscr() 107 | 108 | def cleanup_example(msg): 109 | """Shut down curses and exit the program.""" 110 | print('Exiting') 111 | # pylint: disable=unused-argument 112 | estop_nogui.estop_keep_alive.shutdown() 113 | 114 | # Clean up and close curses 115 | stdscr.keypad(False) 116 | curses.echo() 117 | stdscr.nodelay(False) 118 | curses.endwin() 119 | print(msg) 120 | 121 | def clean_exit(msg=''): 122 | cleanup_example(msg) 123 | exit(0) 124 | 125 | def sigint_handler(sig, frame): 126 | """Exit the application on interrupt.""" 127 | clean_exit() 128 | 129 | def run_example(): 130 | """Run the actual example with the curses screen display""" 131 | # Set up curses screen display to monitor for stop request 132 | curses.noecho() 133 | stdscr.keypad(True) 134 | stdscr.nodelay(True) 135 | curses.start_color() 136 | curses.init_pair(1, curses.COLOR_GREEN, curses.COLOR_BLACK) 137 | curses.init_pair(2, curses.COLOR_YELLOW, curses.COLOR_BLACK) 138 | curses.init_pair(3, curses.COLOR_RED, curses.COLOR_BLACK) 139 | # If terminal cannot handle colors, do not proceed 140 | if not curses.has_colors(): 141 | return 142 | 143 | # Curses eats Ctrl-C keyboard input, but keep a SIGINT handler around for 144 | # explicit kill signals outside of the program. 145 | signal.signal(signal.SIGINT, sigint_handler) 146 | 147 | # Clear screen 148 | stdscr.clear() 149 | 150 | # Display usage instructions in terminal 151 | stdscr.addstr('Estop w/o GUI running.\n') 152 | stdscr.addstr('\n') 153 | stdscr.addstr('[q] or [Ctrl-C]: Quit\n', curses.color_pair(2)) 154 | stdscr.addstr('[SPACE]: Trigger estop\n', curses.color_pair(2)) 155 | stdscr.addstr('[r]: Release estop\n', curses.color_pair(2)) 156 | stdscr.addstr('[s]: Settle then cut estop\n', curses.color_pair(2)) 157 | 158 | # Monitor estop until user exits 159 | while True: 160 | # Retrieve user input (non-blocking) 161 | c = stdscr.getch() 162 | 163 | try: 164 | if c == ord(' '): 165 | estop_nogui.stop() 166 | if c == ord('r'): 167 | estop_nogui.allow() 168 | if c == ord('q') or c == 3: 169 | clean_exit('Exit on user input') 170 | if c == ord('s'): 171 | estop_nogui.settle_then_cut() 172 | # If the user attempts to toggle estop without valid endpoint 173 | except bosdyn.client.estop.EndpointUnknownError: 174 | clean_exit("This estop endpoint no longer valid. Exiting...") 175 | 176 | # Check if robot is estopped by any estops 177 | estop_status = 'NOT_STOPPED\n' 178 | estop_status_color = curses.color_pair(1) 179 | state = state_client.get_robot_state() 180 | estop_states = state.estop_states 181 | for estop_state in estop_states: 182 | state_str = estop_state.State.Name(estop_state.state) 183 | if state_str == 'STATE_ESTOPPED': 184 | estop_status = 'STOPPED\n' 185 | estop_status_color = curses.color_pair(3) 186 | break 187 | elif state_str == 'STATE_UNKNOWN': 188 | estop_status = 'ERROR\n' 189 | estop_status_color = curses.color_pair(3) 190 | elif state_str == 'STATE_NOT_ESTOPPED': 191 | pass 192 | else: 193 | # Unknown estop status 194 | clean_exit() 195 | 196 | # Display current estop status 197 | if not estop_nogui.estop_keep_alive.status_queue.empty(): 198 | latest_status = estop_nogui.estop_keep_alive.status_queue.get()[ 199 | 1].strip() 200 | if latest_status != '': 201 | # If you lose this estop endpoint, report it to user 202 | stdscr.addstr(7, 0, latest_status, curses.color_pair(3)) 203 | stdscr.addstr(6, 0, estop_status, estop_status_color) 204 | 205 | # Slow down loop 206 | time.sleep(0.5) 207 | 208 | # Run all curses code in a try so we can cleanly exit if something goes wrong 209 | try: 210 | run_example() 211 | except Exception as e: 212 | cleanup_example(e) 213 | raise e 214 | 215 | 216 | if __name__ == '__main__': 217 | # Open terminal interface and hold estop until user exits with SIGINT 218 | if len(sys.argv) == 1: 219 | sys.argv.append(SPOT_IP) 220 | if not main(sys.argv[1:]): 221 | sys.exit(1) 222 | -------------------------------------------------------------------------------- /spot_wrapper/spot.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2021 Boston Dynamics, Inc. All rights reserved. 2 | # 3 | # Downloading, reproducing, distributing or otherwise using the SDK Software 4 | # is subject to the terms and conditions of the Boston Dynamics Software 5 | # Development Kit License (20191101-BDSDK-SL). 6 | 7 | """ Easy-to-use wrapper for properly controlling Spot """ 8 | import os 9 | import os.path as osp 10 | import time 11 | from collections import OrderedDict 12 | 13 | import bosdyn.client 14 | import bosdyn.client.lease 15 | import bosdyn.client.util 16 | import cv2 17 | import numpy as np 18 | from bosdyn import geometry 19 | from bosdyn.api import ( 20 | arm_command_pb2, 21 | basic_command_pb2, 22 | geometry_pb2, 23 | image_pb2, 24 | manipulation_api_pb2, 25 | robot_command_pb2, 26 | synchronized_command_pb2, 27 | trajectory_pb2, 28 | ) 29 | from bosdyn.api.docking import docking_pb2 30 | from bosdyn.api.geometry_pb2 import SE2Velocity, SE2VelocityLimit, Vec2 31 | from bosdyn.api.spot import robot_command_pb2 as spot_command_pb2 32 | from bosdyn.client import math_helpers 33 | from bosdyn.client.docking import DockingClient, blocking_dock_robot, blocking_undock 34 | from bosdyn.client.frame_helpers import ( 35 | BODY_FRAME_NAME, 36 | GRAV_ALIGNED_BODY_FRAME_NAME, 37 | VISION_FRAME_NAME, 38 | get_a_tform_b, 39 | get_vision_tform_body, 40 | ) 41 | from bosdyn.client.image import ImageClient, build_image_request 42 | from bosdyn.client.manipulation_api_client import ManipulationApiClient 43 | from bosdyn.client.robot_command import ( 44 | RobotCommandBuilder, 45 | RobotCommandClient, 46 | block_until_arm_arrives, 47 | blocking_selfright, 48 | blocking_stand, 49 | ) 50 | from bosdyn.client.robot_state import RobotStateClient 51 | from google.protobuf import wrappers_pb2 52 | 53 | # Get Spot password and IP address 54 | env_err_msg = ( 55 | "\n{var_name} not found as an environment variable!\n" 56 | "Please run:\n" 57 | "echo 'export {var_name}=' >> ~/.bashrc\nor for MacOS,\n" 58 | "echo 'export {var_name}=' >> ~/.bash_profile\n" 59 | "Then:\nsource ~/.bashrc\nor\nsource ~/.bash_profile" 60 | ) 61 | 62 | ARM_6DOF_NAMES = [ 63 | "arm0.sh0", 64 | "arm0.sh1", 65 | "arm0.el0", 66 | "arm0.el1", 67 | "arm0.wr0", 68 | "arm0.wr1", 69 | ] 70 | 71 | HOME_TXT = osp.join(osp.dirname(osp.abspath(__file__)), "home.txt") 72 | 73 | 74 | class SpotCamIds: 75 | r"""Enumeration of types of cameras.""" 76 | 77 | BACK_DEPTH = "back_depth" 78 | BACK_DEPTH_IN_VISUAL_FRAME = "back_depth_in_visual_frame" 79 | BACK_FISHEYE = "back_fisheye_image" 80 | FRONTLEFT_DEPTH = "frontleft_depth" 81 | FRONTLEFT_DEPTH_IN_VISUAL_FRAME = "frontleft_depth_in_visual_frame" 82 | FRONTLEFT_FISHEYE = "frontleft_fisheye_image" 83 | FRONTRIGHT_DEPTH = "frontright_depth" 84 | FRONTRIGHT_DEPTH_IN_VISUAL_FRAME = "frontright_depth_in_visual_frame" 85 | FRONTRIGHT_FISHEYE = "frontright_fisheye_image" 86 | HAND_COLOR = "hand_color_image" 87 | HAND_COLOR_IN_HAND_DEPTH_FRAME = "hand_color_in_hand_depth_frame" 88 | HAND_DEPTH = "hand_depth" 89 | HAND_DEPTH_IN_HAND_COLOR_FRAME = "hand_depth_in_hand_color_frame" 90 | HAND = "hand_image" 91 | LEFT_DEPTH = "left_depth" 92 | LEFT_DEPTH_IN_VISUAL_FRAME = "left_depth_in_visual_frame" 93 | LEFT_FISHEYE = "left_fisheye_image" 94 | RIGHT_DEPTH = "right_depth" 95 | RIGHT_DEPTH_IN_VISUAL_FRAME = "right_depth_in_visual_frame" 96 | RIGHT_FISHEYE = "right_fisheye_image" 97 | 98 | 99 | # CamIds that need to be rotated by 270 degrees in order to appear upright 100 | SHOULD_ROTATE = [ 101 | SpotCamIds.FRONTLEFT_DEPTH, 102 | SpotCamIds.FRONTRIGHT_DEPTH, 103 | SpotCamIds.HAND_DEPTH, 104 | SpotCamIds.HAND, 105 | ] 106 | 107 | 108 | class Spot: 109 | def __init__(self, client_name_prefix): 110 | try: 111 | spot_admin_pw = os.environ["SPOT_ADMIN_PW"] 112 | except KeyError: 113 | raise RuntimeError(env_err_msg.format(var_name="SPOT_ADMIN_PW")) 114 | try: 115 | spot_ip = os.environ["SPOT_IP"] 116 | except KeyError: 117 | raise RuntimeError(env_err_msg.format(var_name="SPOT_IP")) 118 | 119 | bosdyn.client.util.setup_logging() 120 | sdk = bosdyn.client.create_standard_sdk(client_name_prefix) 121 | robot = sdk.create_robot(spot_ip) 122 | robot.authenticate("admin", spot_admin_pw) 123 | robot.time_sync.wait_for_sync() 124 | self.robot = robot 125 | self.command_client = None 126 | self.spot_lease = None 127 | 128 | # Get clients 129 | self.command_client = robot.ensure_client( 130 | RobotCommandClient.default_service_name 131 | ) 132 | self.image_client = robot.ensure_client(ImageClient.default_service_name) 133 | self.manipulation_api_client = robot.ensure_client( 134 | ManipulationApiClient.default_service_name 135 | ) 136 | self.robot_state_client = robot.ensure_client( 137 | RobotStateClient.default_service_name 138 | ) 139 | 140 | # Used to re-center origin of global frame 141 | if osp.isfile(HOME_TXT): 142 | with open(HOME_TXT) as f: 143 | data = f.read() 144 | self.global_T_home = np.array([float(d) for d in data.split(", ")[:9]]) 145 | self.global_T_home = self.global_T_home.reshape([3, 3]) 146 | self.robot_recenter_yaw = float(data.split(", ")[-1]) 147 | else: 148 | self.global_T_home = None 149 | self.robot_recenter_yaw = None 150 | 151 | # Print the battery charge level of the robot 152 | self.loginfo(f"Current battery charge: {self.get_battery_charge()}%") 153 | 154 | def get_lease(self, hijack=False): 155 | # Make sure a lease for this client isn't already active 156 | assert self.spot_lease is None 157 | self.spot_lease = SpotLease(self, hijack=hijack) 158 | return self.spot_lease 159 | 160 | def get_cmd_feedback(self, cmd_id): 161 | return self.command_client.robot_command_feedback(cmd_id) 162 | 163 | def is_estopped(self): 164 | return self.robot.is_estopped() 165 | 166 | def power_on(self, timeout_sec=20): 167 | self.robot.power_on(timeout_sec=timeout_sec) 168 | assert self.robot.is_powered_on(), "Robot power on failed." 169 | self.loginfo("Robot powered on.") 170 | 171 | def power_off(self, cut_immediately=False, timeout_sec=20): 172 | self.loginfo("Powering robot off...") 173 | self.robot.power_off(cut_immediately=cut_immediately, timeout_sec=timeout_sec) 174 | assert not self.robot.is_powered_on(), "Robot power off failed." 175 | self.loginfo("Robot safely powered off.") 176 | 177 | def blocking_stand(self, timeout_sec=10): 178 | self.loginfo("Commanding robot to stand (blocking)...") 179 | blocking_stand(self.command_client, timeout_sec=timeout_sec) 180 | self.loginfo("Robot standing.") 181 | 182 | def stand(self, timeout_sec=10): 183 | stand_command = RobotCommandBuilder.synchro_stand_command() 184 | cmd_id = self.command_client.robot_command(stand_command, timeout=timeout_sec) 185 | return cmd_id 186 | 187 | def blocking_selfright(self, timeout_sec=20): 188 | self.loginfo("Commanding robot to self-right (blocking)...") 189 | blocking_selfright(self.command_client, timeout_sec=timeout_sec) 190 | self.loginfo("Robot has self-righted.") 191 | 192 | def loginfo(self, *args, **kwargs): 193 | self.robot.logger.info(*args, **kwargs) 194 | 195 | def open_gripper(self): 196 | """Does not block, be careful!""" 197 | gripper_command = RobotCommandBuilder.claw_gripper_open_command() 198 | self.command_client.robot_command(gripper_command) 199 | 200 | def close_gripper(self): 201 | """Does not block, be careful!""" 202 | gripper_command = RobotCommandBuilder.claw_gripper_close_command() 203 | self.command_client.robot_command(gripper_command) 204 | 205 | def move_gripper_to_point(self, point, rotation): 206 | """ 207 | Moves EE to a point relative to body frame 208 | :param point: XYZ location 209 | :param rotation: Euler roll-pitch-yaw or WXYZ quaternion 210 | :return: cmd_id 211 | """ 212 | if len(rotation) == 3: # roll pitch yaw Euler angles 213 | roll, pitch, yaw = rotation 214 | quat = geometry.EulerZXY(yaw=yaw, roll=roll, pitch=pitch).to_quaternion() 215 | elif len(rotation) == 4: # w, x, y, z quaternion 216 | w, x, y, z = rotation 217 | quat = math_helpers.Quat(w=w, x=x, y=y, z=z) 218 | else: 219 | raise RuntimeError( 220 | "rotation needs to have length 3 (euler) or 4 (quaternion)," 221 | f"got {len(rotation)}" 222 | ) 223 | 224 | hand_pose = math_helpers.SE3Pose(*point, quat) 225 | hand_trajectory = trajectory_pb2.SE3Trajectory( 226 | points=[trajectory_pb2.SE3TrajectoryPoint(pose=hand_pose.to_proto())] 227 | ) 228 | arm_cartesian_command = arm_command_pb2.ArmCartesianCommand.Request( 229 | pose_trajectory_in_task=hand_trajectory, 230 | root_frame_name=GRAV_ALIGNED_BODY_FRAME_NAME, 231 | ) 232 | 233 | # Pack everything up in protos. 234 | arm_command = arm_command_pb2.ArmCommand.Request( 235 | arm_cartesian_command=arm_cartesian_command 236 | ) 237 | synchronized_command = synchronized_command_pb2.SynchronizedCommand.Request( 238 | arm_command=arm_command 239 | ) 240 | command = robot_command_pb2.RobotCommand( 241 | synchronized_command=synchronized_command 242 | ) 243 | cmd_id = self.command_client.robot_command(command) 244 | 245 | return cmd_id 246 | 247 | def block_until_arm_arrives(self, cmd_id, timeout_sec=5): 248 | block_until_arm_arrives(self.command_client, cmd_id, timeout_sec=timeout_sec) 249 | 250 | def get_image_responses(self, sources, quality=None): 251 | """Retrieve images from Spot's cameras 252 | 253 | :param sources: list containing camera uuids 254 | :param quality: either an int or a list specifying what quality each source 255 | should return its image with 256 | :return: list containing bosdyn image response objects 257 | """ 258 | if quality is not None: 259 | if isinstance(quality, int): 260 | quality = [quality] * len(sources) 261 | else: 262 | assert len(quality) == len(sources) 263 | else: 264 | quality = [100] * len(sources) 265 | 266 | p_fmts = [ 267 | pixel_format_string_to_enum("PIXEL_FORMAT_RGB_U8") 268 | if "fisheye" in src 269 | else None 270 | for src in sources 271 | ] 272 | 273 | sources = [ 274 | build_image_request(src, q, pixel_format=fmt) 275 | for src, q, fmt in zip(sources, quality, p_fmts) 276 | ] 277 | image_responses = self.image_client.get_image(sources) 278 | 279 | return image_responses 280 | 281 | def grasp_point_in_image( 282 | self, 283 | image_response, 284 | pixel_xy=None, 285 | timeout=10, 286 | data_edge_timeout=2, 287 | top_down_grasp=False, 288 | horizontal_grasp=False, 289 | ): 290 | # If pixel location not provided, select the center pixel 291 | if pixel_xy is None: 292 | height = image_response.shot.image.rows 293 | width = image_response.shot.image.cols 294 | pixel_xy = [width // 2, height // 2] 295 | 296 | pick_vec = geometry_pb2.Vec2(x=pixel_xy[0], y=pixel_xy[1]) 297 | grasp = manipulation_api_pb2.PickObjectInImage( 298 | pixel_xy=pick_vec, 299 | transforms_snapshot_for_camera=image_response.shot.transforms_snapshot, 300 | frame_name_image_sensor=image_response.shot.frame_name_image_sensor, 301 | camera_model=image_response.source.pinhole, 302 | walk_gaze_mode=1, # PICK_NO_AUTO_WALK_OR_GAZE 303 | ) 304 | if top_down_grasp or horizontal_grasp: 305 | if top_down_grasp: 306 | # Add a constraint that requests that the x-axis of the gripper is 307 | # pointing in the negative-z direction in the vision frame. 308 | 309 | # The axis on the gripper is the x-axis. 310 | axis_on_gripper_ewrt_gripper = geometry_pb2.Vec3(x=1, y=0, z=0) 311 | 312 | # The axis in the vision frame is the negative z-axis 313 | axis_to_align_with_ewrt_vo = geometry_pb2.Vec3(x=0, y=0, z=-1) 314 | 315 | else: 316 | # Add a constraint that requests that the y-axis of the gripper is 317 | # pointing in the positive-z direction in the vision frame. That means 318 | # that the gripper is constrained to be rolled 90 degrees and pointed 319 | # at the horizon. 320 | 321 | # The axis on the gripper is the y-axis. 322 | axis_on_gripper_ewrt_gripper = geometry_pb2.Vec3(x=0, y=1, z=0) 323 | 324 | # The axis in the vision frame is the positive z-axis 325 | axis_to_align_with_ewrt_vo = geometry_pb2.Vec3(x=0, y=0, z=1) 326 | 327 | grasp.grasp_params.grasp_params_frame_name = VISION_FRAME_NAME 328 | # Add the vector constraint to our proto. 329 | constraint = grasp.grasp_params.allowable_orientation.add() 330 | constraint.vector_alignment_with_tolerance.axis_on_gripper_ewrt_gripper.CopyFrom( 331 | axis_on_gripper_ewrt_gripper 332 | ) 333 | constraint.vector_alignment_with_tolerance.axis_to_align_with_ewrt_frame.CopyFrom( 334 | axis_to_align_with_ewrt_vo 335 | ) 336 | 337 | # Take anything within 30 degrees for top-down or horizontal grasps. 338 | constraint.vector_alignment_with_tolerance.threshold_radians = np.deg2rad( 339 | 30 * 2 340 | ) 341 | 342 | # Ask the robot to pick up the object 343 | grasp_request = manipulation_api_pb2.ManipulationApiRequest( 344 | pick_object_in_image=grasp 345 | ) 346 | # Send the request 347 | cmd_response = self.manipulation_api_client.manipulation_api_command( 348 | manipulation_api_request=grasp_request 349 | ) 350 | 351 | # Get feedback from the robot (WILL BLOCK TILL COMPLETION) 352 | start_time = time.time() 353 | success = False 354 | while time.time() < start_time + timeout: 355 | feedback_request = manipulation_api_pb2.ManipulationApiFeedbackRequest( 356 | manipulation_cmd_id=cmd_response.manipulation_cmd_id 357 | ) 358 | 359 | # Send the request 360 | response = self.manipulation_api_client.manipulation_api_feedback_command( 361 | manipulation_api_feedback_request=feedback_request 362 | ) 363 | 364 | print( 365 | "Current grasp_point_in_image state: ", 366 | manipulation_api_pb2.ManipulationFeedbackState.Name( 367 | response.current_state 368 | ), 369 | ) 370 | 371 | if ( 372 | response.current_state 373 | == manipulation_api_pb2.MANIP_STATE_GRASP_PLANNING_WAITING_DATA_AT_EDGE 374 | ) and time.time() > start_time + data_edge_timeout: 375 | break 376 | elif ( 377 | response.current_state 378 | == manipulation_api_pb2.MANIP_STATE_GRASP_SUCCEEDED 379 | ): 380 | success = True 381 | break 382 | elif response.current_state in [ 383 | manipulation_api_pb2.MANIP_STATE_GRASP_FAILED, 384 | manipulation_api_pb2.MANIP_STATE_GRASP_PLANNING_NO_SOLUTION, 385 | ]: 386 | break 387 | 388 | time.sleep(0.25) 389 | return success 390 | 391 | def grasp_hand_depth(self, *args, **kwargs): 392 | image_responses = self.get_image_responses( 393 | # [SpotCamIds.HAND_DEPTH_IN_HAND_COLOR_FRAME] 394 | [SpotCamIds.HAND_COLOR] 395 | ) 396 | hand_image_response = image_responses[0] # only expecting one image 397 | return self.grasp_point_in_image(hand_image_response, *args, **kwargs) 398 | 399 | def set_base_velocity( 400 | self, 401 | x_vel, 402 | y_vel, 403 | ang_vel, 404 | vel_time, 405 | disable_obstacle_avoidance=False, 406 | return_cmd=False, 407 | ): 408 | body_tform_goal = math_helpers.SE2Velocity(x=x_vel, y=y_vel, angular=ang_vel) 409 | params = spot_command_pb2.MobilityParams( 410 | obstacle_params=spot_command_pb2.ObstacleParams( 411 | disable_vision_body_obstacle_avoidance=disable_obstacle_avoidance, 412 | disable_vision_foot_obstacle_avoidance=False, 413 | disable_vision_foot_constraint_avoidance=False, 414 | obstacle_avoidance_padding=0.05, # in meters 415 | ) 416 | ) 417 | command = RobotCommandBuilder.synchro_velocity_command( 418 | v_x=body_tform_goal.linear_velocity_x, 419 | v_y=body_tform_goal.linear_velocity_y, 420 | v_rot=body_tform_goal.angular_velocity, 421 | params=params, 422 | ) 423 | 424 | if return_cmd: 425 | return command 426 | 427 | cmd_id = self.command_client.robot_command( 428 | command, end_time_secs=time.time() + vel_time 429 | ) 430 | 431 | return cmd_id 432 | 433 | def set_base_position( 434 | self, 435 | x_pos, 436 | y_pos, 437 | yaw, 438 | end_time, 439 | relative=False, 440 | max_fwd_vel=2, 441 | max_hor_vel=2, 442 | max_ang_vel=np.pi / 2, 443 | disable_obstacle_avoidance=False, 444 | blocking=False, 445 | ): 446 | vel_limit = SE2VelocityLimit( 447 | max_vel=SE2Velocity( 448 | linear=Vec2(x=max_fwd_vel, y=max_hor_vel), angular=max_ang_vel 449 | ), 450 | min_vel=SE2Velocity( 451 | linear=Vec2(x=-max_fwd_vel, y=-max_hor_vel), angular=-max_ang_vel 452 | ), 453 | ) 454 | params = spot_command_pb2.MobilityParams( 455 | vel_limit=vel_limit, 456 | obstacle_params=spot_command_pb2.ObstacleParams( 457 | disable_vision_body_obstacle_avoidance=disable_obstacle_avoidance, 458 | disable_vision_foot_obstacle_avoidance=False, 459 | disable_vision_foot_constraint_avoidance=False, 460 | obstacle_avoidance_padding=0.05, # in meters 461 | ), 462 | ) 463 | curr_x, curr_y, curr_yaw = self.get_xy_yaw(use_boot_origin=True) 464 | coors = np.array([x_pos, y_pos, 1.0]) 465 | if relative: 466 | local_T_global = self._get_local_T_global(curr_x, curr_y, curr_yaw) 467 | x, y, w = local_T_global.dot(coors) 468 | global_x_pos, global_y_pos = x / w, y / w 469 | global_yaw = wrap_heading(curr_yaw + yaw) 470 | else: 471 | global_x_pos, global_y_pos, global_yaw = self.xy_yaw_home_to_global( 472 | x_pos, y_pos, yaw 473 | ) 474 | robot_cmd = RobotCommandBuilder.synchro_se2_trajectory_point_command( 475 | goal_x=global_x_pos, 476 | goal_y=global_y_pos, 477 | goal_heading=global_yaw, 478 | frame_name=VISION_FRAME_NAME, 479 | params=params, 480 | ) 481 | cmd_id = self.command_client.robot_command( 482 | robot_cmd, end_time_secs=time.time() + end_time 483 | ) 484 | 485 | if blocking: 486 | cmd_status = None 487 | while cmd_status != 1: 488 | time.sleep(0.1) 489 | feedback_resp = self.get_cmd_feedback(cmd_id) 490 | cmd_status = ( 491 | feedback_resp.feedback.synchronized_feedback 492 | ).mobility_command_feedback.se2_trajectory_feedback.status 493 | return None 494 | 495 | return cmd_id 496 | 497 | def get_robot_state(self): 498 | return self.robot_state_client.get_robot_state() 499 | 500 | def get_battery_charge(self): 501 | state = self.get_robot_state() 502 | return state.power_state.locomotion_charge_percentage.value 503 | 504 | def roll_over(self, roll_over_left=True): 505 | if roll_over_left: 506 | dir_hint = basic_command_pb2.BatteryChangePoseCommand.Request.HINT_LEFT 507 | else: 508 | dir_hint = basic_command_pb2.BatteryChangePoseCommand.Request.HINT_RIGHT 509 | cmd = RobotCommandBuilder.battery_change_pose_command(dir_hint=dir_hint) 510 | self.command_client.robot_command(cmd) 511 | 512 | def sit(self): 513 | cmd = RobotCommandBuilder.synchro_sit_command() 514 | self.command_client.robot_command(cmd) 515 | 516 | def get_arm_proprioception(self, robot_state=None): 517 | """Return state of each of the 6 joints of the arm""" 518 | if robot_state is None: 519 | robot_state = self.robot_state_client.get_robot_state() 520 | arm_joint_states = OrderedDict( 521 | { 522 | i.name[len("arm0.") :]: i 523 | for i in robot_state.kinematic_state.joint_states 524 | if i.name in ARM_6DOF_NAMES 525 | } 526 | ) 527 | 528 | return arm_joint_states 529 | 530 | def get_proprioception(self, robot_state=None): 531 | """Return state of each of the 6 joints of the arm""" 532 | if robot_state is None: 533 | robot_state = self.robot_state_client.get_robot_state() 534 | joint_states = OrderedDict( 535 | {i.name: i for i in robot_state.kinematic_state.joint_states} 536 | ) 537 | 538 | return joint_states 539 | 540 | def set_arm_joint_positions( 541 | self, positions, travel_time=1.0, max_vel=2.5, max_acc=15, return_cmd=False 542 | ): 543 | """ 544 | Takes in 6 joint targets and moves each arm joint to the corresponding target. 545 | Ordering: sh0, sh1, el0, el1, wr0, wr1 546 | :param positions: np.array or list of radians 547 | :param travel_time: how long execution should take 548 | :param max_vel: max allowable velocity 549 | :param max_acc: max allowable acceleration 550 | :return: cmd_id 551 | """ 552 | sh0, sh1, el0, el1, wr0, wr1 = positions 553 | traj_point = RobotCommandBuilder.create_arm_joint_trajectory_point( 554 | sh0, sh1, el0, el1, wr0, wr1, travel_time 555 | ) 556 | arm_joint_traj = arm_command_pb2.ArmJointTrajectory( 557 | points=[traj_point], 558 | maximum_velocity=wrappers_pb2.DoubleValue(value=max_vel), 559 | maximum_acceleration=wrappers_pb2.DoubleValue(value=max_acc), 560 | ) 561 | command = make_robot_command(arm_joint_traj) 562 | 563 | if return_cmd: 564 | return command 565 | 566 | cmd_id = self.command_client.robot_command(command) 567 | 568 | return cmd_id 569 | 570 | def set_base_vel_and_arm_pos( 571 | self, 572 | x_vel, 573 | y_vel, 574 | ang_vel, 575 | arm_positions, 576 | travel_time, 577 | disable_obstacle_avoidance=False, 578 | ): 579 | base_cmd = self.set_base_velocity( 580 | x_vel, 581 | y_vel, 582 | ang_vel, 583 | vel_time=travel_time, 584 | disable_obstacle_avoidance=disable_obstacle_avoidance, 585 | return_cmd=True, 586 | ) 587 | arm_cmd = self.set_arm_joint_positions( 588 | arm_positions, travel_time=travel_time, return_cmd=True 589 | ) 590 | synchro_command = RobotCommandBuilder.build_synchro_command(base_cmd, arm_cmd) 591 | cmd_id = self.command_client.robot_command( 592 | synchro_command, end_time_secs=time.time() + travel_time 593 | ) 594 | return cmd_id 595 | 596 | def get_xy_yaw(self, use_boot_origin=False, robot_state=None): 597 | """ 598 | Returns the relative x and y distance from start, as well as relative heading 599 | """ 600 | if robot_state is None: 601 | robot_state = self.robot_state_client.get_robot_state() 602 | tf_snapshot = robot_state.kinematic_state.transforms_snapshot 603 | robot_tform = get_vision_tform_body(tf_snapshot) 604 | yaw = math_helpers.quat_to_eulerZYX(robot_tform.rotation)[0] 605 | if self.global_T_home is None or use_boot_origin: 606 | return robot_tform.x, robot_tform.y, yaw 607 | return self.xy_yaw_global_to_home(robot_tform.x, robot_tform.y, yaw) 608 | 609 | def get_transform( 610 | self, 611 | from_frame: str = BODY_FRAME_NAME, 612 | to_frame: str = VISION_FRAME_NAME, 613 | tf_snapshot=None, 614 | ): 615 | if tf_snapshot is None: 616 | robot_state = self.robot_state_client.get_robot_state() 617 | tf_snapshot = robot_state.kinematic_state.transforms_snapshot 618 | tform = get_a_tform_b(tf_snapshot, to_frame, from_frame) 619 | return tform.to_matrix() 620 | 621 | def xy_yaw_global_to_home(self, x, y, yaw): 622 | if self.global_T_home is None: 623 | return x, y, yaw 624 | x, y, w = self.global_T_home.dot(np.array([x, y, 1.0])) 625 | x, y = x / w, y / w 626 | 627 | return x, y, wrap_heading(yaw - self.robot_recenter_yaw) 628 | 629 | def xy_yaw_home_to_global(self, x, y, yaw): 630 | if self.global_T_home is None: 631 | return x, y, yaw 632 | local_T_global = np.linalg.inv(self.global_T_home) 633 | x, y, w = local_T_global.dot(np.array([x, y, 1.0])) 634 | x, y = x / w, y / w 635 | 636 | return x, y, wrap_heading(self.robot_recenter_yaw - yaw) 637 | 638 | def _get_local_T_global(self, x=None, y=None, yaw=None): 639 | if x is None: 640 | x, y, yaw = self.get_xy_yaw(use_boot_origin=True) 641 | # Create offset transformation matrix 642 | local_T_global = np.array( 643 | [ 644 | [np.cos(yaw), -np.sin(yaw), x], 645 | [np.sin(yaw), np.cos(yaw), y], 646 | [0.0, 0.0, 1.0], 647 | ] 648 | ) 649 | return local_T_global 650 | 651 | def home_robot(self): 652 | x, y, yaw = self.get_xy_yaw(use_boot_origin=True) 653 | local_T_global = self._get_local_T_global() 654 | self.global_T_home = np.linalg.inv(local_T_global) 655 | self.robot_recenter_yaw = yaw 656 | 657 | as_string = list(self.global_T_home.flatten()) + [yaw] 658 | as_string = f"{as_string}"[1:-1] # [1:-1] removes brackets 659 | with open(HOME_TXT, "w") as f: 660 | f.write(as_string) 661 | self.loginfo(f"Wrote:\n{as_string}\nto: {HOME_TXT}") 662 | 663 | def get_base_transform_to(self, child_frame): 664 | kin_state = self.robot_state_client.get_robot_state().kinematic_state 665 | kin_state = kin_state.transforms_snapshot.child_to_parent_edge_map.get( 666 | child_frame 667 | ).parent_tform_child 668 | return kin_state.position, kin_state.rotation 669 | 670 | def dock(self, dock_id, home_robot=False): 671 | blocking_dock_robot(self.robot, dock_id) 672 | if home_robot: 673 | self.home_robot() 674 | 675 | def undock(self): 676 | blocking_undock(self.robot) 677 | 678 | @property 679 | def is_docked(self): 680 | docking_client = self.robot.ensure_client(DockingClient.default_service_name) 681 | dock_state = docking_client.get_docking_state() 682 | return dock_state.status == docking_pb2.DockState.DOCK_STATUS_DOCKED 683 | 684 | 685 | class SpotLease: 686 | """ 687 | A class that supports execution with Python's "with" statement for safe return of 688 | the lease and settle-then-estop upon exit. Grants control of the Spot's motors. 689 | """ 690 | 691 | def __init__(self, spot, hijack=False): 692 | self.lease_client = spot.robot.ensure_client( 693 | bosdyn.client.lease.LeaseClient.default_service_name 694 | ) 695 | if hijack: 696 | self.lease = self.lease_client.take() 697 | else: 698 | self.lease = self.lease_client.acquire() 699 | self.lease_keep_alive = bosdyn.client.lease.LeaseKeepAlive(self.lease_client) 700 | self.spot = spot 701 | self.dont_return_lease = False 702 | 703 | def __enter__(self): 704 | return self.lease 705 | 706 | def __exit__(self, *args, **kwargs): 707 | self.return_lease() 708 | 709 | def return_lease(self): 710 | # Exit the LeaseKeepAlive object 711 | self.lease_keep_alive.shutdown() 712 | # Return the lease 713 | if not self.dont_return_lease: 714 | self.lease_client.return_lease(self.lease) 715 | self.spot.loginfo("Returned the lease.") 716 | # Clear lease from Spot object 717 | self.spot.spot_lease = None 718 | 719 | 720 | def make_robot_command(arm_joint_traj): 721 | """Helper function to create a RobotCommand from an ArmJointTrajectory. 722 | The returned command will be a SynchronizedCommand with an ArmJointMoveCommand 723 | filled out to follow the passed in trajectory.""" 724 | 725 | joint_move_command = arm_command_pb2.ArmJointMoveCommand.Request( 726 | trajectory=arm_joint_traj 727 | ) 728 | arm_command = arm_command_pb2.ArmCommand.Request( 729 | arm_joint_move_command=joint_move_command 730 | ) 731 | sync_arm = synchronized_command_pb2.SynchronizedCommand.Request( 732 | arm_command=arm_command 733 | ) 734 | arm_sync_robot_cmd = robot_command_pb2.RobotCommand(synchronized_command=sync_arm) 735 | return RobotCommandBuilder.build_synchro_command(arm_sync_robot_cmd) 736 | 737 | 738 | def image_response_to_cv2(image_response, reorient=True): 739 | if image_response.shot.image.pixel_format == image_pb2.Image.PIXEL_FORMAT_DEPTH_U16: 740 | dtype = np.uint16 741 | else: 742 | dtype = np.uint8 743 | # img = np.fromstring(image_response.shot.image.data, dtype=dtype) 744 | img = np.frombuffer(image_response.shot.image.data, dtype=dtype) 745 | if image_response.shot.image.format == image_pb2.Image.FORMAT_RAW: 746 | img = img.reshape( 747 | image_response.shot.image.rows, image_response.shot.image.cols 748 | ) 749 | else: 750 | img = cv2.imdecode(img, -1) 751 | 752 | if reorient and image_response.source.name in SHOULD_ROTATE: 753 | img = np.rot90(img, k=3) 754 | 755 | return img 756 | 757 | 758 | def scale_depth_img(img, min_depth=0.0, max_depth=10.0, as_img=False): 759 | min_depth, max_depth = min_depth * 1000, max_depth * 1000 760 | img_copy = np.clip(img.astype(np.float32), a_min=min_depth, a_max=max_depth) 761 | img_copy = (img_copy - min_depth) / (max_depth - min_depth) 762 | if as_img: 763 | img_copy = cv2.cvtColor((255.0 * img_copy).astype(np.uint8), cv2.COLOR_GRAY2BGR) 764 | 765 | return img_copy 766 | 767 | 768 | def draw_crosshair(img): 769 | height, width = img.shape[:2] 770 | cx, cy = width // 2, height // 2 771 | img = cv2.circle( 772 | img, 773 | center=(cx, cy), 774 | radius=5, 775 | color=(0, 0, 255), 776 | thickness=1, 777 | ) 778 | 779 | return img 780 | 781 | 782 | def wrap_heading(heading): 783 | """Ensures input heading is between -180 an 180; can be float or np.ndarray""" 784 | return (heading + np.pi) % (2 * np.pi) - np.pi 785 | 786 | 787 | def pixel_format_string_to_enum(enum_string): 788 | return dict(image_pb2.Image.PixelFormat.items()).get(enum_string) 789 | --------------------------------------------------------------------------------