61 | * Artificial Intelligence for Robotics [[Packt]](https://india.packtpub.com/in/hardware-and-creative/artificial-intelligence-robotics?utm_source=github&utm_medium=repository&utm_campaign=9781788835442) [[Amazon]](https://www.amazon.com/dp/1788835441)
62 |
63 | * Python Robotics Projects [[Packt]](https://india.packtpub.com/in/hardware-and-creative/python-robotics-projects?utm_source=github&utm_medium=repository&utm_campaign=9781788832922) [[Amazon]](https://www.amazon.com/dp/1788832922)
64 |
65 | ## Get to Know the Author
66 | **Danny Staple** builds robots and gadgets as a hobbyist, makes videos about his work with robots, and attends community events such as PiWars and Arduino Day. He has been a professional Python programmer, later moving into DevOps, since 2009, and a software engineer since 2000. He has worked with embedded systems, including embedded Linux systems, throughout the majority of his career. He has been a mentor at a local CoderDojo, where he taught how to code with Python. He has run Lego Robotics clubs with Mindstorms. He has also developed Bounce!, a visual programming language targeted at teaching code using the NodeMCU IoT platform.
67 | The robots he has built with his children include TankBot, SkittleBot (now the Pi Wars robot), ArmBot, and SpiderBot.
68 |
69 |
70 |
71 | ### Suggestions and Feedback
72 | [Click here](https://docs.google.com/forms/d/e/1FAIpQLSdy7dATC6QmEL81FIUuymZ0Wy9vH1jHkvpY57OiMeKGqib_Ow/viewform) if you have any feedback or suggestions.
73 | ### Download a free PDF
74 |
75 | If you have already purchased a print or Kindle version of this book, you can get a DRM-free PDF version at no cost. Simply click on the link to claim your free PDF.
76 | https://packt.link/free-ebook/9781789340747
--------------------------------------------------------------------------------
/chapter10/circle_pan_tilt_behavior.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 | import math
3 |
4 | from robot import Robot
5 |
6 | class CirclePanTiltBehavior(object):
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 | self.current_time = 0
10 | self.frames_per_circle = 50
11 | self.radians_per_frame = (2 * math.pi) / self.frames_per_circle
12 | self.radius = 30
13 |
14 | def run(self):
15 | while True:
16 | frame = self.current_time % self.frames_per_circle
17 | frame_in_radians = frame * self.radians_per_frame
18 | self.robot.set_pan(self.radius * math.cos(frame_in_radians))
19 | self.robot.set_tilt(self.radius * math.sin(frame_in_radians))
20 | sleep(0.05)
21 | self.current_time += 1
22 |
23 |
24 | bot = Robot()
25 | behavior = CirclePanTiltBehavior(bot)
26 | behavior.run()
27 |
--------------------------------------------------------------------------------
/chapter10/readme.md:
--------------------------------------------------------------------------------
1 | # Servo Motor Setup
2 |
3 | If you have installed the Raspi_MotorHAT and tested it in chapter 7, this will still be used for this chapter.
4 |
--------------------------------------------------------------------------------
/chapter10/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 |
4 | import atexit
5 |
6 | #import leds_led_shim
7 | import leds_8_apa102c
8 | from servos import Servos
9 |
10 | class Robot(object):
11 | def __init__(self, motorhat_addr=0x6f):
12 | # Setup the motorhat with the passed in address
13 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
14 |
15 | # get local variable for each motor
16 | self.left_motor = self._mh.getMotor(1)
17 | self.right_motor = self._mh.getMotor(2)
18 |
19 | # ensure the motors get stopped when the code exits
20 | atexit.register(self.stop_all)
21 |
22 | # Setup the line sensors
23 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
24 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
25 |
26 | # Setup the Leds
27 | self.leds = leds_8_apa102c.Leds()
28 |
29 | # Set up servo motors for pan and tilt.
30 | self.servos = Servos(addr=motorhat_addr)
31 |
32 | def stop_all(self):
33 | self.stop_motors()
34 |
35 | # Clear any sensor handlers
36 | self.left_line_sensor.when_line = None
37 | self.left_line_sensor.when_no_line = None
38 | self.right_line_sensor.when_line = None
39 | self.right_line_sensor.when_no_line = None
40 |
41 | # Clear the display
42 | self.leds.clear()
43 | self.leds.show()
44 |
45 | # Reset the servos
46 | self.servos.stop_all()
47 |
48 | def convert_speed(self, speed):
49 | mode = Raspi_MotorHAT.RELEASE
50 | if speed > 0:
51 | mode = Raspi_MotorHAT.FORWARD
52 | elif speed < 0:
53 | mode = Raspi_MotorHAT.BACKWARD
54 | output_speed = (abs(speed) * 255) / 100
55 | return mode, int(output_speed)
56 |
57 | def set_left(self, speed):
58 | mode, output_speed = self.convert_speed(speed)
59 | self.left_motor.setSpeed(output_speed)
60 | self.left_motor.run(mode)
61 |
62 | def set_right(self, speed):
63 | mode, output_speed = self.convert_speed(speed)
64 | self.right_motor.setSpeed(output_speed)
65 | self.right_motor.run(mode)
66 |
67 | def stop_motors(self):
68 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
69 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
70 |
71 | def set_pan(self, angle):
72 | self.servos.set_servo_angle(1, angle)
73 |
74 | def set_tilt(self, angle):
75 | self.servos.set_servo_angle(0, angle)
76 |
--------------------------------------------------------------------------------
/chapter10/servo_type_position.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT.Raspi_PWM_Servo_Driver import PWM
2 | import atexit
3 |
4 | pwm = PWM(0x6f)
5 | # This sets the timebase for it all
6 | pwm_frequency = 60
7 | pwm.setPWMFreq(pwm_frequency)
8 |
9 | # Frequency is 1/period, but working ms, we can use 1000
10 | period_in_ms = 1000.0 / pwm_frequency
11 | # The chip has 4096 steps in each period.
12 | pulse_steps = 4096.0
13 | # Mid point of the servo pulse length in milliseconds.
14 | servo_mid_point_ms = 1.5
15 | # What a deflection of 90 degrees is in pulse length in milliseconds
16 | deflect_90_in_ms = 0.9
17 | # Steps for every millisecond.
18 | steps_per_ms = pulse_steps / period_in_ms
19 | # Steps for a degree
20 | steps_per_degree = (deflect_90_in_ms * steps_per_ms) / 90.0
21 | # Mid point of the servo in steps
22 | servo_mid_point_steps = servo_mid_point_ms * steps_per_ms
23 |
24 | def convert_degrees_to_pwm(position):
25 | return int(servo_mid_point_steps + (position * steps_per_degree))
26 |
27 | def stop():
28 | # Set pin off flag
29 | pwm.setPWM(0, 0, 4096)
30 |
31 | atexit.register(stop)
32 |
33 | import time
34 |
35 | while (True):
36 | position = int(raw_input("Type your position in degrees (90 to -90, 0 is middle): "))
37 | end_step = convert_degrees_to_pwm(position)
38 | pwm.setPWM(0, 0, end_step)
39 |
--------------------------------------------------------------------------------
/chapter10/servos.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT.Raspi_PWM_Servo_Driver import PWM
2 |
3 | class Servos(object):
4 | def __init__(self, addr=0x6f, deflect_90_in_ms = 0.9):
5 | """addr: The i2c address of the PWM chip.
6 | deflect_90_in_ms: set this to calibrate the servo motors.
7 | it is what a deflection of 90 degrees is
8 | in terms of a pulse length in milliseconds."""
9 | self._pwm = PWM(addr)
10 | # This sets the timebase for it all
11 | pwm_frequency = 60
12 | self._pwm.setPWMFreq(pwm_frequency)
13 |
14 | # Frequency is 1/period, but working ms, we can use 1000
15 | period_in_ms = 1000.0 / pwm_frequency
16 | # The chip has 4096 steps in each period.
17 | pulse_steps = 4096.0
18 | # Mid point of the servo pulse length in milliseconds.
19 | servo_mid_point_ms = 1.5
20 | # Steps for every millisecond.
21 | steps_per_ms = pulse_steps / period_in_ms
22 | # Steps for a degree
23 | self.steps_per_degree = (deflect_90_in_ms * steps_per_ms) / 90.0
24 | # Mid point of the servo in steps
25 | self.servo_mid_point_steps = servo_mid_point_ms * steps_per_ms
26 | # Prepare servo's turned off
27 | self.stop_all()
28 |
29 | def stop_all(self):
30 | # 0 in start is nothing, 4096 sets the OFF bit.
31 | self._pwm.setPWM(0, 0, 4096)
32 | self._pwm.setPWM(1, 0, 4096)
33 | self._pwm.setPWM(14, 0, 4096)
34 | self._pwm.setPWM(15, 0, 4096)
35 |
36 | def _convert_degrees_to_pwm(self, position):
37 | return int(self.servo_mid_point_steps + (position * self.steps_per_degree))
38 |
39 | def set_servo_angle(self, channel, angle):
40 | """position: The position in degrees from the center. -90 to 90"""
41 | # Validate
42 | if angle > 90 or angle < -90:
43 | raise ValueError("Angle outside of range")
44 | # Then set the position
45 | off_step = self._convert_degrees_to_pwm(angle)
46 | self._pwm.setPWM(channel, 0, off_step)
47 |
--------------------------------------------------------------------------------
/chapter10/test_servo_with_driving.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT.Raspi_PWM_Servo_Driver import PWM
2 | from robot import Robot
3 | import atexit
4 | import time
5 |
6 | r = Robot()
7 | pwm = PWM(0x6f)
8 |
9 | servo_min = 150 # Servo minimum position.
10 | servo_max = 600 # Maximum position
11 | servo_mid = 225 + 150 # Middle position
12 |
13 | def stop():
14 | pwm.setPWM(0, 0, 0)
15 |
16 | atexit.register(stop)
17 | pwm.setPWMFreq(60) # Set frequency to 60 Hz
18 |
19 | while (True):
20 | r.set_left(255)
21 | r.set_right(255)
22 | print("min")
23 | pwm.setPWM(0, 0, servo_min)
24 | time.sleep(1)
25 | print("max")
26 | pwm.setPWM(0, 0, servo_max)
27 | time.sleep(1)
28 | r.stop_motors()
29 | print("mid")
30 | pwm.setPWM(0, 0, servo_mid)
31 | time.sleep(1)
--------------------------------------------------------------------------------
/chapter11/avoid_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, NoDistanceRead
2 | from time import sleep
3 |
4 |
5 | class ObstacleAvoidingBehavior(object):
6 | """Better obstacle avoiding"""
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 |
10 | # Calculations for the LEDs
11 | led_half = int(self.robot.leds.count/2)
12 | self.max_distance = 100
13 | self.leds_per_distance = led_half / float(self.max_distance)
14 | # print("Leds per distance", self.leds_per_distance)
15 | self.sense_colour = (255, 0, 0)
16 |
17 | def distance_to_led_bar(self, distance):
18 | # Invert so closer means more LED's.
19 | inverted = self.max_distance - min(distance, self.max_distance)
20 | led_bar = int(round(inverted * self.leds_per_distance))
21 | return led_bar
22 |
23 | def display_state(self, left_distance, right_distance):
24 | # Clear first
25 | self.robot.leds.clear()
26 | # Right side
27 | led_bar = self.distance_to_led_bar(right_distance)
28 | self.robot.leds.set_range(range(led_bar), self.sense_colour)
29 | # Left side
30 | led_bar = self.distance_to_led_bar(left_distance)
31 | # Bit trickier - must go from below the leds count, to the leds count.
32 | start = self.robot.leds.count - led_bar
33 | self.robot.leds.set_range(range(start, self.robot.leds.count), self.sense_colour)
34 |
35 | # Now show this display
36 | self.robot.leds.show()
37 |
38 | def get_speeds(self, nearest_distance):
39 | if nearest_distance > 100:
40 | nearest_speed = 100
41 | furthest_speed = 100
42 | delay = 100
43 | elif nearest_distance > 50:
44 | nearest_speed = 100
45 | furthest_speed = 80
46 | delay = 100
47 | elif nearest_distance > 20:
48 | nearest_speed = 100
49 | furthest_speed = 60
50 | delay = 100
51 | elif nearest_distance > 10:
52 | nearest_speed = -40
53 | furthest_speed = -100
54 | delay = 100
55 | else: # collison
56 | nearest_speed = -100
57 | furthest_speed = -100
58 | delay = 250
59 | return nearest_speed, furthest_speed, delay
60 |
61 | def run(self):
62 | self.robot.set_pan(0)
63 | self.robot.set_tilt(0)
64 | while True:
65 | # Get the sensor readings
66 | try:
67 | left_distance = self.robot.left_distance_sensor.get_distance()
68 | except NoDistanceRead:
69 | left_distance = 100
70 | try:
71 | right_distance = self.robot.right_distance_sensor.get_distance()
72 | except NoDistanceRead:
73 | right_distance = 100
74 | # Display this
75 | self.display_state(left_distance, right_distance)
76 |
77 | # Get speeds for motors from distances
78 | nearest_speed, furthest_speed, delay = self.get_speeds(min(left_distance, right_distance))
79 | print("Distances: l", left_distance, "r", right_distance, "Speeds: n", nearest_speed, "f", furthest_speed,
80 | "Delays: l", delay)
81 |
82 | # Send this to the motors
83 | if left_distance < right_distance:
84 | self.robot.set_left(nearest_speed)
85 | self.robot.set_right(furthest_speed)
86 | else:
87 | self.robot.set_right(nearest_speed)
88 | self.robot.set_left(furthest_speed)
89 |
90 | # Wait our delay time
91 | sleep(delay * 0.001)
92 |
93 | bot = Robot()
94 | behavior = ObstacleAvoidingBehavior(bot)
95 | behavior.run()
96 |
--------------------------------------------------------------------------------
/chapter11/distance_sensor_hcsr04.py:
--------------------------------------------------------------------------------
1 | """Object for the HC-SR04 distance sensor type."""
2 | import time # import the whole thing, we need more than just sleep
3 | from gpiozero import DigitalInputDevice, DigitalOutputDevice
4 |
5 | # This is an exception, we will send this when get distance fails to make a measurement
6 | class NoDistanceRead(Exception):
7 | """The system was unable to make a measurement"""
8 | pass # We aren't doing anything special, but python syntax demands us to be explicit about this.
9 |
10 | class DistanceSensor(DigitalInputDevice):
11 | """Represents a distance sensor."""
12 | def __init__(self, trigger_pin, echo_pin):
13 | # Setup devices, an input device and an output device, with pin numbers for the sensor.
14 | super(DistanceSensor, self).__init__(echo_pin)
15 | self._trigger = DigitalOutputDevice(trigger_pin)
16 | self._trigger.value = False
17 |
18 | def get_distance(self):
19 | """Method to get the distance measurement"""
20 | # Timeout - we'll use this to stop it getting stuck
21 | time_out = time.time() + 2
22 |
23 | # This off-on-off pulse tells the device to make a measurement
24 | self._trigger.value = True
25 | time.sleep(0.00001) # This is the 10 microseconds
26 | self._trigger.value = False
27 |
28 | # Wait for the pin state to stop being 0, going from low to high
29 | # When it rises, this is the real pulse start. Assign it once - it may already have changed!
30 | pulse_start = time.time()
31 | while self.pin.state == 0:
32 | pulse_start = time.time()
33 | # We ran out of time
34 | if pulse_start > time_out:
35 | raise NoDistanceRead("Timed Out")
36 |
37 | # Wait for the echo pin to stop being 1, going from high, to low, the end of the pulse.
38 | pulse_end = time.time()
39 | while self.pin.state == 1:
40 | pulse_end = time.time()
41 | # Pulse end not received
42 | if pulse_end > time_out:
43 | raise NoDistanceRead("Timed Out")
44 |
45 | # The duration is the time between the start and end of pulse in seconds.
46 | pulse_duration = pulse_end - pulse_start
47 | # Speed of sound in centimeters per second - 34300 cm/s. However, the pulse has travelled TWICE
48 | # the distance, so we get half of this. (34300 / 2) = 17150.
49 | distance = pulse_duration * 17150
50 | # Round it to 2 decimal places, any finer doesn't really make sense.
51 | distance = round(distance, 2)
52 | return distance
53 |
--------------------------------------------------------------------------------
/chapter11/menu_server.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, render_template
2 | from robot_modes import RobotModes
3 |
4 | # A Flask App contains all its routes.
5 | app = Flask(__name__)
6 | # Prepare our robot modes for use
7 | mode_manager = RobotModes()
8 |
9 | def render_menu(message=None):
10 | """Render the menu screen, with an optional status message"""
11 | return render_template('menu.html', menu=mode_manager.menu_config, message=message)
12 |
13 | # These are the Flask routes - the different places we can go to in our browser.
14 |
15 | @app.route("/")
16 | def index():
17 | return render_menu()
18 |
19 | @app.route("/run/")
20 | def run(mode_name):
21 | # Use our robot app to run something with this mode_name
22 | mode_manager.run(mode_name)
23 | return render_menu(message="%s running" % mode_name)
24 |
25 | @app.route("/stop")
26 | def stop():
27 | # Tell our system to stop the mode it's in.
28 | mode_manager.stop()
29 | return render_menu(message='Stopped')
30 |
31 | # Start the app running
32 | app.run(host="0.0.0.0", debug=True)
33 |
--------------------------------------------------------------------------------
/chapter11/readme.md:
--------------------------------------------------------------------------------
1 | # Distance Sensors Setup
2 |
3 | The distance sensor code uses GPIOZero. Be sure to install the version mentioned in chapter 8.
4 |
5 | # Menu Server Setup
6 |
7 | You will need to install Flask for this step.
8 |
9 | pi@myrobot:~ $ pip install flask
10 |
11 | # Additional Troublexhooting Notes
12 |
13 | The book has a number of troubleshooting notes. Please check those first. If these are still not working, try the following additional troubleshooting steps:
14 |
15 | * Not all logic level shifters are bidirectional - please ensure the type you have used state bidirectional in their description.
16 | * Some logic level shifters will need a 3v (lv) power connection from the lower volt rail on the breadboard.
17 |
--------------------------------------------------------------------------------
/chapter11/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 | import RPi.GPIO as GPIO
4 |
5 | import atexit
6 |
7 | #import leds_led_shim
8 | import leds_8_apa102c
9 | from servos import Servos
10 | from distance_sensor_hcsr04 import DistanceSensor, NoDistanceRead
11 |
12 | class Robot(object):
13 | def __init__(self, motorhat_addr=0x6f, drive_enabled=True):
14 | # Setup the motorhat with the passed in address
15 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
16 |
17 | # get local variable for each motor
18 | self.left_motor = self._mh.getMotor(1)
19 | self.right_motor = self._mh.getMotor(2)
20 | self.drive_enabled = drive_enabled
21 |
22 | # ensure the motors get stopped when the code exits
23 | atexit.register(self.stop_all)
24 |
25 | # Setup the line sensors
26 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
27 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
28 |
29 | # Setup The Distance Sensors
30 | self.left_distance_sensor = DistanceSensor(17, 27)
31 | self.right_distance_sensor = DistanceSensor(5, 6)
32 |
33 | # Setup the Leds
34 | self.leds = leds_8_apa102c.Leds()
35 |
36 | # Set up servo motors for pan and tilt.
37 | self.servos = Servos(addr=motorhat_addr)
38 |
39 | def stop_all(self):
40 | self.stop_motors()
41 |
42 | # Clear any sensor handlers
43 | self.left_line_sensor.when_line = None
44 | self.left_line_sensor.when_no_line = None
45 | self.right_line_sensor.when_line = None
46 | self.right_line_sensor.when_no_line = None
47 |
48 | # Clear the display
49 | self.leds.clear()
50 | self.leds.show()
51 |
52 | # Reset the servos
53 | self.servos.stop_all()
54 |
55 | def convert_speed(self, speed):
56 | mode = Raspi_MotorHAT.RELEASE
57 | if speed > 0:
58 | mode = Raspi_MotorHAT.FORWARD
59 | elif speed < 0:
60 | mode = Raspi_MotorHAT.BACKWARD
61 | output_speed = (abs(speed) * 255) / 100
62 | return mode, int(output_speed)
63 |
64 | def set_left(self, speed):
65 | if not self.drive_enabled:
66 | return
67 | mode, output_speed = self.convert_speed(speed)
68 | self.left_motor.setSpeed(output_speed)
69 | self.left_motor.run(mode)
70 |
71 | def set_right(self, speed):
72 | if not self.drive_enabled:
73 | return
74 | mode, output_speed = self.convert_speed(speed)
75 | self.right_motor.setSpeed(output_speed)
76 | self.right_motor.run(mode)
77 |
78 | def stop_motors(self):
79 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
80 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
81 |
82 | def set_pan(self, angle):
83 | self.servos.set_servo_angle(1, angle)
84 |
85 | def set_tilt(self, angle):
86 | self.servos.set_servo_angle(0, angle)
87 |
--------------------------------------------------------------------------------
/chapter11/robot_modes.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 |
4 | class RobotModes(object):
5 | """Our robot behaviors and tests as running modes"""
6 |
7 | # Mode config goes from a "mode_name" to a script to run. Configured for look up
8 | # We could convert the path segment into a python script by adding '.py'
9 | # but this is a bad idea for at least 2 reasons:
10 | # * We may want to switch the script it really runs (avoid_behavior.py for simple_avoid_behavior.py)
11 | # * It's not a great security idea to let this run anything but the scripts we specify here.
12 | mode_config = {
13 | "avoid_behavior": "avoid_behavior.py",
14 | "circle_head": "circle_pan_tilt_behavior.py",
15 | "test_leds": "leds_test.py",
16 | "test_hcsr04": "test_hcsr04.py",
17 | "stop_at_line": "stop_at_line.py",
18 | "line_following": "line_following_behavior.py",
19 | "behavior_line": "behavior_line.py",
20 | "behavior_path": "behavior_path.py"
21 | }
22 |
23 | # Menu config is a list of mode_names and text to display. Ordered as we'd like our menu.
24 | menu_config = [
25 | {"mode_name": "avoid_behavior", "text": "Avoid Behavior"},
26 | {"mode_name": "circle_head", "text": "Circle Head"},
27 | {"mode_name": "test_leds", "text": "Test LEDS"},
28 | {"mode_name": "test_hcsr04", "text": "Test HC-SR04"},
29 | {"mode_name": "stop_at_line", "text": "Stop At Line"},
30 | {"mode_name": "line_following", "text": "Line Following"},
31 | {"mode_name": "behavior_line", "text": "Drive In A Line"},
32 | {"mode_name": "behavior_path", "text": "Drive a Path"}
33 | ]
34 |
35 | def __init__(self):
36 | self.current_process = None
37 |
38 | def is_running(self):
39 | """Check if there is a process running. Returncode is only set when a process finishes"""
40 | return self.current_process and self.current_process.returncode is None
41 |
42 | def run(self, mode_name):
43 | """Run the mode as a subprocess, but not if we still have one running"""
44 | while self.is_running():
45 | self.stop()
46 | script = self.mode_config[mode_name]
47 | self.current_process = subprocess.Popen(["python", script])
48 | return True
49 |
50 | def stop(self):
51 | """Stop a process"""
52 | if self.is_running():
53 | # Sending the signal sigint is (on Linux) similar to pressing ctrl-c.
54 | # The behavior will do the same clean up.
55 | self.current_process.send_signal(subprocess.signal.SIGINT)
56 | self.current_process = None
57 |
--------------------------------------------------------------------------------
/chapter11/simple_avoid_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, NoDistanceRead
2 | from time import sleep
3 |
4 |
5 | class ObstacleAvoidingBehavior(object):
6 | """Simple obstacle avoiding"""
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 |
10 | # Calculations for the LEDs
11 | led_half = int(self.robot.leds.count/2)
12 | self.max_distance = 100
13 | self.leds_per_distance = led_half / float(self.max_distance)
14 | # print("Leds per distance", self.leds_per_distance)
15 | self.sense_colour = (255, 0, 0)
16 |
17 | def distance_to_led_bar(self, distance):
18 | # Invert so closer means more LED's.
19 | inverted = self.max_distance - min(distance, self.max_distance)
20 | led_bar = int(round(inverted * self.leds_per_distance))
21 | return led_bar
22 |
23 | def display_state(self, left_distance, right_distance):
24 | # Clear first
25 | self.robot.leds.clear()
26 | # Right side
27 | led_bar = self.distance_to_led_bar(right_distance)
28 | self.robot.leds.set_range(range(led_bar), self.sense_colour)
29 | # Left side
30 | led_bar = self.distance_to_led_bar(left_distance)
31 | # Bit trickier - must go from below the leds count, to the leds count.
32 | start = self.robot.leds.count - led_bar
33 | self.robot.leds.set_range(range(start, self.robot.leds.count), self.sense_colour)
34 |
35 | # Now show this display
36 | self.robot.leds.show()
37 |
38 | def get_motor_speed(self, distance):
39 | """This method chooses a speed for a motor based on the distance from it's sensor"""
40 | if distance < 20:
41 | return -100
42 | else:
43 | return 100
44 |
45 | def run(self):
46 | self.robot.set_pan(0)
47 | self.robot.set_tilt(0)
48 | while True:
49 | # Get the sensor readings
50 | try:
51 | left_distance = self.robot.left_distance_sensor.get_distance()
52 | except NoDistanceRead:
53 | left_distance = 100
54 | try:
55 | right_distance = self.robot.right_distance_sensor.get_distance()
56 | except NoDistanceRead:
57 | right_distance = 100
58 | # Display this
59 | self.display_state(left_distance, right_distance)
60 |
61 | # Get speeds for motors from distances
62 | print("Distances: l", left_distance, "r", right_distance)
63 | self.robot.set_left(self.get_motor_speed(left_distance))
64 | self.robot.set_right(self.get_motor_speed(right_distance))
65 |
66 | # Wait a little
67 | sleep(0.1)
68 |
69 | bot = Robot()
70 | behavior = ObstacleAvoidingBehavior(bot)
71 | behavior.run()
72 |
--------------------------------------------------------------------------------
/chapter11/templates/menu.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | My Robot Menu
4 |
5 |
6 | My Robot Menu
7 | {% if message %}
8 | {{ message }}
9 | {% endif %}
10 |
16 |
17 |
--------------------------------------------------------------------------------
/chapter11/test_hcsr04.py:
--------------------------------------------------------------------------------
1 | import time
2 | from gpiozero import DigitalInputDevice, DigitalOutputDevice
3 |
4 | # Setup devices, an input device and an output device, with pin numbers for the sensors.
5 | print "Prepare GPIO pins"
6 |
7 | # Left sensor
8 | left_trigger = DigitalOutputDevice(17)
9 | left_echo = DigitalInputDevice(27)
10 |
11 | left_trigger.value = False
12 |
13 | # Right sensor
14 | right_trigger = DigitalOutputDevice(5)
15 | right_echo = DigitalInputDevice(6)
16 |
17 | right_trigger.value = False
18 |
19 | # wait a little, to iron out spurious responses.
20 | print "Warm up time"
21 | time.sleep(0.5)
22 |
23 | def make_measurement(trig_device, echo_device):
24 | """Function to get the distance measurement"""
25 | # Timeout - we'll use this to stop it getting stuck
26 | time_out = time.time() + 1
27 |
28 | # This off-on-off pulse tells the device to make a measurement
29 | trig_device.value = True
30 | time.sleep(0.00001) # This is the 10 microseconds
31 | trig_device.value = False
32 |
33 | # Here, we wait for the pin state to stop being 0, that is, to go from low to high
34 | # When it rises, this is the real pulse start. Assign it once - it may already have changed!
35 | pulse_start = time.time()
36 | while echo_device.pin.state == 0:
37 | pulse_start = time.time()
38 | # We ran out of time here.
39 | if pulse_start > time_out:
40 | print "timed out - missed pulse start"
41 | return 100
42 |
43 | # Now we wait for the echo_device pin to stop being 1, going from high, to low, the end of the pulse.
44 | pulse_end = time.time()
45 | while echo_device.pin.state == 1:
46 | pulse_end = time.time()
47 | if pulse_end > time_out:
48 | print "timed out - pulse end too long"
49 | return 100
50 |
51 | # The duration is the time between the start and end of pulse in seconds.
52 | pulse_duration = pulse_end - pulse_start
53 | # This number is the speed of sound in centimeters per second - 34300 cm/s. However, the pulse has travelled TWICE
54 | # the distance, so we get half of this. (34300 / 2) = 17150.
55 | distance = pulse_duration * 17150
56 | # Round it to 2 decimal places, any finer doesn't really make sense.
57 | distance = round(distance, 2)
58 | return distance
59 |
60 | while True:
61 | # Make our measurements and print them
62 | left_distance = make_measurement(left_trigger, left_echo)
63 | right_distance = make_measurement(right_trigger, right_echo)
64 | print "Left: ", left_distance, "cm", "Right:", right_distance
65 | # Sleep a bit before making another.
66 | time.sleep(0.5)
67 |
--------------------------------------------------------------------------------
/chapter12/drive_distance_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, EncoderCounter
2 | from pid_controller import PIController
3 | import time
4 |
5 |
6 | def drive_distance(bot, distance, speed=80):
7 | # Use left as "primary" motor, the right is keeping up
8 | set_primary = bot.set_left
9 | primary_encoder = bot.left_encoder
10 | set_secondary = bot.set_right
11 | secondary_encoder = bot.right_encoder
12 |
13 | controller = PIController(proportional_constant=5, integral_constant=0.2)
14 |
15 | # start the motors, and start the loop
16 | set_primary(speed)
17 | set_secondary(speed)
18 | while primary_encoder.pulse_count < distance or secondary_encoder.pulse_count < distance:
19 | # Sleep a bit before calculating
20 | time.sleep(0.05)
21 | # How far off are we?
22 | error = primary_encoder.pulse_count - secondary_encoder.pulse_count
23 | adjustment = controller.get_value(error)
24 | # How fast should the motor move to get there?
25 | set_secondary(speed + adjustment)
26 | # Some debug
27 | print("Primary c:{} ({} mm)\tSecondary c:{} ({} mm) e:{} adjustment: {:.2f}".format(
28 | primary_encoder.pulse_count, primary_encoder.distance_in_mm(),
29 | secondary_encoder.pulse_count, secondary_encoder.distance_in_mm(),
30 | error,
31 | adjustment
32 | ))
33 |
34 |
35 | bot = Robot()
36 | distance_to_drive = 1000 # in mm - this is a meter
37 | distance_in_ticks = EncoderCounter.mm_to_ticks(distance_to_drive)
38 | drive_distance(bot, distance_in_ticks, distance_in_ticks)
39 |
--------------------------------------------------------------------------------
/chapter12/drive_square_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, EncoderCounter
2 | from pid_controller import PIController
3 | import time
4 | import math
5 |
6 |
7 | def drive_distances(bot, left_distance, right_distance, speed=80):
8 | # We always want the "primary" to be the longest distance, therefore the faster motor
9 | if abs(left_distance) >= abs(right_distance):
10 | print("Left is primary")
11 | set_primary = bot.set_left
12 | primary_encoder = bot.left_encoder
13 | set_secondary = bot.set_right
14 | secondary_encoder = bot.right_encoder
15 | primary_distance = left_distance
16 | secondary_distance = right_distance
17 | else:
18 | print("right is primary")
19 | set_primary = bot.set_right
20 | primary_encoder = bot.right_encoder
21 | set_secondary = bot.set_left
22 | secondary_encoder = bot.left_encoder
23 | primary_distance = right_distance
24 | secondary_distance = left_distance
25 | primary_to_secondary_ratio = secondary_distance / (primary_distance * 1.0)
26 | secondary_speed = speed * primary_to_secondary_ratio
27 | print("Targets - primary: %d, secondary: %d, ratio: %.2f" % (primary_distance, secondary_distance, primary_to_secondary_ratio))
28 |
29 | primary_encoder.reset()
30 | secondary_encoder.reset()
31 |
32 | controller = PIController(proportional_constant=5, integral_constant=0.2)
33 |
34 | # Ensure that the encoder knows which way it is going
35 | primary_encoder.set_direction(math.copysign(1, speed))
36 | secondary_encoder.set_direction(math.copysign(1, secondary_speed))
37 |
38 | # start the motors, and start the loop
39 | set_primary(speed)
40 | set_secondary(secondary_speed)
41 | while abs(primary_encoder.pulse_count) < abs(primary_distance) or abs(secondary_encoder.pulse_count) < abs(secondary_distance):
42 | # And sleep a bit before calculating
43 | time.sleep(0.05)
44 |
45 | # How far off are we?
46 | secondary_target = primary_encoder.pulse_count * primary_to_secondary_ratio
47 | error = secondary_target - secondary_encoder.pulse_count
48 |
49 | # How fast should the motor move to get there?
50 | adjustment = controller.get_value(error)
51 |
52 | set_secondary(secondary_speed + adjustment)
53 | secondary_encoder.set_direction(math.copysign(1, secondary_speed+adjustment))
54 | # Some debug
55 | print "Primary c:{:.2f} ({:.2f} mm)\tSecondary c:{:.2f} ({:.2f} mm) t:{:.2f} e:{:.2f} s:{:.2f} adjustment: {:.2f}".format(
56 | primary_encoder.pulse_count,
57 | primary_encoder.distance_in_mm(),
58 | secondary_encoder.pulse_count,
59 | secondary_encoder.distance_in_mm(),
60 | secondary_target,
61 | error,
62 | secondary_speed,
63 | adjustment
64 | )
65 |
66 | # Stop the primary if we need to
67 | if abs(primary_encoder.pulse_count) >= abs(primary_distance):
68 | print "primary stop"
69 | set_primary(0)
70 | secondary_speed = 0
71 |
72 | def drive_arc(bot, turn_in_degrees, radius, speed=80):
73 | """ Turn is based on change in heading. """
74 | # Get the bot width in ticks
75 | half_width_ticks = EncoderCounter.mm_to_ticks(bot.wheel_distance_mm/2.0)
76 | if turn_in_degrees < 0:
77 | left_radius = radius - half_width_ticks
78 | right_radius = radius + half_width_ticks
79 | else:
80 | left_radius = radius + half_width_ticks
81 | right_radius = radius - half_width_ticks
82 | print "Arc left radius {:.2f}, right_radius {:.2f}".format(left_radius, right_radius)
83 | radians = math.radians(abs(turn_in_degrees))
84 | left_distance = int(left_radius * radians)
85 | right_distance = int(right_radius * radians)
86 | print "Arc left distance {}, right_distance {}".format(left_distance, right_distance)
87 | drive_distances(bot, left_distance, right_distance, speed=speed)
88 |
89 | bot = Robot()
90 |
91 | distance_to_drive = 300 # in mm
92 | distance_in_ticks = EncoderCounter.mm_to_ticks(distance_to_drive)
93 | radius = bot.wheel_distance_mm + 100 # in mm
94 | radius_in_ticks = EncoderCounter.mm_to_ticks(radius)
95 |
96 | for n in range(4):
97 | drive_distances(bot, distance_in_ticks, distance_in_ticks)
98 | drive_arc(bot, 90, radius_in_ticks, speed=50)
99 |
--------------------------------------------------------------------------------
/chapter12/encoder_counter.py:
--------------------------------------------------------------------------------
1 | from gpiozero import DigitalInputDevice
2 | import math
3 |
4 | class EncoderCounter(DigitalInputDevice):
5 | ticks_to_mm_const = None # you must set this up before using distance methods
6 | def __init__(self, pin):
7 | super(EncoderCounter, self).__init__(pin)
8 | self.pin.when_changed = self.when_changed
9 | self.pulse_count = 0
10 | self.direction = 1
11 |
12 | def when_changed(self):
13 | self.pulse_count += self.direction
14 |
15 | def set_direction(self, direction):
16 | """ This should be -1 or 1. """
17 | assert abs(direction)==1, "Direction %s should be 1 or -1" % direction
18 | self.direction = direction
19 |
20 | def reset(self):
21 | self.pulse_count = 0
22 |
23 | def distance_in_mm(self):
24 | return int(self.pulse_count * EncoderCounter.ticks_to_mm_const)
25 |
26 | @staticmethod
27 | def mm_to_ticks(mm):
28 | return mm / EncoderCounter.ticks_to_mm_const
29 |
30 | @staticmethod
31 | def set_constants(wheel_diameter_mm, ticks_per_revolution):
32 | EncoderCounter.ticks_to_mm_const = (math.pi / ticks_per_revolution) * wheel_diameter_mm
33 |
--------------------------------------------------------------------------------
/chapter12/pid_controller.py:
--------------------------------------------------------------------------------
1 | class PIController(object):
2 | def __init__(self, proportional_constant=0, integral_constant=0):
3 | self.proportional_constant = proportional_constant
4 | self.integral_constant = integral_constant
5 |
6 | # Running sums
7 | self.integral_sum = 0
8 |
9 | def handle_proportional(self, error):
10 | return self.proportional_constant * error
11 |
12 | def handle_integral(self, error):
13 | self.integral_sum += error
14 | return self.integral_constant * self.integral_sum
15 |
16 | def get_value(self, error):
17 | p = self.handle_proportional(error)
18 | i = self.handle_integral(error)
19 | return p + i
20 |
--------------------------------------------------------------------------------
/chapter12/readme.md:
--------------------------------------------------------------------------------
1 | # Encoder Sensors
2 |
3 | The code in this chapter requires the setup steps from chapter 8 and 7 to have been installed.
4 |
--------------------------------------------------------------------------------
/chapter12/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 | import RPi.GPIO as GPIO
4 |
5 | import atexit
6 |
7 | #import leds_led_shim
8 | import leds_8_apa102c
9 | from servos import Servos
10 | from distance_sensor_hcsr04 import DistanceSensor, NoDistanceRead
11 | from encoder_counter import EncoderCounter
12 |
13 | class Robot(object):
14 | wheel_diameter_mm = 69.0
15 | ticks_per_revolution = 40.0
16 | wheel_distance_mm = 131.0
17 |
18 | def __init__(self, motorhat_addr=0x6f, drive_enabled=True):
19 | # Setup the motorhat with the passed in address
20 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
21 |
22 | # get local variable for each motor
23 | self.left_motor = self._mh.getMotor(1)
24 | self.right_motor = self._mh.getMotor(2)
25 | self.drive_enabled = drive_enabled
26 |
27 | # ensure the motors get stopped when the code exits
28 | atexit.register(self.stop_all)
29 |
30 | # Setup the line sensors
31 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
32 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
33 |
34 | # Setup The Distance Sensors
35 | self.left_distance_sensor = DistanceSensor(17, 27)
36 | self.right_distance_sensor = DistanceSensor(5, 6)
37 |
38 | # Setup the Encoders
39 | EncoderCounter.set_constants(self.wheel_diameter_mm, self.ticks_per_revolution)
40 | self.left_encoder = EncoderCounter(4)
41 | self.right_encoder = EncoderCounter(26)
42 |
43 | # Setup the Leds
44 | self.leds = leds_8_apa102c.Leds()
45 |
46 | # Set up servo motors for pan and tilt.
47 | self.servos = Servos(addr=motorhat_addr)
48 |
49 | def stop_all(self):
50 | self.stop_motors()
51 |
52 | # Clear any sensor handlers
53 | self.left_line_sensor.when_line = None
54 | self.left_line_sensor.when_no_line = None
55 | self.right_line_sensor.when_line = None
56 | self.right_line_sensor.when_no_line = None
57 |
58 | # Clear the display
59 | self.leds.clear()
60 | self.leds.show()
61 |
62 | # Reset the servos
63 | self.servos.stop_all()
64 |
65 | def convert_speed(self, speed):
66 | mode = Raspi_MotorHAT.RELEASE
67 | if speed > 0:
68 | mode = Raspi_MotorHAT.FORWARD
69 | elif speed < 0:
70 | mode = Raspi_MotorHAT.BACKWARD
71 | output_speed = (abs(speed) * 255) / 100
72 | return mode, int(output_speed)
73 |
74 | def set_left(self, speed):
75 | if not self.drive_enabled:
76 | return
77 | mode, output_speed = self.convert_speed(speed)
78 | self.left_motor.setSpeed(output_speed)
79 | self.left_motor.run(mode)
80 |
81 | def set_right(self, speed):
82 | if not self.drive_enabled:
83 | return
84 | mode, output_speed = self.convert_speed(speed)
85 | self.right_motor.setSpeed(output_speed)
86 | self.right_motor.run(mode)
87 |
88 | def stop_motors(self):
89 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
90 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
91 |
92 | def set_pan(self, angle):
93 | self.servos.set_servo_angle(1, angle)
94 |
95 | def set_tilt(self, angle):
96 | self.servos.set_servo_angle(0, angle)
97 |
--------------------------------------------------------------------------------
/chapter12/robot_modes.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 |
4 | class RobotModes(object):
5 | """Our robot behaviors and tests as running modes"""
6 |
7 | # Mode config goes from a "mode_name" to a script to run. Configured for look up
8 | # We could convert the path segment into a python script by adding '.py'
9 | # but this is a bad idea for at least 2 reasons:
10 | # * We may want to switch the script it really runs (avoid_behavior.py for simple_avoid_behavior.py)
11 | # * It's not a great security idea to let this run anything but the scripts we specify here.
12 | mode_config = {
13 | "avoid_behavior": "avoid_behavior.py",
14 | "circle_head": "circle_pan_tilt_behavior.py",
15 | "test_leds": "leds_test.py",
16 | "test_hcsr04": "test_hcsr04.py",
17 | "stop_at_line": "stop_at_line.py",
18 | "line_following": "line_following_behavior.py",
19 | "behavior_line": "straight_line_drive.py",
20 | "behavior_path": "drive_square.py"
21 | }
22 |
23 | # Menu config is a list of mode_names and text to display. Ordered as we'd like our menu.
24 | menu_config = [
25 | {"mode_name": "avoid_behavior", "text": "Avoid Behavior"},
26 | {"mode_name": "circle_head", "text": "Circle Head"},
27 | {"mode_name": "test_leds", "text": "Test LEDS"},
28 | {"mode_name": "test_hcsr04", "text": "Test HC-SR04"},
29 | {"mode_name": "stop_at_line", "text": "Stop At Line"},
30 | {"mode_name": "line_following", "text": "Line Following"},
31 | {"mode_name": "behavior_line", "text": "Drive In A Line"},
32 | {"mode_name": "behavior_path", "text": "Drive a Square Path"}
33 | ]
34 |
35 | def __init__(self):
36 | self.current_process = None
37 |
38 | def is_running(self):
39 | """Check if there is a process running. Returncode is only set when a process finishes"""
40 | return self.current_process and self.current_process.returncode is None
41 |
42 | def run(self, mode_name):
43 | """Run the mode as a subprocess, but not if we still have one running"""
44 | while self.is_running():
45 | self.stop()
46 | script = self.mode_config[mode_name]
47 | self.current_process = subprocess.Popen(["python", script])
48 | return True
49 |
50 | def stop(self):
51 | """Stop a process"""
52 | if self.is_running():
53 | # Sending the signal sigint is (on Linux) similar to pressing ctrl-c.
54 | # The behavior will do the same clean up.
55 | self.current_process.send_signal(subprocess.signal.SIGINT)
56 | self.current_process = None
57 |
--------------------------------------------------------------------------------
/chapter12/straight_line_drive.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from pid_controller import PIController
3 | import time
4 |
5 | bot = Robot()
6 | stop_at_time = time.time() + 60
7 |
8 | speed = 80
9 | bot.set_left(speed)
10 | bot.set_right(speed)
11 |
12 | pid = PIController(proportional_constant=4, integral_constant=0.2)
13 | while time.time() < stop_at_time:
14 | time.sleep(0.02)
15 | # Calculate the error
16 | left = bot.left_encoder.pulse_count
17 | right = bot.right_encoder.pulse_count
18 | error = left - right
19 | # Get the speed
20 | adjustment = pid.get_value(error)
21 | right_speed = int(speed + adjustment)
22 | print "left", left, \
23 | "right", right, \
24 | "right_speed:", right_speed, \
25 | "error:", error, \
26 | "adjustment: %.2f" % adjustment
27 | # Appy it to the right motor
28 | bot.set_right(right_speed)
29 |
--------------------------------------------------------------------------------
/chapter12/test_distance_travelled.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | import time
3 | import math
4 |
5 | wheel_diameter_mm = 70.0
6 | ticks_per_revolution = 40.0
7 | ticks_to_mm_const = (math.pi / ticks_per_revolution) * wheel_diameter_mm
8 |
9 | def ticks_to_mm(ticks):
10 | return int(ticks_to_mm_const * ticks)
11 |
12 | bot = Robot()
13 | stop_at_time = time.time() + 1
14 |
15 | bot.set_left(90)
16 | bot.set_right(90)
17 |
18 | while time.time() < stop_at_time:
19 | print "Left:", ticks_to_mm(bot.left_encoder.pulse_count), \
20 | "Right:", ticks_to_mm(bot.right_encoder.pulse_count)
21 | time.sleep(0.05)
22 |
--------------------------------------------------------------------------------
/chapter12/test_encoders.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | import time
3 |
4 | from gpiozero import DigitalInputDevice
5 |
6 |
7 | class EncoderCounter(object):
8 | def __init__(self, pin_number):
9 | self.pulse_count = 0
10 | self.device = DigitalInputDevice(pin=pin_number)
11 | self.device.pin.when_changed = self.when_changed
12 |
13 | def when_changed(self):
14 | self.pulse_count += 1
15 |
16 |
17 | bot = Robot()
18 | left_encoder = EncoderCounter(4)
19 | right_encoder = EncoderCounter(26)
20 | stop_at_time = time.time() + 1
21 |
22 | bot.set_left(90)
23 | bot.set_right(90)
24 |
25 | while time.time() < stop_at_time:
26 | print "Left:", left_encoder.pulse_count, "Right:", right_encoder.pulse_count
27 | time.sleep(0.05)
28 |
--------------------------------------------------------------------------------
/chapter13/color_track_behavior.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import time
3 |
4 | from image_app_core import start_server_process, get_control_instruction, put_output_image
5 |
6 | import cv2
7 | import numpy as np
8 |
9 | import pi_camera_stream
10 | from pid_controller import PIController
11 | from robot import Robot
12 |
13 |
14 | class ColorTrackingBehavior(object):
15 | """Behavior to find and get close to a colored object"""
16 | def __init__(self, robot):
17 | self.robot = robot
18 | # Tuning values
19 | self.low_range = (25, 70, 25)
20 | self.high_range = (80, 255, 255)
21 | self.correct_radius = 120
22 | self.center = 160
23 | # Current state
24 | self.running = False
25 |
26 | def process_control(self):
27 | instruction = get_control_instruction()
28 | if instruction == "start":
29 | self.running = True
30 | elif instruction == "stop":
31 | self.running = False
32 | elif instruction == "exit":
33 | print("Stopping")
34 | exit()
35 |
36 | def find_object(self, original_frame):
37 | """Find the largest enclosing circle for all contours in a masked image.
38 | Returns: the masked image, the object coordinates, the object radius"""
39 | frame_hsv = cv2.cvtColor(original_frame, cv2.COLOR_BGR2HSV)
40 | masked = cv2.inRange(frame_hsv, self.low_range, self.high_range)
41 |
42 | # Find the contours of the image (outline points)
43 | contour_image = np.copy(masked)
44 | contours, _ = cv2.findContours(contour_image, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
45 | # Find enclosing circles
46 | circles = [cv2.minEnclosingCircle(cnt) for cnt in contours]
47 | # Filter for the largest one
48 | largest = (0, 0), 0
49 | for (x, y), radius in circles:
50 | if radius > largest[1]:
51 | largest = (int(x), int(y)), int(radius)
52 | return masked, largest[0], largest[1]
53 |
54 | def make_display(self, frame, processed):
55 | """Create display output, and put it on the queue"""
56 | # Make a dualscreen view - two images of the same scale concatenated
57 | display_frame = np.concatenate((frame, processed), axis=1)
58 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(display_frame)
59 | put_output_image(encoded_bytes)
60 |
61 | def process_frame(self, frame):
62 | # Find the largest enclosing circle
63 | masked, coordinates, radius = self.find_object(frame)
64 | # Now back to 3 channels for display
65 | processed = cv2.cvtColor(masked, cv2.COLOR_GRAY2BGR)
66 | # Draw our circle on the original frame, then display this
67 | cv2.circle(frame, coordinates, radius, [255, 0, 0])
68 | self.make_display(frame, processed)
69 | # Yield the object details
70 | return coordinates, radius
71 |
72 | def run(self):
73 | # Set pan and tilt to middle, then clear it.
74 | self.robot.set_pan(0)
75 | self.robot.set_tilt(0)
76 | # start camera
77 | camera = pi_camera_stream.setup_camera()
78 | # speed pid - based on the radius we get.
79 | speed_pid = PIController(proportional_constant=0.8,
80 | integral_constant=0.1, windup_limit=100)
81 | # direction pid - how far from the middle X is.
82 | direction_pid = PIController(proportional_constant=0.25,
83 | integral_constant=0.1, windup_limit=400)
84 | # warm up and servo move time
85 | time.sleep(0.1)
86 | # Servo's will be in place - stop them for now.
87 | self.robot.servos.stop_all()
88 | print("Setup Complete")
89 | # Main loop
90 | for frame in pi_camera_stream.start_stream(camera):
91 | (x, y), radius = self.process_frame(frame)
92 | self.process_control()
93 | if self.running and radius > 20:
94 | # The size is the first error
95 | radius_error = self.correct_radius - radius
96 | speed_value = speed_pid.get_value(radius_error)
97 | # And the second error is the based on the center coordinate.
98 | direction_error = self.center - x
99 | direction_value = direction_pid.get_value(direction_error)
100 | print("radius: %d, radius_error: %d, speed_value: %.2f, direction_error: %d, direction_value: %.2f" %
101 | (radius, radius_error, speed_value, direction_error, direction_value))
102 | # Now produce left and right motor speeds
103 | self.robot.set_left(speed_value - direction_value)
104 | self.robot.set_right(speed_value + direction_value)
105 | else:
106 | self.robot.stop_motors()
107 | if not self.running:
108 | speed_pid.reset()
109 | direction_pid.reset()
110 |
111 | print("Setting up")
112 | behavior = ColorTrackingBehavior(Robot())
113 | process = start_server_process('color_track_behavior.html')
114 | behavior.run()
115 |
--------------------------------------------------------------------------------
/chapter13/control_image_behavior.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from image_app_core import start_server_process, get_control_instruction, put_output_image
4 | import pi_camera_stream
5 |
6 | def controlled_image_server_behavior():
7 | # Setup the camera
8 | camera = pi_camera_stream.setup_camera()
9 | # allow the camera to warmup
10 | time.sleep(0.1)
11 | # Send frames from camera to server
12 | for frame in pi_camera_stream.start_stream(camera):
13 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(frame)
14 | put_output_image(encoded_bytes)
15 | # Check any control instructions
16 | instruction = get_control_instruction()
17 | if instruction == "exit":
18 | print("Stopping")
19 | return
20 |
21 | process = start_server_process('control_image_behavior.html')
22 | controlled_image_server_behavior()
23 |
--------------------------------------------------------------------------------
/chapter13/face_track_behavior.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import time
3 |
4 | from image_app_core import start_server_process, get_control_instruction, put_output_image
5 |
6 | import cv2
7 | import numpy as np
8 |
9 | import pi_camera_stream
10 | from pid_controller import PIController
11 | from robot import Robot
12 |
13 |
14 | class FaceTrackBehavior(object):
15 | """Behavior to find and point at a face."""
16 | def __init__(self, robot):
17 | self.robot = robot
18 | cascade_path = "/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml"
19 | self.cascade = cv2.CascadeClassifier(cascade_path)
20 | # Tuning values
21 | self.center_x = 160
22 | self.center_y = 120
23 | self.min_size = 20
24 | self.pan_pid = PIController(proportional_constant=0.1, integral_constant=0.03)
25 | self.tilt_pid = PIController(proportional_constant=-0.1, integral_constant=-0.03)
26 | # Current state
27 | self.running = False
28 |
29 | def process_control(self):
30 | instruction = get_control_instruction()
31 | if instruction == "start":
32 | self.running = True
33 | elif instruction == "stop":
34 | self.running = False
35 | self.pan_pid.reset()
36 | self.tilt_pid.reset()
37 | self.robot.servos.stop_all()
38 | elif instruction == "exit":
39 | print("Stopping")
40 | exit()
41 |
42 | def find_object(self, original_frame):
43 | """Search the frame for an object. Return the rectangle of the largest by w * h"""
44 | # Make it greyscale to reduce the data used
45 | gray_img = cv2.cvtColor(original_frame, cv2.COLOR_BGR2GRAY)
46 | # Detect all the objects
47 | objects = self.cascade.detectMultiScale(gray_img)
48 | largest = 0, (0, 0, 0, 0) # area, x, y, w, h
49 | for (x, y, w, h) in objects:
50 | item_area = w * h
51 | if item_area > largest[0]:
52 | largest = item_area, (x, y, w, h)
53 | return largest[1]
54 |
55 | def make_display(self, display_frame):
56 | """Create display output, and put it on the queue"""
57 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(display_frame)
58 | put_output_image(encoded_bytes)
59 |
60 | def process_frame(self, frame):
61 | # Find the largest matching object
62 | (x, y, w, h) = self.find_object(frame)
63 | # Draw a rect on the original frame, then display this
64 | cv2.rectangle(frame, (x, y), (x + w, y + w), [255, 0, 0])
65 | self.make_display(frame)
66 | # Yield the object details
67 | return x, y, w, h
68 |
69 | def run(self):
70 | # start camera
71 | camera = pi_camera_stream.setup_camera()
72 | # warm up time
73 | time.sleep(0.1)
74 | print("Setup Complete")
75 | # Main loop
76 | for frame in pi_camera_stream.start_stream(camera):
77 | (x, y, w, h) = self.process_frame(frame)
78 | self.process_control()
79 | if self.running and h > self.min_size:
80 | # Pan
81 | pan_error = self.center_x - (x + (w / 2))
82 | pan_value = self.pan_pid.get_value(pan_error)
83 | self.robot.set_pan(int(pan_value))
84 | # Tilt
85 | tilt_error = self.center_y - (y + (h / 2))
86 | tilt_value = self.tilt_pid.get_value(tilt_error)
87 | self.robot.set_tilt(int(tilt_value))
88 | print("x: %d, y: %d, pan_error: %d, tilt_error: %d, pan_value: %.2f, tilt_value: %.2f" %
89 | (x, y, pan_error, tilt_error, pan_value, tilt_value))
90 |
91 | print("Setting up")
92 | behavior = FaceTrackBehavior(Robot())
93 | process = start_server_process('color_track_behavior.html')
94 | behavior.run()
95 |
--------------------------------------------------------------------------------
/chapter13/image_app_core.py:
--------------------------------------------------------------------------------
1 | """The flask/webserver part is slightly independent of the behavior,
2 | allowing the user to "tune in" to see, but should not stop the
3 | robot running"""
4 | import time
5 | from multiprocessing import Process, Queue
6 |
7 | from flask import Flask, render_template, Response
8 |
9 |
10 | app = Flask(__name__)
11 | control_queue = Queue()
12 | display_queue = Queue(maxsize=2)
13 | display_template = 'image_server'
14 |
15 | @app.route('/')
16 | def index():
17 | return render_template(display_template)
18 |
19 | def frame_generator():
20 | """This is our main video feed"""
21 | while True:
22 | # at most 20 fps
23 | time.sleep(0.05)
24 | # Get (wait until we have data)
25 | encoded_bytes = display_queue.get()
26 | # Need to turn this into http multipart data.
27 | yield (b'--frame\r\n'
28 | b'Content-Type: image/jpeg\r\n\r\n' + encoded_bytes + b'\r\n')
29 |
30 | @app.route('/display')
31 | def display():
32 | return Response(frame_generator(),
33 | mimetype='multipart/x-mixed-replace; boundary=frame')
34 |
35 | @app.route('/control/')
36 | def control(control_name):
37 | control_queue.put(control_name)
38 | return Response('queued')
39 |
40 | def start_server_process(template_name):
41 | """Start the process, call .terminate to close it"""
42 | global display_template
43 | display_template = template_name
44 | # app.debug=True
45 | # app.use_reloader = False
46 | server = Process(target=app.run, kwargs={"host": "0.0.0.0", "port": 5001})
47 | server.daemon = True
48 | server.start()
49 | return server
50 |
51 | def put_output_image(encoded_bytes):
52 | """Queue an output image"""
53 | if display_queue.empty():
54 | display_queue.put(encoded_bytes)
55 |
56 | def get_control_instruction():
57 | """Get control instructions from the web app, if any"""
58 | if control_queue.empty():
59 | # nothing
60 | return None
61 | else:
62 | return control_queue.get()
63 |
--------------------------------------------------------------------------------
/chapter13/image_server.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from flask import Flask, render_template, Response
4 | import pi_camera_stream
5 |
6 | app = Flask(__name__)
7 |
8 | @app.route('/')
9 | def index():
10 | return render_template('image_server.html')
11 |
12 | def frame_generator():
13 | """This is our main video feed"""
14 | camera = pi_camera_stream.setup_camera()
15 |
16 | # allow the camera to warmup
17 | time.sleep(0.1)
18 |
19 | for frame in pi_camera_stream.start_stream(camera):
20 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(frame)
21 | # Need to turn this into http multipart data.
22 | yield (b'--frame\r\n'
23 | b'Content-Type: image/jpeg\r\n\r\n' + encoded_bytes + b'\r\n')
24 |
25 | @app.route('/display')
26 | def display():
27 | return Response(frame_generator(),
28 | mimetype='multipart/x-mixed-replace; boundary=frame')
29 |
30 | app.run(host="0.0.0.0", debug=True, port=5001)
31 |
--------------------------------------------------------------------------------
/chapter13/pi_camera_stream.py:
--------------------------------------------------------------------------------
1 | from picamera.array import PiRGBArray
2 | from picamera import PiCamera
3 | import numpy as np
4 | import cv2
5 |
6 | size = (320, 240)
7 | encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
8 |
9 | def setup_camera():
10 | camera = PiCamera()
11 | camera.resolution = size
12 | camera.rotation = 180
13 | return camera
14 |
15 | def start_stream(camera):
16 | image_storage = PiRGBArray(camera, size=size)
17 |
18 | cam_stream = camera.capture_continuous(image_storage, format="bgr", use_video_port=True)
19 | for raw_frame in cam_stream:
20 | yield raw_frame.array
21 | image_storage.truncate(0)
22 |
23 | def get_encoded_bytes_for_frame(frame):
24 | result, encoded_image = cv2.imencode('.jpg', frame, encode_param)
25 | return encoded_image.tostring()
26 |
--------------------------------------------------------------------------------
/chapter13/pid_controller.py:
--------------------------------------------------------------------------------
1 | class PIController(object):
2 | def __init__(self, proportional_constant=0, integral_constant=0, windup_limit=None):
3 | self.proportional_constant = proportional_constant
4 | self.integral_constant = integral_constant
5 | self.windup_limit = windup_limit
6 | # Running sums
7 | self.integral_sum = 0
8 |
9 | def reset(self):
10 | self.integral_sum = 0
11 |
12 | def handle_proportional(self, error):
13 | return self.proportional_constant * error
14 |
15 | def handle_integral(self, error):
16 | """Integral will change if
17 | * There is no windup limit
18 | * We are below the windup limit
19 | * or the sign of the error would reduce the sum"""
20 | if self.windup_limit is None or \
21 | (abs(self.integral_sum) < self.windup_limit) or \
22 | ((error > 0) != (self.integral_sum > 0)):
23 | self.integral_sum += error
24 | return self.integral_constant * self.integral_sum
25 |
26 | def get_value(self, error):
27 | p = self.handle_proportional(error)
28 | i = self.handle_integral(error)
29 | return p + i
30 |
--------------------------------------------------------------------------------
/chapter13/readme.md:
--------------------------------------------------------------------------------
1 | # Setup
2 |
3 | You will need a Raspberry Pi Camera, which has been enabled with Raspiconfig.
4 | On Raspbian, install OpenCV using
5 |
6 | pi@myrobot:~ $ sudo apt install opencv-python opencv-data
7 |
8 | You will also require picamera and numpy. Install with:
9 |
10 | pi@myrobot:~ $ sudo pip install picamera[array] numpy
11 |
12 |
--------------------------------------------------------------------------------
/chapter13/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 | import RPi.GPIO as GPIO
4 |
5 | import atexit
6 |
7 | #import leds_led_shim
8 | import leds_8_apa102c
9 | from servos import Servos
10 | from distance_sensor_hcsr04 import DistanceSensor, NoDistanceRead
11 | from encoder_counter import EncoderCounter
12 |
13 | class Robot(object):
14 | wheel_diameter_mm = 69.0
15 | ticks_per_revolution = 40.0
16 | wheel_distance_mm = 131.0
17 |
18 | def __init__(self, motorhat_addr=0x6f, drive_enabled=True):
19 | # Setup the motorhat with the passed in address
20 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
21 |
22 | # get local variable for each motor
23 | self.left_motor = self._mh.getMotor(1)
24 | self.right_motor = self._mh.getMotor(2)
25 | self.drive_enabled = drive_enabled
26 |
27 | # ensure the motors get stopped when the code exits
28 | atexit.register(self.stop_all)
29 |
30 | # Setup the line sensors
31 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
32 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
33 |
34 | # Setup The Distance Sensors
35 | self.left_distance_sensor = DistanceSensor(17, 27)
36 | self.right_distance_sensor = DistanceSensor(5, 6)
37 |
38 | # Setup the Encoders
39 | EncoderCounter.set_constants(self.wheel_diameter_mm, self.ticks_per_revolution)
40 | self.left_encoder = EncoderCounter(4)
41 | self.right_encoder = EncoderCounter(26)
42 |
43 | # Setup the Leds
44 | self.leds = leds_8_apa102c.Leds()
45 |
46 | # Set up servo motors for pan and tilt.
47 | self.servos = Servos(addr=motorhat_addr)
48 |
49 | def stop_all(self):
50 | self.stop_motors()
51 |
52 | # Clear any sensor handlers
53 | self.left_line_sensor.when_line = None
54 | self.left_line_sensor.when_no_line = None
55 | self.right_line_sensor.when_line = None
56 | self.right_line_sensor.when_no_line = None
57 |
58 | # Clear the display
59 | self.leds.clear()
60 | self.leds.show()
61 |
62 | # Reset the servos
63 | self.servos.stop_all()
64 |
65 | def convert_speed(self, speed):
66 | mode = Raspi_MotorHAT.RELEASE
67 | if speed > 0:
68 | mode = Raspi_MotorHAT.FORWARD
69 | elif speed < 0:
70 | mode = Raspi_MotorHAT.BACKWARD
71 | output_speed = (abs(speed) * 255) / 100
72 | return mode, int(output_speed)
73 |
74 | def set_left(self, speed):
75 | if not self.drive_enabled:
76 | return
77 | mode, output_speed = self.convert_speed(speed)
78 | self.left_motor.setSpeed(output_speed)
79 | self.left_motor.run(mode)
80 |
81 | def set_right(self, speed):
82 | if not self.drive_enabled:
83 | return
84 | mode, output_speed = self.convert_speed(speed)
85 | self.right_motor.setSpeed(output_speed)
86 | self.right_motor.run(mode)
87 |
88 | def stop_motors(self):
89 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
90 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
91 |
92 | def set_pan(self, angle):
93 | self.servos.set_servo_angle(1, angle)
94 |
95 | def set_tilt(self, angle):
96 | self.servos.set_servo_angle(0, angle)
97 |
--------------------------------------------------------------------------------
/chapter13/servos.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT.Raspi_PWM_Servo_Driver import PWM
2 |
3 | class Servos(object):
4 | def __init__(self, addr=0x6f, deflect_90_in_ms = 0.9):
5 | """addr: The i2c address of the PWM chip.
6 | deflect_90_in_ms: set this to calibrate the servo motors.
7 | it is what a deflection of 90 degrees is
8 | in terms of a pulse length in milliseconds."""
9 | self._pwm = PWM(addr)
10 | # This sets the timebase for it all
11 | pwm_frequency = 60
12 | self._pwm.setPWMFreq(pwm_frequency)
13 |
14 | # Frequency is 1/period, but working ms, we can use 1000
15 | period_in_ms = 1000.0 / pwm_frequency
16 | # The chip has 4096 steps in each period.
17 | pulse_steps = 4096.0
18 | # Mid point of the servo pulse length in milliseconds.
19 | servo_mid_point_ms = 1.5
20 | # Steps for every millisecond.
21 | steps_per_ms = pulse_steps / period_in_ms
22 | # Steps for a degree
23 | self.steps_per_degree = (deflect_90_in_ms * steps_per_ms) / 90.0
24 | # Mid point of the servo in steps
25 | self.servo_mid_point_steps = servo_mid_point_ms * steps_per_ms
26 | # Prepare servo's turned off
27 | self.stop_all()
28 |
29 | def stop_all(self):
30 | # 0 in start is nothing, 4096 sets the OFF bit.
31 | self._pwm.setPWM(0, 0, 4096)
32 | self._pwm.setPWM(1, 0, 4096)
33 | self._pwm.setPWM(14, 0, 4096)
34 | self._pwm.setPWM(15, 0, 4096)
35 |
36 | def _convert_degrees_to_pwm(self, position):
37 | return int(self.servo_mid_point_steps + (position * self.steps_per_degree))
38 |
39 | def set_servo_angle(self, channel, angle):
40 | """position: The position in degrees from the center. -90 to 90"""
41 | # Validate
42 | if angle > 90 or angle < -90:
43 | raise ValueError("Angle outside of range")
44 | # Then set the position
45 | off_step = self._convert_degrees_to_pwm(angle)
46 | self._pwm.setPWM(channel, 0, off_step)
47 |
--------------------------------------------------------------------------------
/chapter13/templates/color_track_behavior.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Robot Image Server
5 |
6 |
7 | Robot Image Server
8 |
9 | Start Stop
10 | Exit
11 |
12 |
13 |
--------------------------------------------------------------------------------
/chapter13/templates/control_image_behavior.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Robot Image Server
5 |
6 |
7 | Robot Image Server
8 |
9 | Exit
10 |
11 |
12 |
--------------------------------------------------------------------------------
/chapter13/templates/image_server.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Robot Image Server
4 |
5 |
6 | Robot Image Server
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/chapter13/templates/menu.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | My Robot Menu
4 |
5 |
6 | My Robot Menu
7 | {% if message %}
8 | {{ message }}
9 | {% endif %}
10 |
16 |
17 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/README.md:
--------------------------------------------------------------------------------
1 | ## My Robot
2 |
3 | Send commands to your robot
4 |
5 | ## Description
6 |
7 | Send comments to the robot you have been building
8 |
9 | ## Examples
10 |
11 | - "Robot follow line"
12 | - "Robot avoid walls"
13 | - "Robot chase green"
14 | - "Robot stop"
15 | - "Robot stop at line"
16 | - "Robot drive straight"
17 | - "Robot test led"
18 | - "Robot make a square"
19 | - "Robot track faces"
20 | - "Robot what is your ip"
21 | - "What is the robots ip"
22 |
23 | ## Credits
24 |
25 | Danny staple
26 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | from adapt.intent import IntentBuilder
3 | from mycroft import MycroftSkill, intent_handler
4 | from mycroft.util.log import LOG
5 | import requests
6 |
7 |
8 | class MyRobot(MycroftSkill):
9 | def __init__(self):
10 | MycroftSkill.__init__(self)
11 | self.settings.load_skill_settings_from_file()
12 | self.base_url = self.settings.get("base_url")
13 |
14 | def handle_start_behavior(self, mode_name):
15 | try:
16 | requests.get(self.base_url + "/run/" + mode_name)
17 | self.speak_dialog('Robot')
18 | self.speak_dialog('Starting')
19 | except:
20 | self.speak_dialog("UnableToReach")
21 | LOG.exception("Can't reach the robot")
22 |
23 | @intent_handler(IntentBuilder("").require("robot").require("DriveForward"))
24 | def handle_drive_forward(self, message):
25 | self.handle_start_behavior("behavior_line")
26 |
27 | @intent_handler(IntentBuilder("").require("robot").require("FollowLine"))
28 | def handle_follow_line(self, message):
29 | self.handle_start_behavior("line_following")
30 |
31 | def create_skill():
32 | return MyRobot()
33 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/dialog/en-us/Robot.dialog:
--------------------------------------------------------------------------------
1 | The Robot
2 | Robot
3 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/dialog/en-us/Starting.dialog:
--------------------------------------------------------------------------------
1 | Starting
2 | is Starting
3 | will Start
4 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/dialog/en-us/UnableToReach.dialog:
--------------------------------------------------------------------------------
1 | Sorry I cannot reach the robot.
2 | The robot does not seem to be reachable right now.
3 | Is the robot turned on?
4 | Have I got the right address for the robot?
5 | Is the flask menu service running on the robot?
6 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/vocab/en-us/DriveForward.voc:
--------------------------------------------------------------------------------
1 | drive forward
2 | drive in a line
3 | drive
4 | go forward
5 | move forward
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/vocab/en-us/FollowLine.voc:
--------------------------------------------------------------------------------
1 | follow a line
2 | follow the line
3 | follow lines
4 | line following
5 | line following behavior
6 |
--------------------------------------------------------------------------------
/chapter14/my-robot-skill/vocab/en-us/robot.voc:
--------------------------------------------------------------------------------
1 | robot
2 | my robot
3 | ask robot to
4 |
--------------------------------------------------------------------------------
/chapter14/readme.md:
--------------------------------------------------------------------------------
1 | # Requirements for Voice Assistant
2 |
3 | This requires the mycroft system installed on the Raspberry Pi.
4 |
5 | In the requirements for this skill it uses requests.
6 |
7 | pi@myrobot:~ $ pip install requests
8 |
9 | The menu service from chapter 11 will need to be running on the robot.
--------------------------------------------------------------------------------
/chapter15/0_starting_point/avoid_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, NoDistanceRead
2 | from time import sleep
3 |
4 |
5 | class ObstacleAvoidingBehavior(object):
6 | """Better obstacle avoiding"""
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 |
10 | # Calculations for the LEDs
11 | led_half = int(self.robot.leds.count/2)
12 | self.max_distance = 100
13 | self.leds_per_distance = led_half / float(self.max_distance)
14 | # print("Leds per distance", self.leds_per_distance)
15 | self.sense_colour = (255, 0, 0)
16 |
17 | def distance_to_led_bar(self, distance):
18 | # Invert so closer means more LED's.
19 | inverted = self.max_distance - min(distance, self.max_distance)
20 | led_bar = int(round(inverted * self.leds_per_distance))
21 | return led_bar
22 |
23 | def display_state(self, left_distance, right_distance):
24 | # Clear first
25 | self.robot.leds.clear()
26 | # Right side
27 | led_bar = self.distance_to_led_bar(right_distance)
28 | self.robot.leds.set_range(range(led_bar), self.sense_colour)
29 | # Left side
30 | led_bar = self.distance_to_led_bar(left_distance)
31 | # Bit trickier - must go from below the leds count, to the leds count.
32 | start = self.robot.leds.count - led_bar
33 | self.robot.leds.set_range(range(start, self.robot.leds.count), self.sense_colour)
34 |
35 | # Now show this display
36 | self.robot.leds.show()
37 |
38 | def get_speeds(self, nearest_distance):
39 | if nearest_distance > 100:
40 | nearest_speed = 100
41 | furthest_speed = 100
42 | delay = 100
43 | elif nearest_distance > 50:
44 | nearest_speed = 100
45 | furthest_speed = 80
46 | delay = 100
47 | elif nearest_distance > 20:
48 | nearest_speed = 100
49 | furthest_speed = 60
50 | delay = 100
51 | elif nearest_distance > 10:
52 | nearest_speed = -40
53 | furthest_speed = -100
54 | delay = 100
55 | else: # collison
56 | nearest_speed = -100
57 | furthest_speed = -100
58 | delay = 250
59 | return nearest_speed, furthest_speed, delay
60 |
61 | def run(self):
62 | self.robot.set_pan(0)
63 | self.robot.set_tilt(0)
64 | while True:
65 | # Get the sensor readings
66 | try:
67 | left_distance = self.robot.left_distance_sensor.get_distance()
68 | except NoDistanceRead:
69 | left_distance = 100
70 | try:
71 | right_distance = self.robot.right_distance_sensor.get_distance()
72 | except NoDistanceRead:
73 | right_distance = 100
74 | # Display this
75 | self.display_state(left_distance, right_distance)
76 |
77 | # Get speeds for motors from distances
78 | nearest_speed, furthest_speed, delay = self.get_speeds(min(left_distance, right_distance))
79 | print("Distances: l", left_distance, "r", right_distance, "Speeds: n", nearest_speed, "f", furthest_speed,
80 | "Delays: l", delay)
81 |
82 | # Send this to the motors
83 | if left_distance < right_distance:
84 | self.robot.set_left(nearest_speed)
85 | self.robot.set_right(furthest_speed)
86 | else:
87 | self.robot.set_right(nearest_speed)
88 | self.robot.set_left(furthest_speed)
89 |
90 | # Wait our delay time
91 | sleep(delay * 0.001)
92 |
93 | bot = Robot()
94 | behavior = ObstacleAvoidingBehavior(bot)
95 | behavior.run()
96 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/behavior_line.py:
--------------------------------------------------------------------------------
1 | import robot
2 | from time import sleep
3 |
4 | r = robot.Robot()
5 | r.set_left(80)
6 | r.set_right(80)
7 | sleep(1)
8 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/behavior_path.py:
--------------------------------------------------------------------------------
1 | import robot
2 | from time import sleep
3 |
4 | def straight(bot, seconds):
5 | bot.set_left(80)
6 | bot.set_right(70)
7 | sleep(seconds)
8 |
9 | def turn_left(bot, seconds):
10 | bot.set_left(20)
11 | bot.set_right(80)
12 | sleep(seconds)
13 |
14 | def turn_right(bot, seconds):
15 | bot.set_left(80)
16 | bot.set_right(20)
17 | sleep(seconds)
18 |
19 | def spin_left(bot, seconds):
20 | bot.set_left(-80)
21 | bot.set_right(80)
22 | sleep(seconds)
23 |
24 | bot = robot.Robot()
25 | straight(bot, 1)
26 | turn_right(bot, 0.6)
27 | straight(bot, 1)
28 | turn_left(bot, 0.6)
29 | straight(bot, 1)
30 | turn_left(bot, 0.6)
31 | straight(bot, 1)
32 | spin_left(bot, 1)
33 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/circle_pan_tilt_behavior.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 | import math
3 |
4 | from robot import Robot
5 |
6 | class CirclePanTiltBehavior(object):
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 | self.current_time = 0
10 | self.frames_per_circle = 50
11 | self.radians_per_frame = (2 * math.pi) / self.frames_per_circle
12 | self.radius = 30
13 |
14 | def run(self):
15 | while True:
16 | frame = self.current_time % self.frames_per_circle
17 | frame_in_radians = frame * self.radians_per_frame
18 | self.robot.set_pan(self.radius * math.cos(frame_in_radians))
19 | self.robot.set_tilt(self.radius * math.sin(frame_in_radians))
20 | sleep(0.05)
21 | self.current_time += 1
22 |
23 |
24 | bot = Robot()
25 | behavior = CirclePanTiltBehavior(bot)
26 | behavior.run()
27 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/color_track_behavior.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import time
3 |
4 | from image_app_core import start_server_process, get_control_instruction, put_output_image
5 |
6 | import cv2
7 | import numpy as np
8 |
9 | import pi_camera_stream
10 | from pid_controller import PIController
11 | from robot import Robot
12 |
13 |
14 | class ColorTrackingBehavior(object):
15 | """Behavior to find and get close to a colored object"""
16 | def __init__(self, robot):
17 | self.robot = robot
18 | # Tuning values
19 | self.low_range = (25, 70, 25)
20 | self.high_range = (80, 255, 255)
21 | self.correct_radius = 120
22 | self.center = 160
23 | # Current state
24 | self.running = False
25 |
26 | def process_control(self):
27 | instruction = get_control_instruction()
28 | if instruction == "start":
29 | self.running = True
30 | elif instruction == "stop":
31 | self.running = False
32 | elif instruction == "exit":
33 | print("Stopping")
34 | exit()
35 |
36 | def find_object(self, original_frame):
37 | """Find the largest enclosing circle for all contours in a masked image.
38 | Returns: the masked image, the object coordinates, the object radius"""
39 | frame_hsv = cv2.cvtColor(original_frame, cv2.COLOR_BGR2HSV)
40 | masked = cv2.inRange(frame_hsv, self.low_range, self.high_range)
41 |
42 | # Find the contours of the image (outline points)
43 | contour_image = np.copy(masked)
44 | contours, _ = cv2.findContours(contour_image, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
45 | # Find enclosing circles
46 | circles = [cv2.minEnclosingCircle(cnt) for cnt in contours]
47 | # Filter for the largest one
48 | largest = (0, 0), 0
49 | for (x, y), radius in circles:
50 | if radius > largest[1]:
51 | largest = (int(x), int(y)), int(radius)
52 | return masked, largest[0], largest[1]
53 |
54 | def make_display(self, frame, processed):
55 | """Create display output, and put it on the queue"""
56 | # Make a dualscreen view - two images of the same scale concatenated
57 | display_frame = np.concatenate((frame, processed), axis=1)
58 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(display_frame)
59 | put_output_image(encoded_bytes)
60 |
61 | def process_frame(self, frame):
62 | # Find the largest enclosing circle
63 | masked, coordinates, radius = self.find_object(frame)
64 | # Now back to 3 channels for display
65 | processed = cv2.cvtColor(masked, cv2.COLOR_GRAY2BGR)
66 | # Draw our circle on the original frame, then display this
67 | cv2.circle(frame, coordinates, radius, [255, 0, 0])
68 | self.make_display(frame, processed)
69 | # Yield the object details
70 | return coordinates, radius
71 |
72 | def run(self):
73 | # Set pan and tilt to middle, then clear it.
74 | self.robot.set_pan(0)
75 | self.robot.set_tilt(0)
76 | # start camera
77 | camera = pi_camera_stream.setup_camera()
78 | # speed pid - based on the radius we get.
79 | speed_pid = PIController(proportional_constant=0.8,
80 | integral_constant=0.1, windup_limit=100)
81 | # direction pid - how far from the middle X is.
82 | direction_pid = PIController(proportional_constant=0.25,
83 | integral_constant=0.1, windup_limit=400)
84 | # warm up and servo move time
85 | time.sleep(0.1)
86 | # Servo's will be in place - stop them for now.
87 | self.robot.servos.stop_all()
88 | print("Setup Complete")
89 | # Main loop
90 | for frame in pi_camera_stream.start_stream(camera):
91 | (x, y), radius = self.process_frame(frame)
92 | self.process_control()
93 | if self.running and radius > 20:
94 | # The size is the first error
95 | radius_error = self.correct_radius - radius
96 | speed_value = speed_pid.get_value(radius_error)
97 | # And the second error is the based on the center coordinate.
98 | direction_error = self.center - x
99 | direction_value = direction_pid.get_value(direction_error)
100 | print("radius: %d, radius_error: %d, speed_value: %.2f, direction_error: %d, direction_value: %.2f" %
101 | (radius, radius_error, speed_value, direction_error, direction_value))
102 | # Now produce left and right motor speeds
103 | self.robot.set_left(speed_value - direction_value)
104 | self.robot.set_right(speed_value + direction_value)
105 | else:
106 | self.robot.stop_motors()
107 | if not self.running:
108 | speed_pid.reset()
109 | direction_pid.reset()
110 |
111 | print("Setting up")
112 | behavior = ColorTrackingBehavior(Robot())
113 | process = start_server_process('color_track_behavior.html')
114 | behavior.run()
115 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/control_image_behavior.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from image_app_core import start_server_process, get_control_instruction, put_output_image
4 | import pi_camera_stream
5 |
6 | def controlled_image_server_behavior():
7 | # Setup the camera
8 | camera = pi_camera_stream.setup_camera()
9 | # allow the camera to warmup
10 | time.sleep(0.1)
11 | # Send frames from camera to server
12 | for frame in pi_camera_stream.start_stream(camera):
13 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(frame)
14 | put_output_image(encoded_bytes)
15 | # Check any control instructions
16 | instruction = get_control_instruction()
17 | if instruction == "exit":
18 | print("Stopping")
19 | return
20 |
21 | process = start_server_process('control_image_behavior.html')
22 | controlled_image_server_behavior()
23 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/distance_sensor_hcsr04.py:
--------------------------------------------------------------------------------
1 | """Object for the HC-SR04 distance sensor type."""
2 | import time # import the whole thing, we need more than just sleep
3 | from gpiozero import DigitalInputDevice, DigitalOutputDevice
4 |
5 | # This is an exception, we will send this when get distance fails to make a measurement
6 | class NoDistanceRead(Exception):
7 | """The system was unable to make a measurement"""
8 | pass # We aren't doing anything special, but python syntax demands us to be explicit about this.
9 |
10 | class DistanceSensor(DigitalInputDevice):
11 | """Represents a distance sensor."""
12 | def __init__(self, trigger_pin, echo_pin):
13 | # Setup devices, an input device and an output device, with pin numbers for the sensor.
14 | super(DistanceSensor, self).__init__(echo_pin)
15 | self._trigger = DigitalOutputDevice(trigger_pin)
16 | self._trigger.value = False
17 |
18 | def get_distance(self):
19 | """Method to get the distance measurement"""
20 | # Timeout - we'll use this to stop it getting stuck
21 | time_out = time.time() + 2
22 |
23 | # This off-on-off pulse tells the device to make a measurement
24 | self._trigger.value = True
25 | time.sleep(0.00001) # This is the 10 microseconds
26 | self._trigger.value = False
27 |
28 | # Wait for the pin state to stop being 0, going from low to high
29 | # When it rises, this is the real pulse start. Assign it once - it may already have changed!
30 | pulse_start = time.time()
31 | while self.pin.state == 0:
32 | pulse_start = time.time()
33 | # We ran out of time
34 | if pulse_start > time_out:
35 | raise NoDistanceRead("Timed Out")
36 |
37 | # Wait for the echo pin to stop being 1, going from high, to low, the end of the pulse.
38 | pulse_end = time.time()
39 | while self.pin.state == 1:
40 | pulse_end = time.time()
41 | # Pulse end not received
42 | if pulse_end > time_out:
43 | raise NoDistanceRead("Timed Out")
44 |
45 | # The duration is the time between the start and end of pulse in seconds.
46 | pulse_duration = pulse_end - pulse_start
47 | # Speed of sound in centimeters per second - 34300 cm/s. However, the pulse has travelled TWICE
48 | # the distance, so we get half of this. (34300 / 2) = 17150.
49 | distance = pulse_duration * 17150
50 | # Round it to 2 decimal places, any finer doesn't really make sense.
51 | distance = round(distance, 2)
52 | return distance
53 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/drive_distance_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, EncoderCounter
2 | from pid_controller import PIController
3 | import time
4 |
5 |
6 | def drive_distance(bot, distance, speed=80):
7 | # Use left as "primary" motor, the right is keeping up
8 | set_primary = bot.set_left
9 | primary_encoder = bot.left_encoder
10 | set_secondary = bot.set_right
11 | secondary_encoder = bot.right_encoder
12 |
13 | controller = PIController(proportional_constant=5, integral_constant=0.2)
14 |
15 | # start the motors, and start the loop
16 | set_primary(speed)
17 | set_secondary(speed)
18 | while primary_encoder.pulse_count < distance or secondary_encoder.pulse_count < distance:
19 | # Sleep a bit before calculating
20 | time.sleep(0.05)
21 | # How far off are we?
22 | error = primary_encoder.pulse_count - secondary_encoder.pulse_count
23 | adjustment = controller.get_value(error)
24 | # How fast should the motor move to get there?
25 | set_secondary(speed + adjustment)
26 | # Some debug
27 | print("Primary c:{} ({} mm)\tSecondary c:{} ({} mm) e:{} adjustment: {:.2f}".format(
28 | primary_encoder.pulse_count, primary_encoder.distance_in_mm(),
29 | secondary_encoder.pulse_count, secondary_encoder.distance_in_mm(),
30 | error,
31 | adjustment
32 | ))
33 |
34 |
35 | bot = Robot()
36 | distance_to_drive = 1000 # in mm - this is a meter
37 | distance_in_ticks = EncoderCounter.mm_to_ticks(distance_to_drive)
38 | drive_distance(bot, distance_in_ticks, distance_in_ticks)
39 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/drive_square_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, EncoderCounter
2 | from pid_controller import PIController
3 | import time
4 | import math
5 |
6 |
7 | def drive_distances(bot, left_distance, right_distance, speed=80):
8 | # We always want the "primary" to be the longest distance, therefore the faster motor
9 | if abs(left_distance) >= abs(right_distance):
10 | print("Left is primary")
11 | set_primary = bot.set_left
12 | primary_encoder = bot.left_encoder
13 | set_secondary = bot.set_right
14 | secondary_encoder = bot.right_encoder
15 | primary_distance = left_distance
16 | secondary_distance = right_distance
17 | else:
18 | print("right is primary")
19 | set_primary = bot.set_right
20 | primary_encoder = bot.right_encoder
21 | set_secondary = bot.set_left
22 | secondary_encoder = bot.left_encoder
23 | primary_distance = right_distance
24 | secondary_distance = left_distance
25 | primary_to_secondary_ratio = secondary_distance / (primary_distance * 1.0)
26 | secondary_speed = speed * primary_to_secondary_ratio
27 | print("Targets - primary: %d, secondary: %d, ratio: %.2f" % (primary_distance, secondary_distance, primary_to_secondary_ratio))
28 |
29 | primary_encoder.reset()
30 | secondary_encoder.reset()
31 |
32 | controller = PIController(proportional_constant=5, integral_constant=0.2)
33 |
34 | # Ensure that the encoder knows which way it is going
35 | primary_encoder.set_direction(math.copysign(1, speed))
36 | secondary_encoder.set_direction(math.copysign(1, secondary_speed))
37 |
38 | # start the motors, and start the loop
39 | set_primary(speed)
40 | set_secondary(secondary_speed)
41 | while abs(primary_encoder.pulse_count) < abs(primary_distance) or abs(secondary_encoder.pulse_count) < abs(secondary_distance):
42 | # And sleep a bit before calculating
43 | time.sleep(0.05)
44 |
45 | # How far off are we?
46 | secondary_target = primary_encoder.pulse_count * primary_to_secondary_ratio
47 | error = secondary_target - secondary_encoder.pulse_count
48 |
49 | # How fast should the motor move to get there?
50 | adjustment = controller.get_value(error)
51 |
52 | set_secondary(secondary_speed + adjustment)
53 | secondary_encoder.set_direction(math.copysign(1, secondary_speed+adjustment))
54 | # Some debug
55 | print "Primary c:{:.2f} ({:.2f} mm)\tSecondary c:{:.2f} ({:.2f} mm) t:{:.2f} e:{:.2f} s:{:.2f} adjustment: {:.2f}".format(
56 | primary_encoder.pulse_count,
57 | primary_encoder.distance_in_mm(),
58 | secondary_encoder.pulse_count,
59 | secondary_encoder.distance_in_mm(),
60 | secondary_target,
61 | error,
62 | secondary_speed,
63 | adjustment
64 | )
65 |
66 | # Stop the primary if we need to
67 | if abs(primary_encoder.pulse_count) >= abs(primary_distance):
68 | print "primary stop"
69 | set_primary(0)
70 | secondary_speed = 0
71 |
72 | def drive_arc(bot, turn_in_degrees, radius, speed=80):
73 | """ Turn is based on change in heading. """
74 | # Get the bot width in ticks
75 | half_width_ticks = EncoderCounter.mm_to_ticks(bot.wheel_distance_mm/2.0)
76 | if turn_in_degrees < 0:
77 | left_radius = radius - half_width_ticks
78 | right_radius = radius + half_width_ticks
79 | else:
80 | left_radius = radius + half_width_ticks
81 | right_radius = radius - half_width_ticks
82 | print "Arc left radius {:.2f}, right_radius {:.2f}".format(left_radius, right_radius)
83 | radians = math.radians(abs(turn_in_degrees))
84 | left_distance = int(left_radius * radians)
85 | right_distance = int(right_radius * radians)
86 | print "Arc left distance {}, right_distance {}".format(left_distance, right_distance)
87 | drive_distances(bot, left_distance, right_distance, speed=speed)
88 |
89 | bot = Robot()
90 |
91 | distance_to_drive = 300 # in mm
92 | distance_in_ticks = EncoderCounter.mm_to_ticks(distance_to_drive)
93 | radius = bot.wheel_distance_mm + 100 # in mm
94 | radius_in_ticks = EncoderCounter.mm_to_ticks(radius)
95 |
96 | for n in range(4):
97 | drive_distances(bot, distance_in_ticks, distance_in_ticks)
98 | drive_arc(bot, 90, radius_in_ticks, speed=50)
99 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/encoder_counter.py:
--------------------------------------------------------------------------------
1 | from gpiozero import DigitalInputDevice
2 | import math
3 |
4 | class EncoderCounter(DigitalInputDevice):
5 | ticks_to_mm_const = None # you must set this up before using distance methods
6 | def __init__(self, pin):
7 | super(EncoderCounter, self).__init__(pin)
8 | self.pin.when_changed = self.when_changed
9 | self.pulse_count = 0
10 | self.direction = 1
11 |
12 | def when_changed(self):
13 | self.pulse_count += self.direction
14 |
15 | def set_direction(self, direction):
16 | """ This should be -1 or 1. """
17 | assert abs(direction)==1, "Direction %s should be 1 or -1" % direction
18 | self.direction = direction
19 |
20 | def reset(self):
21 | self.pulse_count = 0
22 |
23 | def distance_in_mm(self):
24 | return int(self.pulse_count * EncoderCounter.ticks_to_mm_const)
25 |
26 | @staticmethod
27 | def mm_to_ticks(mm):
28 | return mm / EncoderCounter.ticks_to_mm_const
29 |
30 | @staticmethod
31 | def set_constants(wheel_diameter_mm, ticks_per_revolution):
32 | EncoderCounter.ticks_to_mm_const = (math.pi / ticks_per_revolution) * wheel_diameter_mm
33 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/face_track_behavior.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import time
3 |
4 | from image_app_core import start_server_process, get_control_instruction, put_output_image
5 |
6 | import cv2
7 | import numpy as np
8 |
9 | import pi_camera_stream
10 | from pid_controller import PIController
11 | from robot import Robot
12 |
13 |
14 | class FaceTrackBehavior(object):
15 | """Behavior to find and point at a face."""
16 | def __init__(self, robot):
17 | self.robot = robot
18 | cascade_path = "/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml"
19 | self.cascade = cv2.CascadeClassifier(cascade_path)
20 | # Tuning values
21 | self.center_x = 160
22 | self.center_y = 120
23 | self.min_size = 20
24 | self.pan_pid = PIController(proportional_constant=0.1, integral_constant=0.03)
25 | self.tilt_pid = PIController(proportional_constant=-0.1, integral_constant=-0.03)
26 | # Current state
27 | self.running = False
28 |
29 | def process_control(self):
30 | instruction = get_control_instruction()
31 | if instruction == "start":
32 | self.running = True
33 | elif instruction == "stop":
34 | self.running = False
35 | self.pan_pid.reset()
36 | self.tilt_pid.reset()
37 | self.robot.servos.stop_all()
38 | elif instruction == "exit":
39 | print("Stopping")
40 | exit()
41 |
42 | def find_object(self, original_frame):
43 | """Search the frame for an object. Return the rectangle of the largest by w * h"""
44 | # Make it greyscale to reduce the data used
45 | gray_img = cv2.cvtColor(original_frame, cv2.COLOR_BGR2GRAY)
46 | # Detect all the objects
47 | objects = self.cascade.detectMultiScale(gray_img)
48 | largest = 0, (0, 0, 0, 0) # area, x, y, w, h
49 | for (x, y, w, h) in objects:
50 | item_area = w * h
51 | if item_area > largest[0]:
52 | largest = item_area, (x, y, w, h)
53 | return largest[1]
54 |
55 | def make_display(self, display_frame):
56 | """Create display output, and put it on the queue"""
57 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(display_frame)
58 | put_output_image(encoded_bytes)
59 |
60 | def process_frame(self, frame):
61 | # Find the largest matching object
62 | (x, y, w, h) = self.find_object(frame)
63 | # Draw a rect on the original frame, then display this
64 | cv2.rectangle(frame, (x, y), (x + w, y + w), [255, 0, 0])
65 | self.make_display(frame)
66 | # Yield the object details
67 | return x, y, w, h
68 |
69 | def run(self):
70 | # start camera
71 | camera = pi_camera_stream.setup_camera()
72 | # warm up time
73 | time.sleep(0.1)
74 | print("Setup Complete")
75 | # Main loop
76 | for frame in pi_camera_stream.start_stream(camera):
77 | (x, y, w, h) = self.process_frame(frame)
78 | self.process_control()
79 | if self.running and h > self.min_size:
80 | # Pan
81 | pan_error = self.center_x - (x + (w / 2))
82 | pan_value = self.pan_pid.get_value(pan_error)
83 | self.robot.set_pan(int(pan_value))
84 | # Tilt
85 | tilt_error = self.center_y - (y + (h / 2))
86 | tilt_value = self.tilt_pid.get_value(tilt_error)
87 | self.robot.set_tilt(int(tilt_value))
88 | print("x: %d, y: %d, pan_error: %d, tilt_error: %d, pan_value: %.2f, tilt_value: %.2f" %
89 | (x, y, pan_error, tilt_error, pan_value, tilt_value))
90 |
91 | print("Setting up")
92 | behavior = FaceTrackBehavior(Robot())
93 | process = start_server_process('color_track_behavior.html')
94 | behavior.run()
95 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/following_rainbows.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | import colorsys
3 | from time import sleep
4 |
5 | class FollowingRainbows:
6 | # Note - this is the robot ON the line.
7 | def __init__(self, the_robot, forward_speed=40, cornering=0):
8 | self.robot = the_robot
9 | self.forward_speed = forward_speed
10 | self.cornering = cornering
11 | self.left_index = 0
12 | self.left_brightness = 0
13 | self.right_index = 0
14 | self.right_brightness = 0
15 | self.main_index = 0
16 |
17 | led_qtr = int(self.robot.leds.count/4)
18 | self.right_indicator = range(0, led_qtr)
19 | self.left_indicator = range(self.robot.leds.count - led_qtr, self.robot.leds.count)
20 |
21 |
22 | def when_left_crosses_line(self):
23 | self.robot.set_left(self.cornering)
24 | self.left_brightness = 1.0
25 |
26 | def when_right_crosses_line(self):
27 | self.robot.set_right(self.cornering)
28 | self.right_brightness = 1.0
29 |
30 | def when_left_off_line(self):
31 | self.robot.set_left(self.forward_speed)
32 |
33 | def when_right_off_line(self):
34 | self.robot.set_right(self.forward_speed)
35 |
36 | def hsv_to_rgb(self, h, s, v):
37 | return [int(component*255) for component in colorsys.hsv_to_rgb(h, s, v)]
38 |
39 | def make_display(self):
40 | # main rainbow
41 | half_leds = int(self.robot.leds.count/2)
42 | qtr_leds = int(self.robot.leds.count/4)
43 | for n in range(0, half_leds):
44 | offset = (240/half_leds) * n
45 | ih = (self.main_index + offset) % 360
46 | ch = self.hsv_to_rgb(ih / 360.0, 1.0, 0.6)
47 | rgb = [int(c*255) for c in ch]
48 | self.robot.leds.set_one(qtr_leds + n, rgb)
49 | self.main_index += 5
50 | # LEft and right
51 | for n in range(0, qtr_leds):
52 | offset = (60/7.0) * n
53 | lh = (self.left_index + offset) % 360
54 | ch = self.hsv_to_rgb(lh / 360.0, 1.0, self.left_brightness)
55 | rgb = [int(c*255) for c in ch]
56 | self.robot.leds.set_one(n, rgb)
57 | rh = (self.right_index + offset) % 360
58 | ch = self.hsv_to_rgb(rh / 360.0, 1.0, self.right_brightness)
59 | rgb = [int(c*255) for c in ch]
60 | self.robot.leds.set_one(self.robot.leds.count-1-n, rgb)
61 | self.left_index += 5
62 | self.right_index -= 5
63 | if self.left_brightness >= 0.1:
64 | self.left_brightness -= 0.1
65 | if self.right_brightness >= 0.1:
66 | self.right_brightness -= 0.1
67 | self.robot.leds.show()
68 |
69 | def run(self):
70 | # Setup conditions
71 | self.robot.left_line_sensor.when_line = self.when_left_crosses_line
72 | self.robot.left_line_sensor.when_no_line = self.when_left_off_line
73 | self.robot.right_line_sensor.when_line = self.when_right_crosses_line
74 | self.robot.right_line_sensor.when_no_line = self.when_right_off_line
75 | # Start driving
76 | self.robot.set_left(self.forward_speed)
77 | self.robot.set_right(self.forward_speed)
78 | while True:
79 | sleep(0.01)
80 | self.make_display()
81 |
82 |
83 | bot = Robot()
84 | behavior = FollowingRainbows(bot)
85 | behavior.run()
86 |
87 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/image_app_core.py:
--------------------------------------------------------------------------------
1 | """The flask/webserver part is slightly independent of the behavior,
2 | allowing the user to "tune in" to see, but should not stop the
3 | robot running"""
4 | import time
5 | from multiprocessing import Process, Queue
6 |
7 | from flask import Flask, render_template, Response
8 |
9 |
10 | app = Flask(__name__)
11 | control_queue = Queue()
12 | display_queue = Queue(maxsize=2)
13 | display_template = 'image_server'
14 |
15 | @app.route('/')
16 | def index():
17 | return render_template(display_template)
18 |
19 | def frame_generator():
20 | """This is our main video feed"""
21 | while True:
22 | # at most 20 fps
23 | time.sleep(0.05)
24 | # Get (wait until we have data)
25 | encoded_bytes = display_queue.get()
26 | # Need to turn this into http multipart data.
27 | yield (b'--frame\r\n'
28 | b'Content-Type: image/jpeg\r\n\r\n' + encoded_bytes + b'\r\n')
29 |
30 | @app.route('/display')
31 | def display():
32 | return Response(frame_generator(),
33 | mimetype='multipart/x-mixed-replace; boundary=frame')
34 |
35 | @app.route('/control/')
36 | def control(control_name):
37 | control_queue.put(control_name)
38 | return Response('queued')
39 |
40 | def start_server_process(template_name):
41 | """Start the process, call .terminate to close it"""
42 | global display_template
43 | display_template = template_name
44 | # app.debug=True
45 | # app.use_reloader = False
46 | server = Process(target=app.run, kwargs={"host": "0.0.0.0", "port": 5001})
47 | server.daemon = True
48 | server.start()
49 | return server
50 |
51 | def put_output_image(encoded_bytes):
52 | """Queue an output image"""
53 | if display_queue.empty():
54 | display_queue.put(encoded_bytes)
55 |
56 | def get_control_instruction():
57 | """Get control instructions from the web app, if any"""
58 | if control_queue.empty():
59 | # nothing
60 | return None
61 | else:
62 | return control_queue.get()
63 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/image_server.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from flask import Flask, render_template, Response
4 | import pi_camera_stream
5 |
6 | app = Flask(__name__)
7 |
8 | @app.route('/')
9 | def index():
10 | return render_template('image_server.html')
11 |
12 | def frame_generator():
13 | """This is our main video feed"""
14 | camera = pi_camera_stream.setup_camera()
15 |
16 | # allow the camera to warmup
17 | time.sleep(0.1)
18 |
19 | for frame in pi_camera_stream.start_stream(camera):
20 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(frame)
21 | # Need to turn this into http multipart data.
22 | yield (b'--frame\r\n'
23 | b'Content-Type: image/jpeg\r\n\r\n' + encoded_bytes + b'\r\n')
24 |
25 | @app.route('/display')
26 | def display():
27 | return Response(frame_generator(),
28 | mimetype='multipart/x-mixed-replace; boundary=frame')
29 |
30 | app.run(host="0.0.0.0", debug=True, port=5001)
31 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/leds_8_apa102c.py:
--------------------------------------------------------------------------------
1 | import spidev
2 |
3 | class Leds(object):
4 | def __init__(self):
5 | # MOSI - default output, (Master Out, Slave in) is 10. Clock is 11.
6 | self.device = spidev.SpiDev()
7 | self.device.open(0, 0)
8 | self.device.max_speed_hz = 15000
9 | self.colors = [(0,0,0)] * self.count
10 |
11 | @property
12 | def count(self):
13 | return 8
14 |
15 | def set_one(self, led_number, color):
16 | assert(len(color) == 3)
17 | self.colors[led_number] = color
18 |
19 | def set_range(self, a_range, color):
20 | for led_number in a_range:
21 | self.colors[led_number] = color
22 |
23 | def set_all(self, color):
24 | self.colors = [color] * self.count
25 |
26 | def clear(self):
27 | self.set_all((0, 0, 0))
28 |
29 | def show(self):
30 | # Create the wake up header
31 | data = [0] * 4
32 | for color in self.colors:
33 | data.append(0xe1)
34 | data.extend(color)
35 | data.extend([0]* 4)
36 | # send it
37 | self.device.xfer(data)
38 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/leds_led_shim.py:
--------------------------------------------------------------------------------
1 | import ledshim
2 |
3 | class Leds(object):
4 | def __init__(self):
5 | self.count = 24
6 |
7 | def set_one(self, led_number, color):
8 | ledshim.set_pixel(led_number, *color)
9 |
10 | def set_range(self, a_range, color):
11 | ledshim.set_multiple_pixels(a_range, color)
12 |
13 | def set_all(self, color):
14 | ledshim.set_all(*color)
15 |
16 | def clear(self):
17 | ledshim.clear()
18 |
19 | def show(self):
20 | ledshim.show()
--------------------------------------------------------------------------------
/chapter15/0_starting_point/leds_test.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | bot = Robot()
5 | red = (255, 0, 0)
6 | blue = (0, 0, 255)
7 |
8 | while True:
9 | print("red")
10 | bot.leds.set_all(red)
11 | bot.leds.show()
12 | sleep(0.5)
13 | print("blue")
14 | bot.leds.set_all(blue)
15 | bot.leds.show()
16 | sleep(0.5)
17 |
18 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/line_following_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | cross_line_color = (255, 0, 0)
5 | off_line_color = (0, 0, 255)
6 |
7 | class LineFollowingBehavior:
8 | # Note - this is the robot ON the line.
9 | def __init__(self, the_robot, forward_speed=30, cornering=-30):
10 | self.robot = the_robot
11 | self.forward_speed = forward_speed
12 | self.cornering = cornering
13 |
14 | led_qtr = int(self.robot.leds.count/4)
15 | self.right_indicator = range(0, led_qtr)
16 | self.left_indicator = range(self.robot.leds.count - led_qtr, self.robot.leds.count)
17 |
18 | self.robot.leds.show()
19 |
20 | def when_left_crosses_line(self):
21 | self.robot.set_left(self.cornering)
22 | self.robot.leds.set_range(self.left_indicator, cross_line_color)
23 | self.robot.leds.show()
24 |
25 | def when_right_crosses_line(self):
26 | self.robot.set_right(self.cornering)
27 | self.robot.leds.set_range(self.right_indicator, cross_line_color)
28 | self.robot.leds.show()
29 |
30 | def when_left_off_line(self):
31 | self.robot.set_left(self.forward_speed)
32 | self.robot.leds.set_range(self.left_indicator, off_line_color)
33 | self.robot.leds.show()
34 |
35 | def when_right_off_line(self):
36 | self.robot.set_right(self.forward_speed)
37 | self.robot.leds.set_range(self.right_indicator, off_line_color)
38 | self.robot.leds.show()
39 |
40 | def run(self):
41 | # Setup conditions
42 | self.robot.left_line_sensor.when_line = self.when_left_crosses_line
43 | self.robot.left_line_sensor.when_no_line = self.when_left_off_line
44 | self.robot.right_line_sensor.when_line = self.when_right_crosses_line
45 | self.robot.right_line_sensor.when_no_line = self.when_right_off_line
46 | # Start driving
47 | self.robot.set_left(self.forward_speed)
48 | self.robot.set_right(self.forward_speed)
49 | while True:
50 | sleep(0.02)
51 |
52 |
53 | bot = Robot()
54 | behavior = LineFollowingBehavior(bot)
55 | behavior.run()
56 |
57 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/menu_server.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, render_template
2 | from robot_modes import RobotModes
3 |
4 | # A Flask App contains all its routes.
5 | app = Flask(__name__)
6 | # Prepare our robot modes for use
7 | mode_manager = RobotModes()
8 |
9 | def render_menu(message=None):
10 | """Render the menu screen, with an optional status message"""
11 | return render_template('menu.html', menu=mode_manager.menu_config, message=message)
12 |
13 | # These are the Flask routes - the different places we can go to in our browser.
14 |
15 | @app.route("/")
16 | def index():
17 | return render_menu()
18 |
19 | @app.route("/run/")
20 | def run(mode_name):
21 | # Use our robot app to run something with this mode_name
22 | mode_manager.run(mode_name)
23 | return render_menu(message="%s running" % mode_name)
24 |
25 | @app.route("/stop")
26 | def stop():
27 | # Tell our system to stop the mode it's in.
28 | mode_manager.stop()
29 | return render_menu(message='Stopped')
30 |
31 | # Start the app running
32 | app.run(host="0.0.0.0", debug=True)
33 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/pi_camera_stream.py:
--------------------------------------------------------------------------------
1 | from picamera.array import PiRGBArray
2 | from picamera import PiCamera
3 | import numpy as np
4 | import cv2
5 |
6 | size = (320, 240)
7 | encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
8 |
9 | def setup_camera():
10 | camera = PiCamera()
11 | camera.resolution = size
12 | camera.rotation = 180
13 | return camera
14 |
15 | def start_stream(camera):
16 | image_storage = PiRGBArray(camera, size=size)
17 |
18 | cam_stream = camera.capture_continuous(image_storage, format="bgr", use_video_port=True)
19 | for raw_frame in cam_stream:
20 | yield raw_frame.array
21 | image_storage.truncate(0)
22 |
23 | def get_encoded_bytes_for_frame(frame):
24 | result, encoded_image = cv2.imencode('.jpg', frame, encode_param)
25 | return encoded_image.tostring()
26 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/pid_controller.py:
--------------------------------------------------------------------------------
1 | class PIController(object):
2 | def __init__(self, proportional_constant=0, integral_constant=0, windup_limit=None):
3 | self.proportional_constant = proportional_constant
4 | self.integral_constant = integral_constant
5 | self.windup_limit = windup_limit
6 | # Running sums
7 | self.integral_sum = 0
8 |
9 | def reset(self):
10 | self.integral_sum = 0
11 |
12 | def handle_proportional(self, error):
13 | return self.proportional_constant * error
14 |
15 | def handle_integral(self, error):
16 | """Integral will change if
17 | * There is no windup limit
18 | * We are below the windup limit
19 | * or the sign of the error would reduce the sum"""
20 | if self.windup_limit is None or \
21 | (abs(self.integral_sum) < self.windup_limit) or \
22 | ((error > 0) != (self.integral_sum > 0)):
23 | self.integral_sum += error
24 | return self.integral_constant * self.integral_sum
25 |
26 | def get_value(self, error):
27 | p = self.handle_proportional(error)
28 | i = self.handle_integral(error)
29 | return p + i
30 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 | import RPi.GPIO as GPIO
4 |
5 | import atexit
6 |
7 | #import leds_led_shim
8 | import leds_8_apa102c
9 | from servos import Servos
10 | from distance_sensor_hcsr04 import DistanceSensor, NoDistanceRead
11 | from encoder_counter import EncoderCounter
12 |
13 | class Robot(object):
14 | wheel_diameter_mm = 69.0
15 | ticks_per_revolution = 40.0
16 | wheel_distance_mm = 131.0
17 |
18 | def __init__(self, motorhat_addr=0x6f, drive_enabled=True):
19 | # Setup the motorhat with the passed in address
20 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
21 |
22 | # get local variable for each motor
23 | self.left_motor = self._mh.getMotor(1)
24 | self.right_motor = self._mh.getMotor(2)
25 | self.drive_enabled = drive_enabled
26 |
27 | # ensure the motors get stopped when the code exits
28 | atexit.register(self.stop_all)
29 |
30 | # Setup the line sensors
31 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
32 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
33 |
34 | # Setup The Distance Sensors
35 | self.left_distance_sensor = DistanceSensor(17, 27)
36 | self.right_distance_sensor = DistanceSensor(5, 6)
37 |
38 | # Setup the Encoders
39 | EncoderCounter.set_constants(self.wheel_diameter_mm, self.ticks_per_revolution)
40 | self.left_encoder = EncoderCounter(4)
41 | self.right_encoder = EncoderCounter(26)
42 |
43 | # Setup the Leds
44 | self.leds = leds_8_apa102c.Leds()
45 |
46 | # Set up servo motors for pan and tilt.
47 | self.servos = Servos(addr=motorhat_addr)
48 |
49 | def stop_all(self):
50 | self.stop_motors()
51 |
52 | # Clear any sensor handlers
53 | self.left_line_sensor.when_line = None
54 | self.left_line_sensor.when_no_line = None
55 | self.right_line_sensor.when_line = None
56 | self.right_line_sensor.when_no_line = None
57 |
58 | # Clear the display
59 | self.leds.clear()
60 | self.leds.show()
61 |
62 | # Reset the servos
63 | self.servos.stop_all()
64 |
65 | def convert_speed(self, speed):
66 | mode = Raspi_MotorHAT.RELEASE
67 | if speed > 0:
68 | mode = Raspi_MotorHAT.FORWARD
69 | elif speed < 0:
70 | mode = Raspi_MotorHAT.BACKWARD
71 | output_speed = (abs(speed) * 255) / 100
72 | return mode, int(output_speed)
73 |
74 | def set_left(self, speed):
75 | if not self.drive_enabled:
76 | return
77 | mode, output_speed = self.convert_speed(speed)
78 | self.left_motor.setSpeed(output_speed)
79 | self.left_motor.run(mode)
80 |
81 | def set_right(self, speed):
82 | if not self.drive_enabled:
83 | return
84 | mode, output_speed = self.convert_speed(speed)
85 | self.right_motor.setSpeed(output_speed)
86 | self.right_motor.run(mode)
87 |
88 | def stop_motors(self):
89 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
90 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
91 |
92 | def set_pan(self, angle):
93 | self.servos.set_servo_angle(1, angle)
94 |
95 | def set_tilt(self, angle):
96 | self.servos.set_servo_angle(0, angle)
97 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/robot_modes.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 |
4 | class RobotModes(object):
5 | """Our robot behaviors and tests as running modes"""
6 |
7 | # Mode config goes from a "mode_name" to a script to run. Configured for look up
8 | # We could convert the path segment into a python script by adding '.py'
9 | # but this is a bad idea for at least 2 reasons:
10 | # * We may want to switch the script it really runs (avoid_behavior.py for simple_avoid_behavior.py)
11 | # * It's not a great security idea to let this run anything but the scripts we specify here.
12 | mode_config = {
13 | "avoid_behavior": "avoid_behavior.py",
14 | "circle_head": "circle_pan_tilt_behavior.py",
15 | "test_leds": "leds_test.py",
16 | "test_hcsr04": "test_hcsr04.py",
17 | "stop_at_line": "stop_at_line.py",
18 | "line_following": "line_following_behavior.py",
19 | "behavior_line": "straight_line_drive.py",
20 | "behavior_path": "drive_square.py"
21 | }
22 |
23 | # Menu config is a list of mode_names and text to display. Ordered as we'd like our menu.
24 | menu_config = [
25 | {"mode_name": "avoid_behavior", "text": "Avoid Behavior"},
26 | {"mode_name": "circle_head", "text": "Circle Head"},
27 | {"mode_name": "test_leds", "text": "Test LEDS"},
28 | {"mode_name": "test_hcsr04", "text": "Test HC-SR04"},
29 | {"mode_name": "stop_at_line", "text": "Stop At Line"},
30 | {"mode_name": "line_following", "text": "Line Following"},
31 | {"mode_name": "behavior_line", "text": "Drive In A Line"},
32 | {"mode_name": "behavior_path", "text": "Drive a Square Path"}
33 | ]
34 |
35 | def __init__(self):
36 | self.current_process = None
37 |
38 | def is_running(self):
39 | """Check if there is a process running. Returncode is only set when a process finishes"""
40 | return self.current_process and self.current_process.returncode is None
41 |
42 | def run(self, mode_name):
43 | """Run the mode as a subprocess, but not if we still have one running"""
44 | while self.is_running():
45 | self.stop()
46 | script = self.mode_config[mode_name]
47 | self.current_process = subprocess.Popen(["python", script])
48 | return True
49 |
50 | def stop(self):
51 | """Stop a process"""
52 | if self.is_running():
53 | # Sending the signal sigint is (on Linux) similar to pressing ctrl-c.
54 | # The behavior will do the same clean up.
55 | self.current_process.send_signal(subprocess.signal.SIGINT)
56 | self.current_process = None
57 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/servos.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT.Raspi_PWM_Servo_Driver import PWM
2 |
3 | class Servos(object):
4 | def __init__(self, addr=0x6f, deflect_90_in_ms = 0.9):
5 | """addr: The i2c address of the PWM chip.
6 | deflect_90_in_ms: set this to calibrate the servo motors.
7 | it is what a deflection of 90 degrees is
8 | in terms of a pulse length in milliseconds."""
9 | self._pwm = PWM(addr)
10 | # This sets the timebase for it all
11 | pwm_frequency = 60
12 | self._pwm.setPWMFreq(pwm_frequency)
13 |
14 | # Frequency is 1/period, but working ms, we can use 1000
15 | period_in_ms = 1000.0 / pwm_frequency
16 | # The chip has 4096 steps in each period.
17 | pulse_steps = 4096.0
18 | # Mid point of the servo pulse length in milliseconds.
19 | servo_mid_point_ms = 1.5
20 | # Steps for every millisecond.
21 | steps_per_ms = pulse_steps / period_in_ms
22 | # Steps for a degree
23 | self.steps_per_degree = (deflect_90_in_ms * steps_per_ms) / 90.0
24 | # Mid point of the servo in steps
25 | self.servo_mid_point_steps = servo_mid_point_ms * steps_per_ms
26 | # Prepare servo's turned off
27 | self.stop_all()
28 |
29 | def stop_all(self):
30 | # 0 in start is nothing, 4096 sets the OFF bit.
31 | self._pwm.setPWM(0, 0, 4096)
32 | self._pwm.setPWM(1, 0, 4096)
33 | self._pwm.setPWM(14, 0, 4096)
34 | self._pwm.setPWM(15, 0, 4096)
35 |
36 | def _convert_degrees_to_pwm(self, position):
37 | return int(self.servo_mid_point_steps + (position * self.steps_per_degree))
38 |
39 | def set_servo_angle(self, channel, angle):
40 | """position: The position in degrees from the center. -90 to 90"""
41 | # Validate
42 | if angle > 90 or angle < -90:
43 | raise ValueError("Angle outside of range")
44 | # Then set the position
45 | off_step = self._convert_degrees_to_pwm(angle)
46 | self._pwm.setPWM(channel, 0, off_step)
47 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/simple_avoid_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, NoDistanceRead
2 | from time import sleep
3 |
4 |
5 | class ObstacleAvoidingBehavior(object):
6 | """Simple obstacle avoiding"""
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 |
10 | # Calculations for the LEDs
11 | led_half = int(self.robot.leds.count/2)
12 | self.max_distance = 100
13 | self.leds_per_distance = led_half / float(self.max_distance)
14 | # print("Leds per distance", self.leds_per_distance)
15 | self.sense_colour = (255, 0, 0)
16 |
17 | def distance_to_led_bar(self, distance):
18 | # Invert so closer means more LED's.
19 | inverted = self.max_distance - min(distance, self.max_distance)
20 | led_bar = int(round(inverted * self.leds_per_distance))
21 | return led_bar
22 |
23 | def display_state(self, left_distance, right_distance):
24 | # Clear first
25 | self.robot.leds.clear()
26 | # Right side
27 | led_bar = self.distance_to_led_bar(right_distance)
28 | self.robot.leds.set_range(range(led_bar), self.sense_colour)
29 | # Left side
30 | led_bar = self.distance_to_led_bar(left_distance)
31 | # Bit trickier - must go from below the leds count, to the leds count.
32 | start = self.robot.leds.count - led_bar
33 | self.robot.leds.set_range(range(start, self.robot.leds.count), self.sense_colour)
34 |
35 | # Now show this display
36 | self.robot.leds.show()
37 |
38 | def get_motor_speed(self, distance):
39 | """This method chooses a speed for a motor based on the distance from it's sensor"""
40 | if distance < 20:
41 | return -100
42 | else:
43 | return 100
44 |
45 | def run(self):
46 | self.robot.set_pan(0)
47 | self.robot.set_tilt(0)
48 | while True:
49 | # Get the sensor readings
50 | try:
51 | left_distance = self.robot.left_distance_sensor.get_distance()
52 | except NoDistanceRead:
53 | left_distance = 100
54 | try:
55 | right_distance = self.robot.right_distance_sensor.get_distance()
56 | except NoDistanceRead:
57 | right_distance = 100
58 | # Display this
59 | self.display_state(left_distance, right_distance)
60 |
61 | # Get speeds for motors from distances
62 | print("Distances: l", left_distance, "r", right_distance)
63 | self.robot.set_left(self.get_motor_speed(left_distance))
64 | self.robot.set_right(self.get_motor_speed(right_distance))
65 |
66 | # Wait a little
67 | sleep(0.1)
68 |
69 | bot = Robot()
70 | behavior = ObstacleAvoidingBehavior(bot)
71 | behavior.run()
72 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/stop_at_line.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 | from gpiozero import LineSensor
4 |
5 | r = Robot()
6 |
7 | lsensor = LineSensor(23, pull_up=True)
8 | rsensor = LineSensor(16, pull_up=True)
9 |
10 | lsensor.when_line = r.stop_motors
11 | rsensor.when_line = r.stop_motors
12 | r.set_left(60)
13 | r.set_right(60)
14 | while True:
15 | sleep(0.02)
16 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/straight_line_drive.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from pid_controller import PIController
3 | import time
4 |
5 | bot = Robot()
6 | stop_at_time = time.time() + 60
7 |
8 | speed = 80
9 | bot.set_left(speed)
10 | bot.set_right(speed)
11 |
12 | pid = PIController(proportional_constant=4, integral_constant=0.2)
13 | while time.time() < stop_at_time:
14 | time.sleep(0.02)
15 | # Calculate the error
16 | left = bot.left_encoder.pulse_count
17 | right = bot.right_encoder.pulse_count
18 | error = left - right
19 | # Get the speed
20 | adjustment = pid.get_value(error)
21 | right_speed = int(speed + adjustment)
22 | print "left", left, \
23 | "right", right, \
24 | "right_speed:", right_speed, \
25 | "error:", error, \
26 | "adjustment: %.2f" % adjustment
27 | # Appy it to the right motor
28 | bot.set_right(right_speed)
29 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/templates/color_track_behavior.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Robot Image Server
5 |
6 |
7 | Robot Image Server
8 |
9 | Start Stop
10 | Exit
11 |
12 |
13 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/templates/control_image_behavior.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Robot Image Server
5 |
6 |
7 | Robot Image Server
8 |
9 | Exit
10 |
11 |
12 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/templates/image_server.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Robot Image Server
4 |
5 |
6 | Robot Image Server
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/chapter15/0_starting_point/templates/menu.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | My Robot Menu
4 |
5 |
6 | My Robot Menu
7 | {% if message %}
8 | {{ message }}
9 | {% endif %}
10 |
16 |
17 |
--------------------------------------------------------------------------------
/chapter15/full_system/avoid_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, NoDistanceRead
2 | from time import sleep
3 |
4 |
5 | class ObstacleAvoidingBehavior(object):
6 | """Better obstacle avoiding"""
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 |
10 | # Calculations for the LEDs
11 | led_half = int(self.robot.leds.count/2)
12 | self.max_distance = 100
13 | self.leds_per_distance = led_half / float(self.max_distance)
14 | # print("Leds per distance", self.leds_per_distance)
15 | self.sense_colour = (255, 0, 0)
16 |
17 | def distance_to_led_bar(self, distance):
18 | # Invert so closer means more LED's.
19 | inverted = self.max_distance - min(distance, self.max_distance)
20 | led_bar = int(round(inverted * self.leds_per_distance))
21 | return led_bar
22 |
23 | def display_state(self, left_distance, right_distance):
24 | # Clear first
25 | self.robot.leds.clear()
26 | # Right side
27 | led_bar = self.distance_to_led_bar(right_distance)
28 | self.robot.leds.set_range(range(led_bar), self.sense_colour)
29 | # Left side
30 | led_bar = self.distance_to_led_bar(left_distance)
31 | # Bit trickier - must go from below the leds count, to the leds count.
32 | start = self.robot.leds.count - led_bar
33 | self.robot.leds.set_range(range(start, self.robot.leds.count), self.sense_colour)
34 |
35 | # Now show this display
36 | self.robot.leds.show()
37 |
38 | def get_speeds(self, nearest_distance):
39 | if nearest_distance > 100:
40 | nearest_speed = 100
41 | furthest_speed = 100
42 | delay = 100
43 | elif nearest_distance > 50:
44 | nearest_speed = 100
45 | furthest_speed = 80
46 | delay = 100
47 | elif nearest_distance > 20:
48 | nearest_speed = 100
49 | furthest_speed = 60
50 | delay = 100
51 | elif nearest_distance > 10:
52 | nearest_speed = -40
53 | furthest_speed = -100
54 | delay = 100
55 | else: # collison
56 | nearest_speed = -100
57 | furthest_speed = -100
58 | delay = 250
59 | return nearest_speed, furthest_speed, delay
60 |
61 | def run(self):
62 | self.robot.set_pan(0)
63 | self.robot.set_tilt(0)
64 | while True:
65 | # Get the sensor readings
66 | try:
67 | left_distance = self.robot.left_distance_sensor.get_distance()
68 | except NoDistanceRead:
69 | left_distance = 100
70 | try:
71 | right_distance = self.robot.right_distance_sensor.get_distance()
72 | except NoDistanceRead:
73 | right_distance = 100
74 | # Display this
75 | self.display_state(left_distance, right_distance)
76 |
77 | # Get speeds for motors from distances
78 | nearest_speed, furthest_speed, delay = self.get_speeds(min(left_distance, right_distance))
79 | print("Distances: l", left_distance, "r", right_distance, "Speeds: n", nearest_speed, "f", furthest_speed,
80 | "Delays: l", delay)
81 |
82 | # Send this to the motors
83 | if left_distance < right_distance:
84 | self.robot.set_left(nearest_speed)
85 | self.robot.set_right(furthest_speed)
86 | else:
87 | self.robot.set_right(nearest_speed)
88 | self.robot.set_left(furthest_speed)
89 |
90 | # Wait our delay time
91 | sleep(delay * 0.001)
92 |
93 | bot = Robot()
94 | behavior = ObstacleAvoidingBehavior(bot)
95 | behavior.run()
96 |
--------------------------------------------------------------------------------
/chapter15/full_system/circle_pan_tilt_behavior.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 | import math
3 |
4 | from robot import Robot
5 |
6 | class CirclePanTiltBehavior(object):
7 | def __init__(self, the_robot):
8 | self.robot = the_robot
9 | self.current_time = 0
10 | self.frames_per_circle = 50
11 | self.radians_per_frame = (2 * math.pi) / self.frames_per_circle
12 | self.radius = 30
13 |
14 | def run(self):
15 | while True:
16 | frame = self.current_time % self.frames_per_circle
17 | frame_in_radians = frame * self.radians_per_frame
18 | self.robot.set_pan(self.radius * math.cos(frame_in_radians))
19 | self.robot.set_tilt(self.radius * math.sin(frame_in_radians))
20 | sleep(0.05)
21 | self.current_time += 1
22 |
23 |
24 | bot = Robot()
25 | behavior = CirclePanTiltBehavior(bot)
26 | behavior.run()
27 |
--------------------------------------------------------------------------------
/chapter15/full_system/color_track_behavior.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import time
3 |
4 | from image_app_core import start_server_process, get_control_instruction, put_output_image
5 |
6 | import cv2
7 | import numpy as np
8 |
9 | import pi_camera_stream
10 | from pid_controller import PIController
11 | from robot import Robot
12 |
13 |
14 | class ColorTrackingBehavior(object):
15 | """Behavior to find and get close to a colored object"""
16 | def __init__(self, robot):
17 | self.robot = robot
18 | # Tuning values
19 | self.low_range = (25, 70, 25)
20 | self.high_range = (80, 255, 255)
21 | self.correct_radius = 120
22 | self.center = 160
23 | # Current state
24 | self.running = False
25 |
26 | def process_control(self):
27 | instruction = get_control_instruction()
28 | if instruction == "start":
29 | self.running = True
30 | elif instruction == "stop":
31 | self.running = False
32 | elif instruction == "exit":
33 | print("Stopping")
34 | exit()
35 |
36 | def find_object(self, original_frame):
37 | """Find the largest enclosing circle for all contours in a masked image.
38 | Returns: the masked image, the object coordinates, the object radius"""
39 | frame_hsv = cv2.cvtColor(original_frame, cv2.COLOR_BGR2HSV)
40 | masked = cv2.inRange(frame_hsv, self.low_range, self.high_range)
41 |
42 | # Find the contours of the image (outline points)
43 | contour_image = np.copy(masked)
44 | contours, _ = cv2.findContours(contour_image, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)
45 | # Find enclosing circles
46 | circles = [cv2.minEnclosingCircle(cnt) for cnt in contours]
47 | # Filter for the largest one
48 | largest = (0, 0), 0
49 | for (x, y), radius in circles:
50 | if radius > largest[1]:
51 | largest = (int(x), int(y)), int(radius)
52 | return masked, largest[0], largest[1]
53 |
54 | def make_display(self, frame, processed):
55 | """Create display output, and put it on the queue"""
56 | # Make a dualscreen view - two images of the same scale concatenated
57 | display_frame = np.concatenate((frame, processed), axis=1)
58 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(display_frame)
59 | put_output_image(encoded_bytes)
60 |
61 | def process_frame(self, frame):
62 | # Find the largest enclosing circle
63 | masked, coordinates, radius = self.find_object(frame)
64 | # Now back to 3 channels for display
65 | processed = cv2.cvtColor(masked, cv2.COLOR_GRAY2BGR)
66 | # Draw our circle on the original frame, then display this
67 | cv2.circle(frame, coordinates, radius, [255, 0, 0])
68 | self.make_display(frame, processed)
69 | # Yield the object details
70 | return coordinates, radius
71 |
72 | def run(self):
73 | # Set pan and tilt to middle, then clear it.
74 | self.robot.set_pan(0)
75 | self.robot.set_tilt(0)
76 | # start camera
77 | camera = pi_camera_stream.setup_camera()
78 | # speed pid - based on the radius we get.
79 | speed_pid = PIController(proportional_constant=0.8,
80 | integral_constant=0.1, windup_limit=100)
81 | # direction pid - how far from the middle X is.
82 | direction_pid = PIController(proportional_constant=0.25,
83 | integral_constant=0.1, windup_limit=400)
84 | # warm up and servo move time
85 | time.sleep(0.1)
86 | # Servo's will be in place - stop them for now.
87 | self.robot.servos.stop_all()
88 | print("Setup Complete")
89 | # Main loop
90 | for frame in pi_camera_stream.start_stream(camera):
91 | (x, y), radius = self.process_frame(frame)
92 | self.process_control()
93 | if self.running and radius > 20:
94 | # The size is the first error
95 | radius_error = self.correct_radius - radius
96 | speed_value = speed_pid.get_value(radius_error)
97 | # And the second error is the based on the center coordinate.
98 | direction_error = self.center - x
99 | direction_value = direction_pid.get_value(direction_error)
100 | print("radius: %d, radius_error: %d, speed_value: %.2f, direction_error: %d, direction_value: %.2f" %
101 | (radius, radius_error, speed_value, direction_error, direction_value))
102 | # Now produce left and right motor speeds
103 | self.robot.set_left(speed_value - direction_value)
104 | self.robot.set_right(speed_value + direction_value)
105 | else:
106 | self.robot.stop_motors()
107 | if not self.running:
108 | speed_pid.reset()
109 | direction_pid.reset()
110 |
111 | print("Setting up")
112 | behavior = ColorTrackingBehavior(Robot())
113 | process = start_server_process('color_track_behavior.html')
114 | behavior.run()
115 |
--------------------------------------------------------------------------------
/chapter15/full_system/distance_sensor_hcsr04.py:
--------------------------------------------------------------------------------
1 | """Object for the HC-SR04 distance sensor type."""
2 | import time # import the whole thing, we need more than just sleep
3 | from gpiozero import DigitalInputDevice, DigitalOutputDevice
4 |
5 | # This is an exception, we will send this when get distance fails to make a measurement
6 | class NoDistanceRead(Exception):
7 | """The system was unable to make a measurement"""
8 | pass # We aren't doing anything special, but python syntax demands us to be explicit about this.
9 |
10 | class DistanceSensor(DigitalInputDevice):
11 | """Represents a distance sensor."""
12 | def __init__(self, trigger_pin, echo_pin):
13 | # Setup devices, an input device and an output device, with pin numbers for the sensor.
14 | super(DistanceSensor, self).__init__(echo_pin)
15 | self._trigger = DigitalOutputDevice(trigger_pin)
16 | self._trigger.value = False
17 |
18 | def get_distance(self):
19 | """Method to get the distance measurement"""
20 | # Timeout - we'll use this to stop it getting stuck
21 | time_out = time.time() + 2
22 |
23 | # This off-on-off pulse tells the device to make a measurement
24 | self._trigger.value = True
25 | time.sleep(0.00001) # This is the 10 microseconds
26 | self._trigger.value = False
27 |
28 | # Wait for the pin state to stop being 0, going from low to high
29 | # When it rises, this is the real pulse start. Assign it once - it may already have changed!
30 | pulse_start = time.time()
31 | while self.pin.state == 0:
32 | pulse_start = time.time()
33 | # We ran out of time
34 | if pulse_start > time_out:
35 | raise NoDistanceRead("Timed Out")
36 |
37 | # Wait for the echo pin to stop being 1, going from high, to low, the end of the pulse.
38 | pulse_end = time.time()
39 | while self.pin.state == 1:
40 | pulse_end = time.time()
41 | # Pulse end not received
42 | if pulse_end > time_out:
43 | raise NoDistanceRead("Timed Out")
44 |
45 | # The duration is the time between the start and end of pulse in seconds.
46 | pulse_duration = pulse_end - pulse_start
47 | # Speed of sound in centimeters per second - 34300 cm/s. However, the pulse has travelled TWICE
48 | # the distance, so we get half of this. (34300 / 2) = 17150.
49 | distance = pulse_duration * 17150
50 | # Round it to 2 decimal places, any finer doesn't really make sense.
51 | distance = round(distance, 2)
52 | return distance
53 |
--------------------------------------------------------------------------------
/chapter15/full_system/drive_distance_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, EncoderCounter
2 | from pid_controller import PIController
3 | import time
4 |
5 |
6 | def drive_distance(bot, distance, speed=80):
7 | # Use left as "primary" motor, the right is keeping up
8 | set_primary = bot.set_left
9 | primary_encoder = bot.left_encoder
10 | set_secondary = bot.set_right
11 | secondary_encoder = bot.right_encoder
12 |
13 | controller = PIController(proportional_constant=5, integral_constant=0.2)
14 |
15 | # start the motors, and start the loop
16 | set_primary(speed)
17 | set_secondary(speed)
18 | while primary_encoder.pulse_count < distance or secondary_encoder.pulse_count < distance:
19 | # Sleep a bit before calculating
20 | time.sleep(0.05)
21 | # How far off are we?
22 | error = primary_encoder.pulse_count - secondary_encoder.pulse_count
23 | adjustment = controller.get_value(error)
24 | # How fast should the motor move to get there?
25 | set_secondary(speed + adjustment)
26 | # Some debug
27 | print("Primary c:{} ({} mm)\tSecondary c:{} ({} mm) e:{} adjustment: {:.2f}".format(
28 | primary_encoder.pulse_count, primary_encoder.distance_in_mm(),
29 | secondary_encoder.pulse_count, secondary_encoder.distance_in_mm(),
30 | error,
31 | adjustment
32 | ))
33 |
34 |
35 | bot = Robot()
36 | distance_to_drive = 1000 # in mm - this is a meter
37 | distance_in_ticks = EncoderCounter.mm_to_ticks(distance_to_drive)
38 | drive_distance(bot, distance_in_ticks, distance_in_ticks)
39 |
--------------------------------------------------------------------------------
/chapter15/full_system/drive_square_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot, EncoderCounter
2 | from pid_controller import PIController
3 | import time
4 | import math
5 |
6 |
7 | def drive_distances(bot, left_distance, right_distance, speed=80):
8 | # We always want the "primary" to be the longest distance, therefore the faster motor
9 | if abs(left_distance) >= abs(right_distance):
10 | print("Left is primary")
11 | set_primary = bot.set_left
12 | primary_encoder = bot.left_encoder
13 | set_secondary = bot.set_right
14 | secondary_encoder = bot.right_encoder
15 | primary_distance = left_distance
16 | secondary_distance = right_distance
17 | else:
18 | print("right is primary")
19 | set_primary = bot.set_right
20 | primary_encoder = bot.right_encoder
21 | set_secondary = bot.set_left
22 | secondary_encoder = bot.left_encoder
23 | primary_distance = right_distance
24 | secondary_distance = left_distance
25 | primary_to_secondary_ratio = secondary_distance / (primary_distance * 1.0)
26 | secondary_speed = speed * primary_to_secondary_ratio
27 | print("Targets - primary: %d, secondary: %d, ratio: %.2f" % (primary_distance, secondary_distance, primary_to_secondary_ratio))
28 |
29 | primary_encoder.reset()
30 | secondary_encoder.reset()
31 |
32 | controller = PIController(proportional_constant=5, integral_constant=0.2)
33 |
34 | # Ensure that the encoder knows which way it is going
35 | primary_encoder.set_direction(math.copysign(1, speed))
36 | secondary_encoder.set_direction(math.copysign(1, secondary_speed))
37 |
38 | # start the motors, and start the loop
39 | set_primary(speed)
40 | set_secondary(secondary_speed)
41 | while abs(primary_encoder.pulse_count) < abs(primary_distance) or abs(secondary_encoder.pulse_count) < abs(secondary_distance):
42 | # And sleep a bit before calculating
43 | time.sleep(0.05)
44 |
45 | # How far off are we?
46 | secondary_target = primary_encoder.pulse_count * primary_to_secondary_ratio
47 | error = secondary_target - secondary_encoder.pulse_count
48 |
49 | # How fast should the motor move to get there?
50 | adjustment = controller.get_value(error)
51 |
52 | set_secondary(secondary_speed + adjustment)
53 | secondary_encoder.set_direction(math.copysign(1, secondary_speed+adjustment))
54 | # Some debug
55 | print "Primary c:{:.2f} ({:.2f} mm)\tSecondary c:{:.2f} ({:.2f} mm) t:{:.2f} e:{:.2f} s:{:.2f} adjustment: {:.2f}".format(
56 | primary_encoder.pulse_count,
57 | primary_encoder.distance_in_mm(),
58 | secondary_encoder.pulse_count,
59 | secondary_encoder.distance_in_mm(),
60 | secondary_target,
61 | error,
62 | secondary_speed,
63 | adjustment
64 | )
65 |
66 | # Stop the primary if we need to
67 | if abs(primary_encoder.pulse_count) >= abs(primary_distance):
68 | print "primary stop"
69 | set_primary(0)
70 | secondary_speed = 0
71 |
72 | def drive_arc(bot, turn_in_degrees, radius, speed=80):
73 | """ Turn is based on change in heading. """
74 | # Get the bot width in ticks
75 | half_width_ticks = EncoderCounter.mm_to_ticks(bot.wheel_distance_mm/2.0)
76 | if turn_in_degrees < 0:
77 | left_radius = radius - half_width_ticks
78 | right_radius = radius + half_width_ticks
79 | else:
80 | left_radius = radius + half_width_ticks
81 | right_radius = radius - half_width_ticks
82 | print "Arc left radius {:.2f}, right_radius {:.2f}".format(left_radius, right_radius)
83 | radians = math.radians(abs(turn_in_degrees))
84 | left_distance = int(left_radius * radians)
85 | right_distance = int(right_radius * radians)
86 | print "Arc left distance {}, right_distance {}".format(left_distance, right_distance)
87 | drive_distances(bot, left_distance, right_distance, speed=speed)
88 |
89 | bot = Robot()
90 |
91 | distance_to_drive = 300 # in mm
92 | distance_in_ticks = EncoderCounter.mm_to_ticks(distance_to_drive)
93 | radius = bot.wheel_distance_mm + 100 # in mm
94 | radius_in_ticks = EncoderCounter.mm_to_ticks(radius)
95 |
96 | for n in range(4):
97 | drive_distances(bot, distance_in_ticks, distance_in_ticks)
98 | drive_arc(bot, 90, radius_in_ticks, speed=50)
99 |
--------------------------------------------------------------------------------
/chapter15/full_system/encoder_counter.py:
--------------------------------------------------------------------------------
1 | from gpiozero import DigitalInputDevice
2 | import math
3 |
4 | class EncoderCounter(DigitalInputDevice):
5 | ticks_to_mm_const = None # you must set this up before using distance methods
6 | def __init__(self, pin):
7 | super(EncoderCounter, self).__init__(pin)
8 | self.pin.when_changed = self.when_changed
9 | self.pulse_count = 0
10 | self.direction = 1
11 |
12 | def when_changed(self):
13 | self.pulse_count += self.direction
14 |
15 | def set_direction(self, direction):
16 | """ This should be -1 or 1. """
17 | assert abs(direction)==1, "Direction %s should be 1 or -1" % direction
18 | self.direction = direction
19 |
20 | def reset(self):
21 | self.pulse_count = 0
22 |
23 | def distance_in_mm(self):
24 | return int(self.pulse_count * EncoderCounter.ticks_to_mm_const)
25 |
26 | @staticmethod
27 | def mm_to_ticks(mm):
28 | return mm / EncoderCounter.ticks_to_mm_const
29 |
30 | @staticmethod
31 | def set_constants(wheel_diameter_mm, ticks_per_revolution):
32 | EncoderCounter.ticks_to_mm_const = (math.pi / ticks_per_revolution) * wheel_diameter_mm
33 |
--------------------------------------------------------------------------------
/chapter15/full_system/face_track_behavior.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import time
3 |
4 | from image_app_core import start_server_process, get_control_instruction, put_output_image
5 |
6 | import cv2
7 | import numpy as np
8 |
9 | import pi_camera_stream
10 | from pid_controller import PIController
11 | from robot import Robot
12 |
13 |
14 | class FaceTrackBehavior(object):
15 | """Behavior to find and point at a face."""
16 | def __init__(self, robot):
17 | self.robot = robot
18 | cascade_path = "/usr/share/opencv/haarcascades/haarcascade_frontalface_default.xml"
19 | self.cascade = cv2.CascadeClassifier(cascade_path)
20 | # Tuning values
21 | self.center_x = 160
22 | self.center_y = 120
23 | self.min_size = 20
24 | self.pan_pid = PIController(proportional_constant=0.1, integral_constant=0.03)
25 | self.tilt_pid = PIController(proportional_constant=-0.1, integral_constant=-0.03)
26 | # Current state
27 | self.running = False
28 |
29 | def process_control(self):
30 | instruction = get_control_instruction()
31 | if instruction == "start":
32 | self.running = True
33 | elif instruction == "stop":
34 | self.running = False
35 | self.pan_pid.reset()
36 | self.tilt_pid.reset()
37 | self.robot.servos.stop_all()
38 | elif instruction == "exit":
39 | print("Stopping")
40 | exit()
41 |
42 | def find_object(self, original_frame):
43 | """Search the frame for an object. Return the rectangle of the largest by w * h"""
44 | # Make it greyscale to reduce the data used
45 | gray_img = cv2.cvtColor(original_frame, cv2.COLOR_BGR2GRAY)
46 | # Detect all the objects
47 | objects = self.cascade.detectMultiScale(gray_img)
48 | largest = 0, (0, 0, 0, 0) # area, x, y, w, h
49 | for (x, y, w, h) in objects:
50 | item_area = w * h
51 | if item_area > largest[0]:
52 | largest = item_area, (x, y, w, h)
53 | return largest[1]
54 |
55 | def make_display(self, display_frame):
56 | """Create display output, and put it on the queue"""
57 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(display_frame)
58 | put_output_image(encoded_bytes)
59 |
60 | def process_frame(self, frame):
61 | # Find the largest matching object
62 | (x, y, w, h) = self.find_object(frame)
63 | # Draw a rect on the original frame, then display this
64 | cv2.rectangle(frame, (x, y), (x + w, y + w), [255, 0, 0])
65 | self.make_display(frame)
66 | # Yield the object details
67 | return x, y, w, h
68 |
69 | def run(self):
70 | # start camera
71 | camera = pi_camera_stream.setup_camera()
72 | # warm up time
73 | time.sleep(0.1)
74 | print("Setup Complete")
75 | # Main loop
76 | for frame in pi_camera_stream.start_stream(camera):
77 | (x, y, w, h) = self.process_frame(frame)
78 | self.process_control()
79 | if self.running and h > self.min_size:
80 | # Pan
81 | pan_error = self.center_x - (x + (w / 2))
82 | pan_value = self.pan_pid.get_value(pan_error)
83 | self.robot.set_pan(int(pan_value))
84 | # Tilt
85 | tilt_error = self.center_y - (y + (h / 2))
86 | tilt_value = self.tilt_pid.get_value(tilt_error)
87 | self.robot.set_tilt(int(tilt_value))
88 | print("x: %d, y: %d, pan_error: %d, tilt_error: %d, pan_value: %.2f, tilt_value: %.2f" %
89 | (x, y, pan_error, tilt_error, pan_value, tilt_value))
90 |
91 | print("Setting up")
92 | behavior = FaceTrackBehavior(Robot())
93 | process = start_server_process('color_track_behavior.html')
94 | behavior.run()
95 |
--------------------------------------------------------------------------------
/chapter15/full_system/image_app_core.py:
--------------------------------------------------------------------------------
1 | """The flask/webserver part is slightly independent of the behavior,
2 | allowing the user to "tune in" to see, but should not stop the
3 | robot running"""
4 | import time
5 | from multiprocessing import Process, Queue
6 |
7 | from flask import (Flask, render_template,
8 | Response, redirect, request)
9 |
10 |
11 | app = Flask(__name__)
12 | control_queue = Queue()
13 | display_queue = Queue(maxsize=2)
14 | display_template = 'image_server'
15 |
16 | @app.route('/')
17 | def index():
18 | return render_template(display_template)
19 |
20 | @app.after_request
21 | def add_header(response):
22 | response.headers['Cache-Control'] = "no-cache, no-store, must-revalidate"
23 | return response
24 |
25 | def frame_generator():
26 | """This is our main video feed"""
27 | while True:
28 | # at most 20 fps
29 | time.sleep(0.05)
30 | # Get (wait until we have data)
31 | encoded_bytes = display_queue.get()
32 | # Need to turn this into http multipart data.
33 | yield (b'--frame\r\n'
34 | b'Content-Type: image/jpeg\r\n\r\n' + encoded_bytes + b'\r\n')
35 |
36 | @app.route('/display')
37 | def display():
38 | return Response(frame_generator(),
39 | mimetype='multipart/x-mixed-replace; boundary=frame')
40 |
41 | @app.route('/exit')
42 | def handle_exit():
43 | control_queue.put('exit')
44 | menu_server = request.url_root.replace('5001', '5000')
45 | return redirect(menu_server, code=302)
46 |
47 | @app.route('/control/')
48 | def control(control_name):
49 | control_queue.put(control_name)
50 | return Response('queued')
51 |
52 | def start_server_process(template_name):
53 | """Start the process, call .terminate to close it"""
54 | global display_template
55 | display_template = template_name
56 | server = Process(target=app.run, kwargs={"host": "0.0.0.0", "port": 5001})
57 | server.daemon = True
58 | server.start()
59 | print("App should be running")
60 | print("Process info name:", server.name, " alive:", server.is_alive())
61 | return server
62 |
63 | def put_output_image(encoded_bytes):
64 | """Queue an output image"""
65 | if display_queue.empty():
66 | display_queue.put(encoded_bytes)
67 |
68 | def get_control_instruction():
69 | """Get control instructions from the web app, if any"""
70 | if control_queue.empty():
71 | return None
72 | else:
73 | return control_queue.get()
74 |
--------------------------------------------------------------------------------
/chapter15/full_system/image_server.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from flask import Flask, render_template, Response
4 | import pi_camera_stream
5 | import os
6 |
7 | app = Flask(__name__)
8 |
9 | @app.route('/')
10 | def index():
11 | return render_template('image_server.html')
12 |
13 | def frame_generator():
14 | """This is our main video feed"""
15 | camera = pi_camera_stream.setup_camera()
16 |
17 | # allow the camera to warmup
18 | time.sleep(0.1)
19 |
20 | for frame in pi_camera_stream.start_stream(camera):
21 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(frame)
22 | # Need to turn this into http multipart data.
23 | yield (b'--frame\r\n'
24 | b'Content-Type: image/jpeg\r\n\r\n' + encoded_bytes + b'\r\n')
25 |
26 | @app.route('/display')
27 | def display():
28 | return Response(frame_generator(),
29 | mimetype='multipart/x-mixed-replace; boundary=frame')
30 |
31 | print(repr(os.environ))
32 | print("App started")
33 | app.run(host="0.0.0.0", debug=True, port=5001)
34 | print("App finished")
--------------------------------------------------------------------------------
/chapter15/full_system/leds_8_apa102c.py:
--------------------------------------------------------------------------------
1 | import spidev
2 |
3 | class Leds(object):
4 | def __init__(self):
5 | # MOSI - default output, (Master Out, Slave in) is 10. Clock is 11.
6 | self.device = spidev.SpiDev()
7 | self.device.open(0, 0)
8 | self.device.max_speed_hz = 15000
9 | self.colors = [(0,0,0)] * self.count
10 |
11 | @property
12 | def count(self):
13 | return 8
14 |
15 | def set_one(self, led_number, color):
16 | assert(len(color) == 3)
17 | self.colors[led_number] = color
18 |
19 | def set_range(self, a_range, color):
20 | for led_number in a_range:
21 | self.colors[led_number] = color
22 |
23 | def set_all(self, color):
24 | self.colors = [color] * self.count
25 |
26 | def clear(self):
27 | self.set_all((0, 0, 0))
28 |
29 | def show(self):
30 | # Create the wake up header
31 | data = [0] * 4
32 | for color in self.colors:
33 | data.append(0xe1)
34 | data.extend(color)
35 | data.extend([0]* 4)
36 | # send it
37 | self.device.xfer(data)
38 |
--------------------------------------------------------------------------------
/chapter15/full_system/leds_led_shim.py:
--------------------------------------------------------------------------------
1 | import ledshim
2 |
3 | class Leds(object):
4 | def __init__(self):
5 | self.count = 24
6 |
7 | def set_one(self, led_number, color):
8 | ledshim.set_pixel(led_number, *color)
9 |
10 | def set_range(self, a_range, color):
11 | ledshim.set_multiple_pixels(a_range, color)
12 |
13 | def set_all(self, color):
14 | ledshim.set_all(*color)
15 |
16 | def clear(self):
17 | ledshim.clear()
18 |
19 | def show(self):
20 | ledshim.show()
--------------------------------------------------------------------------------
/chapter15/full_system/leds_test.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | bot = Robot()
5 | red = (255, 0, 0)
6 | blue = (0, 0, 255)
7 |
8 | while True:
9 | print("red")
10 | bot.leds.set_all(red)
11 | bot.leds.show()
12 | sleep(0.5)
13 | print("blue")
14 | bot.leds.set_all(blue)
15 | bot.leds.show()
16 | sleep(0.5)
17 |
18 |
--------------------------------------------------------------------------------
/chapter15/full_system/line_following_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | cross_line_color = (255, 0, 0)
5 | off_line_color = (0, 0, 255)
6 |
7 | class LineFollowingBehavior:
8 | # Note - this is the robot ON the line.
9 | def __init__(self, the_robot, forward_speed=30, cornering=-30):
10 | self.robot = the_robot
11 | self.forward_speed = forward_speed
12 | self.cornering = cornering
13 |
14 | led_qtr = int(self.robot.leds.count/4)
15 | self.right_indicator = range(0, led_qtr)
16 | self.left_indicator = range(self.robot.leds.count - led_qtr, self.robot.leds.count)
17 |
18 | self.robot.leds.show()
19 |
20 | def when_left_crosses_line(self):
21 | self.robot.set_left(self.cornering)
22 | self.robot.leds.set_range(self.left_indicator, cross_line_color)
23 | self.robot.leds.show()
24 |
25 | def when_right_crosses_line(self):
26 | self.robot.set_right(self.cornering)
27 | self.robot.leds.set_range(self.right_indicator, cross_line_color)
28 | self.robot.leds.show()
29 |
30 | def when_left_off_line(self):
31 | self.robot.set_left(self.forward_speed)
32 | self.robot.leds.set_range(self.left_indicator, off_line_color)
33 | self.robot.leds.show()
34 |
35 | def when_right_off_line(self):
36 | self.robot.set_right(self.forward_speed)
37 | self.robot.leds.set_range(self.right_indicator, off_line_color)
38 | self.robot.leds.show()
39 |
40 | def run(self):
41 | # Setup conditions
42 | self.robot.left_line_sensor.when_line = self.when_left_crosses_line
43 | self.robot.left_line_sensor.when_no_line = self.when_left_off_line
44 | self.robot.right_line_sensor.when_line = self.when_right_crosses_line
45 | self.robot.right_line_sensor.when_no_line = self.when_right_off_line
46 | # Start driving
47 | self.robot.set_left(self.forward_speed)
48 | self.robot.set_right(self.forward_speed)
49 | while True:
50 | sleep(0.02)
51 |
52 |
53 | bot = Robot()
54 | behavior = LineFollowingBehavior(bot)
55 | behavior.run()
56 |
57 |
--------------------------------------------------------------------------------
/chapter15/full_system/manual_drive.py:
--------------------------------------------------------------------------------
1 | import time
2 | from robot import Robot
3 | from image_app_core import start_server_process, get_control_instruction, put_output_image
4 | import pi_camera_stream
5 |
6 | class ManualDriveBehavior(object):
7 | def __init__(self, robot):
8 | self.robot = robot
9 | self.timeout = time.time() + 1
10 |
11 | def process_control(self):
12 | instruction = get_control_instruction()
13 | while instruction:
14 | self.timeout = time.time() + 1
15 | parts = instruction.split('/')
16 | if parts[0] == "set_left":
17 | self.robot.set_left(int(parts[1]))
18 | elif parts[0] == "set_right":
19 | self.robot.set_right(int(parts[1]))
20 | elif parts[0] == "exit":
21 | print "Stopping"
22 | exit()
23 | instruction = get_control_instruction()
24 |
25 | def make_display(self, frame):
26 | """Create display output, and put it on the queue"""
27 | encoded_bytes = pi_camera_stream.get_encoded_bytes_for_frame(frame)
28 | put_output_image(encoded_bytes)
29 |
30 | def run(self):
31 | # Set pan and tilt to middle, then clear it.
32 | self.robot.set_pan(0)
33 | self.robot.set_tilt(0)
34 | # start camera
35 | camera = pi_camera_stream.setup_camera()
36 | # warm up and servo move time
37 | time.sleep(0.1)
38 | # Servo's will be in place - stop them for now.
39 | self.robot.servos.stop_all()
40 | print("Setup Complete")
41 | # Main loop
42 | for frame in pi_camera_stream.start_stream(camera):
43 | self.make_display(frame)
44 | self.process_control()
45 | # Auto stop
46 | if time.time() > self.timeout:
47 | self.robot.stop_motors()
48 |
49 | print "Setting up"
50 | behavior = ManualDriveBehavior(Robot())
51 | process = start_server_process('manual_drive.html')
52 | behavior.run()
53 |
--------------------------------------------------------------------------------
/chapter15/full_system/menu_server.py:
--------------------------------------------------------------------------------
1 | import time
2 | from flask import Flask, render_template, redirect, request
3 | from robot_modes import RobotModes
4 | from leds_8_apa102c import Leds
5 |
6 | # A Flask App contains all its routes.
7 | app = Flask(__name__)
8 | # Prepare our robot modes for use
9 | mode_manager = RobotModes()
10 |
11 | leds = Leds()
12 | leds.set_one(0, [0, 255, 0])
13 | leds.show()
14 |
15 | def render_menu(message=None):
16 | """Render the menu screen, with an optional status message"""
17 | return render_template('menu.html', menu=mode_manager.menu_config, message=message)
18 |
19 | # These are the Flask routes - the different places we can go to in our browser.
20 |
21 | @app.after_request
22 | def add_header(response):
23 | response.headers['Cache-Control'] = "no-cache, no-store, must-revalidate"
24 | return response
25 |
26 | @app.route("/")
27 | def index():
28 | return render_menu()
29 |
30 | @app.route("/run/")
31 | def run(mode_name):
32 | global leds
33 | if leds:
34 | leds.clear()
35 | leds.show()
36 | leds = None
37 |
38 | # Use our robot app to run something with this mode_name
39 | mode_manager.run(mode_name)
40 | if mode_manager.should_redirect(mode_name):
41 | # Give the other process time to start
42 | time.sleep(3)
43 | # If it's not broken
44 | if mode_manager.is_running():
45 | # Now redirect
46 | new_url = request.url_root.replace('5000', '5001')
47 | return redirect(new_url)
48 | else:
49 | return render_menu(message="%s dead." % mode_name)
50 | return render_menu(message="%s running" % mode_name)
51 |
52 | @app.route("/stop")
53 | def stop():
54 | # Tell our system to stop the mode it's in.
55 | mode_manager.stop()
56 | return render_menu(message='Stopped')
57 |
58 | # Start the app running
59 | # if you enable debug, disable the reloader here.
60 | app.run(host="0.0.0.0")
61 |
62 |
--------------------------------------------------------------------------------
/chapter15/full_system/menu_server.unit:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Robot Menu Web Service
3 | After=network.target
4 |
5 | [Install]
6 | WantedBy=multi-user.target
7 |
8 | [Service]
9 | WorkingDirectory=/home/pi/full_system
10 | ExecStart=/usr/bin/env python menu_server.py
11 | User=pi
12 |
--------------------------------------------------------------------------------
/chapter15/full_system/pi_camera_stream.py:
--------------------------------------------------------------------------------
1 | from picamera.array import PiRGBArray
2 | from picamera import PiCamera
3 | import numpy as np
4 | import cv2
5 |
6 | size = (320, 240)
7 | encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]
8 |
9 | def setup_camera():
10 | camera = PiCamera()
11 | camera.resolution = size
12 | camera.rotation = 180
13 | return camera
14 |
15 | def start_stream(camera):
16 | image_storage = PiRGBArray(camera, size=size)
17 |
18 | cam_stream = camera.capture_continuous(image_storage, format="bgr", use_video_port=True)
19 | for raw_frame in cam_stream:
20 | yield raw_frame.array
21 | image_storage.truncate(0)
22 |
23 | def get_encoded_bytes_for_frame(frame):
24 | result, encoded_image = cv2.imencode('.jpg', frame, encode_param)
25 | return encoded_image.tostring()
26 |
--------------------------------------------------------------------------------
/chapter15/full_system/pid_controller.py:
--------------------------------------------------------------------------------
1 | class PIController(object):
2 | def __init__(self, proportional_constant=0, integral_constant=0, windup_limit=None):
3 | self.proportional_constant = proportional_constant
4 | self.integral_constant = integral_constant
5 | self.windup_limit = windup_limit
6 | # Running sums
7 | self.integral_sum = 0
8 |
9 | def reset(self):
10 | self.integral_sum = 0
11 |
12 | def handle_proportional(self, error):
13 | return self.proportional_constant * error
14 |
15 | def handle_integral(self, error):
16 | """Integral will change if
17 | * There is no windup limit
18 | * We are below the windup limit
19 | * or the sign of the error would reduce the sum"""
20 | if self.windup_limit is None or \
21 | (abs(self.integral_sum) < self.windup_limit) or \
22 | ((error > 0) != (self.integral_sum > 0)):
23 | self.integral_sum += error
24 | return self.integral_constant * self.integral_sum
25 |
26 | def get_value(self, error):
27 | p = self.handle_proportional(error)
28 | i = self.handle_integral(error)
29 | return p + i
30 |
--------------------------------------------------------------------------------
/chapter15/full_system/requirements.txt:
--------------------------------------------------------------------------------
1 | flask
2 |
--------------------------------------------------------------------------------
/chapter15/full_system/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 | import RPi.GPIO as GPIO
4 |
5 | import atexit
6 |
7 | #import leds_led_shim
8 | import leds_8_apa102c
9 | from servos import Servos
10 | from distance_sensor_hcsr04 import DistanceSensor, NoDistanceRead
11 | from encoder_counter import EncoderCounter
12 |
13 | class Robot(object):
14 | wheel_diameter_mm = 69.0
15 | ticks_per_revolution = 40.0
16 | wheel_distance_mm = 131.0
17 |
18 | def __init__(self, motorhat_addr=0x6f, drive_enabled=True):
19 | # Setup the motorhat with the passed in address
20 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
21 |
22 | # get local variable for each motor
23 | self.left_motor = self._mh.getMotor(1)
24 | self.right_motor = self._mh.getMotor(2)
25 | self.drive_enabled = drive_enabled
26 |
27 | # ensure the motors get stopped when the code exits
28 | atexit.register(self.stop_all)
29 |
30 | # Setup the line sensors
31 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
32 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
33 |
34 | # Setup The Distance Sensors
35 | self.left_distance_sensor = DistanceSensor(17, 27)
36 | self.right_distance_sensor = DistanceSensor(5, 6)
37 |
38 | # Setup the Encoders
39 | EncoderCounter.set_constants(self.wheel_diameter_mm, self.ticks_per_revolution)
40 | self.left_encoder = EncoderCounter(4)
41 | self.right_encoder = EncoderCounter(26)
42 |
43 | # Setup the Leds
44 | self.leds = leds_8_apa102c.Leds()
45 |
46 | # Set up servo motors for pan and tilt.
47 | self.servos = Servos(addr=motorhat_addr)
48 |
49 | def stop_all(self):
50 | self.stop_motors()
51 |
52 | # Clear any sensor handlers
53 | self.left_line_sensor.when_line = None
54 | self.left_line_sensor.when_no_line = None
55 | self.right_line_sensor.when_line = None
56 | self.right_line_sensor.when_no_line = None
57 |
58 | # Clear the display
59 | self.leds.clear()
60 | self.leds.show()
61 |
62 | # Reset the servos
63 | self.servos.stop_all()
64 |
65 | def convert_speed(self, speed):
66 | mode = Raspi_MotorHAT.RELEASE
67 | if speed > 0:
68 | mode = Raspi_MotorHAT.FORWARD
69 | elif speed < 0:
70 | mode = Raspi_MotorHAT.BACKWARD
71 | output_speed = (abs(speed) * 255) / 100
72 | return mode, int(output_speed)
73 |
74 | def set_left(self, speed):
75 | if not self.drive_enabled:
76 | return
77 | mode, output_speed = self.convert_speed(speed)
78 | self.left_motor.setSpeed(output_speed)
79 | self.left_motor.run(mode)
80 |
81 | def set_right(self, speed):
82 | if not self.drive_enabled:
83 | return
84 | mode, output_speed = self.convert_speed(speed)
85 | self.right_motor.setSpeed(output_speed)
86 | self.right_motor.run(mode)
87 |
88 | def stop_motors(self):
89 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
90 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
91 |
92 | def set_pan(self, angle):
93 | self.servos.set_servo_angle(1, angle)
94 |
95 | def set_tilt(self, angle):
96 | self.servos.set_servo_angle(0, angle)
97 |
--------------------------------------------------------------------------------
/chapter15/full_system/robot_modes.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 |
3 | class RobotModes(object):
4 | """Our robot behaviors and tests as running modes"""
5 |
6 | # Mode config goes from a "mode_name" to a script to run. Configured for look up
7 | # We could convert the path segment into a python script by adding '.py'
8 | # but this is a bad idea for at least 2 reasons:
9 | # * We may want to switch the script it really runs (avoid_behavior.py for simple_avoid_behavior.py)
10 | # * It's not a great security idea to let this run anything but the scripts we specify here.
11 | mode_config = {
12 | "avoid_behavior": {"script": "avoid_behavior.py"},
13 | "circle_head": {"script": "circle_pan_tilt_behavior.py"},
14 | "test_leds": {"script": "leds_test.py"},
15 | "test_hcsr04": {"script": "test_hcsr04.py"},
16 | "stop_at_line": {"script": "stop_at_line.py"},
17 | "line_following": {"script": "line_following_behavior.py"},
18 | "behavior_line": {"script": "drive_distance_behavior.py"},
19 | "behavior_path": {"script": "drive_square_behavior.py"},
20 | "color_track": {"script": "color_track_behavior.py", "server": True},
21 | "face_track": {"script": "face_track_behavior.py", "server": True},
22 | "manual_drive": {"script": "manual_drive.py", "server": True},
23 | "image_server": {"script": "image_server.py", "server": True}
24 | }
25 |
26 | # Menu config is a list of mode_names and text to display. Ordered as we'd like our menu.
27 | menu_config = [
28 | {"mode_name": "avoid_behavior", "text": "Avoid Behavior"},
29 | {"mode_name": "circle_head", "text": "Circle Head"},
30 | {"mode_name": "test_leds", "text": "Test LEDS"},
31 | {"mode_name": "test_hcsr04", "text": "Test HC-SR04"},
32 | {"mode_name": "stop_at_line", "text": "Stop At Line"},
33 | {"mode_name": "line_following", "text": "Line Following"},
34 | {"mode_name": "behavior_line", "text": "Drive In A Line"},
35 | {"mode_name": "behavior_path", "text": "Drive a Square Path"},
36 | {"mode_name": "image_server", "text": "Test camera server"},
37 | {"mode_name": "color_track", "text": "Track Colored Objects"},
38 | {"mode_name": "face_track", "text": "Track Faces"},
39 | {"mode_name": "manual_drive", "text": "Drive manually"},
40 | ]
41 |
42 | def __init__(self):
43 | self.current_process = None
44 |
45 | def is_running(self):
46 | """Check if there is a process running. Returncode is only set when a process finishes"""
47 | return self.current_process and self.current_process.returncode is None
48 |
49 | def run(self, mode_name):
50 | """Run the mode as a subprocess, and stop any we still have one running"""
51 | while self.is_running():
52 | self.stop()
53 | script = self.mode_config[mode_name]['script']
54 | self.current_process = subprocess.Popen(["python", script])
55 |
56 | def should_redirect(self, mode_name):
57 | return self.mode_config[mode_name].get('server') is True and self.is_running()
58 |
59 | def stop(self):
60 | """Stop a process"""
61 | if self.is_running():
62 | # Sending the signal sigint is (on Linux) similar to pressing ctrl-c.
63 | # The behavior will do the same clean up.
64 | self.current_process.send_signal(subprocess.signal.SIGINT)
65 | self.current_process = None
66 |
--------------------------------------------------------------------------------
/chapter15/full_system/servos.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT.Raspi_PWM_Servo_Driver import PWM
2 |
3 | class Servos(object):
4 | def __init__(self, addr=0x6f, deflect_90_in_ms = 0.9):
5 | """addr: The i2c address of the PWM chip.
6 | deflect_90_in_ms: set this to calibrate the servo motors.
7 | it is what a deflection of 90 degrees is
8 | in terms of a pulse length in milliseconds."""
9 | self._pwm = PWM(addr)
10 | # This sets the timebase for it all
11 | pwm_frequency = 60
12 | self._pwm.setPWMFreq(pwm_frequency)
13 |
14 | # Frequency is 1/period, but working ms, we can use 1000
15 | period_in_ms = 1000.0 / pwm_frequency
16 | # The chip has 4096 steps in each period.
17 | pulse_steps = 4096.0
18 | # Mid point of the servo pulse length in milliseconds.
19 | servo_mid_point_ms = 1.5
20 | # Steps for every millisecond.
21 | steps_per_ms = pulse_steps / period_in_ms
22 | # Steps for a degree
23 | self.steps_per_degree = (deflect_90_in_ms * steps_per_ms) / 90.0
24 | # Mid point of the servo in steps
25 | self.servo_mid_point_steps = servo_mid_point_ms * steps_per_ms
26 | # Prepare servo's turned off
27 | self.stop_all()
28 |
29 | def stop_all(self):
30 | # 0 in start is nothing, 4096 sets the OFF bit.
31 | self._pwm.setPWM(0, 0, 4096)
32 | self._pwm.setPWM(1, 0, 4096)
33 | self._pwm.setPWM(14, 0, 4096)
34 | self._pwm.setPWM(15, 0, 4096)
35 |
36 | def _convert_degrees_to_pwm(self, position):
37 | return int(self.servo_mid_point_steps + (position * self.steps_per_degree))
38 |
39 | def set_servo_angle(self, channel, angle):
40 | """position: The position in degrees from the center. -90 to 90"""
41 | # Validate
42 | if angle > 90 or angle < -90:
43 | raise ValueError("Angle outside of range")
44 | # Then set the position
45 | off_step = self._convert_degrees_to_pwm(angle)
46 | self._pwm.setPWM(channel, 0, off_step)
47 |
--------------------------------------------------------------------------------
/chapter15/full_system/static/display.css:
--------------------------------------------------------------------------------
1 | .slider_track {
2 | width: 10vw;
3 | height: 90vh;
4 | border: 1px solid blue;
5 | background-color: lightblue;
6 | }
7 | .slider_tick {
8 | fill: mistyrose;
9 | }
10 | #left_slider {
11 | position: absolute;
12 | left: 5vw;
13 | top: 5vh;
14 | }
15 | #right_slider {
16 | position: absolute;
17 | right: 5vw;
18 | top: 5vh;
19 | }
20 | .button {
21 | display: block;
22 | height: 10vh;
23 | text-align: center;
24 | font-size: 2em;
25 | line-height: 10vh;
26 | text-decoration: none;
27 | background-color: blue;
28 | color: white;
29 | }
30 | #exitbutton {
31 | position: absolute;
32 | width: 40vw;
33 | left: 30vw;
34 | top: 5vh;
35 | }
36 | #video {
37 | display: block;
38 | margin-top: 20vh;
39 | width: 80vmin;
40 | height: 80vmin;
41 | margin-left: auto;
42 | margin-right: auto;
43 | }
44 | #video img {
45 | width: 100%;
46 | padding-bottom: 75%;
47 | }
48 |
49 | .menu {
50 | width: 100%;
51 | margin-top: 0;
52 | margin-bottom: 0;
53 | padding: 0;
54 | }
55 |
56 | .menu li {
57 | list-style-type: none;
58 | list-style-position: initial;
59 | }
60 |
61 | .menu .button {
62 | margin-left: auto;
63 | margin-right: auto;
64 | width: 60vw;
65 | border: 1px solid lightblue;
66 | }
67 |
--------------------------------------------------------------------------------
/chapter15/full_system/static/display_sample.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learn-Robotics-Programming/89b9feb4985349b575bd1cd3cd80796a78ca7bde/chapter15/full_system/static/display_sample.jpg
--------------------------------------------------------------------------------
/chapter15/full_system/static/lib/jquery-readme.txt:
--------------------------------------------------------------------------------
1 | This folder contains jquery 3.3.1 (minified).
2 |
3 | jQuery v3.3.1 | (c) JS Foundation and other contributors | jquery.org/license
4 |
5 | The Jquery license is at https://jquery.org/license/.
6 |
7 | JQuery is licensed under the MIT License.
8 | The MIT License is simple and easy to understand and it places almost no restrictions on what you can do with the Project.
9 |
10 | You are free to use the Project in any other project (even commercial projects) as long as the copyright header is left intact.
11 |
--------------------------------------------------------------------------------
/chapter15/full_system/static/touch-slider.js:
--------------------------------------------------------------------------------
1 |
2 | function Slider(id, when_updated) {
3 | this.selector = '#' + id;
4 | this.when_updated = when_updated;
5 | };
6 |
7 | Slider.prototype = {
8 | touched: false, // is a touch still occuring
9 | touchmove: function(event) {
10 | var touch = event.targetTouches[0];
11 | // Get the touch relative to the top of the slider
12 | var from_top = touch.pageY - $(this.selector).offset().top;
13 | // height of track in pixels
14 | var trackheight = $(this.selector).height();
15 | // Convert this to twice a percentage of the track. (0 is the middle)
16 | var relative_touch = Math.round((from_top/trackheight) * 100);
17 | this.set_position(relative_touch);
18 | this.touched = true;
19 | event.preventDefault();
20 | },
21 | touchend: function(event) {
22 | this.touched = false;
23 | },
24 | setup: function() {
25 | $(this.selector).on('touchmove', this.touchmove.bind(this));
26 | $(this.selector).on('touchend', this.touchend.bind(this));
27 | setInterval(this.update.bind(this), 50);
28 | setInterval(this.update_if_changed.bind(this), 200);
29 | },
30 | changed: false,
31 | position : 50,
32 | set_position: function(new_position) {
33 | this.position = Math.round(new_position);
34 | console.log(this.id + " - " + this.position);
35 | $(this.selector).find('.slider_tick')[0].setAttribute('cy', this.position + '%');
36 | this.changed = true;
37 | },
38 | update: function() {
39 | var error = 50 - this.position;
40 | var change = (0.3 * error) + (Math.sign(error) * 0.5)
41 | if(!this.touched && this.position != 50) {
42 | this.set_position(this.position + change);
43 | }
44 | },
45 | update_if_changed: function() {
46 | if(this.changed) {
47 | this.changed = false;
48 | this.when_updated(100 - (this.position * 2));
49 | }
50 | }
51 | };
52 |
--------------------------------------------------------------------------------
/chapter15/full_system/templates/color_track_behavior.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Robot Image Server
6 |
7 |
8 | Robot Image Server
9 | Exit
10 | Start Stop
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/chapter15/full_system/templates/image_server.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Robot Image Server
4 |
5 |
6 | Robot Image Server
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/chapter15/full_system/templates/manual_drive.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Manually Drive The Robot
9 |
10 |
11 |
12 |
13 |
14 | Exit
15 |
16 |
17 |
18 |
19 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/chapter15/full_system/templates/menu.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | My Robot Menu
5 |
6 |
7 |
8 | My Robot Menu
9 | {% if message %}
10 | {{ message }}
11 | {% endif %}
12 |
18 |
19 |
--------------------------------------------------------------------------------
/chapter15/readme.md:
--------------------------------------------------------------------------------
1 | # Overview
2 |
3 | This code is intended to run on a Raspberry Pi on A robot running Raspbian (currently stretch).
4 |
5 | # Setup
6 |
7 | Start by performing:
8 |
9 | pi@myrobot:~ $ sudo apt-get update && sudo apt-get upgrade -y
10 |
11 | The following Raspbian packages should be present:
12 |
13 | pi@myrobot:~ $ sudo apt-get install -y git python-pip python-smbus i2ctools
14 |
15 | * python-opencv
16 | * opencv-data
17 | * picamera
18 |
19 | In Raspi Config enable:
20 |
21 | * I2c
22 | * SPI
23 | * Camera
24 |
25 | For python the following packages should be installed with pip
26 |
27 | pi@myrobot:~ $ pip install
28 | git+https://github.com/orionrobots/Raspi_MotorHAT
29 | pi@myrobot:~ $ pip install flask gpiozero spidev
30 |
--------------------------------------------------------------------------------
/chapter4/readme.md:
--------------------------------------------------------------------------------
1 | Creating A Headless Pi
2 | ======================
3 |
4 | This chapter is about making a Raspberry Pi headless, and reaching it from the network.
5 |
6 | Using WiFi and SSH
7 | ==================
8 |
9 | This example wpa_supplicant should be copied onto the BOOT partition of a Raspberry Pi SD card, as wpa_supplicant.conf.
10 | The SSID and PSK will need to be substituted for your own details.
11 |
12 | Also an "ssh" file should be present on the SD Card BOOT partition to enable this too.
13 |
14 | These will make your Raspberry Pi contactable on your WiFi network.
15 |
16 | Contacting The Pi
17 | =================
18 |
19 | When it is turned on, the Raspberry Pi will respond on a network to the name "raspberrypi.local" until you change it to a more unique hostname.
20 |
21 | You will need to ensure your computer supports MDNS. Most current Linux and Apple Mac desktops already have this support.
22 |
23 | ## Windows
24 |
25 | You will need to install the Apple Bonjour softwarem from https://support.apple.com/downloads/bonjour-for-windows, unless you already have iTunes or Skype installed.
--------------------------------------------------------------------------------
/chapter4/wpa_supplicant_example.conf:
--------------------------------------------------------------------------------
1 | country=
2 | update_config=1
3 | ctrl_interface=/var/run/wpa_supplicant
4 |
5 | network={
6 | ssid=""
7 | psk=""
8 | }
--------------------------------------------------------------------------------
/chapter5/hello.py:
--------------------------------------------------------------------------------
1 | import socket
2 | print('%s is alive!' % socket.gethostname())
3 |
--------------------------------------------------------------------------------
/chapter5/readme.md:
--------------------------------------------------------------------------------
1 | # Using sftp to Upload Files From Your PC
2 |
3 | This hello.py is just a sample file, to upload to the Pi using SFTP and then run using python.
4 |
5 | Use Filezilla, or a preferred SFTP client to link to the Pi and send this file to it to get comfortable with SFTP file transfers.
--------------------------------------------------------------------------------
/chapter7/behavior_line.py:
--------------------------------------------------------------------------------
1 | import robot
2 | from time import sleep
3 |
4 | r = robot.Robot()
5 | r.set_left(80)
6 | r.set_right(80)
7 | sleep(1)
8 |
--------------------------------------------------------------------------------
/chapter7/behavior_path.py:
--------------------------------------------------------------------------------
1 | import robot
2 | from time import sleep
3 |
4 | def straight(bot, seconds):
5 | bot.set_left(80)
6 | bot.set_right(70)
7 | sleep(seconds)
8 |
9 | def turn_left(bot, seconds):
10 | bot.set_left(20)
11 | bot.set_right(80)
12 | sleep(seconds)
13 |
14 | def turn_right(bot, seconds):
15 | bot.set_left(80)
16 | bot.set_right(20)
17 | sleep(seconds)
18 |
19 | def spin_left(bot, seconds):
20 | bot.set_left(-80)
21 | bot.set_right(80)
22 | sleep(seconds)
23 |
24 | bot = robot.Robot()
25 | straight(bot, 1)
26 | turn_right(bot, 0.6)
27 | straight(bot, 1)
28 | turn_left(bot, 0.6)
29 | straight(bot, 1)
30 | turn_left(bot, 0.6)
31 | straight(bot, 1)
32 | spin_left(bot, 1)
33 |
--------------------------------------------------------------------------------
/chapter7/readme.md:
--------------------------------------------------------------------------------
1 | # Move and Turn
2 |
3 | The code in this directory is the starting point for making a moving robot.
4 | It is designed to work with the "Full Function Stepper Motor Hat", a robotics board with support for DC motors, Stepper Motors, Servo motors which also has long passthrough header pins for easy access to all GPIO pins on the Raspberry Pi.
5 |
6 | First you will require the following installation on your Pi:
7 |
8 | pi@myrobot:~ $ sudo apt-get install -y git python-pip python-smbus i2ctools
9 | pi@myrobot:~ $ pip install git+https://github.com/orionrobots/Raspi_MotorHAT
10 |
11 | You will need to use Raspi Config to enable i2c and SPI.
12 |
13 | Run the test_ or behavior_ files to demonstrate your robot moving.
14 |
15 |
--------------------------------------------------------------------------------
/chapter7/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 |
3 | import atexit
4 |
5 | class Robot(object):
6 | def __init__(self, motorhat_addr=0x6f):
7 | # Setup the motorhat with the passed in address
8 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
9 |
10 | # get local variable for each motor
11 | self.left_motor = self._mh.getMotor(1)
12 | self.right_motor = self._mh.getMotor(2)
13 |
14 | # ensure the motors get stopped when the code exits
15 | atexit.register(self.stop_motors)
16 |
17 | def convert_speed(self, speed):
18 | # Choose the running mode
19 | mode = Raspi_MotorHAT.RELEASE
20 | if speed > 0:
21 | mode = Raspi_MotorHAT.FORWARD
22 | elif speed < 0:
23 | mode = Raspi_MotorHAT.BACKWARD
24 |
25 | # Scale the speed
26 | output_speed = (abs(speed) * 255) / 100
27 | return mode, int(output_speed)
28 |
29 | def set_left(self, speed):
30 | mode, output_speed = self.convert_speed(speed)
31 | self.left_motor.setSpeed(output_speed)
32 | self.left_motor.run(mode)
33 |
34 | def set_right(self, speed):
35 | mode, output_speed = self.convert_speed(speed)
36 | self.right_motor.setSpeed(output_speed)
37 | self.right_motor.run(mode)
38 |
39 | def stop_motors(self):
40 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
41 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
42 |
--------------------------------------------------------------------------------
/chapter7/test_gentleleft.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 |
3 | import time
4 | import atexit
5 |
6 | mh = Raspi_MotorHAT(addr=0x6f)
7 | lm = mh.getMotor(1)
8 | rm = mh.getMotor(2)
9 |
10 | def turn_off_motors():
11 | lm.run(Raspi_MotorHAT.RELEASE)
12 | rm.run(Raspi_MotorHAT.RELEASE)
13 | atexit.register(turn_off_motors)
14 |
15 | lm.setSpeed(50)
16 | rm.setSpeed(150)
17 |
18 | lm.run(Raspi_MotorHAT.FORWARD)
19 | rm.run(Raspi_MotorHAT.FORWARD)
20 | time.sleep(1)
21 |
--------------------------------------------------------------------------------
/chapter7/test_motors.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 |
3 | import time
4 | import atexit
5 |
6 | mh = Raspi_MotorHAT(addr=0x6f)
7 | lm = mh.getMotor(1)
8 | rm = mh.getMotor(2)
9 |
10 | def turn_off_motors():
11 | lm.run(Raspi_MotorHAT.RELEASE)
12 | rm.run(Raspi_MotorHAT.RELEASE)
13 | atexit.register(turn_off_motors)
14 |
15 | lm.setSpeed(150)
16 | rm.setSpeed(150)
17 |
18 | lm.run(Raspi_MotorHAT.FORWARD)
19 | rm.run(Raspi_MotorHAT.FORWARD)
20 | time.sleep(1)
21 |
--------------------------------------------------------------------------------
/chapter7/test_spinleft.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 |
3 | import time
4 | import atexit
5 |
6 | mh = Raspi_MotorHAT(addr=0x6f)
7 | lm = mh.getMotor(1)
8 | rm = mh.getMotor(2)
9 |
10 | def turn_off_motors():
11 | lm.run(Raspi_MotorHAT.RELEASE)
12 | rm.run(Raspi_MotorHAT.RELEASE)
13 | atexit.register(turn_off_motors)
14 |
15 | lm.setSpeed(150)
16 | rm.setSpeed(150)
17 |
18 | lm.run(Raspi_MotorHAT.BACKWARD)
19 | rm.run(Raspi_MotorHAT.FORWARD)
20 | time.sleep(1)
21 |
--------------------------------------------------------------------------------
/chapter7/test_spinright.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 |
3 | import time
4 | import atexit
5 |
6 | mh = Raspi_MotorHAT(addr=0x6f)
7 | lm = mh.getMotor(1)
8 | rm = mh.getMotor(2)
9 |
10 | def turn_off_motors():
11 | lm.run(Raspi_MotorHAT.RELEASE)
12 | rm.run(Raspi_MotorHAT.RELEASE)
13 | atexit.register(turn_off_motors)
14 |
15 | lm.setSpeed(150)
16 | rm.setSpeed(150)
17 |
18 | lm.run(Raspi_MotorHAT.FORWARD)
19 | rm.run(Raspi_MotorHAT.BACKWARD)
20 | time.sleep(1)
21 |
--------------------------------------------------------------------------------
/chapter8/line_following_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | class LineFollowingBehavior:
5 | # Note - this is the robot ON the line.
6 | def __init__(self, the_robot, forward_speed=30, cornering=-30):
7 | self.robot = the_robot
8 | self.forward_speed = forward_speed
9 | self.cornering = cornering
10 |
11 | def when_left_crosses_line(self):
12 | self.robot.set_left(self.cornering)
13 |
14 | def when_right_crosses_line(self):
15 | self.robot.set_right(self.cornering)
16 |
17 | def when_left_off_line(self):
18 | self.robot.set_left(self.forward_speed)
19 |
20 | def when_right_off_line(self):
21 | self.robot.set_right(self.forward_speed)
22 |
23 | def run(self):
24 | # Setup conditions
25 | self.robot.left_line_sensor.when_line = self.when_left_crosses_line
26 | self.robot.left_line_sensor.when_no_line = self.when_left_off_line
27 | self.robot.right_line_sensor.when_line = self.when_right_crosses_line
28 | self.robot.right_line_sensor.when_no_line = self.when_right_off_line
29 | # Start driving
30 | self.robot.set_left(self.forward_speed)
31 | self.robot.set_right(self.forward_speed)
32 | while True:
33 | sleep(0.02)
34 |
35 |
36 | bot = Robot()
37 | behavior = LineFollowingBehavior(bot)
38 | behavior.run()
39 |
40 |
--------------------------------------------------------------------------------
/chapter8/readme.md:
--------------------------------------------------------------------------------
1 | # Setup
2 |
3 | Currently GPIOZero release 1.4.1 does not have the pull_up=True option for the LineSensor object.
4 | When 1.5 is available, this will be present.
5 | Until then the master branch can be used, on the Pi type:
6 |
7 | pi@myrobot:~ $ pip install git+https://github.com/RPi-Distro/python-gpiozero.git
8 |
9 | If you use gpiozero 1.4.1, you are likely to see errors about the pull_up argument not existing.
10 |
--------------------------------------------------------------------------------
/chapter8/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 |
4 | import atexit
5 |
6 | class Robot(object):
7 | def __init__(self, motorhat_addr=0x6f):
8 | # Setup the motorhat with the passed in address
9 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
10 |
11 | # get local variable for each motor
12 | self.left_motor = self._mh.getMotor(1)
13 | self.right_motor = self._mh.getMotor(2)
14 |
15 | # ensure the motors get stopped when the code exits
16 | atexit.register(self.stop_all)
17 |
18 | # Setup the line sensors
19 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
20 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
21 |
22 | def stop_all(self):
23 | self.stop_motors()
24 |
25 | # Clear any sensor handlers
26 | self.left_line_sensor.when_line = None
27 | self.left_line_sensor.when_no_line = None
28 | self.right_line_sensor.when_line = None
29 | self.right_line_sensor.when_no_line = None
30 |
31 | def convert_speed(self, speed):
32 | mode = Raspi_MotorHAT.RELEASE
33 | if speed > 0:
34 | mode = Raspi_MotorHAT.FORWARD
35 | elif speed < 0:
36 | mode = Raspi_MotorHAT.BACKWARD
37 | output_speed = (abs(speed) * 255) / 100
38 | return mode, int(output_speed)
39 |
40 | def set_left(self, speed):
41 | mode, output_speed = self.convert_speed(speed)
42 | self.left_motor.setSpeed(output_speed)
43 | self.left_motor.run(mode)
44 |
45 | def set_right(self, speed):
46 | mode, output_speed = self.convert_speed(speed)
47 | self.right_motor.setSpeed(output_speed)
48 | self.right_motor.run(mode)
49 |
50 | def stop_motors(self):
51 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
52 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
53 |
--------------------------------------------------------------------------------
/chapter8/stop_at_line.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 | from gpiozero import LineSensor
4 |
5 | r = Robot()
6 |
7 | lsensor = LineSensor(23, pull_up=True)
8 | rsensor = LineSensor(16, pull_up=True)
9 |
10 | lsensor.when_line = r.stop_motors
11 | rsensor.when_line = r.stop_motors
12 | r.set_left(60)
13 | r.set_right(60)
14 | while True:
15 | sleep(0.02)
16 |
--------------------------------------------------------------------------------
/chapter8/test_line_sensor.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LineSensor
2 | from time import sleep
3 |
4 |
5 | lsensor = LineSensor(23)
6 | rsensor = LineSensor(16)
7 |
8 | while True:
9 | print "Left", lsensor.is_active, "Right", rsensor.is_active
10 | sleep(0.05)
11 |
--------------------------------------------------------------------------------
/chapter9/following_rainbows.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | import colorsys
3 | from time import sleep
4 |
5 | class FollowingRainbows:
6 | # Note - this is the robot ON the line.
7 | def __init__(self, the_robot, forward_speed=40, cornering=0):
8 | self.robot = the_robot
9 | self.forward_speed = forward_speed
10 | self.cornering = cornering
11 | self.left_index = 0
12 | self.left_brightness = 0
13 | self.right_index = 0
14 | self.right_brightness = 0
15 | self.main_index = 0
16 |
17 | led_qtr = int(self.robot.leds.count/4)
18 | self.right_indicator = range(0, led_qtr)
19 | self.left_indicator = range(self.robot.leds.count - led_qtr, self.robot.leds.count)
20 |
21 |
22 | def when_left_crosses_line(self):
23 | self.robot.set_left(self.cornering)
24 | self.left_brightness = 1.0
25 |
26 | def when_right_crosses_line(self):
27 | self.robot.set_right(self.cornering)
28 | self.right_brightness = 1.0
29 |
30 | def when_left_off_line(self):
31 | self.robot.set_left(self.forward_speed)
32 |
33 | def when_right_off_line(self):
34 | self.robot.set_right(self.forward_speed)
35 |
36 | def hsv_to_rgb(self, h, s, v):
37 | return [int(component*255) for component in colorsys.hsv_to_rgb(h, s, v)]
38 |
39 | def make_display(self):
40 | # main rainbow
41 | half_leds = int(self.robot.leds.count/2)
42 | qtr_leds = int(self.robot.leds.count/4)
43 | for n in range(0, half_leds):
44 | offset = (240/half_leds) * n
45 | ih = (self.main_index + offset) % 360
46 | ch = self.hsv_to_rgb(ih / 360.0, 1.0, 0.6)
47 | rgb = [int(c*255) for c in ch]
48 | self.robot.leds.set_one(qtr_leds + n, rgb)
49 | self.main_index += 5
50 | # LEft and right
51 | for n in range(0, qtr_leds):
52 | offset = (60/7.0) * n
53 | lh = (self.left_index + offset) % 360
54 | ch = self.hsv_to_rgb(lh / 360.0, 1.0, self.left_brightness)
55 | rgb = [int(c*255) for c in ch]
56 | self.robot.leds.set_one(n, rgb)
57 | rh = (self.right_index + offset) % 360
58 | ch = self.hsv_to_rgb(rh / 360.0, 1.0, self.right_brightness)
59 | rgb = [int(c*255) for c in ch]
60 | self.robot.leds.set_one(self.robot.leds.count-1-n, rgb)
61 | self.left_index += 5
62 | self.right_index -= 5
63 | if self.left_brightness >= 0.1:
64 | self.left_brightness -= 0.1
65 | if self.right_brightness >= 0.1:
66 | self.right_brightness -= 0.1
67 | self.robot.leds.show()
68 |
69 | def run(self):
70 | # Setup conditions
71 | self.robot.left_line_sensor.when_line = self.when_left_crosses_line
72 | self.robot.left_line_sensor.when_no_line = self.when_left_off_line
73 | self.robot.right_line_sensor.when_line = self.when_right_crosses_line
74 | self.robot.right_line_sensor.when_no_line = self.when_right_off_line
75 | # Start driving
76 | self.robot.set_left(self.forward_speed)
77 | self.robot.set_right(self.forward_speed)
78 | while True:
79 | sleep(0.01)
80 | self.make_display()
81 |
82 |
83 | bot = Robot()
84 | behavior = FollowingRainbows(bot)
85 | behavior.run()
86 |
87 |
--------------------------------------------------------------------------------
/chapter9/leds_8_apa102c.py:
--------------------------------------------------------------------------------
1 | import spidev
2 |
3 | class Leds(object):
4 | def __init__(self):
5 | # MOSI - default output, (Master Out, Slave in) is 10. Clock is 11.
6 | self.device = spidev.SpiDev()
7 | self.device.open(0, 0)
8 | self.device.max_speed_hz = 15000
9 | self.colors = [(0,0,0)] * self.count
10 |
11 | @property
12 | def count(self):
13 | return 8
14 |
15 | def set_one(self, led_number, color):
16 | assert(len(color) == 3)
17 | self.colors[led_number] = color
18 |
19 | def set_range(self, a_range, color):
20 | for led_number in a_range:
21 | self.colors[led_number] = color
22 |
23 | def set_all(self, color):
24 | self.colors = [color] * self.count
25 |
26 | def clear(self):
27 | self.set_all((0, 0, 0))
28 |
29 | def show(self):
30 | # Create the wake up header
31 | data = [0] * 4
32 | for color in self.colors:
33 | data.append(0xe1)
34 | data.extend(color)
35 | data.extend([0]* 4)
36 | # send it
37 | self.device.xfer(data)
38 |
--------------------------------------------------------------------------------
/chapter9/leds_led_shim.py:
--------------------------------------------------------------------------------
1 | import ledshim
2 |
3 | class Leds(object):
4 | def __init__(self):
5 | self.count = 24
6 |
7 | def set_one(self, led_number, color):
8 | ledshim.set_pixel(led_number, *color)
9 |
10 | def set_range(self, a_range, color):
11 | ledshim.set_multiple_pixels(a_range, color)
12 |
13 | def set_all(self, color):
14 | ledshim.set_all(*color)
15 |
16 | def clear(self):
17 | ledshim.clear()
18 |
19 | def show(self):
20 | ledshim.show()
--------------------------------------------------------------------------------
/chapter9/leds_test.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | bot = Robot()
5 | red = (255, 0, 0)
6 | blue = (0, 0, 255)
7 |
8 | while True:
9 | print("red")
10 | bot.leds.set_all(red)
11 | bot.leds.show()
12 | sleep(0.5)
13 | print("blue")
14 | bot.leds.set_all(blue)
15 | bot.leds.show()
16 | sleep(0.5)
17 |
18 |
--------------------------------------------------------------------------------
/chapter9/line_following_behavior.py:
--------------------------------------------------------------------------------
1 | from robot import Robot
2 | from time import sleep
3 |
4 | cross_line_color = (255, 0, 0)
5 | off_line_color = (0, 0, 255)
6 |
7 | class LineFollowingBehavior:
8 | # Note - this is the robot ON the line.
9 | def __init__(self, the_robot, forward_speed=30, cornering=-30):
10 | self.robot = the_robot
11 | self.forward_speed = forward_speed
12 | self.cornering = cornering
13 |
14 | led_qtr = int(self.robot.leds.count/4)
15 | self.right_indicator = range(0, led_qtr)
16 | self.left_indicator = range(self.robot.leds.count - led_qtr, self.robot.leds.count)
17 |
18 | self.robot.leds.show()
19 |
20 | def when_left_crosses_line(self):
21 | self.robot.set_left(self.cornering)
22 | self.robot.leds.set_range(self.left_indicator, cross_line_color)
23 | self.robot.leds.show()
24 |
25 | def when_right_crosses_line(self):
26 | self.robot.set_right(self.cornering)
27 | self.robot.leds.set_range(self.right_indicator, cross_line_color)
28 | self.robot.leds.show()
29 |
30 | def when_left_off_line(self):
31 | self.robot.set_left(self.forward_speed)
32 | self.robot.leds.set_range(self.left_indicator, off_line_color)
33 | self.robot.leds.show()
34 |
35 | def when_right_off_line(self):
36 | self.robot.set_right(self.forward_speed)
37 | self.robot.leds.set_range(self.right_indicator, off_line_color)
38 | self.robot.leds.show()
39 |
40 | def run(self):
41 | # Setup conditions
42 | self.robot.left_line_sensor.when_line = self.when_left_crosses_line
43 | self.robot.left_line_sensor.when_no_line = self.when_left_off_line
44 | self.robot.right_line_sensor.when_line = self.when_right_crosses_line
45 | self.robot.right_line_sensor.when_no_line = self.when_right_off_line
46 | # Start driving
47 | self.robot.set_left(self.forward_speed)
48 | self.robot.set_right(self.forward_speed)
49 | while True:
50 | sleep(0.02)
51 |
52 |
53 | bot = Robot()
54 | behavior = LineFollowingBehavior(bot)
55 | behavior.run()
56 |
57 |
--------------------------------------------------------------------------------
/chapter9/readme.md:
--------------------------------------------------------------------------------
1 | # Setup
2 |
3 | You should already have SPI enabled as mentioned in chapter 7.
4 |
5 | For the apa102 LEDs you will need the SPIdev library installed:
6 |
7 | pi@myrobot:~ $ pip install spidev
8 |
9 | # LED Shim
10 |
11 | To use the Pimoroni LED Shim, you will need to set that up first.
12 |
13 | pi@myrobot:~ $ curl https://get.pimoroni.com/ledshim | bash
14 |
15 |
--------------------------------------------------------------------------------
/chapter9/robot.py:
--------------------------------------------------------------------------------
1 | from Raspi_MotorHAT import Raspi_MotorHAT
2 | from gpiozero import LineSensor
3 | import atexit
4 | #import leds_led_shim
5 | import leds_8_apa102c
6 |
7 | class Robot(object):
8 | def __init__(self, motorhat_addr=0x6f):
9 | # Setup the motorhat with the passed in address
10 | self._mh = Raspi_MotorHAT(addr=motorhat_addr)
11 |
12 | # get local variable for each motor
13 | self.left_motor = self._mh.getMotor(1)
14 | self.right_motor = self._mh.getMotor(2)
15 |
16 | # ensure the motors get stopped when the code exits
17 | atexit.register(self.stop_all)
18 |
19 | # Setup the line sensors
20 | self.left_line_sensor = LineSensor(23, queue_len=3, pull_up=True)
21 | self.right_line_sensor = LineSensor(16, queue_len=3, pull_up=True)
22 |
23 | # Setup the Leds
24 | self.leds = leds_8_apa102c.Leds()
25 |
26 | def stop_all(self):
27 | self.stop_motors()
28 |
29 | # Clear any sensor handlers
30 | self.left_line_sensor.when_line = None
31 | self.left_line_sensor.when_no_line = None
32 | self.right_line_sensor.when_line = None
33 | self.right_line_sensor.when_no_line = None
34 |
35 | # Clear the display
36 | self.leds.clear()
37 | self.leds.show()
38 |
39 | def convert_speed(self, speed):
40 | mode = Raspi_MotorHAT.RELEASE
41 | if speed > 0:
42 | mode = Raspi_MotorHAT.FORWARD
43 | elif speed < 0:
44 | mode = Raspi_MotorHAT.BACKWARD
45 | output_speed = (abs(speed) * 255) / 100
46 | return mode, int(output_speed)
47 |
48 | def set_left(self, speed):
49 | mode, output_speed = self.convert_speed(speed)
50 | self.left_motor.setSpeed(output_speed)
51 | self.left_motor.run(mode)
52 |
53 | def set_right(self, speed):
54 | mode, output_speed = self.convert_speed(speed)
55 | self.right_motor.setSpeed(output_speed)
56 | self.right_motor.run(mode)
57 |
58 | def stop_motors(self):
59 | self.left_motor.run(Raspi_MotorHAT.RELEASE)
60 | self.right_motor.run(Raspi_MotorHAT.RELEASE)
61 |
--------------------------------------------------------------------------------