20 | {% endblock %}
21 |
--------------------------------------------------------------------------------
/packages/common/pitop/common/common_names.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class DeviceName(Enum):
5 | pi_top_ceed = "pi-topCEED"
6 | pi_top_3 = "pi-top [3]"
7 | pi_top_4 = "pi-top [4]"
8 |
9 |
10 | class PeripheralName(Enum):
11 | unknown = "Unknown"
12 | pi_top_pulse = "pi-topPULSE"
13 | pi_top_speaker_l = "pi-topSPEAKER (v1) - Left channel"
14 | pi_top_speaker_r = "pi-topSPEAKER (v1) - Right channel"
15 | pi_top_speaker_m = "pi-topSPEAKER (v1) - Mono"
16 | pi_top_speaker_v2 = "pi-topSPEAKER (v2)"
17 | pi_top_proto_plus = "pi-topPROTO+"
18 |
19 |
20 | class FirmwareDeviceName(Enum):
21 | pt4_hub = "pi-top [4]"
22 | pt4_foundation_plate = "pi-top [4] Foundation Plate"
23 | pt4_expansion_plate = "pi-top [4] Expansion Plate"
24 |
--------------------------------------------------------------------------------
/examples/system/miniscreen/miniscreen_display_animated_image_loop_in_background.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from PIL import Image
4 |
5 | from pitop import Pitop
6 |
7 | pitop = Pitop()
8 | miniscreen = pitop.miniscreen
9 |
10 | image = Image.open("/usr/lib/python3/dist-packages/pitop/miniscreen/images/rocket.gif")
11 |
12 | # Run animation loop in background by setting `background` to True
13 | miniscreen.play_animated_image(image, background=True, loop=True)
14 |
15 |
16 | # Do stuff while showing image
17 | print("Counting to 100 while showing animated image on miniscreen...")
18 |
19 | for i in range(100):
20 | print("\r{}".format(i), end="", flush=True)
21 | sleep(0.2)
22 |
23 | print("\rFinished!")
24 |
25 | # Stop animation
26 | miniscreen.stop_animated_image()
27 |
--------------------------------------------------------------------------------
/examples/camera/camera_image_preview_capture.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | import cv2
4 |
5 | from pitop import Camera, Pitop
6 |
7 | miniscreen = Pitop().miniscreen
8 |
9 | cam = Camera(format="OpenCV", flip_top_bottom=True)
10 | directory = "images/"
11 | button = miniscreen.select_button
12 | picture_count = 0
13 |
14 |
15 | while True:
16 | frame = cam.get_frame()
17 |
18 | cv2.imshow("Frame", frame)
19 | miniscreen.display_image(frame)
20 | if button.is_pressed:
21 | cv2.imwrite(f"{directory}image_{picture_count}.jpg", frame)
22 | print(f"Frame written to file with ID: {picture_count}\n")
23 | picture_count += 1
24 | sleep(0.5)
25 | if cv2.waitKey(1) & 0xFF == ord("q"):
26 | break
27 |
28 |
29 | cv2.destroyAllWindows()
30 |
--------------------------------------------------------------------------------
/examples/camera/camera_face_detector.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | import cv2
4 |
5 | from pitop import Camera
6 | from pitop.processing.algorithms.faces import FaceDetector
7 |
8 |
9 | def find_faces(frame):
10 | face = face_detector(frame)
11 | robot_view = face.robot_view
12 |
13 | cv2.imshow("Faces", robot_view)
14 | cv2.waitKey(1)
15 |
16 | if face.found:
17 | print(
18 | f"Face angle: {face.angle} \n"
19 | f"Face center: {face.center} \n"
20 | f"Face rectangle: {face.rectangle} \n"
21 | )
22 | else:
23 | print("Cannot find face!")
24 |
25 |
26 | camera = Camera(resolution=(640, 480), flip_top_bottom=True)
27 | face_detector = FaceDetector()
28 |
29 | camera.on_frame = find_faces
30 |
31 | pause()
32 |
--------------------------------------------------------------------------------
/packages/miniscreen/pitop/miniscreen/oled/core/contrib/luma/core/error.py:
--------------------------------------------------------------------------------
1 | class Error(Exception):
2 | """Base class for exceptions in this library."""
3 |
4 | pass
5 |
6 |
7 | class DeviceNotFoundError(Error):
8 | """Exception raised when a device cannot be found."""
9 |
10 |
11 | class DevicePermissionError(Error):
12 | """Exception raised when permission to access the device is denied."""
13 |
14 |
15 | class DeviceAddressError(Error):
16 | """Exception raised when an invalid device address is detected."""
17 |
18 |
19 | class DeviceDisplayModeError(Error):
20 | """Exception raised when an invalid device display mode is detected."""
21 |
22 |
23 | class UnsupportedPlatform(Error):
24 | """Exception raised when trying to use the library on an incompatible
25 | system."""
26 |
--------------------------------------------------------------------------------
/docs/more/license.rst:
--------------------------------------------------------------------------------
1 | =====================================================
2 | License
3 | =====================================================
4 |
5 |
6 | Copyright 2020 CEED Ltd.
7 |
8 | Licensed under the Apache License, Version 2.0 (the "License");
9 | you may not use this file except in compliance with the License.
10 | You may obtain a copy of the License at
11 |
12 | http://www.apache.org/licenses/LICENSE-2.0
13 |
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License.
19 |
20 | *Version 2.0, January 2004*
21 | http://www.apache.org/licenses/
22 |
--------------------------------------------------------------------------------
/tests/README.md:
--------------------------------------------------------------------------------
1 | # Tests
2 |
3 | ## Running the tests
4 |
5 | To run the tests, install the dependencies declared in `requirements.txt`. Then, you can run the tests with `pytest`.
6 |
7 | ```
8 | $ pip3 install -r tests/requirements.txt
9 | $ pytest --verbose
10 | ```
11 |
12 | ## Using docker
13 |
14 | Since it can take a while to download or build the dependencies, you can create a docker image with these pre-loaded. We provide a `Dockerfile` to build an image that can be used to run the tests.
15 |
16 | Build the image by running:
17 | ```
18 | $ cd tests
19 | $ docker build -t sdk-test-runner .
20 | ```
21 |
22 | Then, run the tests with:
23 | ```
24 | $ docker run \
25 | --rm \
26 | -it \
27 | --volume "$PWD/..":/sdk \
28 | --workdir /sdk \
29 | --entrypoint=pytest \
30 | sdk-test-runner \
31 | --verbose
32 | ```
33 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Need a place to start? Try the Knowledge Base!
4 | url: https://knowledgebase.pi-top.com/
5 | about: Find answers to commonly asked questions.
6 | - name: Need to go a little deeper? Try the Forum!
7 | url: https://forum.pi-top.com/c/pi-top-software
8 | about: Discuss and search through support topics.
9 | - name: Committed to creating a GitHub Issue? Read the 'Contributing to pi-topOS' Knowledge Base article!
10 | url: http://knowledgebase.pi-top.com/knowledge/contributing-to-pi-top-os
11 | about: Get information about how to create effective GitHub Issues.
12 | - name: Looking to learn about programming? Check out Further!
13 | url: https://further.pi-top.com/explore/
14 | about: Complete challenges to learn how to program your pi-top.
15 |
--------------------------------------------------------------------------------
/examples/recipes/prax_shake_head.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pitop import TiltRollHeadController
4 |
5 | # Create a head controller object
6 | head = TiltRollHeadController()
7 |
8 | # Initialize the servo angles
9 | head.roll.target_angle = 0
10 | head.tilt.target_angle = 50
11 | sleep(1)
12 |
13 |
14 | # Nod 6 times at max speed 5 degrees either side of current angle. Blocks program execution until finished.
15 | head.nod(times=6, angle=5, speed=100, block=True)
16 |
17 | # Shake 4 times at half speed 10 degrees either side of current angle. Blocks program execution until finished.
18 | head.shake(times=4, angle=10, speed=50, block=True)
19 |
20 | # Shake and nod at the same time with default speed and angle
21 | # Setting nod with block=False ensures the program continues to the next command
22 | head.nod(times=6, block=False)
23 | head.shake(times=6, block=True)
24 |
--------------------------------------------------------------------------------
/packages/keyboard/pitop/keyboard/vendor/pynput/_info.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # pystray
3 | # Copyright (C) 2015-2018 Moses Palmér
4 | #
5 | # This program is free software: you can redistribute it and/or modify it under
6 | # the terms of the GNU Lesser General Public License as published by the Free
7 | # Software Foundation, either version 3 of the License, or (at your option) any
8 | # later version.
9 | #
10 | # This program is distributed in the hope that it will be useful, but WITHOUT
11 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
13 | # details.
14 | #
15 | # You should have received a copy of the GNU Lesser General Public License
16 | # along with this program. If not, see .
17 |
18 | __author__ = "Moses Palmér"
19 | __version__ = (1, 4, 2)
20 |
--------------------------------------------------------------------------------
/examples/pulse/leds-test_colors.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from pitop.pulse import ledmatrix
4 |
5 |
6 | def show_map(r, g, b):
7 | for x in range(0, 7):
8 | for y in range(0, 7):
9 | z = (float(y) + 7.0 * float(x)) / 49.0
10 | rr = int(z * r)
11 | gg = int(z * g)
12 | bb = int(z * b)
13 | ledmatrix.set_pixel(x, y, rr, gg, bb)
14 | ledmatrix.show()
15 |
16 |
17 | ledmatrix.rotation(0)
18 | ledmatrix.clear()
19 |
20 | # Display 49 different color intensities
21 | for r in range(0, 2):
22 | for g in range(0, 2):
23 | for b in range(2):
24 | if r + g + b > 0:
25 | rr = 255 * r
26 | gg = 255 * g
27 | bb = 255 * b
28 | print(rr, gg, bb)
29 | show_map(rr, gg, bb)
30 | time.sleep(5)
31 |
32 | ledmatrix.clear()
33 | ledmatrix.show()
34 |
--------------------------------------------------------------------------------
/packages/camera/pitop/camera/core/capture_actions/capture_action_base.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from os import mkdir, path
3 | from pathlib import Path
4 | from time import strftime
5 |
6 |
7 | class CaptureActionBase(ABC):
8 | """Abstract class from which all capture actions classes must inherit
9 | from."""
10 |
11 | @abstractmethod
12 | def process(self, frame):
13 | pass
14 |
15 | @abstractmethod
16 | def stop(self):
17 | pass
18 |
19 | def _get_output_filename(self, directory, extension):
20 | return path.join(
21 | directory, "output_" + strftime("%Y-%m-%d-%H-%M-%S") + "." + extension
22 | )
23 |
24 | def _create_output_directory(self):
25 | output_directory = path.join(str(Path.home()), "Camera")
26 |
27 | if not path.isdir(output_directory):
28 | mkdir(output_directory)
29 |
30 | return output_directory
31 |
--------------------------------------------------------------------------------
/examples/recipes/robot_pan_tilt_face_tracker.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | import cv2
4 |
5 | from pitop import Camera, PanTiltController
6 | from pitop.processing.algorithms.faces import FaceDetector
7 |
8 |
9 | def track_face(frame):
10 | face = face_detector(frame)
11 | robot_view = face.robot_view
12 |
13 | cv2.imshow("Faces", robot_view)
14 | cv2.waitKey(1)
15 |
16 | if face.found:
17 | face_center = face.center
18 | pan_tilt.track_object(face_center)
19 | print(f"Face center: {face_center}")
20 | else:
21 | pan_tilt.track_object.stop()
22 | print("Cannot find face!")
23 |
24 |
25 | face_detector = FaceDetector()
26 |
27 | pan_tilt = PanTiltController(servo_pan_port="S0", servo_tilt_port="S3")
28 | pan_tilt.tilt_servo.target_angle = 0
29 | pan_tilt.pan_servo.target_angle = 0
30 |
31 | camera = Camera(resolution=(640, 480))
32 | camera.on_frame = track_face
33 |
34 | pause()
35 |
--------------------------------------------------------------------------------
/packages/processing/pitop/processing/algorithms/faces/core/emotion.py:
--------------------------------------------------------------------------------
1 | class Emotion:
2 | def __init__(self):
3 | self._type = None
4 | self._confidence = 0.0
5 | self._robot_view = None
6 |
7 | def clear(self):
8 | self.type = None
9 | self.confidence = 0.0
10 |
11 | @property
12 | def type(self):
13 | return self._type
14 |
15 | @type.setter
16 | def type(self, value):
17 | self._type = value
18 |
19 | @property
20 | def confidence(self):
21 | return self._confidence
22 |
23 | @confidence.setter
24 | def confidence(self, value):
25 | self._confidence = value
26 |
27 | @property
28 | def robot_view(self):
29 | return self._robot_view
30 |
31 | @robot_view.setter
32 | def robot_view(self, value):
33 | self._robot_view = value
34 |
35 | @property
36 | def found(self):
37 | return self.type is not None
38 |
--------------------------------------------------------------------------------
/examples/labs/photobooth_controller/styles.css:
--------------------------------------------------------------------------------
1 | header {
2 | padding: 5px;
3 | display: flex;
4 | flex-direction: row;
5 | justify-content: flex-end;
6 | }
7 |
8 | input {
9 | border-radius: 15px;
10 | height: 30px;
11 | width: 300px;
12 | padding: 0 15px;
13 | }
14 |
15 | button {
16 | border: 2px solid gainsboro;
17 | border-radius: 15px;
18 | color: white;
19 | font-weight: bold;
20 | height: 30px;
21 | min-width: 100px;
22 | padding: 0 10px;
23 | text-align: center;
24 | background-color: var(--background-color);
25 | opacity: 1;
26 | margin: 0 5px;
27 | }
28 |
29 | button:active {
30 | opacity: 0.8;
31 | }
32 |
33 | #flash {
34 | position: fixed;
35 | top: 0;
36 | left: 0;
37 | visibility: hidden;
38 | background: white;
39 | height: 100vh;
40 | width: 100vw;
41 | }
42 |
43 | .visible {
44 | visibility: visible !important;
45 | }
46 |
47 | .fadeout {
48 | opacity: 0;
49 | transition: opacity 0.75s ease-out;
50 | }
51 |
--------------------------------------------------------------------------------
/examples/camera/camera_emotion_detector.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | import cv2
4 |
5 | from pitop import Camera
6 | from pitop.processing.algorithms.faces import EmotionClassifier, FaceDetector
7 |
8 |
9 | def detect_emotion(frame):
10 | face = face_detector(frame)
11 | emotion = emotion_classifier(face)
12 |
13 | if emotion.found:
14 | print(f"{emotion_lookup[emotion.type]}", end="\r", flush=True)
15 | else:
16 | print("Face not found!")
17 |
18 | cv2.imshow("Emotion", emotion.robot_view)
19 | cv2.waitKey(1)
20 |
21 |
22 | camera = Camera(resolution=(640, 480), flip_top_bottom=True)
23 |
24 | face_detector = FaceDetector()
25 | emotion_classifier = EmotionClassifier()
26 | emotion_types = emotion_classifier.emotion_types
27 | ascii_emotions = [":|", ":c", "D:<", ":)", ":(", ":O"]
28 | emotion_lookup = {
29 | emotion_types[i]: ascii_emotions[i] for i in range(len(emotion_types))
30 | }
31 |
32 | camera.on_frame = detect_emotion
33 |
34 | pause()
35 |
--------------------------------------------------------------------------------
/packages/keyboard/pitop/keyboard/vendor/pynput/__init__.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # pynput
3 | # Copyright (C) 2015-2018 Moses Palmér
4 | #
5 | # This program is free software: you can redistribute it and/or modify it under
6 | # the terms of the GNU Lesser General Public License as published by the Free
7 | # Software Foundation, either version 3 of the License, or (at your option) any
8 | # later version.
9 | #
10 | # This program is distributed in the hope that it will be useful, but WITHOUT
11 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
13 | # details.
14 | #
15 | # You should have received a copy of the GNU Lesser General Public License
16 | # along with this program. If not, see .
17 | """The main *pynput* module.
18 |
19 | This module imports ``keyboard`` and ``mouse``.
20 | """
21 |
22 | from . import keyboard, mouse
23 | from ._logger import _logger
24 |
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | import filecmp
2 | from io import BytesIO
3 | from time import sleep
4 | from typing import Callable
5 |
6 |
7 | def wait_until(condition: Callable, on_wait: Callable = None, timeout: int = 5) -> None:
8 | t = 0
9 | delta = 0.1
10 | while not condition() and t <= timeout:
11 | sleep(delta)
12 | t += delta
13 | if callable(on_wait):
14 | on_wait()
15 | if t > timeout:
16 | raise TimeoutError("wait_until: timeout expired")
17 |
18 |
19 | def to_bytes(image):
20 | img_byte_arr = BytesIO()
21 | image.save(img_byte_arr, format="PNG")
22 | return img_byte_arr.getvalue()
23 |
24 |
25 | def file_content_is_identical(filename1, filename2, match_eof=False) -> bool:
26 | """Compares the contents of two files."""
27 | if match_eof:
28 | return filecmp.cmp(filename1, filename2)
29 |
30 | with open(filename1, "r") as f1:
31 | with open(filename2, "r") as f2:
32 | return f1.read().splitlines() == f2.read().splitlines()
33 |
--------------------------------------------------------------------------------
/packages/simulation/pitop/simulation/virtual_hardware/fonts.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import patch
2 |
3 |
4 | def mock_fonts():
5 | from pygame import font
6 |
7 | fonts = font.get_fonts()
8 |
9 | have_roboto = any([1 for f in fonts if "roboto" in f])
10 | have_vera = any([1 for f in fonts if "vera" in f])
11 |
12 | sans_fonts = [f for f in fonts if "sans" in f and "mono" not in f]
13 | default_sans = sans_fonts[0] if len(sans_fonts) else fonts[0]
14 |
15 | mono_fonts = [f for f in fonts if "mono" in f]
16 | default_mono = mono_fonts[0] if len(mono_fonts) else fonts[0]
17 |
18 | if not have_roboto:
19 | fallback = font.match_font(default_sans)
20 | patch(
21 | "pitop.miniscreen.oled.assistant.Fonts.regular", return_value=fallback
22 | ).start()
23 |
24 | if not have_vera:
25 |
26 | def mono(bold, italics):
27 | return font.match_font(default_mono, bold, italics)
28 |
29 | patch("pitop.miniscreen.oled.assistant.Fonts.mono", mono).start()
30 |
--------------------------------------------------------------------------------
/examples/recipes/robot_line_detect.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | from pitop import Camera, DriveController, Pitop
4 | from pitop.processing.algorithms.line_detect import process_frame_for_line
5 |
6 | # Assemble a robot
7 | robot = Pitop()
8 | robot.add_component(DriveController(left_motor_port="M3", right_motor_port="M0"))
9 | robot.add_component(Camera())
10 |
11 |
12 | # Set up logic based on line detection
13 | def drive_based_on_frame(frame):
14 | processed_frame = process_frame_for_line(frame)
15 |
16 | if processed_frame.line_center is None:
17 | print("Line is lost!", end="\r")
18 | robot.drive.stop()
19 | else:
20 | print(f"Target angle: {processed_frame.angle:.2f} deg ", end="\r")
21 | robot.drive.forward(0.25, hold=True)
22 | robot.drive.target_lock_drive_angle(processed_frame.angle)
23 | robot.miniscreen.display_image(processed_frame.robot_view)
24 |
25 |
26 | # On each camera frame, detect a line
27 | robot.camera.on_frame = drive_based_on_frame
28 |
29 |
30 | pause()
31 |
--------------------------------------------------------------------------------
/examples/recipes/robot_move_random.py:
--------------------------------------------------------------------------------
1 | from random import randint
2 | from time import sleep
3 |
4 | from pitop import Pitop
5 | from pitop.robotics.drive_controller import DriveController
6 |
7 | # Create a basic robot
8 | robot = Pitop()
9 | drive = DriveController(left_motor_port="M3", right_motor_port="M0")
10 | robot.add_component(drive)
11 |
12 |
13 | # Use miniscreen display
14 | robot.miniscreen.display_multiline_text("hey there!")
15 |
16 |
17 | def random_speed_factor():
18 | # 0.01 - 1, 0.01 resolution
19 | return randint(1, 100) / 100
20 |
21 |
22 | def random_sleep():
23 | # 0.5 - 2, 0.5 resolution
24 | return randint(1, 4) / 2
25 |
26 |
27 | # Move around randomly
28 | robot.drive.forward(speed_factor=random_speed_factor())
29 | sleep(random_sleep())
30 |
31 | robot.drive.left(speed_factor=random_speed_factor())
32 | sleep(random_sleep())
33 |
34 | robot.drive.backward(speed_factor=random_speed_factor())
35 | sleep(random_sleep())
36 |
37 | robot.drive.right(speed_factor=random_speed_factor())
38 | sleep(random_sleep())
39 |
--------------------------------------------------------------------------------
/debian/rules:
--------------------------------------------------------------------------------
1 | #!/usr/bin/make -f
2 |
3 | export DH_VERBOSE=1
4 | DEBIAN_PACKAGE_PREFIX := python3-pitop
5 | BUILD_FOLDER := $(CURDIR)/debian/tmp/
6 | LIB_FOLDER := $(shell mktemp -d)
7 |
8 | %:
9 | dh $@
10 |
11 | override_dh_auto_test:
12 | # Don't run the tests!
13 |
14 | override_dh_install:
15 | set -ex ;\
16 |
17 | for SUBPACKAGE_FOLDER in $(wildcard packages/*) ; do \
18 | cd $$SUBPACKAGE_FOLDER ;\
19 | SUBPACKAGE_NAME=`echo $$SUBPACKAGE_FOLDER | cut -d/ -f 2 | tr '_' '-'` ;\
20 | DEBIAN_PACKAGE_NAME=$(DEBIAN_PACKAGE_PREFIX) ;\
21 | [ $$SUBPACKAGE_NAME != "pitop" ] && DEBIAN_PACKAGE_NAME=$(DEBIAN_PACKAGE_PREFIX)-$$SUBPACKAGE_NAME ;\
22 | python3 -B setup.py install --root $(BUILD_FOLDER)/$$DEBIAN_PACKAGE_NAME --install-layout deb --verbose ;\
23 | cp -r $(BUILD_FOLDER)/$$DEBIAN_PACKAGE_NAME/* $(LIB_FOLDER) ;\
24 | cd - ;\
25 | dh_install -p $$DEBIAN_PACKAGE_NAME $$DEBIAN_PACKAGE_NAME/* / ;\
26 | done
27 |
28 | # Fail on warnings; make verbose
29 | sphinx-build -W -v -bhtml docs/ build/html
30 | sphinx-build -W -v -bman docs/ build/man
31 |
--------------------------------------------------------------------------------
/examples/recipes/robot_head_rotation_tracker.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | import cv2
4 |
5 | from pitop import Camera, Pitop, TiltRollHeadController
6 | from pitop.processing.algorithms.faces import FaceDetector
7 |
8 |
9 | def track_face(frame):
10 | face = face_detector(frame)
11 | robot_view = face.robot_view
12 |
13 | cv2.imshow("Faces", robot_view)
14 | cv2.waitKey(1)
15 |
16 | if face.found:
17 | face_angle = face.angle
18 | robot.head.track_head_angle(face_angle)
19 | print(f"Face angle: {face.angle}")
20 | else:
21 | robot.head.roll.sweep(speed=0)
22 | print("Cannot find face!")
23 |
24 |
25 | robot = Pitop()
26 |
27 | robot.add_component(TiltRollHeadController(servo_roll_port="S0", servo_tilt_port="S3"))
28 | robot.head.calibrate()
29 | robot.head.tilt.target_angle = 70
30 | robot.head.roll.target_angle = 0
31 |
32 | robot.add_component(Camera(resolution=(640, 480), flip_top_bottom=True))
33 |
34 | face_detector = FaceDetector()
35 |
36 | robot.camera.on_frame = track_face
37 |
38 | pause()
39 |
--------------------------------------------------------------------------------
/packages/simulation/pitop/simulation/images/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | HERE = os.path.abspath(os.path.dirname(__file__))
4 |
5 |
6 | Pitop_miniscreen_pos = (152, 340)
7 |
8 | Button = f"{HERE}/Button.png"
9 | Button_pressed = f"{HERE}/Button_pressed.png"
10 | Buzzer = f"{HERE}/Buzzer.png"
11 | LED_green_off = f"{HERE}/LED_green_off.png"
12 | LED_green_on = f"{HERE}/LED_green_on.png"
13 | LED_red_off = f"{HERE}/LED_red_off.png"
14 | LED_red_on = f"{HERE}/LED_red_on.png"
15 | LED_yellow_off = f"{HERE}/LED_yellow_off.png"
16 | LED_yellow_on = f"{HERE}/LED_yellow_on.png"
17 | LightSensor = f"{HERE}/LightSensor.png"
18 | Pitop = f"{HERE}/Pitop.png"
19 | Potentiometer = f"{HERE}/Potentiometer.png"
20 | SoundSensor = f"{HERE}/SoundSensor.png"
21 | UltrasonicSensor = f"{HERE}/UltrasonicSensor.png"
22 |
23 | angle_icon = f"{HERE}/angle_icon.png"
24 | buzzer_sound_icon = f"{HERE}/buzzer_sound_icon.png"
25 | distance_icon = f"{HERE}/distance_icon.png"
26 | lightbulb_icon = f"{HERE}/lightbulb_icon.png"
27 | speaker_icon = f"{HERE}/speaker_icon.png"
28 |
29 |
30 | PMA_CUBE_SIZE = (102, 102)
31 | PITOP_SIZE = (435, 573)
32 |
--------------------------------------------------------------------------------
/examples/system/miniscreen/miniscreen_buttons.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pitop import Pitop
4 |
5 | pitop = Pitop()
6 | miniscreen = pitop.miniscreen
7 | up = miniscreen.up_button
8 | down = miniscreen.down_button
9 |
10 |
11 | def do_up_thing():
12 | print("Up button was pressed")
13 |
14 |
15 | def do_down_thing():
16 | print("Down button was pressed")
17 |
18 |
19 | def do_another_thing():
20 | print("do_another_thing invoked")
21 |
22 |
23 | def select_something():
24 | print("select_something called")
25 |
26 |
27 | # To invoke a function when the button is pressed/released,
28 | # you can assign the function to the 'when_pressed' or 'when_released' data member of a button
29 | print("Configuring miniscreen's up and down button events...")
30 | up.when_pressed = do_up_thing
31 | down.when_pressed = do_down_thing
32 | down.when_released = do_another_thing
33 |
34 |
35 | # Another way to react to button events is to poll the is_pressed data member
36 | print("Polling for if select button is pressed...")
37 | while True:
38 | if miniscreen.select_button.is_pressed:
39 | select_something()
40 | sleep(0.1)
41 |
--------------------------------------------------------------------------------
/packages/pma/pitop/pma/common/servo_motor_registers.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class ServoRegisterTypes:
5 | ACC_MODE = 0
6 | SPEED = 1
7 | ANGLE_AND_SPEED = 2
8 |
9 |
10 | ServoMotorS0 = {
11 | ServoRegisterTypes.ACC_MODE: 0x50,
12 | ServoRegisterTypes.SPEED: 0x56,
13 | ServoRegisterTypes.ANGLE_AND_SPEED: 0x5C,
14 | }
15 |
16 | ServoMotorS1 = {
17 | ServoRegisterTypes.ACC_MODE: 0x51,
18 | ServoRegisterTypes.SPEED: 0x57,
19 | ServoRegisterTypes.ANGLE_AND_SPEED: 0x5D,
20 | }
21 |
22 | ServoMotorS2 = {
23 | ServoRegisterTypes.ACC_MODE: 0x52,
24 | ServoRegisterTypes.SPEED: 0x58,
25 | ServoRegisterTypes.ANGLE_AND_SPEED: 0x5E,
26 | }
27 |
28 | ServoMotorS3 = {
29 | ServoRegisterTypes.ACC_MODE: 0x53,
30 | ServoRegisterTypes.SPEED: 0x59,
31 | ServoRegisterTypes.ANGLE_AND_SPEED: 0x5F,
32 | }
33 |
34 |
35 | class ServoControlRegisters(Enum):
36 | S0 = ServoMotorS0
37 | S1 = ServoMotorS1
38 | S2 = ServoMotorS2
39 | S3 = ServoMotorS3
40 |
41 |
42 | class ServoMotorSetup:
43 | REGISTER_MIN_PULSE_WIDTH = 0x4A
44 | REGISTER_MAX_PULSE_WIDTH = 0x4B
45 | REGISTER_PWM_FREQUENCY = 0x4C
46 |
--------------------------------------------------------------------------------
/packages/robotics/pitop/robotics/simple_pid/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018-2021 Martin Lundberg
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/packages/robotics/pitop/robotics/filterpy/LICENSE.md:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 Roger R. Labbe Jr
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
22 |
--------------------------------------------------------------------------------
/examples/pma/imu.py:
--------------------------------------------------------------------------------
1 | from dataclasses import fields
2 | from time import sleep
3 |
4 | from pitop import IMU
5 |
6 | imu = IMU()
7 |
8 | while True:
9 | acc = imu.accelerometer
10 | acc_x, acc_y, acc_z = list(getattr(acc, field.name) for field in fields(acc))
11 | gyro = imu.gyroscope
12 | gyro_x, gyro_y, gyro_z = list(getattr(gyro, field.name) for field in fields(gyro))
13 | mag = imu.magnetometer
14 | mag_x, mag_y, mag_z = list(getattr(mag, field.name) for field in fields(mag))
15 |
16 | orientation_fusion = imu.orientation
17 | roll, pitch, yaw = list(
18 | getattr(orientation_fusion, field.name) for field in fields(orientation_fusion)
19 | )
20 |
21 | orientation_accelerometer = imu.accelerometer_orientation
22 | roll_acc, pitch_acc, _ = list(
23 | getattr(orientation_accelerometer, field.name)
24 | for field in fields(orientation_accelerometer)
25 | )
26 |
27 | print(f"acc: {acc_x}, {acc_y}, {acc_z}")
28 | print(f"gyro: {gyro_x}, {gyro_y}, {gyro_z}")
29 | print(f"mag: {mag_x}, {mag_y}, {mag_z}")
30 | print(f"orientation_fusion: {roll}, {pitch}, {yaw}")
31 | print(f"orientation_accelerometer: {roll_acc}, {pitch_acc}")
32 | sleep(0.1)
33 |
--------------------------------------------------------------------------------
/examples/camera/camera_motion_detector.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from time import localtime, sleep, strftime
3 |
4 | from pitop import Camera
5 |
6 | # Example code for Camera
7 | # Records videos of any motion captured by the camera
8 |
9 | cam = Camera()
10 |
11 | last_motion_detected = None
12 |
13 |
14 | def motion_detected():
15 | global last_motion_detected
16 |
17 | last_motion_detected = datetime.now().timestamp()
18 |
19 | if cam.is_recording() is False:
20 | print("Motion detected! Starting recording...")
21 | output_file_name = f"/home/pi/Desktop/My Motion Recording {strftime('%Y-%m-%d %H:%M:%S', localtime(last_motion_detected))}.avi"
22 | cam.start_video_capture(output_file_name=output_file_name)
23 |
24 | while (datetime.now().timestamp() - last_motion_detected) < 3:
25 | sleep(1)
26 |
27 | cam.stop_video_capture()
28 | print(f"Recording completed - saved to {output_file_name}")
29 |
30 |
31 | print("Motion detector starting...")
32 | cam.start_detecting_motion(
33 | callback_on_motion=motion_detected, moving_object_minimum_area=350
34 | )
35 |
36 | sleep(60)
37 |
38 | cam.stop_detecting_motion()
39 | print("Motion detector stopped")
40 |
--------------------------------------------------------------------------------
/examples/recipes/blockpi_rover.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pitop import BlockPiRover, UltrasonicSensor
4 |
5 | # Create a BlockPiRover, intended for use via Further's BlockPi coding
6 | rover = BlockPiRover(left_motor="M3", right_motor="M0")
7 |
8 | # Driving methods available directly without .drive access
9 | speed = 50
10 |
11 | # Linear method optional param hold not provided in BlockPi
12 | rover.forward(speed)
13 | sleep(1)
14 |
15 | rover.backward(speed)
16 | rover.left(speed)
17 | sleep(1)
18 |
19 | # Turning method optional param turn_radius not provided in BlockPi
20 | rover.right(speed)
21 | sleep(1)
22 |
23 | # Stop and rotate
24 | rover.stop()
25 | # Rotate method optional param time_to_take not provided in BlockPi
26 | rover.rotate(180)
27 |
28 | # drive.robot_move is available as move
29 | # Move method optional param turn_radius not provided in BlockPi
30 | angular_speed = 10
31 | rover.move(speed, angular_speed)
32 | sleep(1)
33 |
34 |
35 | # Advanced (not initially available via BlockPi)
36 |
37 | # Use miniscreen display
38 | rover.miniscreen.display_multiline_text("hey there!")
39 |
40 | # Add other components
41 | sensor = UltrasonicSensor("D3")
42 | rover.add_component(sensor)
43 | rover.ultrasonic.distance()
44 |
--------------------------------------------------------------------------------
/examples/labs/image_processing_explorer/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/docs/api_robotics.rst:
--------------------------------------------------------------------------------
1 | =====================
2 | API - pi-top Robotics
3 | =====================
4 |
5 | .. image:: _static/pma/robotics_kit/Alex.jpg
6 |
7 |
8 | With the pi-top Robotics and Electronics Kits, you can build different types of robots.
9 | In this SDK, we provide a set of classes that represent some useful configurations.
10 |
11 |
12 | .. _component-drive-controller:
13 |
14 | Drive Controller
15 | =================
16 |
17 | .. note::
18 | This is a composite component that contains two :ref:`EncoderMotor Components`.
19 |
20 | .. literalinclude:: ../examples/recipes/drive_controller.py
21 |
22 | .. autoclass:: pitop.robotics.DriveController
23 |
24 |
25 | .. _component-pan-tilt-controller:
26 |
27 | Pan Tilt Controller
28 | ===================
29 |
30 | .. note::
31 | This is a composite component that contains two :ref:`ServoMotor Components`.
32 |
33 | .. autoclass:: pitop.robotics.PanTiltController
34 |
35 |
36 | .. _component-pincer-controller:
37 |
38 | Pincer Controller
39 | =================
40 |
41 | .. note::
42 | This is a composite component that contains two :ref:`ServoMotor Components`.
43 |
44 | .. autoclass:: pitop.robotics.PincerController
45 |
--------------------------------------------------------------------------------
/packages/miniscreen/pitop/miniscreen/oled/core/contrib/luma/LICENSE.rst:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 | ---------------------
3 |
4 | Copyright (c) 2017-2021 Richard Hull and contributors
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/examples/labs/keyboard_controller/index.html:
--------------------------------------------------------------------------------
1 | {% extends "base-controller.html" %}
2 |
3 | {% block head %}
4 | {{ super() }}
5 |
6 | {% endblock %}
7 |
8 | {% block main %}
9 |
10 |
11 |
12 |
13 |
14 |
15 |
43 | {% endblock %}
44 |
--------------------------------------------------------------------------------
/packages/robotics/pitop/robotics/blockpi_rover.py:
--------------------------------------------------------------------------------
1 | from pitop.robotics.drive_controller import DriveController
2 | from pitop.system.pitop import Pitop
3 |
4 |
5 | class BlockPiRover(Pitop):
6 | """A rover class for use with BlockPi coding in Further, with a simplified
7 | API.
8 |
9 | Inherits from Pitop. Constructor adds a DriveController to simplify
10 | use. DriveController methods are made available on the instance.
11 | """
12 |
13 | def __init__(self, left_motor="M3", right_motor="M0"):
14 | Pitop.__init__(self)
15 |
16 | drive = DriveController(
17 | left_motor_port=left_motor, right_motor_port=right_motor
18 | )
19 | self.add_component(drive)
20 |
21 | def forward(self, *args, **kwargs):
22 | self.drive.forward(*args, **kwargs)
23 |
24 | def backward(self, *args, **kwargs):
25 | self.drive.backward(*args, **kwargs)
26 |
27 | def left(self, *args, **kwargs):
28 | self.drive.left(*args, **kwargs)
29 |
30 | def right(self, *args, **kwargs):
31 | self.drive.right(*args, **kwargs)
32 |
33 | def stop(self, *args, **kwargs):
34 | self.drive.stop(*args, **kwargs)
35 |
36 | def move(self, *args, **kwargs):
37 | self.drive.robot_move(*args, **kwargs)
38 |
--------------------------------------------------------------------------------
/packages/robotics/pitop/robotics/navigation/core/utils.py:
--------------------------------------------------------------------------------
1 | import math
2 |
3 |
4 | def normalize_angle(angle):
5 | """Converts to range -pi to +pi to prevent unstable behaviour when going
6 | from 0 to 2*pi with slight turn.
7 |
8 | :param angle: angle in radians
9 | :return: angle in radians normalized to range -pi to +pi
10 | """
11 | return (angle + math.pi) % (2 * math.pi) - math.pi
12 |
13 |
14 | def verify_callback(callback):
15 | if callback is None:
16 | return None
17 | if not callable(callback):
18 | raise ValueError("callback should be a callable function.")
19 |
20 | from inspect import getfullargspec
21 |
22 | arg_spec = getfullargspec(callback)
23 | number_of_arguments = len(arg_spec.args)
24 | number_of_default_arguments = (
25 | len(arg_spec.defaults) if arg_spec.defaults is not None else 0
26 | )
27 | if number_of_arguments == 0:
28 | return callback
29 | if (
30 | arg_spec.args[0] in ("self", "_mock_self")
31 | and (number_of_arguments - number_of_default_arguments) == 1
32 | ):
33 | return callback
34 | if number_of_arguments != number_of_default_arguments:
35 | raise ValueError("callback should have no non-default keyword arguments.")
36 | return callback
37 |
--------------------------------------------------------------------------------
/examples/labs/keyboard_controller/main.py:
--------------------------------------------------------------------------------
1 | from pitop import Camera, DriveController, Pitop
2 | from pitop.labs import WebController
3 |
4 | robot = Pitop()
5 | robot.add_component(DriveController())
6 | robot.add_component(Camera())
7 |
8 | speed = 0.2
9 |
10 |
11 | def key_down(data, send):
12 | global speed
13 |
14 | key = data.get("key")
15 | if key == "w":
16 | robot.drive.forward(speed, hold=True)
17 | elif key == "s":
18 | robot.drive.backward(speed, hold=True)
19 | elif key == "d":
20 | robot.drive.right(speed)
21 | elif key == "a":
22 | robot.drive.left(speed)
23 | elif key == "ArrowUp":
24 | speed = min(1, speed + 0.2)
25 | send({"type": "speed", "data": speed})
26 | elif key == "ArrowDown":
27 | speed = max(0, speed - 0.2)
28 | send({"type": "speed", "data": speed})
29 |
30 |
31 | def key_up(data):
32 | key = data.get("key")
33 | if key == "w" or key == "s":
34 | robot.drive.stop()
35 | elif key == "d":
36 | robot.drive.right(0)
37 | elif key == "a":
38 | robot.drive.left(0)
39 |
40 |
41 | controller = WebController(
42 | get_frame=robot.camera.get_frame,
43 | message_handlers={"key_down": key_down, "key_up": key_up},
44 | )
45 |
46 | controller.serve_forever()
47 |
--------------------------------------------------------------------------------
/examples/labs/keyboard_controller/run.py:
--------------------------------------------------------------------------------
1 | from pitop import Camera, DriveController, Pitop
2 | from pitop.labs import WebController
3 |
4 | robot = Pitop()
5 | robot.add_component(DriveController())
6 | robot.add_component(Camera())
7 |
8 | speed = 0.2
9 |
10 |
11 | def key_down(data, send):
12 | global speed
13 |
14 | key = data.get("key")
15 | if key == "w":
16 | robot.drive.forward(speed, hold=True)
17 | elif key == "s":
18 | robot.drive.backward(speed, hold=True)
19 | elif key == "d":
20 | robot.drive.right(speed)
21 | elif key == "a":
22 | robot.drive.left(speed)
23 | elif key == "ArrowUp":
24 | speed = min(1, speed + 0.2)
25 | send({"type": "speed", "data": speed})
26 | elif key == "ArrowDown":
27 | speed = max(0, speed - 0.2)
28 | send({"type": "speed", "data": speed})
29 |
30 |
31 | def key_up(data):
32 | key = data.get("key")
33 | if key == "w" or key == "s":
34 | robot.drive.stop()
35 | elif key == "d":
36 | robot.drive.right(0)
37 | elif key == "a":
38 | robot.drive.left(0)
39 |
40 |
41 | controller = WebController(
42 | get_frame=robot.camera.get_frame,
43 | message_handlers={"key_down": key_down, "key_up": key_up},
44 | )
45 |
46 | controller.serve_forever()
47 |
--------------------------------------------------------------------------------
/examples/system/battery.py:
--------------------------------------------------------------------------------
1 | from pitop import Pitop
2 |
3 | battery = Pitop().battery
4 |
5 | print(f"Battery capacity: {battery.capacity}")
6 | print(f"Battery time remaining: {battery.time_remaining}")
7 | print(f"Battery is charging: {battery.is_charging}")
8 | print(f"Battery is full: {battery.is_full}")
9 | print(f"Battery wattage: {battery.wattage}")
10 |
11 |
12 | def do_low_battery_thing():
13 | print("Battery is low!")
14 |
15 |
16 | def do_critical_battery_thing():
17 | print("Battery is critically low!")
18 |
19 |
20 | def do_full_battery_thing():
21 | print("Battery is full!")
22 |
23 |
24 | def do_charging_battery_thing():
25 | print("Battery is charging!")
26 |
27 |
28 | def do_discharging_battery_thing():
29 | print("Battery is discharging!")
30 |
31 |
32 | # To invoke a function when the battery changes state, you can assign the function to various 'when_' data members
33 | battery.when_low = do_low_battery_thing
34 | battery.when_critical = do_critical_battery_thing
35 | battery.when_full = do_full_battery_thing
36 | battery.when_charging = do_charging_battery_thing
37 | battery.when_discharging = do_discharging_battery_thing
38 |
39 |
40 | # Another way to react to battery events is to poll
41 | while True:
42 | if battery.is_full:
43 | do_full_battery_thing()
44 |
--------------------------------------------------------------------------------
/packages/pitop/README.rst:
--------------------------------------------------------------------------------
1 | =================
2 | pi-top Python SDK
3 | =================
4 |
5 | This is the top-level package of the pi-top Python SDK. It collects all the packages from the SDK,
6 | alongside other modules such as :code:`labs`.
7 |
8 | .. ###############################################
9 | .. # NOTE: THESE ARE EXTERNAL LINKS, AS THEY ARE #
10 | .. # REQUIRED FOR THE IMAGES TO SHOW ON PYPI #
11 | .. ###############################################
12 |
13 | Supports all pi-top devices:
14 |
15 | .. image:: https://github.com/pi-top/pi-top-Python-SDK/raw/master/docs/_static/overview/devices.jpg
16 |
17 | Supports pi-top Maker Architecture (PMA):
18 |
19 | .. image:: https://github.com/pi-top/pi-top-Python-SDK/raw/master/docs/_static/overview/pma.jpg
20 |
21 | Supports all pi-top peripherals:
22 |
23 | .. image:: https://github.com/pi-top/pi-top-Python-SDK/raw/master/docs/_static/overview/peripherals.jpg
24 |
25 | -------
26 | Details
27 | -------
28 |
29 | More information about this and other packages of the SDK can be found in its GitHub repository_.
30 |
31 | .. _repository: https://github.com/pi-top/pi-top-Python-SDK
32 |
33 | -------------
34 | Documentation
35 | -------------
36 |
37 | Comprehensive documentation is available here_.
38 |
39 | .. _here: https://docs.pi-top.com/python-sdk/
40 |
--------------------------------------------------------------------------------
/examples/camera/camera_coloured_ball_detect.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | import cv2
4 |
5 | from pitop.camera import Camera
6 | from pitop.processing.algorithms import BallDetector
7 |
8 |
9 | def process_frame(frame):
10 | detected_balls = ball_detector(frame, color=["red", "green", "blue"])
11 |
12 | red_ball = detected_balls.red
13 | if red_ball.found:
14 | print(f"Red ball center: {red_ball.center}")
15 | print(f"Red ball radius: {red_ball.radius}")
16 | print(f"Red ball angle: {red_ball.angle}")
17 | print()
18 |
19 | green_ball = detected_balls.green
20 | if green_ball.found:
21 | print(f"Green ball center: {green_ball.center}")
22 | print(f"Green ball radius: {green_ball.radius}")
23 | print(f"Green ball angle: {green_ball.angle}")
24 | print()
25 |
26 | blue_ball = detected_balls.blue
27 | if blue_ball.found:
28 | print(f"Blue ball center: {blue_ball.center}")
29 | print(f"Blue ball radius: {blue_ball.radius}")
30 | print(f"Blue ball angle: {blue_ball.angle}")
31 | print()
32 |
33 | cv2.imshow("Image", detected_balls.robot_view)
34 | cv2.waitKey(1)
35 |
36 |
37 | ball_detector = BallDetector()
38 | camera = Camera(resolution=(640, 480))
39 | camera.on_frame = process_frame
40 |
41 | pause()
42 |
--------------------------------------------------------------------------------
/packages/core/pitop/core/mixins/stateful.py:
--------------------------------------------------------------------------------
1 | from json import dumps
2 |
3 |
4 | class Stateful:
5 | """Represents an object with a particular set of important properties that
6 | represent its state."""
7 |
8 | def __init__(self, children=None):
9 | self.children = [] if children is None else children
10 |
11 | @property
12 | def own_state(self):
13 | """Representation of an object state that will be used to determine the
14 | current state of an object."""
15 | return {}
16 |
17 | def __child_state(self, child_name):
18 | child = getattr(self, child_name)
19 | if hasattr(child, "state"):
20 | return child.state
21 | return None
22 |
23 | @property
24 | def state(self):
25 | """Returns a dictionary with the state of the current object and all of
26 | its children."""
27 | state = self.own_state
28 | for k, v in state.items():
29 | if callable(v):
30 | state[k] = v()
31 | for child in self.children:
32 | child_state = self.__child_state(child)
33 | if child_state is None:
34 | continue
35 | state[child] = child_state
36 | return state
37 |
38 | def print_state(self):
39 | print(dumps(self.state, indent=4))
40 |
--------------------------------------------------------------------------------
/packages/pitop/pitop/labs/web/blueprints/rover/helpers.py:
--------------------------------------------------------------------------------
1 | import math
2 |
3 | MAX_LINEAR_SPEED = 0.44
4 | MAX_ANGULAR_SPEED = 5.12
5 | MAX_SERVO_ANGLE = 90
6 |
7 |
8 | def calculate_direction(degree):
9 | direction = degree - 90
10 |
11 | # keeps direction between -180 and 180
12 | if direction > 180:
13 | return -(360 - direction)
14 |
15 | return direction
16 |
17 |
18 | def calculate_pan_tilt_angle(data):
19 | angle = data.get("angle", {})
20 | degree = angle.get("degree", 0)
21 | distance = data.get("distance", 0)
22 | magnitude = distance * MAX_SERVO_ANGLE / 100.0
23 | direction = calculate_direction(degree)
24 |
25 | return {
26 | "y": -math.cos(direction * math.pi / 180) * magnitude,
27 | "z": math.sin(direction * math.pi / 180) * magnitude,
28 | }
29 |
30 |
31 | def calculate_velocity_twist(data):
32 | angle = data.get("angle", {})
33 | degree = angle.get("degree", 0)
34 | distance = data.get("distance", 0)
35 | linear_speed = distance * MAX_LINEAR_SPEED / 100.0
36 | angular_speed = distance * MAX_ANGULAR_SPEED / 100.0
37 | direction = calculate_direction(degree)
38 |
39 | return {
40 | "linear": math.cos(direction * math.pi / 180) * linear_speed,
41 | "angular": math.sin(direction * math.pi / 180) * angular_speed,
42 | }
43 |
--------------------------------------------------------------------------------
/packages/camera/pitop/camera/camera_calibration/load_parameters.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pickle
3 |
4 | # directory where calibration output pickle file is located
5 | calibration_outputs_dir = ""
6 | script_dir = os.path.dirname(os.path.realpath(__file__))
7 | abs_file_path = os.path.join(script_dir, calibration_outputs_dir)
8 |
9 | # Filename used to save the camera calibration result (mtx, dist)
10 | calibration_mtx_dist_filename = "camera_cal_dist_pickle_640-480.p"
11 |
12 | # Camera resolution used for calibration
13 | calibration_width = 640
14 | calibration_height = 480
15 |
16 |
17 | def load_camera_cal(width: int, height: int):
18 | """Read in the saved camera matrix and distortion coefficients These are
19 | the arrays we calculated using cv2.calibrateCamera() Also scales
20 | calibration matrix based on resolution being used."""
21 |
22 | dist_pickle = pickle.load(
23 | open(os.path.join(abs_file_path, calibration_mtx_dist_filename), "rb")
24 | )
25 |
26 | mtx = dist_pickle["mtx"]
27 | dist = dist_pickle["dist"]
28 |
29 | scale_factor_x = width / calibration_width
30 | scale_factor_y = height / calibration_height
31 |
32 | mtx[0][0] = mtx[0][0] * scale_factor_x
33 | mtx[1][1] = mtx[1][1] * scale_factor_y
34 | mtx[0][2] = mtx[0][2] * scale_factor_x
35 | mtx[1][2] = mtx[1][2] * scale_factor_y
36 |
37 | return mtx, dist
38 |
--------------------------------------------------------------------------------
/docs/more/faq.rst:
--------------------------------------------------------------------------------
1 | ==========================
2 | Frequently Asked Questions
3 | ==========================
4 |
5 | -----------------------
6 | How does this SDK work?
7 | -----------------------
8 |
9 | ------------
10 | What is PMA?
11 | ------------
12 |
13 | --------------------------------------------------
14 | I keep getting an Exception - what is the problem?
15 | --------------------------------------------------
16 |
17 | -----------------------------
18 | Where did this SDK come from?
19 | -----------------------------
20 | Note: epoch version
21 |
22 | ---------------------------------------------------------------------------------------
23 | I was using an older version of the Python libraries. How can I update to use this SDK?
24 | ---------------------------------------------------------------------------------------
25 | Check out the `Python SDK Migration`_ GitHub repository for more information about this.
26 |
27 | .. _Python SDK Migration: https://github.com/pi-top/pi-top-Python-SDK-Migration-Support
28 |
29 | You may also find it helpful to check out the examples to see how to use the new components.
30 |
31 | .. _faq-lost-miniscreen-menu:
32 |
33 | ----------------------------------------
34 | I lost my miniscreen menu - where is it?
35 | ----------------------------------------
36 | Check out :ref:`Key Concepts: pi-top [4] Miniscreen` for useful information about how this works.
37 |
--------------------------------------------------------------------------------
/examples/system/pitop_overview.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from PIL import Image
4 |
5 | from pitop import Pitop
6 |
7 | # Set up pi-top
8 | pitop = Pitop()
9 |
10 | # Say hi!
11 | pitop.miniscreen.display_text("Hello!")
12 | sleep(2)
13 |
14 | # Display battery info
15 | battery_capacity = pitop.battery.capacity
16 | battery_charging = pitop.battery.is_charging
17 |
18 | pitop.miniscreen.display_multiline_text(
19 | "Battery Status:\n"
20 | f"-Capacity: {battery_capacity}%\n"
21 | f"-Charging: {battery_charging}",
22 | font_size=15,
23 | )
24 | sleep(2)
25 |
26 |
27 | # Configure buttons to do something
28 | keep_running = True
29 |
30 |
31 | def display_gif_and_exit():
32 | image = Image.open(
33 | "/usr/lib/python3/dist-packages/pitop/miniscreen/images/rocket.gif"
34 | )
35 | pitop.miniscreen.play_animated_image(image)
36 | pitop.miniscreen.display_text("Bye!")
37 | sleep(2)
38 | global keep_running
39 | keep_running = False
40 |
41 |
42 | pitop.miniscreen.select_button.when_pressed = display_gif_and_exit
43 | pitop.miniscreen.cancel_button.when_pressed = display_gif_and_exit
44 | pitop.miniscreen.up_button.when_pressed = display_gif_and_exit
45 | pitop.miniscreen.down_button.when_pressed = display_gif_and_exit
46 |
47 | pitop.miniscreen.display_multiline_text("Press any button...", font_size=25)
48 |
49 | # Sleep until `display_gif_and_exit` runs
50 | while keep_running:
51 | sleep(0.3)
52 |
--------------------------------------------------------------------------------
/packages/robotics/pitop/robotics/json/alex.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "0.17.0",
3 | "classname": "Pitop",
4 | "module": "pitop.system.pitop",
5 | "components": {
6 | "camera": {
7 | "index": null,
8 | "resolution": [
9 | 640,
10 | 480
11 | ],
12 | "camera_type": 0,
13 | "path_to_images": "",
14 | "format": "PIL",
15 | "name": "camera",
16 | "classname": "Camera",
17 | "module": "pitop.camera.camera"
18 | },
19 | "ultrasonic": {
20 | "port_name": "D3",
21 | "queue_len": 9,
22 | "partial": false,
23 | "name": "ultrasonic",
24 | "classname": "UltrasonicSensor",
25 | "module": "pitop.pma.ultrasonic_sensor",
26 | "max_distance": 3,
27 | "threshold_distance": 0.3
28 | },
29 | "drive": {
30 | "left_motor_port": "M3",
31 | "right_motor_port": "M0",
32 | "name": "drive",
33 | "classname": "DriveController",
34 | "module": "pitop.robotics.drive_controller"
35 | },
36 | "pan_tilt": {
37 | "servo_pan_port": "S0",
38 | "servo_tilt_port": "S3",
39 | "name": "pan_tilt",
40 | "classname": "PanTiltController",
41 | "module": "pitop.robotics.pan_tilt_controller"
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/examples/system/miniscreen/miniscreen_display_clock.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | from PIL import Image, ImageDraw
4 |
5 | from pitop import Pitop
6 |
7 | pitop = Pitop()
8 | miniscreen = pitop.miniscreen
9 | miniscreen.set_max_fps(1)
10 |
11 | image = Image.new(
12 | miniscreen.mode,
13 | miniscreen.size,
14 | )
15 | canvas = ImageDraw.Draw(image)
16 |
17 | bounding_box = (32, 0, 95, 63)
18 |
19 | big_hand_box = (
20 | bounding_box[0] + 5,
21 | bounding_box[1] + 5,
22 | bounding_box[2] - 5,
23 | bounding_box[3] - 5,
24 | )
25 |
26 | little_hand_box = (
27 | bounding_box[0] + 15,
28 | bounding_box[1] + 15,
29 | bounding_box[2] - 15,
30 | bounding_box[3] - 15,
31 | )
32 |
33 | while True:
34 | current_time = datetime.now()
35 |
36 | # Clear
37 | canvas.rectangle(bounding_box, fill=0)
38 |
39 | # Draw face
40 | canvas.ellipse(bounding_box, fill=1)
41 |
42 | # Draw hands
43 | angle_second = (current_time.second * 360 / 60) - 90
44 | canvas.pieslice(big_hand_box, angle_second, angle_second + 2, fill=0)
45 |
46 | angle_minute = (current_time.minute * 360 / 60) - 90
47 | canvas.pieslice(big_hand_box, angle_minute, angle_minute + 5, fill=0)
48 |
49 | angle_hour = (
50 | (current_time.hour * 360 / 12) + (current_time.minute * 360 / 12 / 60)
51 | ) - 90
52 | canvas.pieslice(little_hand_box, angle_hour, angle_hour + 5, fill=0)
53 |
54 | # Display to screen
55 | miniscreen.display_image(image)
56 |
--------------------------------------------------------------------------------
/packages/common/pitop/common/file_ops.py:
--------------------------------------------------------------------------------
1 | from os import close, utime
2 | from re import compile
3 | from shutil import copystat, move
4 | from tempfile import NamedTemporaryFile, mkstemp
5 |
6 |
7 | def sed_inplace(filename, pattern, repl):
8 | """Perform the pure-Python equivalent of in-place `sed` substitution: e.g.,
9 | `sed -i -e 's/'${pattern}'/'${repl}' "${filename}"`."""
10 | # For efficiency, precompile the passed regular expression.
11 | pattern_compiled = compile(pattern)
12 |
13 | # For portability, NamedTemporaryFile() defaults to mode "w+b" (i.e., binary
14 | # writing with updating). This is usually a good thing. In this case,
15 | # however, binary writing imposes non-trivial encoding constraints trivially
16 | # resolved by switching to text writing. Let's do that.
17 | with NamedTemporaryFile(mode="w", delete=False) as tmp_file:
18 | with open(filename) as src_file:
19 | for line in src_file:
20 | tmp_file.write(pattern_compiled.sub(repl, line))
21 |
22 | # Overwrite the original file with the munged temporary file in a
23 | # manner preserving file attributes (e.g., permissions).
24 | copystat(filename, tmp_file.name)
25 | move(tmp_file.name, filename)
26 |
27 |
28 | def touch_file(fname, times=None):
29 | with open(fname, "a"):
30 | utime(fname, times)
31 |
32 |
33 | def create_temp_file():
34 | temp_file_tuple = mkstemp()
35 | close(temp_file_tuple[0])
36 |
37 | return temp_file_tuple[1]
38 |
--------------------------------------------------------------------------------
/packages/miniscreen/pitop/miniscreen/oled/core/contrib/luma/core/threadpool.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2017-18 Richard Hull and contributors
3 | # See LICENSE.rst for details.
4 |
5 | # Adapted from http://code.activestate.com/recipes/577187-python-thread-pool/
6 | # Attribution: Created by Emilio Monti on Sun, 11 Apr 2010 (MIT License).
7 |
8 | from threading import Thread
9 |
10 |
11 | class worker(Thread):
12 | """Thread executing tasks from a given tasks queue."""
13 |
14 | def __init__(self, tasks):
15 | Thread.__init__(self)
16 | self.tasks = tasks
17 | self.daemon = True
18 | self.start()
19 |
20 | def run(self):
21 | while True:
22 | func, args, kargs = self.tasks.get()
23 | func(*args, **kargs)
24 | self.tasks.task_done()
25 |
26 |
27 | class threadpool:
28 | """Pool of threads consuming tasks from a queue."""
29 |
30 | def __init__(self, num_threads):
31 | try:
32 | from Queue import Queue
33 | except ImportError:
34 | from queue import Queue
35 |
36 | self.tasks = Queue(num_threads)
37 | for _ in range(num_threads):
38 | worker(self.tasks)
39 |
40 | def add_task(self, func, *args, **kargs):
41 | """Add a task to the queue."""
42 | self.tasks.put((func, args, kargs))
43 |
44 | def wait_completion(self):
45 | """Wait for completion of all the tasks in the queue."""
46 | self.tasks.join()
47 |
--------------------------------------------------------------------------------
/tests/test_pma_ultrasonic.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase
2 | from unittest.mock import Mock, patch
3 |
4 | from pitop.pma.ultrasonic_sensor import UltrasonicSensor
5 |
6 |
7 | class UltrasonicSensorRPIMock(Mock):
8 | pass
9 |
10 |
11 | class UltrasonicSensorMCUMock(Mock):
12 | pass
13 |
14 |
15 | @patch("pitop.pma.ultrasonic_sensor.UltrasonicSensorRPI", new=UltrasonicSensorRPIMock)
16 | @patch("pitop.pma.ultrasonic_sensor.UltrasonicSensorMCU", new=UltrasonicSensorMCUMock)
17 | class UltrasonicSensorTestCase(TestCase):
18 | def test_analog_port_gives_mcu_device(self):
19 | for port in ("A1", "A3"):
20 | ultrasonic_sensor = UltrasonicSensor(port)
21 | assert isinstance(
22 | ultrasonic_sensor._UltrasonicSensor__ultrasonic_device,
23 | UltrasonicSensorMCUMock,
24 | )
25 |
26 | def test_digital_port_gives_rpi_device(self):
27 | for port in [f"D{i}" for i in range(0, 8)]:
28 | ultrasonic_sensor = UltrasonicSensor(port)
29 | assert isinstance(
30 | ultrasonic_sensor._UltrasonicSensor__ultrasonic_device,
31 | UltrasonicSensorRPIMock,
32 | )
33 |
34 | def test_threshold_distance(self):
35 | for port in ("A1", "D1"):
36 | new_value = 0.5
37 | ultrasonic_sensor = UltrasonicSensor(port)
38 | ultrasonic_sensor.threshold_distance = new_value
39 | self.assertEqual(new_value, ultrasonic_sensor.threshold_distance)
40 |
--------------------------------------------------------------------------------
/packages/pitop/pitop/__init__.py:
--------------------------------------------------------------------------------
1 | __path__ = __import__("pkgutil").extend_path(__path__, __name__)
2 |
3 | # import early incase env PITOP_VIRTUAL_HARDWARE indicates to apply mocks
4 | import pitop.simulation
5 |
6 | # System Devices
7 | from pitop.battery import Battery
8 | from pitop.camera import Camera
9 | from pitop.display import Display
10 | from pitop.keyboard import KeyboardButton
11 | from pitop.miniscreen import Miniscreen
12 |
13 | # PMA
14 | from pitop.pma import (
15 | IMU,
16 | LED,
17 | Button,
18 | Buzzer,
19 | EncoderMotor,
20 | LightSensor,
21 | Potentiometer,
22 | ServoMotor,
23 | )
24 | from pitop.pma import ServoMotorSetting
25 | from pitop.pma import ServoMotorSetting as ServoMotorState
26 | from pitop.pma import (
27 | SoundSensor,
28 | UltrasonicSensor,
29 | )
30 | from pitop.pma.parameters import BrakingType, Direction, ForwardDirection
31 |
32 | # Robotics
33 | from pitop.robotics.blockpi_rover import BlockPiRover
34 | from pitop.robotics.configurations import AlexRobot # deprecated
35 | from pitop.robotics.configurations import alex_config
36 | from pitop.robotics.drive_controller import DriveController
37 | from pitop.robotics.navigation import NavigationController
38 | from pitop.robotics.pan_tilt_controller import PanTiltController
39 | from pitop.robotics.pincer_controller import PincerController
40 | from pitop.robotics.tilt_roll_head_controller import TiltRollHeadController
41 |
42 | # Top-level
43 | from pitop.system.pitop import Pitop
44 | from pitop.version import __version__
45 |
--------------------------------------------------------------------------------
/examples/pulse/mic-demo.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pitop.pulse import ledmatrix, microphone
4 |
5 |
6 | def set_bit_rate_to_unsigned_8():
7 | print("Setting bit rate to 8...")
8 | microphone.set_bit_rate_to_unsigned_8()
9 |
10 |
11 | def set_bit_rate_to_signed_16():
12 | print("Setting bit rate to 16...")
13 | microphone.set_bit_rate_to_signed_16()
14 |
15 |
16 | def set_sample_rate_to_16khz():
17 | print("Setting sample rate to 16KHz...")
18 | microphone.set_sample_rate_to_16khz()
19 |
20 |
21 | def set_sample_rate_to_22khz():
22 | print("Setting sample rate to 22KHz...")
23 | microphone.set_sample_rate_to_22khz()
24 |
25 |
26 | def pause(length):
27 | ledmatrix.off()
28 | sleep(length)
29 |
30 |
31 | def record(record_time, output_file, pause_time=1):
32 | print("Recording audio for " + str(record_time) + "s...")
33 | ledmatrix.set_all(255, 0, 0)
34 | ledmatrix.show()
35 | microphone.record()
36 | sleep(record_time)
37 | microphone.stop()
38 | ledmatrix.off()
39 | microphone.save(output_file, True)
40 | print("Saved to " + output_file)
41 | print("")
42 | pause(pause_time)
43 |
44 |
45 | set_sample_rate_to_22khz()
46 |
47 | set_bit_rate_to_unsigned_8()
48 | record(5, "/tmp/test22-8.wav")
49 |
50 | set_bit_rate_to_signed_16()
51 | record(5, "/tmp/test22-16.wav")
52 |
53 |
54 | set_sample_rate_to_16khz()
55 |
56 | set_bit_rate_to_unsigned_8()
57 | record(5, "/tmp/test16-8.wav")
58 |
59 | set_bit_rate_to_signed_16()
60 | record(5, "/tmp/test16-16.wav")
61 |
--------------------------------------------------------------------------------
/examples/keyboard/keyboard_button.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pitop import KeyboardButton
4 |
5 |
6 | def on_up_pressed():
7 | print("up pressed")
8 |
9 |
10 | def on_up_released():
11 | print("up released")
12 |
13 |
14 | def on_down_pressed():
15 | print("down pressed")
16 |
17 |
18 | def on_down_released():
19 | print("down released")
20 |
21 |
22 | def on_left_pressed():
23 | print("left pressed")
24 |
25 |
26 | def on_left_released():
27 | print("left released")
28 |
29 |
30 | def on_right_pressed():
31 | print("right pressed")
32 |
33 |
34 | def on_right_released():
35 | print("right released")
36 |
37 |
38 | keyboard_btn_up = KeyboardButton("up")
39 | keyboard_btn_down = KeyboardButton("down")
40 | keyboard_btn_left = KeyboardButton("left")
41 | keyboard_btn_right = KeyboardButton("right")
42 | keyboard_btn_uppercase_z = KeyboardButton("Z")
43 |
44 | # Methods will be called when key is pressed:
45 |
46 | keyboard_btn_up.when_pressed = on_up_pressed
47 | keyboard_btn_up.when_released = on_up_released
48 | keyboard_btn_down.when_pressed = on_down_pressed
49 | keyboard_btn_down.when_released = on_down_released
50 | keyboard_btn_left.when_pressed = on_left_pressed
51 | keyboard_btn_left.when_released = on_left_released
52 | keyboard_btn_right.when_pressed = on_right_pressed
53 | keyboard_btn_right.when_released = on_right_released
54 |
55 | # Or alternatively you can "poll" for key presses:
56 |
57 | while True:
58 | if keyboard_btn_uppercase_z.is_pressed is True:
59 | print("Z pressed!")
60 |
61 | sleep(0.1)
62 |
--------------------------------------------------------------------------------
/packages/cli/pitopcli/cli_base.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod, abstractproperty
2 |
3 |
4 | class PitopCliException(Exception):
5 | pass
6 |
7 |
8 | class PitopCliInvalidArgument(Exception):
9 | pass
10 |
11 |
12 | class CliBaseClass(ABC):
13 | """Abstract class, used to create CLI commands."""
14 |
15 | @abstractmethod
16 | def __init__(self, args):
17 | """Class constructor.
18 |
19 | Args:
20 | args (Namespace): arguments as returned by Argparse.parse_args
21 | """
22 | pass
23 |
24 | @abstractmethod
25 | def run(self):
26 | """Executes the action performed by the CLI."""
27 | pass
28 |
29 | @classmethod
30 | @abstractmethod
31 | def add_parser_arguments(cls, parser):
32 | """Add CLI expected arguments to the provided parser.
33 |
34 | Args:
35 | parser (argparse._SubParsersAction | argparse.ArgumentParser): parser where class arguments will be appended
36 | """
37 | pass
38 |
39 | @property
40 | @abstractproperty
41 | def parser_help(self):
42 | """Help string to be displayed by ArgumentParser."""
43 | pass
44 |
45 | @property
46 | @abstractproperty
47 | def cli_name(self):
48 | """Name of the class CLI, without the 'pt-' prefix."""
49 | pass
50 |
51 | @property
52 | def parser(self):
53 | """ArgumentParser object used to parse the class."""
54 | pass
55 |
56 | def validate_args(self):
57 | """Method to perform further validation on arguments."""
58 | pass
59 |
--------------------------------------------------------------------------------
/debian/py3dist-overrides:
--------------------------------------------------------------------------------
1 | dlib python3-dlib; PEP386
2 | flask python3-flask; PEP386
3 | flask-cors python3-flask-cors; PEP386
4 | gevent python3-gevent; PEP386
5 | gevent-websocket python3-gevent-websocket; PEP386
6 | # Bullseye - `Depends: python3-gpiozero (>= 1.6.2) but 1.5.1 is to be installed`
7 | gpiozero python3-gpiozero
8 | imageio python3-imageio; PEP386
9 | imutils python3-imutils; PEP386
10 | isc_dhcp_leases python3-isc-dhcp-leases; PEP386
11 | matplotlib python3-matplotlib; PEP386
12 | monotonic python3-monotonic; PEP386
13 | netifaces python3-netifaces; PEP386
14 | # Numpy in Debian has epoch version 1
15 | numpy python3-numpy; s/^/1:/
16 | opencv-python python3-opencv; PEP386
17 | pitop python3-pitop
18 | pitop.battery python3-pitop-battery
19 | pitop.camera python3-pitop-camera
20 | pitop.common python3-pitop-common
21 | pitop.core python3-pitop-core
22 | pitop.display python3-pitop-display
23 | pitop.keyboard python3-pitop-keyboard
24 | pitop.miniscreen python3-pitop-miniscreen
25 | pitop.pma python3-pitop-pma
26 | pitop.processing python3-pitop-processing
27 | pitop.robotics python3-pitop-robotics
28 | pitop.system python3-pitop-system
29 | pitop.simulation python3-pitop-simulation
30 | pitopcli python3-pitop-cli
31 | pyinotify python3-pyinotify; PEP386
32 | RPi.GPIO python3-rpi.gpio; PEP386
33 | scipy python3-scipy; PEP386
34 | smbus2 python3-smbus2
35 | spidev python3-spidev
36 | systemd_python python3-systemd; PEP386
37 | wget python3-wget; PEP386
38 | pyzmq python3-zmq; PEP386
39 | pygame python3-pygame; PEP386
40 | xlib python3-xlib; PEP386
41 | psutil python3-psutil; PEP386
42 | werkzeug python3-werkzeug
43 |
--------------------------------------------------------------------------------
/examples/recipes/robot_bluedot.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 | from threading import Lock
3 |
4 | from bluedot import BlueDot
5 |
6 | from pitop import DriveController
7 |
8 | bd = BlueDot()
9 | bd.color = "#00B2A2"
10 | lock = Lock()
11 |
12 | drive = DriveController(left_motor_port="M3", right_motor_port="M0")
13 |
14 |
15 | def move(pos):
16 | if lock.locked():
17 | return
18 |
19 | if any(
20 | [
21 | pos.angle > 0 and pos.angle < 20,
22 | pos.angle < 0 and pos.angle > -20,
23 | ]
24 | ):
25 | drive.forward(pos.distance, hold=True)
26 | elif pos.angle > 0 and 20 <= pos.angle <= 160:
27 | turn_radius = 0 if 70 < pos.angle < 110 else pos.distance
28 | speed_factor = -pos.distance if pos.angle > 110 else pos.distance
29 | drive.right(speed_factor, turn_radius)
30 | elif pos.angle < 0 and -160 <= pos.angle <= -20:
31 | turn_radius = 0 if -110 < pos.angle < -70 else pos.distance
32 | speed_factor = -pos.distance if pos.angle < -110 else pos.distance
33 | drive.left(speed_factor, turn_radius)
34 | elif any(
35 | [
36 | pos.angle > 0 and pos.angle > 160,
37 | pos.angle < 0 and pos.angle < -160,
38 | ]
39 | ):
40 | drive.backward(pos.distance, hold=True)
41 |
42 |
43 | def stop(pos):
44 | lock.acquire()
45 | drive.stop()
46 |
47 |
48 | def start(pos):
49 | if lock.locked():
50 | lock.release()
51 | move(pos)
52 |
53 |
54 | bd.when_pressed = start
55 | bd.when_moved = move
56 | bd.when_released = stop
57 |
58 | pause()
59 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | # The branches below must be a subset of the branches above
8 | branches: [ master ]
9 | schedule:
10 | # Run daily at midnight
11 | - cron: '0 0 * * *'
12 |
13 | jobs:
14 | analyze:
15 | name: Analyze
16 | runs-on: ubuntu-latest
17 | permissions:
18 | actions: read
19 | contents: read
20 | security-events: write
21 |
22 | strategy:
23 | fail-fast: false
24 | matrix:
25 | language: [ 'python' ]
26 |
27 | steps:
28 | - name: Checkout repository
29 | uses: actions/checkout@v2
30 |
31 | # Initializes the CodeQL tools for scanning.
32 | - name: Initialize CodeQL
33 | uses: github/codeql-action/init@v1
34 | with:
35 | languages: ${{ matrix.language }}
36 |
37 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
38 | # If this step fails, then you should remove it and run the build manually (see below)
39 | - name: Autobuild
40 | uses: github/codeql-action/autobuild@v1
41 |
42 | # ℹ️ Command-line programs to run using the OS shell.
43 | # 📚 https://git.io/JvXDl
44 |
45 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
46 | # and modify them (or add more) to build your code if your project
47 | # uses a compiled language
48 |
49 | #- run: |
50 | # make bootstrap
51 | # make release
52 |
53 | - name: Perform CodeQL Analysis
54 | uses: github/codeql-action/analyze@v1
55 |
--------------------------------------------------------------------------------
/examples/recipes/camera_drowsiness_buzzer.py:
--------------------------------------------------------------------------------
1 | from signal import pause
2 |
3 | from pitop import Buzzer, Camera
4 | from pitop.processing.algorithms.faces import FaceDetector
5 |
6 |
7 | def alert_on():
8 | buzzer.on()
9 |
10 |
11 | def alert_off():
12 | global drowsy_counter
13 | drowsy_counter = 0
14 | buzzer.off()
15 |
16 |
17 | def calculate_eye_aspect_ratio(face):
18 | left_eye_width, left_eye_height = face.left_eye_dimensions
19 | right_eye_width, right_eye_height = face.right_eye_dimensions
20 |
21 | left_eye_aspect_ratio = left_eye_height / left_eye_width
22 | right_eye_aspect_ratio = right_eye_height / right_eye_width
23 |
24 | eye_aspect_ratio_mean = (left_eye_aspect_ratio + right_eye_aspect_ratio) / 2
25 |
26 | return eye_aspect_ratio_mean
27 |
28 |
29 | def find_faces(frame):
30 | global drowsy_counter
31 | face = face_detector(frame)
32 |
33 | if face.found:
34 | eye_aspect_ratio = calculate_eye_aspect_ratio(face)
35 |
36 | if eye_aspect_ratio < DROWSY_THRESHOLD:
37 | drowsy_counter += 1
38 | if drowsy_counter > ALERT_COUNT:
39 | alert_on()
40 | else:
41 | alert_off()
42 | else:
43 | buzzer.on() if lost_face_alert else buzzer.off()
44 | print("Cannot find face!")
45 |
46 |
47 | camera = Camera(resolution=(640, 480), flip_top_bottom=True)
48 | buzzer = Buzzer("D0")
49 | face_detector = FaceDetector()
50 |
51 | drowsy_counter = 0
52 | DROWSY_THRESHOLD = 0.25
53 | ALERT_COUNT = 10
54 | lost_face_alert = False
55 |
56 | camera.on_frame = find_faces
57 |
58 | pause()
59 |
--------------------------------------------------------------------------------
/examples/simulation/pitop_simulate.py:
--------------------------------------------------------------------------------
1 | from os import path
2 | from time import sleep
3 |
4 | from PIL import Image
5 |
6 | from pitop.simulation import simulate, use_virtual_hardware
7 |
8 | use_virtual_hardware()
9 |
10 | # imported after calling use_virtual_hardware so they use the mocks
11 | from pitop import Pitop # noqa: E402
12 | from pitop.pma import LED, Button # noqa: E402
13 |
14 | pitop = Pitop()
15 | pitop.add_component(LED("D0", name="led1"))
16 | pitop.add_component(Button("D1", name="button1"))
17 | pitop.add_component(LED("D2", name="led2", color="green"))
18 | pitop.add_component(Button("D3", name="button2"))
19 |
20 | pitop.button1.when_pressed = pitop.led1.on
21 | pitop.button1.when_released = pitop.led1.off
22 |
23 | pitop.button2.when_pressed = pitop.led2.on
24 | pitop.button2.when_released = pitop.led2.off
25 |
26 | pitop_sim = simulate(pitop, 0.5)
27 | led_sim = simulate(pitop.led1)
28 | button_sim = simulate(pitop.button1)
29 |
30 | sleep(5)
31 |
32 | pitop.add_component(LED("D4", name="led3"))
33 | pitop.add_component(Button("D5", name="button3"))
34 |
35 | pitop.button3.when_pressed = pitop.led3.on
36 | pitop.button3.when_released = pitop.led3.off
37 |
38 | sleep(5)
39 |
40 | pitop.add_component(LED("D6", name="led4", color="yellow"))
41 | pitop.add_component(Button("D7", name="button4"))
42 |
43 | pitop.button4.when_pressed = pitop.led4.on
44 | pitop.button4.when_released = pitop.led4.off
45 |
46 | rocket = Image.open(
47 | f"{path.dirname(path.realpath(__file__))}/../../packages/miniscreen/pitop/miniscreen/images/rocket.gif"
48 | )
49 |
50 | while True:
51 | pitop.miniscreen.play_animated_image(rocket)
52 |
--------------------------------------------------------------------------------
/examples/pulse/leds-cpu_usage.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from pitop.pulse import ledmatrix
4 |
5 | last_work = [0, 0, 0, 0]
6 | last_idle = [0, 0, 0, 0]
7 |
8 |
9 | def get_cpu_rates():
10 | global last_work, last_idle
11 | rate = [0, 0, 0, 0]
12 | f = open("/proc/stat", "r")
13 | line = ""
14 | for i in range(0, 4):
15 | while not "cpu" + str(i) in line:
16 | line = f.readline()
17 | # print(line)
18 | splitline = line.split()
19 | work = int(splitline[1]) + int(splitline[2]) + int(splitline[3])
20 | idle = int(splitline[4])
21 | diff_work = work - last_work[i]
22 | diff_idle = idle - last_idle[i]
23 | rate[i] = float(diff_work) / float(diff_idle + diff_work)
24 | last_work[i] = work
25 | last_idle[i] = idle
26 | f.close()
27 | return rate
28 |
29 |
30 | ledmatrix.rotation(0)
31 |
32 | try:
33 | while True:
34 | rate = get_cpu_rates()
35 | ledmatrix.clear()
36 | for i in range(0, 4):
37 | level = int(6.99 * rate[i])
38 | if level < 4:
39 | r = 0
40 | g = 255
41 | b = 0
42 | elif level < 6:
43 | r = 255
44 | g = 255
45 | b = 6
46 | else:
47 | r = 255
48 | g = 0
49 | b = 0
50 | for y in range(0, level + 1):
51 | ledmatrix.set_pixel(2 * i, y, r, g, b)
52 |
53 | ledmatrix.show()
54 | time.sleep(1)
55 |
56 | except KeyboardInterrupt:
57 | ledmatrix.clear()
58 | ledmatrix.show()
59 |
--------------------------------------------------------------------------------
/examples/pulse/leds-numbers.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from pitop.pulse import ledmatrix
4 |
5 | OFFSET_LEFT = 0
6 | OFFSET_TOP = 2
7 |
8 |
9 | # fmt: off
10 | NUMS = [1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, # 0
11 | 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, # 1
12 | 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, # 2
13 | 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, # 3
14 | 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, # 4
15 | 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, # 5
16 | 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, # 6
17 | 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, # 7
18 | 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, # 8
19 | 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1] # 9
20 | # fmt: on
21 |
22 |
23 | # Displays a single digit (0-9)
24 | def show_digit(val, xd, yd, r, g, b):
25 | offset = val * 15
26 | for p in range(offset, offset + 15):
27 | xt = p % 3
28 | yt = (p - offset) // 3
29 | ledmatrix.set_pixel(xt + xd, 7 - yt - yd, r * NUMS[p], g * NUMS[p], b * NUMS[p])
30 | ledmatrix.show()
31 |
32 |
33 | # Displays a two-digits positive number (0-99)
34 | def show_number(val, r, g, b):
35 | abs_val = abs(val)
36 | tens = abs_val // 10
37 | units = abs_val % 10
38 | if abs_val > 9:
39 | show_digit(tens, OFFSET_LEFT, OFFSET_TOP, r, g, b)
40 | show_digit(units, OFFSET_LEFT + 4, OFFSET_TOP, r, g, b)
41 |
42 |
43 | ledmatrix.rotation(0)
44 | ledmatrix.clear()
45 |
46 | for i in range(0, 100):
47 | show_number(i, 255, 255, 0)
48 | time.sleep(0.5)
49 |
50 | ledmatrix.clear()
51 | ledmatrix.show()
52 |
--------------------------------------------------------------------------------
/packages/system/pitop/system/pitop.py:
--------------------------------------------------------------------------------
1 | from pitop.common.singleton import Singleton
2 | from pitop.core.mixins import Componentable, SupportsBattery, SupportsMiniscreen
3 |
4 |
5 | class Pitop(SupportsMiniscreen, SupportsBattery, Componentable, metaclass=Singleton):
6 | """Represents a pi-top Device.
7 |
8 | When creating a `Pitop` object, multiple properties will be set,
9 | depending on the pi-top device that it's running the code. For example, if run on
10 | a pi-top [4], a `miniscreen` attribute will be created as an interface to control the
11 | miniscreen OLED display, but that won't be available for other pi-top devices.
12 |
13 | The Pitop class is a Singleton. This means that only one instance per process will
14 | be created. In practice, this means that if in a particular project you instance a Pitop
15 | class in 2 different files, they will share the internal state.
16 |
17 | *property* miniscreen
18 | If using a pi-top [4], this property returns a :class:`pitop.miniscreen.Miniscreen` object, to interact with the device's Miniscreen.
19 |
20 |
21 | *property* oled
22 | Refer to `miniscreen`.
23 |
24 |
25 | *property* battery
26 | If using a pi-top with a battery, this property returns a :class:`pitop.battery.Battery` object, to interact with the device's battery.
27 | """
28 |
29 | def __init__(self):
30 | SupportsMiniscreen.__init__(self)
31 | SupportsBattery.__init__(self)
32 | Componentable.__init__(self)
33 |
34 | @property
35 | def own_state(self):
36 | return {
37 | "miniscreen_image": self.miniscreen.image,
38 | }
39 |
--------------------------------------------------------------------------------
/packages/miniscreen/pitop/miniscreen/oled/core/lock.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 |
3 | from pyinotify import IN_CLOSE_WRITE, IN_OPEN, Notifier, ProcessEvent, WatchManager
4 |
5 |
6 | class MiniscreenLockFileMonitor:
7 | def __init__(self, lock_path):
8 | self.thread = Thread(target=self._monitor_lockfile, daemon=True)
9 | self.when_user_stops_using_oled = None
10 | self.when_user_starts_using_oled = None
11 | self.lock_path = lock_path
12 | self.notifier = None
13 |
14 | def __exit__(self, exc_type, exc_value, exc_traceback):
15 | if self.notifier:
16 | self.notifier.stop()
17 |
18 | if self.__thread.is_alive():
19 | self.__thread.join()
20 |
21 | def _monitor_lockfile(self):
22 | eh = ProcessEvent()
23 | events_to_watch = 0
24 | if self.when_user_stops_using_oled:
25 | eh.process_IN_CLOSE_WRITE = lambda event: self.when_user_stops_using_oled()
26 | events_to_watch = events_to_watch | IN_CLOSE_WRITE
27 | if self.when_user_starts_using_oled:
28 | eh.process_IN_OPEN = lambda event: self.when_user_starts_using_oled()
29 | events_to_watch = events_to_watch | IN_OPEN
30 |
31 | wm = WatchManager()
32 | wm.add_watch(self.lock_path, events_to_watch)
33 | self.notifier = Notifier(wm, eh)
34 | self.notifier.loop()
35 |
36 | def start(self):
37 | self.stop()
38 | self.thread = Thread(target=self._monitor_lockfile, daemon=True)
39 | self.thread.start()
40 |
41 | def stop(self):
42 | if self.thread is not None and self.thread.is_alive():
43 | self.thread.join(0)
44 |
--------------------------------------------------------------------------------
/tests/test_buzzer.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | import pytest
4 |
5 |
6 | @pytest.fixture
7 | def make_buzzer():
8 | buzzers = []
9 |
10 | def _make_buzzer(port="D0", name="buzzer"):
11 | from pitop import Buzzer
12 |
13 | buzzer = Buzzer(port, name=name)
14 | buzzers.append(buzzer)
15 | return buzzer
16 |
17 | yield _make_buzzer
18 |
19 | for buzzer in buzzers:
20 | buzzer.close()
21 |
22 |
23 | def test_buzzer(make_buzzer):
24 | buzzer = make_buzzer()
25 |
26 | assert buzzer.config == {
27 | "classname": "Buzzer",
28 | "module": "pitop.pma.buzzer",
29 | "name": "buzzer",
30 | "port_name": "D0",
31 | "version": "0.17.0",
32 | }
33 |
34 | assert not buzzer.value
35 | assert not buzzer.state["is_active"]
36 |
37 | buzzer.on()
38 |
39 | assert buzzer.value
40 | assert buzzer.state["is_active"]
41 |
42 |
43 | @pytest.mark.simulationtest
44 | def test_buzzer_simulate(make_buzzer, create_sim, mocker, snapshot):
45 | mocker.patch(
46 | "pitop.simulation.sprites.is_virtual_hardware",
47 | return_value=True,
48 | )
49 |
50 | buzzer = make_buzzer()
51 |
52 | sim = create_sim(buzzer)
53 |
54 | # give time for the screen and sprites to be set up
55 | sleep(2)
56 | snapshot.assert_match(sim.snapshot(), "buzzer_off.png")
57 |
58 | # turn buzzer on
59 | buzzer.on()
60 |
61 | # these events are a bit slow
62 | sleep(0.5)
63 | snapshot.assert_match(sim.snapshot(), "buzzer_on.png")
64 |
65 | # turn buzzer off
66 | buzzer.off()
67 | sleep(0.5)
68 | snapshot.assert_match(sim.snapshot(), "buzzer_off.png")
69 |
--------------------------------------------------------------------------------
/packages/pitop/pitop/labs/web/blueprints/video/__init__.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 |
3 | import gevent
4 | from flask import Blueprint, Response, abort
5 |
6 |
7 | class VideoResponse(Response):
8 | def __init__(self, get_frame=None, *args, **kwargs):
9 | if get_frame is None:
10 | abort(500, "Unable to get frames")
11 |
12 | def _get_frame():
13 | try:
14 | frame = get_frame()
15 | buffered = BytesIO()
16 | frame.save(buffered, format="JPEG")
17 | return buffered.getvalue()
18 | except Exception as e:
19 | print(e)
20 |
21 | def generate_frames():
22 | pool = gevent.get_hub().threadpool
23 | while True:
24 | # get_frame in thread so it won't block handler greenlets
25 | frame_bytes = pool.spawn(_get_frame).get()
26 | yield (
27 | b"--frame\r\n"
28 | b"Content-Type: image/jpeg\r\n\r\n" + frame_bytes + b"\r\n"
29 | )
30 |
31 | Response.__init__(
32 | self,
33 | generate_frames(),
34 | mimetype="multipart/x-mixed-replace; boundary=frame",
35 | **kwargs,
36 | )
37 |
38 |
39 | class VideoBlueprint(Blueprint):
40 | def __init__(self, name="video", get_frame=None, **kwargs):
41 | Blueprint.__init__(
42 | self,
43 | name,
44 | __name__,
45 | static_folder="video",
46 | template_folder="templates",
47 | **kwargs,
48 | )
49 |
50 | @self.route(f"/{name}.mjpg")
51 | def video():
52 | return VideoResponse(get_frame=get_frame)
53 |
--------------------------------------------------------------------------------
/examples/pma/servo_motor.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | from pitop import ServoMotor, ServoMotorSetting
4 |
5 | servo = ServoMotor("S0")
6 |
7 | # Scan back and forward across a 180 degree angle range in 30 degree hops using default servo speed
8 | for angle in range(90, -100, -30):
9 | print("Setting angle to", angle)
10 | servo.target_angle = angle
11 | sleep(0.5)
12 |
13 | # you can also set angle with a different speed than the default
14 | servo_settings = ServoMotorSetting()
15 | servo_settings.speed = 25
16 |
17 | for angle in range(-90, 100, 30):
18 | print("Setting angle to", angle)
19 | servo_settings.angle = angle
20 | servo.setting = servo_settings
21 | sleep(0.5)
22 |
23 | sleep(1)
24 |
25 | # Scan back and forward displaying current angle and speed
26 | STOP_ANGLE = 80
27 | TARGET_SPEED = 40
28 |
29 | print("Sweeping using speed ", -TARGET_SPEED)
30 | servo.target_speed = -TARGET_SPEED
31 |
32 | current_state = servo.setting
33 | current_angle = current_state.angle
34 |
35 | # sweep using the already set servo speed
36 | servo.sweep()
37 | while current_angle > -STOP_ANGLE:
38 | current_state = servo.setting
39 | current_angle = current_state.angle
40 | current_speed = current_state.speed
41 | print(f"current_angle: {current_angle} | current_speed: {current_speed}")
42 | sleep(0.05)
43 |
44 | print("Sweeping using speed ", TARGET_SPEED)
45 |
46 | # you can also sweep specifying the speed when calling the sweep method
47 | servo.sweep(speed=TARGET_SPEED)
48 | while current_angle < STOP_ANGLE:
49 | current_state = servo.setting
50 | current_angle = current_state.angle
51 | current_speed = current_state.speed
52 | print(f"current_angle: {current_angle} | current_speed: {current_speed}")
53 | sleep(0.05)
54 |
--------------------------------------------------------------------------------
/examples/recipes/encoder_motor_rover.py:
--------------------------------------------------------------------------------
1 | from threading import Thread
2 | from time import sleep
3 |
4 | from pitop import BrakingType, EncoderMotor, ForwardDirection
5 |
6 | # Setup the motors for the rover configuration
7 |
8 | motor_left = EncoderMotor("M3", ForwardDirection.CLOCKWISE)
9 | motor_right = EncoderMotor("M0", ForwardDirection.COUNTER_CLOCKWISE)
10 |
11 | motor_left.braking_type = BrakingType.COAST
12 | motor_right.braking_type = BrakingType.COAST
13 |
14 |
15 | # Define some functions for easily controlling the rover
16 |
17 |
18 | def drive(target_rpm: float):
19 | print("Start driving at target", target_rpm, "rpm...")
20 | motor_left.set_target_rpm(target_rpm)
21 | motor_right.set_target_rpm(target_rpm)
22 |
23 |
24 | def stop_rover():
25 | print("Stopping rover...")
26 | motor_left.stop()
27 | motor_right.stop()
28 |
29 |
30 | def turn_left(rotation_speed: float):
31 | print("Turning left...")
32 | motor_left.stop()
33 | motor_right.set_target_rpm(rotation_speed)
34 |
35 |
36 | def turn_right(rotation_speed: float):
37 | print("Turning right...")
38 | motor_right.stop()
39 | motor_left.set_target_rpm(rotation_speed)
40 |
41 |
42 | # Start a thread to monitor the rover
43 |
44 |
45 | def monitor_rover():
46 | while True:
47 | print(
48 | "> Rover motor RPM's (L,R):",
49 | round(motor_left.current_rpm, 2),
50 | round(motor_right.current_rpm, 2),
51 | )
52 | sleep(1)
53 |
54 |
55 | monitor_thread = Thread(target=monitor_rover, daemon=True)
56 | monitor_thread.start()
57 |
58 | # Go!
59 |
60 | rpm_speed = 100
61 | for _ in range(4):
62 | drive(rpm_speed)
63 | sleep(5)
64 |
65 | turn_left(rpm_speed)
66 | sleep(5)
67 |
68 | stop_rover()
69 |
--------------------------------------------------------------------------------