├── .gitignore ├── LICENSE ├── README.md ├── __init__.py ├── crc16_python.py ├── examples ├── __init__.py ├── fancy_round_cv.py ├── gimbal_angle_control.py ├── gimbal_control.py ├── ml_object_tracker.py ├── object_tracker.py ├── rtcp_viewer.py ├── siyi_control.py ├── siyi_tracker.py ├── test_center_gimbal.py ├── test_follow_mode.py ├── test_fpv_mode.py ├── test_from_rtsp_to_rtmp.py ├── test_get_fw_ver.py ├── test_get_gimbal_info.py ├── test_get_hw_id.py ├── test_gimbal_rotation.py ├── test_lock_mode.py ├── test_record_video.py ├── test_rtmp_stream.py ├── test_rtsp.py ├── test_zoom.py └── udp_server.py ├── siyi_message.py ├── siyi_sdk.py ├── stream.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Dmitry Devitt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # siyi-python-sdk 2 | Python implementation of SIYI SDK for communication with ZR10 and A8 Mini cameras 3 | 4 | * Camera webpage: http://en.siyi.biz/en/Gimbal%20Camera/ZR10/overview/ 5 | * Documentation: http://en.siyi.biz/en/Gimbal%20Camera/ZR10/download/ 6 | 7 | Repo based on this repo: https://github.com/mzahana/siyi_sdk.git 8 | 9 | [![IMAGE ALT TEXT HERE](https://img.youtube.com/vi/yTnAmtcHlzc/0.jpg)](https://www.youtube.com/watch?v=yTnAmtcHlzc) 10 | 11 | 12 | # Setup 13 | * Clone this package 14 | ```bash 15 | git clone git@github.com:Innopolis-UAV-Team/siyi-python-sdk.git 16 | ``` 17 | * Connect the camera to PC or onboard computer using the ethernet cable that comes with it. The current implementation uses UDP communication. 18 | * Power on the camera 19 | * Do the PC wired network configuration. Make sure to assign a manual IP address to your computer 20 | * For example, IP `192.168.144.12` 21 | * Gateway `192.168.144.25` 22 | * Netmask `255.255.255.0` 23 | * Done. 24 | 25 | # Usage 26 | * Check the scripts in the `siyi_sdk/examples` directory to learn how to use the SDK 27 | 28 | * To import this module in your code, copy the `siyi_sdk.py` `siyi_message.py` `utility.py` `crc16_python.py` scripts in your code directory, and import as follows, and then follow the test examples 29 | ```python 30 | from siyi_sdk import SIYISDK 31 | ``` 32 | * Example: To run the `test_gimbal_rotation.py` run, 33 | ```bash 34 | cd siyi_sdk/tests 35 | python3 test_gimbal_rotation.py 36 | ``` 37 | # Video Streaming 38 | ## Requirements 39 | * OpenCV `sudo apt-get install python3-opencv -y` 40 | * imutils `pip install imutils` 41 | * Gstreamer `https://gstreamer.freedesktop.org/documentation/installing/index.html?gi-language=c` 42 | 43 | Ubuntu: 44 | ```bash 45 | sudo apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libgstreamer-plugins-bad1.0-dev gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav gstreamer1.0-doc gstreamer1.0-tools gstreamer1.0-x gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-qt5 gstreamer1.0-pulseaudio -y 46 | ``` 47 | - Deepstream (only for Nvidia Jetson boards) 48 | (https://docs.nvidia.com/metropolis/deepstream/dev-guide/text/DS_Quickstart.html#jetson-setup) 49 | - For RTMP streaming 50 | ```bash 51 | sudo apt install ffmpeg -y 52 | pip install ffmpeg-python 53 | ``` 54 | 55 | ## Examples 56 | * An example of how to auto tracjing with Yolov8 and CV2 tracket to0, see `examples/ml_object_tracker.py` 57 | * An example of how to auto tracjing with CV2 tracket, see `examples/object_tracker.py` 58 | * An example of gimbal contol, see `examples/gimbal_control.py` 59 | 60 | # Tools 61 | * To run a nginx-rtmp server from a docker container 62 | ```bash 63 | docker run -d -p 1935:1935 --name nginx-rtmp tiangolo/nginx-rtmp 64 | ``` 65 | [Reference](https://hub.docker.com/r/tiangolo/nginx-rtmp/) 66 | 67 | * To play an rtmp stream, you can use the following command in a terminal (you will need to install mpv `sudo apt install mpv`) 68 | ```bash 69 | mpv --msg-color=yes --msg-module=yes --keepaspect=yes --no-correct-pts --untimed --vd-lavc-threads=1 --cache=no --cache-pause=no --demuxer-lavf-o-add="fflags=+nobuffer+fastseek+flush_packets" --demuxer-lavf-probe-info=nostreams --demuxer-lavf-analyzeduration=0.1 --demuxer-max-bytes=500MiB --demuxer-readahead-secs=0.1 --interpolation=no --hr-seek-framedrop=no --video-sync=display-resample --temporal-dither=yes --framedrop=decoder+vo --deband=no --dither=no --hwdec=auto-copy --hwdec-codecs=all --video-latency-hacks=yes --profile=low-latency --linear-downscaling=no --correct-downscaling=yes --sigmoid-upscaling=yes --scale=ewa_hanning --scale-radius=3.2383154841662362 --cscale=ewa_lanczossoft --dscale=mitchell --fs --osc=no --osd-duration=450 --border=no --no-pause --no-resume-playback --keep-open=no --network-timeout=0 --stream-lavf-o=reconnect_streamed=1 rtmp://127.0.0.1/live/webcam 70 | ``` 71 | **OR you can use VLC, but you may notice high latency!** 72 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Innopolis-UAV-Team/siyi-python-sdk/9ecf1d8b0f44e60c2e4e7f6faaddf58dcfd716a0/__init__.py -------------------------------------------------------------------------------- /crc16_python.py: -------------------------------------------------------------------------------- 1 | """ 2 | Computes CRC16 3 | Ref: https://gist.github.com/oysstu/68072c44c02879a2abf94ef350d1c7c6?permalink_comment_id=3943460#gistcomment-3943460 4 | """ 5 | 6 | from unittest import TestCase 7 | import logging 8 | 9 | def crc16(data: bytes): 10 | ''' 11 | CRC-16 (CCITT) implemented with a precomputed lookup table 12 | ''' 13 | table = [ 14 | 0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50A5, 0x60C6, 0x70E7, 0x8108, 0x9129, 0xA14A, 0xB16B, 0xC18C, 0xD1AD, 0xE1CE, 0xF1EF, 15 | 0x1231, 0x0210, 0x3273, 0x2252, 0x52B5, 0x4294, 0x72F7, 0x62D6, 0x9339, 0x8318, 0xB37B, 0xA35A, 0xD3BD, 0xC39C, 0xF3FF, 0xE3DE, 16 | 0x2462, 0x3443, 0x0420, 0x1401, 0x64E6, 0x74C7, 0x44A4, 0x5485, 0xA56A, 0xB54B, 0x8528, 0x9509, 0xE5EE, 0xF5CF, 0xC5AC, 0xD58D, 17 | 0x3653, 0x2672, 0x1611, 0x0630, 0x76D7, 0x66F6, 0x5695, 0x46B4, 0xB75B, 0xA77A, 0x9719, 0x8738, 0xF7DF, 0xE7FE, 0xD79D, 0xC7BC, 18 | 0x48C4, 0x58E5, 0x6886, 0x78A7, 0x0840, 0x1861, 0x2802, 0x3823, 0xC9CC, 0xD9ED, 0xE98E, 0xF9AF, 0x8948, 0x9969, 0xA90A, 0xB92B, 19 | 0x5AF5, 0x4AD4, 0x7AB7, 0x6A96, 0x1A71, 0x0A50, 0x3A33, 0x2A12, 0xDBFD, 0xCBDC, 0xFBBF, 0xEB9E, 0x9B79, 0x8B58, 0xBB3B, 0xAB1A, 20 | 0x6CA6, 0x7C87, 0x4CE4, 0x5CC5, 0x2C22, 0x3C03, 0x0C60, 0x1C41, 0xEDAE, 0xFD8F, 0xCDEC, 0xDDCD, 0xAD2A, 0xBD0B, 0x8D68, 0x9D49, 21 | 0x7E97, 0x6EB6, 0x5ED5, 0x4EF4, 0x3E13, 0x2E32, 0x1E51, 0x0E70, 0xFF9F, 0xEFBE, 0xDFDD, 0xCFFC, 0xBF1B, 0xAF3A, 0x9F59, 0x8F78, 22 | 0x9188, 0x81A9, 0xB1CA, 0xA1EB, 0xD10C, 0xC12D, 0xF14E, 0xE16F, 0x1080, 0x00A1, 0x30C2, 0x20E3, 0x5004, 0x4025, 0x7046, 0x6067, 23 | 0x83B9, 0x9398, 0xA3FB, 0xB3DA, 0xC33D, 0xD31C, 0xE37F, 0xF35E, 0x02B1, 0x1290, 0x22F3, 0x32D2, 0x4235, 0x5214, 0x6277, 0x7256, 24 | 0xB5EA, 0xA5CB, 0x95A8, 0x8589, 0xF56E, 0xE54F, 0xD52C, 0xC50D, 0x34E2, 0x24C3, 0x14A0, 0x0481, 0x7466, 0x6447, 0x5424, 0x4405, 25 | 0xA7DB, 0xB7FA, 0x8799, 0x97B8, 0xE75F, 0xF77E, 0xC71D, 0xD73C, 0x26D3, 0x36F2, 0x0691, 0x16B0, 0x6657, 0x7676, 0x4615, 0x5634, 26 | 0xD94C, 0xC96D, 0xF90E, 0xE92F, 0x99C8, 0x89E9, 0xB98A, 0xA9AB, 0x5844, 0x4865, 0x7806, 0x6827, 0x18C0, 0x08E1, 0x3882, 0x28A3, 27 | 0xCB7D, 0xDB5C, 0xEB3F, 0xFB1E, 0x8BF9, 0x9BD8, 0xABBB, 0xBB9A, 0x4A75, 0x5A54, 0x6A37, 0x7A16, 0x0AF1, 0x1AD0, 0x2AB3, 0x3A92, 28 | 0xFD2E, 0xED0F, 0xDD6C, 0xCD4D, 0xBDAA, 0xAD8B, 0x9DE8, 0x8DC9, 0x7C26, 0x6C07, 0x5C64, 0x4C45, 0x3CA2, 0x2C83, 0x1CE0, 0x0CC1, 29 | 0xEF1F, 0xFF3E, 0xCF5D, 0xDF7C, 0xAF9B, 0xBFBA, 0x8FD9, 0x9FF8, 0x6E17, 0x7E36, 0x4E55, 0x5E74, 0x2E93, 0x3EB2, 0x0ED1, 0x1EF0 30 | ] 31 | 32 | crc = 0x0 33 | for byte in data: 34 | crc = ((crc<<8)&0xff00) ^ table[((crc>>8)&0xff)^byte] 35 | return crc & 0xffff 36 | 37 | def crc16_str_swap(val: str): 38 | """ 39 | Custom function to compute the CRC16 of a string of hexadecimal data 40 | 41 | Params 42 | -- 43 | val [str] String of hexadecimal values 44 | 45 | Returns 46 | -- 47 | crc_str [str] string of 4 charcaters representing 2 bytes of crc16, in swaped order. 48 | """ 49 | LOG_FORMAT='%(asctime)s [CRC16PYTHON::%(funcName)s] [%(levelname)s]:\t%(message)s' 50 | logging.basicConfig(format=LOG_FORMAT, level=logging.INFO) 51 | log = logging.getLogger(__name__) 52 | crc_str = "" 53 | if not isinstance(val, str): 54 | log.error("Message is not string") 55 | return crc_str 56 | b = bytes.fromhex(val) 57 | crc_int = crc16(b) 58 | crc_str = format(crc_int, 'x') 59 | 60 | # Just a sanity check: 61 | if len(crc_str)>4: 62 | crc_str='ffff' 63 | return crc_str 64 | 65 | # Make sure crc is 4 chatracters (2 for each byte) with swaped bytes 66 | if len(crc_str)==3: 67 | crc_str = "0"+crc_str 68 | if len(crc_str)==2: 69 | crc_str='00'+crc_str 70 | if len(crc_str)==1: 71 | crc_str='000'+crc_str 72 | 73 | # Reverse bytes, according to SIYI SDK 74 | low_byte = crc_str[2:] 75 | high_byte = crc_str[0:2] 76 | # swap bytes 77 | crc_str = low_byte+high_byte 78 | 79 | return(crc_str) 80 | 81 | def crc16_test(): 82 | LOG_FORMAT='%(asctime)s [CRC16PYTHON::%(funcName)s] [%(levelname)s]:\t%(message)s' 83 | logging.basicConfig(format=LOG_FORMAT, level=logging.INFO) 84 | log = logging.getLogger(__name__) 85 | data="5566010100000005FF" 86 | expected_crc = "5c6a" 87 | data_crc = crc16_str_swap(data) 88 | log.info("data_crc %s", data_crc) 89 | 90 | if data_crc != expected_crc: 91 | log.error("CRC16 of data %s did not match expected CRC %s", data_crc, expected_crc) 92 | else: 93 | log.info("CRC16 Pass!") 94 | 95 | if __name__=="__main__": 96 | crc16_test() -------------------------------------------------------------------------------- /examples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Innopolis-UAV-Team/siyi-python-sdk/9ecf1d8b0f44e60c2e4e7f6faaddf58dcfd716a0/examples/__init__.py -------------------------------------------------------------------------------- /examples/fancy_round_cv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | # For Details Reference Link: 5 | # http://stackoverflow.com/questions/46036477/drawing-fancy-rectangle-around-face 6 | def draw_border(img, pt1, pt2, color, thickness, r, d): 7 | x1,y1 = pt1 8 | x2,y2 = pt2 9 | 10 | # Top left 11 | cv2.line(img, (x1 + r, y1), (x1 + r + d, y1), color, thickness) 12 | cv2.line(img, (x1, y1 + r), (x1, y1 + r + d), color, thickness) 13 | cv2.ellipse(img, (x1 + r, y1 + r), (r, r), 180, 0, 90, color, thickness) 14 | 15 | # Top right 16 | cv2.line(img, (x2 - r, y1), (x2 - r - d, y1), color, thickness) 17 | cv2.line(img, (x2, y1 + r), (x2, y1 + r + d), color, thickness) 18 | cv2.ellipse(img, (x2 - r, y1 + r), (r, r), 270, 0, 90, color, thickness) 19 | 20 | # Bottom left 21 | cv2.line(img, (x1 + r, y2), (x1 + r + d, y2), color, thickness) 22 | cv2.line(img, (x1, y2 - r), (x1, y2 - r - d), color, thickness) 23 | cv2.ellipse(img, (x1 + r, y2 - r), (r, r), 90, 0, 90, color, thickness) 24 | 25 | # Bottom right 26 | cv2.line(img, (x2 - r, y2), (x2 - r - d, y2), color, thickness) 27 | cv2.line(img, (x2, y2 - r), (x2, y2 - r - d), color, thickness) 28 | cv2.ellipse(img, (x2 - r, y2 - r), (r, r), 0, 0, 90, color, thickness) 29 | 30 | # ============================================================================ 31 | 32 | if __name__ == '__main__': 33 | 34 | img = np.zeros((512,512,3), dtype=np.uint8) 35 | img[:]=(100,100,100) 36 | # b g r 37 | draw_border(img, (10,10), (100, 80), (255,0,0), 2, 10, 10) 38 | draw_border(img, (128,128), (340, 340), (255,0,0), 2, 10, 10) 39 | 40 | cv2.imshow('Fancy Rectangle',img) 41 | cv2.waitKey() 42 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /examples/gimbal_angle_control.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_gimbal_rotation.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to set/get gimbal rotation 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | from time import sleep 10 | import sys 11 | import os 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | if not cam.connect(): 23 | print("No connection ") 24 | exit(1) 25 | print("Current motion mode: ", cam._motionMode_msg.mode) 26 | 27 | cam.requestCenterGimbal 28 | 29 | cam.setGimbalRotation(yaw=0,pitch=-85,err_thresh=0.1,kp=0.1) 30 | 31 | print("Attitude (yaw,pitch,roll) eg:", cam.getAttitude()) 32 | 33 | cam.disconnect() 34 | 35 | if __name__ == "__main__": 36 | test() -------------------------------------------------------------------------------- /examples/gimbal_control.py: -------------------------------------------------------------------------------- 1 | from time import sleep 2 | import sys 3 | import os 4 | from pynput.keyboard import Key, Listener 5 | import threading 6 | 7 | current = os.path.dirname(os.path.realpath(__file__)) 8 | parent_directory = os.path.dirname(current) 9 | 10 | sys.path.append(parent_directory) 11 | 12 | from siyi_sdk import SIYISDK 13 | 14 | zoom = 1 15 | yaw = 0 16 | pitch = 0 17 | init_flag = False 18 | 19 | def keybord_control(key): 20 | global cam, zoom, yaw, pitch 21 | print('\nYou Entered {0}'.format(key)) 22 | print("type:", type(key)) 23 | 24 | try: 25 | if key.char == "n" or key.char == "C": 26 | print('You Pressed N Key! Center') 27 | yaw = 0 28 | pitch = 0 29 | cam.setRotation(yaw,pitch) 30 | cam.requestCenterGimbal() 31 | 32 | if key.char == "e" or key.char == "E": 33 | print('You Pressed E Key! Zoom+') 34 | zoom += 1 35 | if zoom > 30: 36 | zoom = 30 37 | cam.set_zoom(zoom) 38 | 39 | if key.char == "q" or key.char == "Q": 40 | print('You Pressed Q Key! Zoom-') 41 | zoom -= 1 42 | if zoom < 1: 43 | zoom = 1 44 | cam.set_zoom(zoom) 45 | 46 | if key.char == "w" or key.char == "W": 47 | print('UP') 48 | pitch += 2.0 49 | cam.setRotation(yaw,pitch) 50 | if key.char == "s" or key.char == "S": 51 | print('Down') 52 | pitch -= 2.0 53 | cam.setRotation(yaw,pitch) 54 | if key.char == "d" or key.char == "D": 55 | print('Right') 56 | yaw -= 2.0 57 | cam.setRotation(yaw,pitch) 58 | if key.char == "a" or key.char == "A": 59 | print('Left') 60 | yaw += 2.0 61 | cam.setRotation(yaw,pitch) 62 | 63 | if key.char == "1": 64 | print('FPV mode') 65 | cam.requestFPVMode() 66 | sleep(2) 67 | print("Current motion mode: ", cam._motionMode_msg.mode) 68 | if key.char == "2": 69 | print('Lock mode') 70 | cam.requestLockMode() 71 | sleep(2) 72 | print("Current motion mode: ", cam._motionMode_msg.mode) 73 | if key.char == "3": 74 | print('Follow mode') 75 | cam.requestFollowMode() 76 | sleep(2) 77 | print("Current motion mode: ", cam._motionMode_msg.mode) 78 | if key.char == "p" or key.char == "P": 79 | print('Pick') 80 | cam.requestPhoto() 81 | except: 82 | pass 83 | 84 | if key == Key.delete: 85 | # Stop listener 86 | return False 87 | 88 | def listener_tread(): 89 | with Listener(on_press = keybord_control) as listener: 90 | listener.join() 91 | 92 | if __name__ == "__main__": 93 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 94 | if not cam.connect(): 95 | print("No connection ") 96 | exit(1) 97 | print("Current motion mode: ", cam._motionMode_msg.mode) 98 | cam.requestFollowMode() 99 | 100 | _list_thread = threading.Thread(target=listener_tread, 101 | args=()) 102 | 103 | _list_thread.start() 104 | sleep(1) 105 | while True: 106 | print("Attitude (yaw,pitch,roll) eg:", cam.getAttitude()) 107 | sleep(1) 108 | if not init_flag: 109 | att = cam.getAttitude() 110 | yaw = att[0] 111 | pitch = att[1] 112 | cam.setRotation(yaw,pitch) 113 | init_flag = True 114 | -------------------------------------------------------------------------------- /examples/ml_object_tracker.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | from ultralytics import YOLO 3 | import numpy as np 4 | import cv2 5 | import os 6 | from time import sleep, time 7 | import threading 8 | from collections import defaultdict 9 | from multiprocessing import Process, Queue 10 | from siyi_control import SIYIControl 11 | from scipy.spatial import distance as dist 12 | 13 | import queue 14 | 15 | class Track_Object: 16 | def __init__(self) -> None: 17 | self.xyxy = [0, 0, 0, 0] 18 | self._xywh = [0, 0, 0, 0] 19 | self.name = "" 20 | self.centerPoint = [] 21 | self.class_id = -1 22 | self.track_id = -1 23 | 24 | def xywh(self): 25 | w = int(self.xyxy[2] - self.xyxy[0]) 26 | h = int(self.xyxy[3] - self.xyxy[1]) 27 | self._xywh = (int(self.xyxy[0]), int(self.xyxy[1]), w, h) 28 | return self._xywh 29 | 30 | class VideoCapture: 31 | 32 | def __init__(self, name): 33 | 34 | self.cap = cv2.VideoCapture(name, cv2.CAP_FFMPEG, [cv2.CAP_PROP_HW_ACCELERATION, cv2.VIDEO_ACCELERATION_ANY]) 35 | self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 10) 36 | 37 | self.q = queue.Queue() 38 | t = threading.Thread(target=self._reader) 39 | t.daemon = True 40 | t.start() 41 | 42 | # read frames as soon as they are available, keeping only most recent one 43 | def _reader(self): 44 | while True: 45 | ret, frame = self.cap.read() 46 | # frame = cv2.resize(frame, None, fx=0.6, fy=0.6) 47 | if not ret: 48 | break 49 | if not self.q.empty(): 50 | try: 51 | self.q.get_nowait() # discard previous (unprocessed) frame 52 | except queue.Empty: 53 | pass 54 | self.q.put(frame) 55 | def read(self): 56 | print("qsize", self.q.qsize()) 57 | return self.q.get() 58 | 59 | 60 | 61 | model = YOLO('yolov8m.pt') 62 | 63 | ml_results = None 64 | track_history_ml = defaultdict(lambda: []) 65 | track_history = list() 66 | 67 | is_click = False 68 | selected_point = None 69 | drawing = False # True while ROI is actively being drawn by mouse 70 | show_drawing = False # True while ROI is drawn but is pending use or cancel 71 | p1, p2 = (0, 0), (0, 0) 72 | click_point = [] 73 | select_obj_point = None 74 | 75 | bbox_ROI = (0, 0, 0, 0) 76 | bbox_old = (0, 0, 0, 0) 77 | # Our ROI, defined by two points 78 | state = 0 79 | tracker_roi = None 80 | 81 | def draw_border(img, pt1, pt2, color, thickness, r, d): 82 | x1,y1 = pt1 83 | x2,y2 = pt2 84 | 85 | # Top left 86 | cv2.line(img, (x1 + r, y1), (x1 + r + d, y1), color, thickness) 87 | cv2.line(img, (x1, y1 + r), (x1, y1 + r + d), color, thickness) 88 | cv2.ellipse(img, (x1 + r, y1 + r), (r, r), 180, 0, 90, color, thickness) 89 | 90 | # Top right 91 | cv2.line(img, (x2 - r, y1), (x2 - r - d, y1), color, thickness) 92 | cv2.line(img, (x2, y1 + r), (x2, y1 + r + d), color, thickness) 93 | cv2.ellipse(img, (x2 - r, y1 + r), (r, r), 270, 0, 90, color, thickness) 94 | 95 | # Bottom left 96 | cv2.line(img, (x1 + r, y2), (x1 + r + d, y2), color, thickness) 97 | cv2.line(img, (x1, y2 - r), (x1, y2 - r - d), color, thickness) 98 | cv2.ellipse(img, (x1 + r, y2 - r), (r, r), 90, 0, 90, color, thickness) 99 | 100 | # Bottom right 101 | cv2.line(img, (x2 - r, y2), (x2 - r - d, y2), color, thickness) 102 | cv2.line(img, (x2, y2 - r), (x2, y2 - r - d), color, thickness) 103 | cv2.ellipse(img, (x2 - r, y2 - r), (r, r), 0, 0, 90, color, thickness) 104 | from operator import itemgetter 105 | 106 | def return_nearest_obj(results, targetPose, trashhold=100): 107 | global model 108 | # Get array of all found objects 109 | detections = np.array(results[0].boxes.xyxy.cpu(), dtype="int") # Используйте xyxy вместо boxes 110 | classes = np.array(results[0].boxes.cls.cpu(), dtype="int") 111 | track_ids = np.array(results[0].boxes.id.cpu(), dtype="int") 112 | 113 | # Check if detec samething 114 | if len(detections) > 0: 115 | # Sort object detection by dist from center of image 116 | distances = [np.sqrt((((x[0] + x[2])/2)-targetPose[0])**2 + (((x[1] + x[3])/2)-targetPose[1])**2) for x in detections] 117 | sorted_indices = np.argsort(distances) 118 | detections = detections[sorted_indices] 119 | # Сортировка detections по distances 120 | 121 | # Found nearest object 122 | nearest_detection = detections[0] 123 | dist = np.sqrt((targetPose[0] - ((nearest_detection[0] + nearest_detection[2])/2))**2 + (targetPose[1] - ((nearest_detection[1] + nearest_detection[3])/2))**2) 124 | if dist > trashhold: 125 | print(" dist > trashhold") 126 | return None 127 | 128 | # Coords of nearest object 129 | x1, y1, x2, y2 = nearest_detection[:4] 130 | 131 | # # ID and name of nearest object 132 | class_id = classes[sorted_indices][0] 133 | track_id = track_ids[sorted_indices][0] 134 | name_id = model.names[class_id] 135 | 136 | center_x = int((x1 + x2) / 2) 137 | center_y = int((y1 + y2) / 2) 138 | 139 | ml_obj = Track_Object() 140 | ml_obj.name = name_id 141 | ml_obj.centerPoint = (center_x, center_y) 142 | ml_obj.class_id = class_id 143 | ml_obj.track_id = track_id 144 | ml_obj.xyxy = nearest_detection[:4] 145 | 146 | return ml_obj 147 | else: 148 | print("Object nit found.") 149 | return None 150 | 151 | # Called every time a mouse event happen 152 | def on_mouse(event, x, y, flags, userdata): 153 | global click_point, is_click 154 | global p1, p2, drawing, show_drawing, bbox_ROI 155 | 156 | if event == cv2.EVENT_LBUTTONDOWN: 157 | # Left click down (select first point) 158 | drawing = True 159 | show_drawing = True 160 | p1 = x, y 161 | p2 = x, y 162 | is_click = True 163 | click_point = x, y 164 | 165 | elif event == cv2.EVENT_MOUSEMOVE: 166 | # Drag to second point 167 | if drawing: 168 | p2 = x, y 169 | elif event == cv2.EVENT_LBUTTONUP: 170 | # Left click up (select second point) 171 | drawing = False 172 | p2 = x, y 173 | p3 = (p1[0], p2[1]) 174 | p4 = (p2[0], p1[1]) 175 | 176 | pts = np.array([p1, p2, p3, p4]) 177 | xyx1y1= norm_2d_points(pts) 178 | w = int(xyx1y1[2] - xyx1y1[0]) 179 | h = int(xyx1y1[3] - xyx1y1[1]) 180 | bbox_ROI = (int(xyx1y1[0]), int(xyx1y1[1]), w, h) 181 | 182 | def draw_center(frame, track_point, color=(255, 255, 255), _thickness=1): 183 | h, w, _ = frame.shape 184 | cv2.line(frame, (track_point[0], 0), (track_point[0], h), color=color, thickness=_thickness) 185 | cv2.line(frame, (0, track_point[1]), (w, track_point[1]), color=color, thickness=_thickness) 186 | 187 | def norm_2d_points(pts): 188 | """ 189 | This function, norm_2d_points, is designed to normalize the coordinates of 2D points. 190 | It takes as input a pts array of four 2D points. 191 | """ 192 | # sort the points based on their x-coordinates 193 | xSorted = pts[np.argsort(pts[:, 0]), :] 194 | # grab the left-most and right-most points from the sorted 195 | # x-roodinate points 196 | leftMost = xSorted[:2, :] 197 | rightMost = xSorted[2:, :] 198 | 199 | leftMost = leftMost[np.argsort(leftMost[:, 1]), :] 200 | (tl, bl) = leftMost 201 | 202 | D = dist.cdist(tl[np.newaxis], rightMost, "euclidean")[0] 203 | (br, tr) = rightMost[np.argsort(D)[::-1], :] 204 | # return the coordinates in top-left, top-right, 205 | # bottom-right, and bottom-left order 206 | return np.array([tl,br], dtype="int16").flatten().tolist() 207 | 208 | def tracking_camera(frame, track_point, control, power=0.03): 209 | """ 210 | Calculation of offset from tracking 211 | """ 212 | 213 | h, w, _ = frame.shape 214 | dy, dx = int(h / 2), int(w / 2) 215 | # Calculate diff 216 | offset = (dx-track_point[0], dy-track_point[1]) 217 | print("offset", offset) 218 | control.set_offset(yaw_off=offset[0], pitch_offset=offset[1],power=power) 219 | 220 | 221 | if __name__ == "__main__": 222 | dt = time() 223 | old_dt = dt 224 | currnt_time = time() 225 | timer = 0 226 | is_bisy = False 227 | 228 | RTSP_URL = "rtsp://192.168.144.25:8554/main.264" 229 | 230 | siyi_cap = VideoCapture(RTSP_URL) 231 | 232 | cv2.namedWindow("output", cv2.WINDOW_NORMAL) 233 | cv2.setMouseCallback('output', on_mouse) 234 | siyi_control = SIYIControl() 235 | 236 | while True: 237 | 238 | # Read frame from camera 239 | if is_click: 240 | selected_point = click_point 241 | is_click = False 242 | 243 | frame = siyi_cap.read() 244 | 245 | # If a ROI is selected, draw it 246 | if show_drawing: 247 | # Fix p2 to be always within the frame 248 | p2 = ( 249 | 0 if p2[0] < 0 else (p2[0] if p2[0] < frame.shape[1] else frame.shape[1]), 250 | 0 if p2[1] < 0 else (p2[1] if p2[1] < frame.shape[0] else frame.shape[0]) 251 | ) 252 | cv2.rectangle(frame, p1, p2, (255, 0, 0), 2) 253 | 254 | # ML track and detection 255 | ml_results = model.track(frame, persist=True, tracker="botsort.yaml", conf=0.1, iou=0.5, device="mps") 256 | if ml_results: 257 | is_bisy = False 258 | # overlay masks on original image 259 | if ml_results[0].boxes.id is not None: 260 | bboxes = np.array(ml_results[0].boxes.xyxy.cpu(), dtype="int") 261 | classes = np.array(ml_results[0].boxes.cls.cpu(), dtype="int") 262 | track_ids = np.array(ml_results[0].boxes.id.cpu(), dtype="int") 263 | 264 | if selected_point: 265 | print("selected_point", selected_point) 266 | select_obj_point = return_nearest_obj(ml_results, selected_point, 100) 267 | else: 268 | select_obj_point = None 269 | 270 | for cls, bbox, track_id in zip(classes, bboxes, track_ids): 271 | (x, y, x2, y2) = bbox 272 | 273 | if select_obj_point and track_id == select_obj_point.track_id: 274 | # draw points track 275 | track = track_history_ml[track_id] 276 | center_x = int((x + x2) / 2) 277 | center_y = int((y + y2) / 2) 278 | track.append((int(center_x), int(center_y))) # x, y center point 279 | if len(track) > 10: # retain 10 tracks 280 | track.pop(0) 281 | points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2)) 282 | cv2.polylines(frame, [points], isClosed=False, color=(255, 255, 255), thickness=2) 283 | 284 | # draw selected border 285 | draw_border(frame, (x,y), (x2, y2), (0,0,255), 2, 5, 10) 286 | cv2.circle(frame, select_obj_point.centerPoint, 5, (0, 0, 255), 5) 287 | draw_center(frame, selected_point, color=(255, 255, 255), _thickness=1) 288 | 289 | else: 290 | draw_border(frame, (x,y), (x2, y2), (255,170,0), 2, 5, 10) 291 | 292 | name = "ID: %s %s " % (track_id, model.names[cls]) 293 | cv2.putText(frame, name, (x, y - 5), cv2.FONT_HERSHEY_PLAIN, 1, (255, 255, 225), 2) 294 | 295 | # add center point of point 296 | center_x = int((x + x2) / 2) 297 | center_y = int((y + y2) / 2) 298 | cv2.circle(frame, (center_x, center_y), 5, (255, 255, 255), 2) 299 | 300 | # Run tracker 301 | if tracker_roi: 302 | ret_tracking, bbox = tracker_roi.update(frame) 303 | if bbox_old != bbox: 304 | bbox_old = bbox 305 | 306 | # Draw bbox 307 | if ret_tracking: 308 | p1_ot = (int(bbox[0]), int(bbox[1])) 309 | p2_ot = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) 310 | draw_border(frame, p1_ot, p2_ot, (0,128,255), 2, 5, 10) 311 | # draw line 312 | (x, y, x2, y2) = (p1_ot[0], p1_ot[1], p2_ot[0], p2_ot[1]) 313 | center_x = int((x + x2) / 2) 314 | center_y = int((y + y2) / 2) 315 | track_history.append((int(center_x), int(center_y))) # x, y center point 316 | if len(track_history) > 10: # retain 10 tracks for 10 frames 317 | track_history.pop(0) 318 | 319 | points = np.hstack(track_history).astype(np.int32).reshape((-1, 1, 2)) 320 | cv2.polylines(frame, [points], isClosed=False, color=(255, 255, 255), thickness=3) 321 | 322 | tracking_camera(frame, track_history[-1], siyi_control) 323 | selected_point = track_history[-1] 324 | else: 325 | if select_obj_point: 326 | ret = tracker_roi.init(frame, select_obj_point.xywh) 327 | else: 328 | selected_point = None 329 | 330 | cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2) 331 | 332 | 333 | 334 | # Show window 335 | cv2.imshow("output", frame) 336 | 337 | pressed = cv2.waitKey(1) 338 | if pressed in [13, 32]: 339 | # Pressed Enter or Space to use ROI 340 | if tracker_roi: 341 | tracker_roi = None 342 | tracker_roi = cv2.TrackerCSRT.create() 343 | drawing = False 344 | show_drawing = False 345 | print("bbox", bbox_ROI) 346 | track_history.clear() 347 | 348 | if select_obj_point: 349 | # select by ML 350 | ret = tracker_roi.init(frame, select_obj_point.xywh()) 351 | else: 352 | # select by ROI 353 | if bbox_ROI[2] == 0 or bbox_ROI[3] == 0: 354 | print("Roi not celect") 355 | drawing = False 356 | show_drawing = False 357 | track_history.clear() 358 | selected_point = None 359 | if tracker_roi: 360 | tracker_roi = None 361 | else: 362 | ret = tracker_roi.init(frame, bbox_ROI) 363 | 364 | # here do something with ROI points values (p1 and p2) 365 | elif pressed in [ord('c'), ord('C')]: 366 | # Pressed C or Esc to cancel ROI 367 | drawing = False 368 | show_drawing = False 369 | track_history.clear() 370 | selected_point = None 371 | if tracker_roi: 372 | tracker_roi = None 373 | elif pressed in [27]: 374 | # Pressed Esc to cancel 375 | drawing = False 376 | show_drawing = False 377 | track_history.clear() 378 | if tracker_roi: 379 | tracker_roi = None 380 | break 381 | 382 | cv2.destroyAllWindows() 383 | siyi_cap.cap.release() 384 | -------------------------------------------------------------------------------- /examples/object_tracker.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import time 3 | 4 | # Videl load 5 | 6 | cap = cv2.VideoCapture(0) 7 | time.sleep(3) 8 | 9 | # Read frame 10 | ret, frame = cap.read() 11 | cv2.namedWindow('Tracking', cv2.WINDOW_NORMAL) 12 | # Our ROI, defined by two points 13 | state = 0 14 | # Our ROI, defined by two points 15 | p1, p2 = (0, 0), (0, 0) 16 | drawing = False # True while ROI is actively being drawn by mouse 17 | show_drawing = False # True while ROI is drawn but is pending use or cancel 18 | blue = (255, 0, 0) 19 | 20 | # Called every time a mouse event happen 21 | def on_mouse(event, x, y, flags, userdata): 22 | global p1, p2, drawing, show_drawing 23 | 24 | if event == cv2.EVENT_LBUTTONDOWN: 25 | # Left click down (select first point) 26 | drawing = True 27 | show_drawing = True 28 | p1 = x, y 29 | p2 = x, y 30 | elif event == cv2.EVENT_MOUSEMOVE: 31 | # Drag to second point 32 | if drawing: 33 | p2 = x, y 34 | elif event == cv2.EVENT_LBUTTONUP: 35 | # Left click up (select second point) 36 | drawing = False 37 | p2 = x, y 38 | 39 | # Register the mouse callback 40 | cv2.setMouseCallback('Tracking', on_mouse) 41 | 42 | tracker = None 43 | bbox_old = (0, 0, 0, 0) 44 | 45 | while (cap.isOpened()): 46 | ret, frame = cap.read() 47 | if not ret: 48 | break 49 | # If a ROI is selected, draw it 50 | if show_drawing: 51 | # Fix p2 to be always within the frame 52 | p2 = ( 53 | 0 if p2[0] < 0 else (p2[0] if p2[0] < frame.shape[1] else frame.shape[1]), 54 | 0 if p2[1] < 0 else (p2[1] if p2[1] < frame.shape[0] else frame.shape[0]) 55 | ) 56 | cv2.rectangle(frame, p1, p2, blue, 2) 57 | 58 | # Обновление трекера 59 | if tracker: 60 | ret_ot, bbox = tracker.update(frame) 61 | if bbox_old != bbox: 62 | bbox_old = bbox 63 | print("track", bbox) 64 | # Рисование ограничивающего прямоугольника 65 | if ret_ot: 66 | p1_ot = (int(bbox[0]), int(bbox[1])) 67 | p2_ot = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) 68 | cv2.rectangle(frame, p1_ot, p2_ot, (255,155,0), 2, 1) 69 | else: 70 | cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2) 71 | 72 | # Отображение результатов отслеживания 73 | cv2.imshow("Tracking", frame) 74 | 75 | pressed = cv2.waitKey(1) 76 | if pressed in [13, 32]: 77 | # Pressed Enter or Space to use ROI 78 | if tracker: 79 | tracker.clear() 80 | tracker = cv2.TrackerCSRT.create() 81 | drawing = False 82 | show_drawing = False 83 | print(p1,p2) 84 | bbox1 = (p1[0],p1[1],p2[0],p2[1]) 85 | print("bbox", bbox1) 86 | ret = tracker.init(frame, bbox1) 87 | 88 | # here do something with ROI points values (p1 and p2) 89 | elif pressed in [ord('c'), ord('C'), 27]: 90 | # Pressed C or Esc to cancel ROI 91 | drawing = False 92 | show_drawing = False 93 | 94 | 95 | 96 | # Закрытие окна отображения 97 | cap.release() 98 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /examples/rtcp_viewer.py: -------------------------------------------------------------------------------- 1 | import ffmpegcv 2 | import cv2 3 | 4 | # Создайте объект VideoCaptureStream с URL вашего RTCP 5 | stream = ffmpegcv.VideoCaptureStream("rtsp://192.168.144.25:8554/main.264") 6 | 7 | 8 | while True: 9 | # Считывайте кадры из потока 10 | ret, frame = stream.read() 11 | 12 | # Если кадр успешно считан, отобразите его 13 | if ret: 14 | cv2.imshow('Frame', frame) 15 | 16 | # Если нажата клавиша 'q', выйдите из цикла 17 | if cv2.waitKey(1) & 0xFF == 27: 18 | break 19 | 20 | # Закройте поток и освободите ресурсы 21 | stream.close() 22 | cv2.destroyAllWindows() 23 | -------------------------------------------------------------------------------- /examples/siyi_control.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_gimbal_rotation.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to set/get gimbal rotation 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | from time import sleep 10 | import sys 11 | import os 12 | from pynput.keyboard import Key, Listener 13 | import threading 14 | 15 | current = os.path.dirname(os.path.realpath(__file__)) 16 | parent_directory = os.path.dirname(current) 17 | 18 | sys.path.append(parent_directory) 19 | 20 | from siyi_sdk import SIYISDK 21 | 22 | class SIYIControl: 23 | def __init__(self, _server_ip="192.168.144.25", _port=37260): 24 | 25 | self.zoom = 1 26 | self.yaw = 0 27 | self.pitch = 0 28 | self.init_flag = False 29 | 30 | 31 | self.cam = SIYISDK(server_ip=_server_ip, port=_port) 32 | if not self.cam.connect(): 33 | print("No connection ") 34 | exit(1) 35 | print("Current motion mode: ", self.cam._motionMode_msg.mode) 36 | self.cam.requestFollowMode() 37 | 38 | _list_thread = threading.Thread(target=self.listener_tread, 39 | args=()) 40 | 41 | _list_thread.start() 42 | sleep(1) 43 | 44 | print("Attitude (yaw,pitch,roll) eg:", self.cam.getAttitude()) 45 | sleep(1) 46 | if not self.init_flag: 47 | att = self.cam.getAttitude() 48 | yaw = att[0] 49 | pitch = att[1] 50 | self.cam.setRotation(yaw,pitch) 51 | self.init_flag = True 52 | 53 | 54 | def keybord_control(self, key): 55 | print('\nYou Entered {0}'.format(key)) 56 | print("type:", type(key)) 57 | 58 | try: 59 | if key.char == "n" or key.char == "C": 60 | print('You Pressed N Key! Center') 61 | yaw = 0 62 | pitch = 0 63 | self.cam.setRotation(yaw,pitch) 64 | self.cam.requestCenterGimbal() 65 | 66 | if key.char == "e" or key.char == "E": 67 | print('You Pressed E Key! Zoom+') 68 | self.zoom += 1 69 | if self.zoom > 30: 70 | self.zoom = 30 71 | self.cam.set_zoom(self.zoom) 72 | 73 | if key.char == "q" or key.char == "Q": 74 | print('You Pressed Q Key! Zoom-') 75 | self.zoom -= 1 76 | if self.zoom < 1: 77 | self.zoom = 1 78 | self.cam.set_zoom(self.zoom) 79 | 80 | if key.char == "w" or key.char == "W": 81 | print('UP') 82 | self.pitch += 2.0 83 | self.cam.setRotation(self.yaw,self.pitch) 84 | if key.char == "s" or key.char == "S": 85 | print('Down') 86 | self.pitch -= 2.0 87 | self.cam.setRotation(self.yaw,self.pitch) 88 | if key.char == "d" or key.char == "D": 89 | print('Right') 90 | self.yaw -= 2.0 91 | self.cam.setRotation(self.yaw,self.pitch) 92 | if key.char == "a" or key.char == "A": 93 | print('Left') 94 | self.yaw += 2.0 95 | self.cam.setRotation(self.yaw,self.pitch) 96 | 97 | if key.char == "1": 98 | print('FPV mode') 99 | self.cam.requestFPVMode() 100 | sleep(2) 101 | print("Current motion mode: ", self.cam._motionMode_msg.mode) 102 | if key.char == "2": 103 | print('Lock mode') 104 | self.cam.requestLockMode() 105 | sleep(2) 106 | print("Current motion mode: ", self.cam._motionMode_msg.mode) 107 | if key.char == "3": 108 | print('Follow mode') 109 | self.cam.requestFollowMode() 110 | sleep(2) 111 | print("Current motion mode: ", self.cam._motionMode_msg.mode) 112 | if key.char == "p" or key.char == "P": 113 | print('Pick') 114 | self.cam.requestPhoto() 115 | except: 116 | pass 117 | 118 | if key == Key.delete: 119 | # Stop listener 120 | return False 121 | 122 | def listener_tread(self): 123 | with Listener(on_press = self.keybord_control) as listener: 124 | listener.join() 125 | 126 | def set_offset(self, yaw_off, pitch_offset, power): 127 | att = self.cam.getAttitude() 128 | self.yaw = att[0] + (yaw_off * power) 129 | self.pitch = att[1] + (pitch_offset * power) 130 | 131 | print("set offset", self.yaw, self.pitch) 132 | self.cam.setRotation(int(self.yaw), int(self.pitch)) 133 | -------------------------------------------------------------------------------- /examples/siyi_tracker.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | 3 | import numpy as np 4 | import os 5 | from time import sleep, time 6 | import threading 7 | from collections import defaultdict 8 | from multiprocessing import Process, Queue 9 | import queue 10 | from scipy.spatial import distance as dist 11 | 12 | from siyi_control import SIYIControl 13 | 14 | track_history = list() 15 | queue_from_cam = Queue(maxsize=10) 16 | tracker_roi = None 17 | 18 | bbox_ROI = (0, 0, 0, 0) 19 | bbox_old = (0, 0, 0, 0) 20 | # Our ROI, defined by two points 21 | state = 0 22 | # Our ROI, defined by two points 23 | p1, p2 = (0, 0), (0, 0) 24 | click_point = (0, 0) 25 | 26 | drawing = False # True while ROI is actively being drawn by mouse 27 | show_drawing = False # True while ROI is drawn but is pending use or cancel 28 | 29 | class VideoCapture: 30 | def __init__(self, name): 31 | # os.environ["OPENCV_FFMPEG_CAPTURE_OPTIONS"]="video_codec;hevc" 32 | self.cap = cv2.VideoCapture(name, cv2.CAP_FFMPEG, [cv2.CAP_PROP_HW_ACCELERATION, cv2.VIDEO_ACCELERATION_ANY ]) 33 | self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 10) 34 | 35 | self.q = queue.Queue() 36 | t = threading.Thread(target=self._reader) 37 | t.daemon = True 38 | t.start() 39 | 40 | # read frames as soon as they are available, keeping only most recent one 41 | def _reader(self): 42 | while True: 43 | ret, frame = self.cap.read() 44 | 45 | # frame = cv2.resize(frame, None, fx=1, fy=1) 46 | if not ret: 47 | break 48 | if not self.q.empty(): 49 | try: 50 | self.q.get_nowait() # discard previous (unprocessed) frame 51 | except queue.Empty: 52 | pass 53 | self.q.put(frame) 54 | 55 | def read(self): 56 | return self.q.get() 57 | 58 | def draw_border(img, pt1, pt2, color, thickness, r, d): 59 | x1,y1 = pt1 60 | x2,y2 = pt2 61 | 62 | # Top left 63 | cv2.line(img, (x1 + r, y1), (x1 + r + d, y1), color, thickness) 64 | cv2.line(img, (x1, y1 + r), (x1, y1 + r + d), color, thickness) 65 | cv2.ellipse(img, (x1 + r, y1 + r), (r, r), 180, 0, 90, color, thickness) 66 | 67 | # Top right 68 | cv2.line(img, (x2 - r, y1), (x2 - r - d, y1), color, thickness) 69 | cv2.line(img, (x2, y1 + r), (x2, y1 + r + d), color, thickness) 70 | cv2.ellipse(img, (x2 - r, y1 + r), (r, r), 270, 0, 90, color, thickness) 71 | 72 | # Bottom left 73 | cv2.line(img, (x1 + r, y2), (x1 + r + d, y2), color, thickness) 74 | cv2.line(img, (x1, y2 - r), (x1, y2 - r - d), color, thickness) 75 | cv2.ellipse(img, (x1 + r, y2 - r), (r, r), 90, 0, 90, color, thickness) 76 | 77 | # Bottom right 78 | cv2.line(img, (x2 - r, y2), (x2 - r - d, y2), color, thickness) 79 | cv2.line(img, (x2, y2 - r), (x2, y2 - r - d), color, thickness) 80 | cv2.ellipse(img, (x2 - r, y2 - r), (r, r), 0, 0, 90, color, thickness) 81 | 82 | def norm_2d_points(pts): 83 | """ 84 | Эта функция, norm_2d_points, предназначена для нормализации координат 2D точек. Она принимает на вход массив pts из четырех 2D точек. 85 | """ 86 | # sort the points based on their x-coordinates 87 | xSorted = pts[np.argsort(pts[:, 0]), :] 88 | # grab the left-most and right-most points from the sorted 89 | # x-roodinate points 90 | leftMost = xSorted[:2, :] 91 | rightMost = xSorted[2:, :] 92 | 93 | leftMost = leftMost[np.argsort(leftMost[:, 1]), :] 94 | (tl, bl) = leftMost 95 | 96 | D = dist.cdist(tl[np.newaxis], rightMost, "euclidean")[0] 97 | (br, tr) = rightMost[np.argsort(D)[::-1], :] 98 | # return the coordinates in top-left, top-right, 99 | # bottom-right, and bottom-left order 100 | return np.array([tl,br], dtype="int16").flatten().tolist() 101 | 102 | # Called every time a mouse event happen 103 | def on_mouse(event, x, y, flags, userdata): 104 | global p1, p2, drawing, show_drawing, bbox_ROI 105 | if event == cv2.EVENT_LBUTTONDOWN: 106 | # Left click down (select first point) 107 | drawing = True 108 | show_drawing = True 109 | p1 = x, y 110 | p2 = x, y 111 | elif event == cv2.EVENT_MOUSEMOVE: 112 | # Drag to second point 113 | if drawing: 114 | p2 = x, y 115 | elif event == cv2.EVENT_LBUTTONUP: 116 | # Left click up (select second point) 117 | drawing = False 118 | p2 = x, y 119 | p3 = (p1[0], p2[1]) 120 | p4 = (p2[0], p1[1]) 121 | 122 | pts = np.array([p1, p2, p3, p4]) 123 | xyx1y1= norm_2d_points(pts) 124 | w = int(xyx1y1[2] - xyx1y1[0]) 125 | h = int(xyx1y1[3] - xyx1y1[1]) 126 | bbox_ROI = (int(xyx1y1[0]), int(xyx1y1[1]), w, h) 127 | 128 | def draw_center(frame, track_point, color=(255, 255, 255), _thickness=1): 129 | h, w, _ = frame.shape 130 | dy, dx = int(h / 2), int(w / 2) 131 | cv2.line(frame, (dx, 0), (dx, h), color=color, thickness=_thickness) 132 | cv2.line(frame, (0, dy), (w, dy), color=color, thickness=_thickness) 133 | 134 | if len(track_point) > 0: 135 | points = track_point[-1] 136 | print("points", points) 137 | cv2.line(frame, (dx, dy), points, color=color, thickness=_thickness) 138 | 139 | def tracking_camera(frame, track_point, control, power=0.03): 140 | """ 141 | Расчет смещения от трекинга 142 | """ 143 | if len(track_point) == 0: 144 | print("len(track_point) == 0. EXIT") 145 | return 146 | h, w, _ = frame.shape 147 | dy, dx = int(h / 2), int(w / 2) 148 | points = track_point[-1] 149 | # Calculate diff 150 | offset = (dx-points[0], dy-points[1]) 151 | print("offset", offset) 152 | control.set_offset(yaw_off=offset[0], pitch_offset=offset[1],power=power) 153 | 154 | 155 | if __name__ == "__main__": 156 | dt = time() 157 | old_dt = dt 158 | currnt_time = time() 159 | timer = 0 160 | is_bisy = False 161 | 162 | RTSP_URL = "rtsp://192.168.144.25:8554/main.264" 163 | siyi_cap = VideoCapture(RTSP_URL) 164 | siyi_control = SIYIControl() 165 | 166 | cv2.namedWindow("output", cv2.WINDOW_NORMAL) 167 | # Register the mouse callback 168 | cv2.setMouseCallback('output', on_mouse) 169 | 170 | while True: 171 | # Read opencv 172 | frame = siyi_cap.read() 173 | 174 | # If a ROI is selected, draw it 175 | if show_drawing: 176 | # Fix p2 to be always within the frame 177 | p2 = ( 178 | 0 if p2[0] < 0 else (p2[0] if p2[0] < frame.shape[1] else frame.shape[1]), 179 | 0 if p2[1] < 0 else (p2[1] if p2[1] < frame.shape[0] else frame.shape[0]) 180 | ) 181 | cv2.rectangle(frame, p1, p2, (255, 0, 0), 2) 182 | 183 | if tracker_roi: 184 | ret_ot, bbox = tracker_roi.update(frame) 185 | if bbox_old != bbox: 186 | bbox_old = bbox 187 | # print("track", bbox) 188 | # Рисование ограничивающего прямоугольника 189 | if ret_ot: 190 | p1_ot = (int(bbox[0]), int(bbox[1])) 191 | p2_ot = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) 192 | draw_border(frame, p1_ot, p2_ot, (255,170,0), 2, 10, 10) 193 | 194 | # draw line 195 | (x, y, x2, y2) = (p1_ot[0], p1_ot[1], p2_ot[0], p2_ot[1]) 196 | 197 | center_x = int((x + x2) / 2) 198 | center_y = int((y + y2) / 2) 199 | track_history.append((int(center_x), int(center_y))) # x, y center point 200 | if len(track_history) > 10: # retain 90 tracks for 90 frames 201 | track_history.pop(0) 202 | points = np.hstack(track_history).astype(np.int32).reshape((-1, 1, 2)) 203 | cv2.polylines(frame, [points], isClosed=False, color=(255, 255, 255), thickness=3) 204 | 205 | # draw line 206 | # draw_center(frame, track_history) 207 | # Set traking 208 | tracking_camera(frame, track_history, siyi_control) 209 | 210 | else: 211 | cv2.putText(frame, "Tracking failure detected", (100,80), cv2.FONT_HERSHEY_SIMPLEX, 0.75,(0,0,255),2) 212 | 213 | cv2.imshow("output", frame) 214 | 215 | pressed = cv2.waitKey(1) 216 | if pressed in [13, 32]: 217 | # Pressed Enter or Space to use ROI 218 | if tracker_roi: 219 | tracker_roi = None 220 | tracker_roi = cv2.TrackerCSRT.create() 221 | drawing = False 222 | show_drawing = False 223 | print("bbox", bbox_ROI) 224 | ret = tracker_roi.init(frame, bbox_ROI) 225 | track_history.clear() 226 | 227 | # here do something with ROI points values (p1 and p2) 228 | elif pressed in [ord('c'), ord('C'), 27]: 229 | # Pressed C or Esc to cancel ROI 230 | drawing = False 231 | show_drawing = False 232 | track_history.clear() 233 | if tracker_roi: 234 | tracker_roi = None 235 | elif pressed in [27]: 236 | # Pressed Esc to cancel ROI 237 | drawing = False 238 | show_drawing = False 239 | break 240 | 241 | cv2.destroyAllWindows() 242 | siyi_cap.cap.release() -------------------------------------------------------------------------------- /examples/test_center_gimbal.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_center_gimbal.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to bring the gimbal to center position 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | 23 | if not cam.connect(): 24 | print("No connection ") 25 | exit(1) 26 | 27 | val = cam.requestCenterGimbal() 28 | sleep(1) 29 | 30 | print("Centering gimbal: ", cam.getFunctionFeedback()) 31 | print("Gimbal angles (yaw,pitch,roll) deg: ", cam.getAttitude()) 32 | 33 | cam.disconnect() 34 | 35 | if __name__ == "__main__": 36 | test() -------------------------------------------------------------------------------- /examples/test_follow_mode.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_follow_mode.py 3 | @Description: This is a test script for using the ZR10 SDK Python implementation to set follow mode 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | 23 | if not cam.connect(): 24 | print("No connection ") 25 | exit(1) 26 | 27 | cam.requestFollowMode() 28 | sleep(2) 29 | print("Current motion mode: ", cam._motionMode_msg.mode) 30 | 31 | cam.disconnect() 32 | 33 | if __name__ == "__main__": 34 | test() -------------------------------------------------------------------------------- /examples/test_fpv_mode.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_fpv_mode.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to set FPV mode 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | 23 | if not cam.connect(): 24 | print("No connection ") 25 | exit(1) 26 | 27 | cam.requestFPVMode() 28 | sleep(2) 29 | print("Current motion mode: ", cam._motionMode_msg.mode) 30 | 31 | cam.disconnect() 32 | 33 | if __name__ == "__main__": 34 | test() -------------------------------------------------------------------------------- /examples/test_from_rtsp_to_rtmp.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_from_rtsp_to_rtmp.py 3 | @Description: Test receiving RTSP stream from IP camera and sending it to RTMP server 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from imutils.video import VideoStream 12 | from time import sleep 13 | 14 | current = os.path.dirname(os.path.realpath(__file__)) 15 | parent_directory = os.path.dirname(current) 16 | 17 | sys.path.append(parent_directory) 18 | 19 | from stream import SIYIRTSP, RTMPSender 20 | 21 | def test(): 22 | rtsp = SIYIRTSP(rtsp_url="rtsp://192.168.144.25:8554/main.264",debug=False) 23 | rtsp.setShowWindow(True) 24 | 25 | rtmp = RTMPSender(rtmp_url="rtmp://127.0.0.1:1935/live/webcam") 26 | rtmp.start() 27 | 28 | try: 29 | while(True): 30 | frame=rtsp.getFrame() 31 | rtmp.setFrame(frame) 32 | except KeyboardInterrupt: 33 | rtsp.close() 34 | rtmp.stop() 35 | # quit 36 | exit(0) 37 | 38 | if __name__ == "__main__": 39 | test() -------------------------------------------------------------------------------- /examples/test_get_fw_ver.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_fw_ver.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to get Firmware version 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | import sys 9 | import os 10 | from time import sleep 11 | 12 | current = os.path.dirname(os.path.realpath(__file__)) 13 | parent_directory = os.path.dirname(current) 14 | 15 | sys.path.append(parent_directory) 16 | 17 | from siyi_sdk import SIYISDK 18 | 19 | def test(): 20 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 21 | if not cam.connect(): 22 | print("No connection ") 23 | exit(1) 24 | 25 | cam.requestFirmwareVersion() 26 | sleep(1) 27 | 28 | print("Camera Firmware version: ", cam.getFirmwareVersion()) 29 | cam.disconnect() 30 | 31 | if __name__ == "__main__": 32 | test() -------------------------------------------------------------------------------- /examples/test_get_gimbal_info.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_get_gimbal_info.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to get gimbal configuration information 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | if not cam.connect(): 23 | print("No connection ") 24 | exit(1) 25 | 26 | val = cam.requestGimbalInfo() 27 | sleep(1) 28 | print("Recording state: ", cam.getRecordingState()) 29 | print("Motion mode: ", cam.getMotionMode()) 30 | print("Mounting direction: ", cam.getMountingDirection()) 31 | 32 | cam.disconnect() 33 | 34 | if __name__ == "__main__": 35 | test() -------------------------------------------------------------------------------- /examples/test_get_hw_id.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_get_hw_id.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to get hardware ID 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | if not cam.connect(): 23 | print("No connection ") 24 | exit(1) 25 | 26 | cam.requestHardwareID() 27 | sleep(1) 28 | print("Camera hardware ID: ", cam.getHardwareID()) 29 | cam.disconnect() 30 | 31 | if __name__ == "__main__": 32 | test() -------------------------------------------------------------------------------- /examples/test_gimbal_rotation.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_gimbal_rotation.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to set/get gimbal rotation 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | from time import sleep 10 | import sys 11 | import os 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | if not cam.connect(): 23 | print("No connection ") 24 | exit(1) 25 | print("Current motion mode: ", cam._motionMode_msg.mode) 26 | 27 | # cam.requestCenterGimbal() 28 | # sleep(2) 29 | while True: 30 | cam.setPitchAngle(0) 31 | cam.setYawAngle(-90) 32 | sleep(4) 33 | cam.setYawAngle(90) 34 | cam.setPitchAngle(-90) 35 | sleep(4) 36 | 37 | 38 | 39 | 40 | print("Attitude (yaw,pitch,roll) eg:", cam.getAttitude()) 41 | 42 | cam.disconnect() 43 | 44 | if __name__ == "__main__": 45 | test() -------------------------------------------------------------------------------- /examples/test_lock_mode.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_lock_mode.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to set Lock mode 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | 23 | if not cam.connect(): 24 | print("No connection ") 25 | exit(1) 26 | 27 | cam.requestLockMode() 28 | sleep(2) 29 | print("Current motion mode: ", cam._motionMode_msg.mode) 30 | 31 | cam.disconnect() 32 | 33 | if __name__ == "__main__": 34 | test() -------------------------------------------------------------------------------- /examples/test_record_video.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_record_video.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to record video 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def test(): 21 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 22 | if not cam.connect(): 23 | print("No connection ") 24 | exit(1) 25 | 26 | if (cam.getRecordingState()<0): 27 | print("Toggle recording") 28 | cam.requestRecording() 29 | sleep(5) 30 | 31 | if (cam.getRecordingState()==cam._record_msg.TF_EMPTY): 32 | print("TF card lsot is empty") 33 | 34 | if (cam.getRecordingState()==cam._record_msg.ON): 35 | print("Recording is ON. Sending requesdt to stop recording") 36 | cam.requestRecording() 37 | sleep(2) 38 | 39 | print("Recording state: ", cam.getRecordingState()) 40 | 41 | cam.disconnect() 42 | 43 | if __name__ == "__main__": 44 | test() -------------------------------------------------------------------------------- /examples/test_rtmp_stream.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_rtmp_stream.py 3 | @Description: Test sending webcam image frames to an RTMP server 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from imutils.video import VideoStream 12 | from time import sleep 13 | 14 | current = os.path.dirname(os.path.realpath(__file__)) 15 | parent_directory = os.path.dirname(current) 16 | 17 | sys.path.append(parent_directory) 18 | 19 | from stream import RTMPSender 20 | 21 | def test(): 22 | # Webcam 23 | try: 24 | # Assuming the webcam /dev/video0 25 | # Change according to your webcam source 26 | wc = VideoStream(src=0).start() 27 | print("Connected to webcam.") 28 | sleep(2) 29 | except Exception as e: 30 | print("Error in opening webcam. Exit...") 31 | exit(1) 32 | 33 | rtmp = RTMPSender(rtmp_url="rtmp://127.0.0.1:1935/live/webcam") 34 | rtmp.setFPS(20) 35 | rtmp.setGrayFrame(False) 36 | rtmp.setImageSize(320,240) 37 | print("Starting RTMP stream...") 38 | sleep(1) 39 | rtmp.start() 40 | 41 | try: 42 | while(True): 43 | frame=wc.read() 44 | rtmp.setFrame(frame) 45 | except KeyboardInterrupt: 46 | rtmp.stop() 47 | wc.stop() 48 | 49 | # quit 50 | exit(0) 51 | 52 | if __name__ == "__main__": 53 | test() -------------------------------------------------------------------------------- /examples/test_rtsp.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_rtsp.py 3 | @Description: Test receiving RTSP stream from IP camera 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from imutils.video import VideoStream 12 | from time import sleep 13 | 14 | current = os.path.dirname(os.path.realpath(__file__)) 15 | parent_directory = os.path.dirname(current) 16 | 17 | sys.path.append(parent_directory) 18 | 19 | from stream import SIYIRTSP 20 | 21 | def test(): 22 | rtsp = SIYIRTSP(rtsp_url="rtsp://192.168.144.25:8554/main.264",debug=False) 23 | rtsp.setShowWindow(True) 24 | if __name__ == "__main__": 25 | test() -------------------------------------------------------------------------------- /examples/test_zoom.py: -------------------------------------------------------------------------------- 1 | """ 2 | @file test_center_gimbal.py 3 | @Description: This is a test script for using the SIYI SDK Python implementation to adjust zoom level 4 | @Author: Mohamed Abdelkader 5 | @Contact: mohamedashraf123@gmail.com 6 | All rights reserved 2022 7 | """ 8 | 9 | import sys 10 | import os 11 | from time import sleep 12 | 13 | current = os.path.dirname(os.path.realpath(__file__)) 14 | parent_directory = os.path.dirname(current) 15 | 16 | sys.path.append(parent_directory) 17 | 18 | from siyi_sdk import SIYISDK 19 | 20 | def set_zoom(_cam, level): 21 | _cam.requestZoomHold() 22 | val = _cam.getZoomLevel() 23 | 24 | print("Init Zoom level: ", val, " goal:", level) 25 | 26 | if val > level: 27 | while val > level: 28 | _cam.requestZoomOut() 29 | sleep(0.1) 30 | val = _cam.getZoomLevel() 31 | print("Zoom level: ", val) 32 | _cam.requestZoomHold() 33 | return True 34 | else: 35 | while val < level: 36 | _cam.requestZoomIn() 37 | sleep(0.1) 38 | val = _cam.getZoomLevel() 39 | print("Zoom level: ", val) 40 | _cam.requestZoomHold() 41 | return True 42 | 43 | def main(): 44 | cam = SIYISDK(server_ip="192.168.144.25", port=37260) 45 | 46 | if not cam.connect(): 47 | print("No connection ") 48 | exit(1) 49 | val = cam.requestZoomOut() 50 | sleep(0.1) 51 | 52 | set_zoom(cam,4) 53 | # sleep(3) 54 | # set_zoom(cam,30) 55 | # sleep(1) 56 | # set_zoom(cam,1) 57 | 58 | 59 | cam.disconnect() 60 | 61 | if __name__ == "__main__": 62 | main() -------------------------------------------------------------------------------- /examples/udp_server.py: -------------------------------------------------------------------------------- 1 | """ 2 | This server is just for testing 3 | """ 4 | import socket 5 | 6 | UDP_IP = "127.0.0.1" 7 | UDP_PORT = 5005 8 | 9 | sock = socket.socket(socket.AF_INET, # Internet 10 | socket.SOCK_DGRAM) # UDP 11 | sock.bind((UDP_IP, UDP_PORT)) 12 | sock.settimeout(5) 13 | 14 | while True: 15 | try: 16 | data, addr = sock.recvfrom(1024) # buffer size is 1024 bytes 17 | print("Received some data") 18 | msg = data.hex() 19 | print("received message: %s" % msg) 20 | sock.sendto(data, addr) 21 | except Exception as e: 22 | print("Error: {}".format(e)) 23 | break -------------------------------------------------------------------------------- /siyi_message.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python implementation of ZR10 SDK by SIYI 3 | ZR10 webpage: http://en.siyi.biz/en/Gimbal%20Camera/ZR10/overview/ 4 | Author : Mohamed Abdelkader 5 | Email: mohamedashraf123@gmail.com 6 | Copyright 2022 7 | 8 | """ 9 | from os import stat 10 | from crc16_python import crc16_str_swap 11 | import logging 12 | from utils import toHex 13 | import numpy as np 14 | import struct 15 | 16 | 17 | class FirmwareMsg: 18 | seq=0 19 | code_board_ver='' 20 | gimbal_firmware_ver='' 21 | zoom_firmware_ver='' 22 | 23 | class HardwareIDMsg: 24 | seq=0 25 | id='' 26 | 27 | class AutoFocusMsg: 28 | seq=0 29 | success=False 30 | 31 | class ManualZoomMsg: 32 | seq=0 33 | level=-1 34 | 35 | class ManualFocusMsg: 36 | seq=0 37 | success=False 38 | 39 | class GimbalSpeedMsg: 40 | seq=0 41 | success=False 42 | 43 | class GimbalRotateMsg: 44 | seq=0 45 | yaw= 0.0 46 | pitch= 0.0 47 | roll= 0.0 48 | 49 | class CenterMsg: 50 | seq=0 51 | success=False 52 | 53 | class RecordingMsg: 54 | seq=0 55 | state=-1 56 | OFF=0 57 | ON=1 58 | TF_EMPTY=2 59 | TD_DATA_LOSS=3 60 | 61 | class MountDirMsg: 62 | seq=0 63 | dir=-1 64 | NORMAL=0 65 | UPSIDE=1 66 | 67 | class MotionModeMsg: 68 | seq=0 69 | mode=-1 70 | LOCK=0 71 | FOLLOW=1 72 | FPV=2 73 | 74 | 75 | class FuncFeedbackInfoMsg: 76 | seq=0 77 | info_type=None 78 | SUCCESSFUL=0 79 | PHOTO_FAIL=1 80 | HDR_ON=2 81 | HDR_OFF=3 82 | RECROD_FAIL=4 83 | 84 | class AttitdueMsg: 85 | seq= 0 86 | stamp= 0 # seconds 87 | yaw= 0.0 88 | pitch= 0.0 89 | roll= 0.0 90 | yaw_speed= 0.0 # deg/s 91 | pitch_speed=0.0 92 | roll_speed= 0.0 93 | 94 | 95 | class COMMAND: 96 | ACQUIRE_FW_VER = '01' 97 | ACQUIRE_HW_ID = '02' 98 | AUTO_FOCUS = '04' 99 | MANUAL_ZOOM = '05' 100 | MANUAL_FOCUS = '06' 101 | GIMBAL_SPEED = '07' 102 | CENTER = '08' 103 | ACQUIRE_GIMBAL_INFO = '0a' 104 | FUNC_FEEDBACK_INFO = '0b' 105 | PHOTO_VIDEO_HDR = '0c' 106 | ACQUIRE_GIMBAL_ATT = '0d' 107 | GIMBAL_ROT = '0e' 108 | 109 | 110 | ############################################# 111 | class SIYIMESSAGE: 112 | """ 113 | Structure of SIYI camera messages 114 | """ 115 | def __init__(self, debug=False) -> None: 116 | self._debug= debug # print debug messages 117 | if self._debug: 118 | d_level = logging.DEBUG 119 | else: 120 | d_level = logging.INFO 121 | LOG_FORMAT='[%(levelname)s] %(asctime)s [SIYIMessage::%(funcName)s] :\t%(message)s' 122 | logging.basicConfig(format=LOG_FORMAT, level=d_level) 123 | self._logger = logging.getLogger(self.__class__.__name__) 124 | 125 | self.HEADER='5566'# STX, 2 bytes 126 | self._ctr ='01' 127 | 128 | self._seq= 0 129 | 130 | self._cmd_id='00' # 1 byte 131 | 132 | self._data_len = 0 133 | 134 | # String of data byes (in hex) 135 | self._data='' 136 | 137 | self._crc16='0000' # low byte (2 characters) on the left! 138 | 139 | 140 | def incrementSEQ(self, val): 141 | """ 142 | Increments sequence number by one, converts them to hex, and revereses the byte order. 143 | 144 | Params 145 | -- 146 | - val [int] Integer value , max is 65535 147 | 148 | Returns 149 | -- 150 | seq_str: [string] String value of the sequence number in reveresed byte order 151 | """ 152 | 153 | if not isinstance(val, int): 154 | self._logger.warning("Sequence value is not integer. Returning zero") 155 | return "0000" 156 | if val> 65535: 157 | self._logger.warning("Sequence value is greater than 65535. Resetting to zero") 158 | self._seq = 0 159 | return "0000" 160 | if val<0: 161 | self._logger.warning("Sequence value is negative. Resetting to zero") 162 | return "0000" 163 | 164 | seq = val+1 165 | self._seq = seq 166 | 167 | seq_hex = hex(seq) 168 | seq_hex = seq_hex[2:] # remove '0x' 169 | if len(seq_hex)==3: 170 | seq_hex = '0'+seq_hex 171 | elif len(seq_hex)==1: 172 | seq_hex = '000'+seq_hex 173 | elif len(seq_hex)==2: 174 | seq_str = '00'+seq_hex 175 | else: 176 | seq='0000' 177 | 178 | low_b = seq_hex[-2:] 179 | high_b = seq_hex[0:2] 180 | seq_str = low_b+high_b 181 | 182 | return seq_str 183 | 184 | def computeDataLen(self, data): 185 | """ 186 | Computes the data lenght (number of bytes) of data, and return a string of two bytes in reveresed order 187 | 188 | Params 189 | -- 190 | data [string] string of data bytes in hex 191 | 192 | Returns 193 | -- 194 | [string] String of two bytes (for characters), in reversed order, represents length of data in hex 195 | """ 196 | 197 | if not isinstance(data, str): 198 | self._logger.error("Data is not of type string") 199 | return "0000" 200 | # We expect number of chartacters to be even (each byte is represented by two cahrs e.g. '0A') 201 | if (len(data)%2) != 0: 202 | data = '0'+data # Pad 0 from the left, as sometimes it's ignored! 203 | L = int(len(data)/2) 204 | 205 | len_hex = hex(L) 206 | len_hex = len_hex[2:] # remove '0x' 207 | if len(len_hex)==3: 208 | len_hex = '0'+len_hex 209 | elif len(len_hex)==1: 210 | len_hex = '000'+len_hex 211 | elif len(len_hex)==2: 212 | len_hex = '00'+len_hex 213 | else: 214 | len_hex='0000' 215 | 216 | low_b = len_hex[-2:] 217 | high_b = len_hex[0:2] 218 | len_str = low_b+high_b 219 | 220 | return len_str 221 | 222 | def decodeMsg(self, msg): 223 | """ 224 | Decodes messages string, and returns the DATA bytes. 225 | 226 | Params 227 | -- 228 | msg: [str] full message stinf in hex 229 | 230 | Returns 231 | -- 232 | - data [str] string of hexadecimal of data bytes. 233 | - data_len [int] Number of data bytes 234 | - cmd_id [str] command ID 235 | - seq [int] message sequence 236 | """ 237 | data = None 238 | 239 | if not isinstance(msg, str): 240 | self._logger.error("Input message is not a string") 241 | return data 242 | 243 | # 10 bytes: STX+CTRL+Data_len+SEQ+CMD_ID+CRC16 244 | # 2 + 1 + 2 + 2 + 1 + 2 245 | MINIMUM_DATA_LENGTH=10*2 246 | if len(msg)0: 278 | data = msg[16:16+char_len] 279 | else: 280 | data='' 281 | 282 | self._data = data 283 | self._data_len = data_len 284 | self._cmd_id = cmd_id 285 | 286 | return data, data_len, cmd_id, seq 287 | 288 | def encodeMsg(self, data, cmd_id): 289 | """ 290 | Encodes a msg according to SDK protocol 291 | 292 | Returns 293 | -- 294 | [str] Encoded msg. Empty string if crc16 is not successful 295 | """ 296 | seq = self.incrementSEQ(self._seq) 297 | data_len = self.computeDataLen(data) 298 | # msg_front = self.HEADER+self._ctr+data_len+seq+cmd_id+data 299 | msg_front = self.HEADER+self._ctr+data_len+'0000'+cmd_id+data 300 | 301 | crc = crc16_str_swap(msg_front) 302 | if crc is not None: 303 | msg = msg_front+crc 304 | self._logger.debug("Encoded msg: %s", msg) 305 | return msg 306 | else: 307 | self._logger.error("Could not encode message. crc16 is None") 308 | return '' 309 | 310 | ######################################################## 311 | # Message definitions # 312 | ######################################################## 313 | 314 | def firmwareVerMsg(self): 315 | """ 316 | Returns message string of the Acqsuire Firmware Version msg 317 | """ 318 | data="" 319 | cmd_id = COMMAND.ACQUIRE_FW_VER 320 | return self.encodeMsg(data, cmd_id) 321 | 322 | def hwIdMsg(self): 323 | """ 324 | Returns message string for the Acquire Hardware ID 325 | """ 326 | data="" 327 | cmd_id = COMMAND.ACQUIRE_HW_ID 328 | return self.encodeMsg(data, cmd_id) 329 | 330 | def gimbalInfoMsg(self): 331 | """ 332 | Gimbal status information msg 333 | """ 334 | data="" 335 | cmd_id = COMMAND.ACQUIRE_GIMBAL_INFO 336 | return self.encodeMsg(data, cmd_id) 337 | 338 | def funcFeedbackMsg(self): 339 | """ 340 | Function feedback information msg 341 | """ 342 | data="" 343 | cmd_id = COMMAND.FUNC_FEEDBACK_INFO 344 | return self.encodeMsg(data, cmd_id) 345 | 346 | def takePhotoMsg(self): 347 | """ 348 | Take photo msg 349 | """ 350 | data="00" 351 | cmd_id = COMMAND.PHOTO_VIDEO_HDR 352 | return self.encodeMsg(data, cmd_id) 353 | 354 | def recordMsg(self): 355 | """ 356 | Video Record msg 357 | """ 358 | data="02" 359 | cmd_id = COMMAND.PHOTO_VIDEO_HDR 360 | return self.encodeMsg(data, cmd_id) 361 | 362 | def autoFocusMsg(self): 363 | """ 364 | Auto focus msg 365 | """ 366 | data="01" 367 | cmd_id = COMMAND.AUTO_FOCUS 368 | return self.encodeMsg(data, cmd_id) 369 | 370 | def centerMsg(self): 371 | """ 372 | Center gimbal msg 373 | """ 374 | data="01" 375 | cmd_id = COMMAND.CENTER 376 | return self.encodeMsg(data, cmd_id) 377 | 378 | def lockModeMsg(self): 379 | """ 380 | Lock mode msg 381 | """ 382 | data="03" 383 | cmd_id = COMMAND.PHOTO_VIDEO_HDR 384 | return self.encodeMsg(data, cmd_id) 385 | 386 | def followModeMsg(self): 387 | """ 388 | Follow mode msg 389 | """ 390 | data="04" 391 | cmd_id = COMMAND.PHOTO_VIDEO_HDR 392 | return self.encodeMsg(data, cmd_id) 393 | 394 | def fpvModeMsg(self): 395 | """ 396 | FPV mode msg 397 | """ 398 | data="05" 399 | cmd_id = COMMAND.PHOTO_VIDEO_HDR 400 | return self.encodeMsg(data, cmd_id) 401 | 402 | def gimbalAttMsg(self): 403 | """ 404 | Acquire Gimbal Attiude msg 405 | """ 406 | data="" 407 | cmd_id = COMMAND.ACQUIRE_GIMBAL_ATT 408 | return self.encodeMsg(data, cmd_id) 409 | 410 | def zoomInMsg(self): 411 | """ 412 | Zoom in Msg 413 | """ 414 | data="01" 415 | cmd_id = COMMAND.MANUAL_ZOOM 416 | return self.encodeMsg(data, cmd_id) 417 | 418 | def zoomOutMsg(self): 419 | """ 420 | Zoom out Msg 421 | """ 422 | data="ff" 423 | cmd_id = COMMAND.MANUAL_ZOOM 424 | return self.encodeMsg(data, cmd_id) 425 | 426 | def stopZoomMsg(self): 427 | """ 428 | Stop Zoom Msg 429 | """ 430 | data="00" 431 | cmd_id = COMMAND.MANUAL_ZOOM 432 | return self.encodeMsg(data, cmd_id) 433 | 434 | def longFocusMsg(self): 435 | """ 436 | Focus 1 Msg 437 | """ 438 | data="01" 439 | cmd_id = COMMAND.MANUAL_FOCUS 440 | return self.encodeMsg(data, cmd_id) 441 | 442 | def closeFocusMsg(self): 443 | """ 444 | Focus -1 Msg 445 | """ 446 | data="ff" 447 | cmd_id = COMMAND.MANUAL_FOCUS 448 | return self.encodeMsg(data, cmd_id) 449 | 450 | def stopFocusMsg(self): 451 | """ 452 | Focus 0 Msg 453 | """ 454 | data="00" 455 | cmd_id = COMMAND.MANUAL_FOCUS 456 | return self.encodeMsg(data, cmd_id) 457 | 458 | def gimbalSpeedMsg(self, yaw_speed, pitch_speed): 459 | """ 460 | Gimbal rotation Msg. 461 | Values -100~0~100: Negative and positive represent two directions, 462 | higher or lower the number is away from 0, faster the rotation speed is. 463 | Send 0 when released from control command and gimbal stops rotation. 464 | 465 | Params 466 | -- 467 | - yaw_speed [int] in degrees 468 | - pitch_speed [int] in degrees 469 | """ 470 | if yaw_speed>100: 471 | yaw_speed=100 472 | if yaw_speed<-100: 473 | yaw_speed=-100 474 | 475 | if pitch_speed>100: 476 | pitch_speed=100 477 | if pitch_speed<-100: 478 | pitch_speed=-100 479 | 480 | data1=toHex(yaw_speed, 8) 481 | data2=toHex(pitch_speed, 8) 482 | data=data1+data2 483 | cmd_id = COMMAND.GIMBAL_SPEED 484 | return self.encodeMsg(data, cmd_id) 485 | 486 | 487 | 488 | def gimbalRotateMsg(self, yaw_angle:int, pitch_angle:int): 489 | """ 490 | Gimbal rotation Msg. 491 | Angle Control Range 492 | Yaw: 493 | A8 mini: -135.0 ~ 135.0 degree 494 | R10 / ZR30: Same with A8 mini 495 | ZT30: Limitless 496 | Pitch: 497 | A8 mini: -90.0 ~ 25.0 degree 498 | ZR10 / ZR30 / ZT30: Same with A8 mini 499 | 500 | Mark: 501 | 1. The accuracy of the control angle is in one decimal place. Eg: Set yaw as 60.5 degrees, the command 502 | number should be set as 605. 503 | 2. The actual angle data returned to be divided by 10 is the actual degree, accuracy in one decimal place. 504 | 505 | Params 506 | -- 507 | - yaw_angle in degrees 508 | - pitch_angle in degrees 509 | """ 510 | 511 | if yaw_angle>135: 512 | yaw_angle=135 513 | if yaw_angle<-135: 514 | yaw_angle=-135 515 | 516 | if pitch_angle>25: 517 | pitch_angle=25 518 | if pitch_angle<-90: 519 | pitch_angle=-90 520 | 521 | # read mark 522 | yaw_angle_int = np.int16(yaw_angle*10) 523 | pitch_angle_int = np.int16(pitch_angle*10) 524 | 525 | 526 | data1=toHex(yaw_angle_int, 16) 527 | data2=toHex(pitch_angle_int, 16) 528 | 529 | data = data1+data2 530 | 531 | print("data:", data) 532 | 533 | cmd_id = COMMAND.GIMBAL_ROT 534 | return self.encodeMsg(data, cmd_id) 535 | 536 | 537 | -------------------------------------------------------------------------------- /siyi_sdk.py: -------------------------------------------------------------------------------- 1 | """ 2 | Python implementation of SIYI SDK 3 | ZR10 webpage: http://en.siyi.biz/en/Gimbal%20Camera/ZR10/overview/ 4 | Author : Mohamed Abdelkader 5 | Email: mohamedashraf123@gmail.com 6 | Copyright 2022 7 | 8 | """ 9 | import socket 10 | from siyi_message import * 11 | from time import sleep, time 12 | import logging 13 | from utils import toInt 14 | import threading 15 | from simple_pid import PID 16 | 17 | class SIYISDK: 18 | def __init__(self, server_ip="192.168.144.25", port=37260, debug=False): 19 | """ 20 | 21 | Params 22 | -- 23 | - server_ip [str] IP address of the camera 24 | - port: [int] UDP port of the camera 25 | """ 26 | self._debug= debug # print debug messages 27 | if self._debug: 28 | d_level = logging.DEBUG 29 | else: 30 | d_level = logging.INFO 31 | LOG_FORMAT=' [%(levelname)s] %(asctime)s [SIYISDK::%(funcName)s] :\t%(message)s' 32 | logging.basicConfig(format=LOG_FORMAT, level=d_level) 33 | self._logger = logging.getLogger(self.__class__.__name__) 34 | 35 | # Message sent to the camera 36 | self._out_msg = SIYIMESSAGE(debug=self._debug) 37 | 38 | # Message received from the camera 39 | self._in_msg = SIYIMESSAGE(debug=self._debug) 40 | 41 | self._server_ip = server_ip 42 | self._port = port 43 | 44 | self._BUFF_SIZE=1024 45 | 46 | self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 47 | self._rcv_wait_t = 2 # Receiving wait time 48 | self._socket.settimeout(self._rcv_wait_t) 49 | 50 | self._connected = False 51 | 52 | self._fw_msg = FirmwareMsg() 53 | self._hw_msg = HardwareIDMsg() 54 | self._autoFocus_msg = AutoFocusMsg() 55 | self._manualZoom_msg=ManualZoomMsg() 56 | self._manualFocus_msg=ManualFocusMsg() 57 | self._gimbalSpeed_msg=GimbalSpeedMsg() 58 | self._gimbalRot_msg=GimbalRotateMsg() 59 | self._center_msg=CenterMsg() 60 | self._record_msg=RecordingMsg() 61 | self._mountDir_msg=MountDirMsg() 62 | self._motionMode_msg=MotionModeMsg() 63 | self._funcFeedback_msg=FuncFeedbackInfoMsg() 64 | self._att_msg=AttitdueMsg() 65 | self._last_att_seq=-1 66 | 67 | # Stop threads 68 | self._stop = False # used to stop the above thread 69 | 70 | self._recv_thread = threading.Thread(target=self.recvLoop) 71 | 72 | # Connection thread 73 | self._last_fw_seq=0 # used to check on connection liveness 74 | self._conn_loop_rate = 1 # seconds 75 | self._conn_thread = threading.Thread(target=self.connectionLoop, args=(self._conn_loop_rate,)) 76 | 77 | # Gimbal info thread @ 1Hz 78 | self._gimbal_info_loop_rate = 1 79 | self._g_info_thread = threading.Thread(target=self.gimbalInfoLoop, 80 | args=(self._gimbal_info_loop_rate,)) 81 | 82 | # Gimbal attitude thread @ 10Hz 83 | self._gimbal_att_loop_rate = 0.1 84 | self._g_att_thread = threading.Thread(target=self.gimbalAttLoop, 85 | args=(self._gimbal_att_loop_rate,)) 86 | 87 | self._pid_angle_thread = threading.Thread(target=self.PID_contol, 88 | args=()) 89 | # PID 90 | self.yaw_pid = PID(1.3, 0.1, 0.1, setpoint=0) 91 | self.yaw_pid.output_limits = (-100, 100) 92 | 93 | self.pitch_pid = PID(1.3, 0.1, 0.1, setpoint=0) 94 | self.pitch_pid.output_limits = (-100, 100) 95 | self._goal_yaw = 0. 96 | self._goal_pitch = 0. 97 | self.is_pause_pid = False 98 | 99 | def resetVars(self): 100 | """ 101 | Resets variables to their initial values. For example, to prepare for a fresh connection 102 | """ 103 | self._connected = False 104 | self._fw_msg = FirmwareMsg() 105 | self._hw_msg = HardwareIDMsg() 106 | self._autoFocus_msg = AutoFocusMsg() 107 | self._manualZoom_msg=ManualZoomMsg() 108 | self._manualFocus_msg=ManualFocusMsg() 109 | self._gimbalSpeed_msg=GimbalSpeedMsg() 110 | self._gimbalRot_msg=GimbalRotateMsg() 111 | 112 | self._center_msg=CenterMsg() 113 | self._record_msg=RecordingMsg() 114 | self._mountDir_msg=MountDirMsg() 115 | self._motionMode_msg=MotionModeMsg() 116 | self._funcFeedback_msg=FuncFeedbackInfoMsg() 117 | self._att_msg=AttitdueMsg() 118 | 119 | 120 | return True 121 | 122 | def connect(self, maxWaitTime=3.0): 123 | """ 124 | Makes sure there is conenction with the camera before doing anything. 125 | It requests Frimware version for some time before it gives up 126 | 127 | Params 128 | -- 129 | maxWaitTime [int] Maximum time to wait before giving up on connection 130 | """ 131 | self._recv_thread.start() 132 | self._conn_thread.start() 133 | t0 = time() 134 | while(True): 135 | if self._connected: 136 | self._g_info_thread.start() 137 | self._g_att_thread.start() 138 | self._pid_angle_thread.start() 139 | return True 140 | if (time() - t0)>maxWaitTime and not self._connected: 141 | self.disconnect() 142 | self._logger.error("Failed to connect to camera") 143 | return False 144 | 145 | def disconnect(self): 146 | self._logger.info("Stopping all threads") 147 | self._stop = True # stop the connection checking thread 148 | self.resetVars() 149 | self._socket.close() 150 | 151 | 152 | def checkConnection(self): 153 | """ 154 | checks if there is live connection to the camera by requesting the Firmware version. 155 | This function is to be run in a thread at a defined frequency 156 | """ 157 | self._connected = True 158 | 159 | # self.requestFirmwareVersion() 160 | # sleep(1) 161 | # if self._fw_msg.seq!=self._last_fw_seq and len(self._fw_msg.gimbal_firmware_ver)>0: 162 | # self._connected = True 163 | # self._last_fw_seq=self._fw_msg.seq 164 | # else: 165 | # self._connected = False 166 | 167 | def connectionLoop(self, t): 168 | """ 169 | This function is used in a thread to check connection status periodically 170 | 171 | Params 172 | -- 173 | t [float] message frequency, secnod(s) 174 | """ 175 | while(True): 176 | if self._stop: 177 | self._connected=False 178 | self.resetVars() 179 | self._logger.warning("Connection checking loop is stopped. Check your connection!") 180 | break 181 | self.checkConnection() 182 | sleep(t) 183 | 184 | def isConnected(self): 185 | return self._connected 186 | 187 | def gimbalInfoLoop(self, t): 188 | """ 189 | This function is used in a thread to get gimbal info periodically 190 | 191 | Params 192 | -- 193 | t [float] message frequency, secnod(s) 194 | """ 195 | while(True): 196 | if not self._connected: 197 | self._logger.warning("Gimbal info thread is stopped. Check connection") 198 | sleep(1) 199 | break 200 | self.requestGimbalInfo() 201 | sleep(t) 202 | 203 | def gimbalAttLoop(self, t): 204 | """ 205 | This function is used in a thread to get gimbal attitude periodically 206 | 207 | Params 208 | -- 209 | t [float] message frequency, secnod(s) 210 | """ 211 | while(True): 212 | if not self._connected: 213 | self._logger.warning("Gimbal attitude thread is stopped. Check connection") 214 | break 215 | self.requestGimbalAttitude() 216 | sleep(t) 217 | 218 | def sendMsg(self, msg): 219 | """ 220 | Sends a message to the camera 221 | 222 | Params 223 | -- 224 | msg [str] Message to send 225 | """ 226 | b = bytes.fromhex(msg) 227 | try: 228 | self._socket.sendto(b, (self._server_ip, self._port)) 229 | return True 230 | except Exception as e: 231 | self._logger.error("Could not send bytes") 232 | return False 233 | 234 | def rcvMsg(self): 235 | data=None 236 | try: 237 | data,addr = self._socket.recvfrom(self._BUFF_SIZE) 238 | except Exception as e: 239 | self._logger.warning("%s. Did not receive message within %s second(s)", e, self._rcv_wait_t) 240 | return data 241 | 242 | def recvLoop(self): 243 | self._logger.debug("Started data receiving thread") 244 | while( not self._stop): 245 | self.bufferCallback() 246 | self._logger.debug("Exiting data receiving thread") 247 | 248 | 249 | def bufferCallback(self): 250 | """ 251 | Receives messages and parses its content 252 | """ 253 | buff,addr = self._socket.recvfrom(self._BUFF_SIZE) 254 | 255 | buff_str = buff.hex() 256 | self._logger.debug("Buffer: %s", buff_str) 257 | 258 | # 10 bytes: STX+CTRL+Data_len+SEQ+CMD_ID+CRC16 259 | # 2 + 1 + 2 + 2 + 1 + 2 260 | MINIMUM_DATA_LENGTH=10*2 261 | 262 | HEADER='5566' 263 | # Go through the buffer 264 | while(len(buff_str)>=MINIMUM_DATA_LENGTH): 265 | if buff_str[0:4]!=HEADER: 266 | # Remove the 1st element and continue 267 | tmp=buff_str[1:] 268 | buff_str=tmp 269 | continue 270 | 271 | # Now we got minimum amount of data. Check if we have enough 272 | # Data length, bytes are reversed, according to SIYI SDK 273 | low_b = buff_str[6:8] # low byte 274 | high_b = buff_str[8:10] # high byte 275 | data_len = high_b+low_b 276 | data_len = int('0x'+data_len, base=16) 277 | char_len = data_len*2 278 | 279 | # Check if there is enough data (including payload) 280 | if(len(buff_str) < (MINIMUM_DATA_LENGTH+char_len)): 281 | # No useful data 282 | buff_str='' 283 | break 284 | 285 | packet = buff_str[0:MINIMUM_DATA_LENGTH+char_len] 286 | buff_str = buff_str[MINIMUM_DATA_LENGTH+char_len:] 287 | 288 | # Finally decode the packet! 289 | val = self._in_msg.decodeMsg(packet) 290 | if val is None: 291 | continue 292 | 293 | data, data_len, cmd_id, seq = val[0], val[1], val[2], val[3] 294 | 295 | if cmd_id==COMMAND.ACQUIRE_FW_VER: 296 | self.parseFirmwareMsg(data, seq) 297 | elif cmd_id==COMMAND.ACQUIRE_HW_ID: 298 | self.parseHardwareIDMsg(data, seq) 299 | elif cmd_id==COMMAND.ACQUIRE_GIMBAL_INFO: 300 | self.parseGimbalInfoMsg(data, seq) 301 | elif cmd_id==COMMAND.ACQUIRE_GIMBAL_ATT: 302 | self.parseAttitudeMsg(data, seq) 303 | elif cmd_id==COMMAND.FUNC_FEEDBACK_INFO: 304 | self.parseFunctionFeedbackMsg(data, seq) 305 | elif cmd_id==COMMAND.GIMBAL_SPEED: 306 | self.parseGimbalSpeedMsg(data, seq) 307 | elif cmd_id==COMMAND.GIMBAL_ROT: 308 | self.parseGimbalRotateMsg(data, seq) 309 | elif cmd_id==COMMAND.AUTO_FOCUS: 310 | self.parseAutoFocusMsg(data, seq) 311 | elif cmd_id==COMMAND.MANUAL_FOCUS: 312 | self.parseManualFocusMsg(data, seq) 313 | elif cmd_id==COMMAND.MANUAL_ZOOM: 314 | self.parseZoomMsg(data, seq) 315 | elif cmd_id==COMMAND.CENTER: 316 | self.parseGimbalCenterMsg(data, seq) 317 | else: 318 | self._logger.warning("CMD ID is not recognized") 319 | 320 | return 321 | 322 | ################################################## 323 | # Request functions # 324 | ################################################## 325 | def requestFirmwareVersion(self): 326 | """ 327 | Sends request for firmware version 328 | 329 | Returns 330 | -- 331 | [bool] True: success. False: fail 332 | """ 333 | msg = self._out_msg.firmwareVerMsg() 334 | if not self.sendMsg(msg): 335 | return False 336 | return True 337 | 338 | def requestHardwareID(self): 339 | """ 340 | Sends request for Hardware ID 341 | 342 | Returns 343 | -- 344 | [bool] True: success. False: fail 345 | """ 346 | msg = self._out_msg.hwIdMsg() 347 | if not self.sendMsg(msg): 348 | return False 349 | return True 350 | 351 | def requestGimbalAttitude(self): 352 | """ 353 | Sends request for gimbal attitude 354 | 355 | Returns 356 | -- 357 | [bool] True: success. False: fail 358 | """ 359 | msg = self._out_msg.gimbalAttMsg() 360 | if not self.sendMsg(msg): 361 | return False 362 | return True 363 | 364 | def requestGimbalInfo(self): 365 | """ 366 | Sends request for gimbal information 367 | 368 | Returns 369 | -- 370 | [bool] True: success. False: fail 371 | """ 372 | msg = self._out_msg.gimbalInfoMsg() 373 | if not self.sendMsg(msg): 374 | return False 375 | return True 376 | 377 | def requestFunctionFeedback(self): 378 | """ 379 | Sends request for function feedback msg 380 | 381 | Returns 382 | -- 383 | [bool] True: success. False: fail 384 | """ 385 | msg = self._out_msg.funcFeedbackMsg() 386 | if not self.sendMsg(msg): 387 | return False 388 | return True 389 | 390 | def requestAutoFocus(self): 391 | """ 392 | Sends request for auto focus 393 | 394 | Returns 395 | -- 396 | [bool] True: success. False: fail 397 | """ 398 | msg = self._out_msg.autoFocusMsg() 399 | if not self.sendMsg(msg): 400 | return False 401 | return True 402 | 403 | def requestZoomIn(self): 404 | """ 405 | Sends request for zoom in 406 | 407 | Returns 408 | -- 409 | [bool] True: success. False: fail 410 | """ 411 | msg = self._out_msg.zoomInMsg() 412 | if not self.sendMsg(msg): 413 | return False 414 | return True 415 | 416 | def requestZoomOut(self): 417 | """ 418 | Sends request for zoom out 419 | 420 | Returns 421 | -- 422 | [bool] True: success. False: fail 423 | """ 424 | msg = self._out_msg.zoomOutMsg() 425 | if not self.sendMsg(msg): 426 | return False 427 | return True 428 | 429 | def requestZoomHold(self): 430 | """ 431 | Sends request for stopping zoom 432 | 433 | Returns 434 | -- 435 | [bool] True: success. False: fail 436 | """ 437 | msg = self._out_msg.stopZoomMsg() 438 | if not self.sendMsg(msg): 439 | return False 440 | return True 441 | 442 | def requestLongFocus(self): 443 | """ 444 | Sends request for manual focus, long shot 445 | 446 | Returns 447 | -- 448 | [bool] True: success. False: fail 449 | """ 450 | msg = self._out_msg.longFocusMsg() 451 | if not self.sendMsg(msg): 452 | return False 453 | return True 454 | 455 | def requestCloseFocus(self): 456 | """ 457 | Sends request for manual focus, close shot 458 | 459 | Returns 460 | -- 461 | [bool] True: success. False: fail 462 | """ 463 | msg = self._out_msg.closeFocusMsg() 464 | if not self.sendMsg(msg): 465 | return False 466 | return True 467 | 468 | def requestFocusHold(self): 469 | """ 470 | Sends request for manual focus, stop 471 | 472 | Returns 473 | -- 474 | [bool] True: success. False: fail 475 | """ 476 | msg = self._out_msg.stopFocusMsg() 477 | if not self.sendMsg(msg): 478 | return False 479 | return True 480 | 481 | def requestCenterGimbal(self): 482 | """ 483 | Sends request for gimbal centering 484 | 485 | Returns 486 | -- 487 | [bool] True: success. False: fail 488 | """ 489 | msg = self._out_msg.centerMsg() 490 | if not self.sendMsg(msg): 491 | return False 492 | return True 493 | 494 | def requestGimbalSpeed(self, yaw_speed:int, pitch_speed:int): 495 | """ 496 | Sends request for gimbal centering 497 | 498 | Params 499 | -- 500 | yaw_speed [int] -100~0~100. away from zero -> fast, close to zero -> slow. Sign is for direction 501 | pitch_speed [int] Same as yaw_speed 502 | 503 | Returns 504 | -- 505 | [bool] True: success. False: fail 506 | """ 507 | msg = self._out_msg.gimbalSpeedMsg(yaw_speed, pitch_speed) 508 | if not self.sendMsg(msg): 509 | return False 510 | return True 511 | 512 | def requestGimbalRot(self, yaw_angle, pitch_angle): 513 | """ 514 | Sends request for gimbal rotation 515 | 516 | Params 517 | -- 518 | Returns 519 | -- 520 | """ 521 | print("set yaw, pitch", yaw_angle, pitch_angle) 522 | msg = self._out_msg.gimbalRotateMsg(yaw_angle, pitch_angle) 523 | if not self.sendMsg(msg): 524 | return False 525 | return True 526 | 527 | 528 | 529 | def requestPhoto(self): 530 | """ 531 | Sends request for taking photo 532 | 533 | Returns 534 | -- 535 | [bool] True: success. False: fail 536 | """ 537 | msg = self._out_msg.takePhotoMsg() 538 | if not self.sendMsg(msg): 539 | return False 540 | return True 541 | 542 | def requestRecording(self): 543 | """ 544 | Sends request for toglling video recording 545 | 546 | Returns 547 | -- 548 | [bool] True: success. False: fail 549 | """ 550 | msg = self._out_msg.recordMsg() 551 | if not self.sendMsg(msg): 552 | return False 553 | return True 554 | 555 | def requestFPVMode(self): 556 | """ 557 | Sends request for setting FPV mode 558 | 559 | Returns 560 | -- 561 | [bool] True: success. False: fail 562 | """ 563 | msg = self._out_msg.fpvModeMsg() 564 | self.is_pause_pid = True 565 | 566 | if not self.sendMsg(msg): 567 | return False 568 | return True 569 | 570 | def requestLockMode(self): 571 | """ 572 | Sends request for setting Lock mode 573 | 574 | Returns 575 | -- 576 | [bool] True: success. False: fail 577 | """ 578 | msg = self._out_msg.lockModeMsg() 579 | self.is_pause_pid = True 580 | 581 | if not self.sendMsg(msg): 582 | return False 583 | return True 584 | 585 | def requestFollowMode(self): 586 | """ 587 | Sends request for setting Follow mode 588 | 589 | Returns 590 | -- 591 | [bool] True: success. False: fail 592 | """ 593 | msg = self._out_msg.followModeMsg() 594 | self.is_pause_pid = True 595 | if not self.sendMsg(msg): 596 | return False 597 | return True 598 | 599 | #################################################### 600 | # Parsing functions # 601 | #################################################### 602 | def parseFirmwareMsg(self, msg:str, seq:int): 603 | try: 604 | self._fw_msg.gimbal_firmware_ver= msg[8:16] 605 | self._fw_msg.seq=seq 606 | 607 | self._logger.debug("Firmware version: %s", self._fw_msg.gimbal_firmware_ver) 608 | 609 | return True 610 | except Exception as e: 611 | self._logger.error("Error %s", e) 612 | return False 613 | 614 | def parseHardwareIDMsg(self, msg:str, seq:int): 615 | try: 616 | self._hw_msg.seq=seq 617 | self._hw_msg.id = msg 618 | self._logger.debug("Hardware ID: %s", self._hw_msg.id) 619 | 620 | return True 621 | except Exception as e: 622 | self._logger.error("Error %s", e) 623 | return False 624 | 625 | def parseAttitudeMsg(self, msg:str, seq:int): 626 | 627 | try: 628 | self._att_msg.seq=seq 629 | self._att_msg.yaw = toInt(msg[2:4]+msg[0:2]) /10. 630 | self._att_msg.pitch = toInt(msg[6:8]+msg[4:6]) /10. 631 | self._att_msg.roll = toInt(msg[10:12]+msg[8:10]) /10. 632 | self._att_msg.yaw_speed = toInt(msg[14:16]+msg[12:14]) /10. 633 | self._att_msg.pitch_speed = toInt(msg[18:20]+msg[16:18]) /10. 634 | self._att_msg.roll_speed = toInt(msg[22:24]+msg[20:22]) /10. 635 | 636 | self._logger.debug("(yaw, pitch, roll= (%s, %s, %s)", 637 | self._att_msg.yaw, self._att_msg.pitch, self._att_msg.roll) 638 | self._logger.debug("(yaw_speed, pitch_speed, roll_speed= (%s, %s, %s)", 639 | self._att_msg.yaw_speed, self._att_msg.pitch_speed, self._att_msg.roll_speed) 640 | return True 641 | except Exception as e: 642 | self._logger.error("Error %s", e) 643 | return False 644 | 645 | def parseGimbalInfoMsg(self, msg:str, seq:int): 646 | try: 647 | self._record_msg.seq=seq 648 | self._mountDir_msg.seq=seq 649 | self._motionMode_msg.seq=seq 650 | 651 | self._record_msg.state = int('0x'+msg[6:8], base=16) 652 | self._motionMode_msg.mode = int('0x'+msg[8:10], base=16) 653 | self._mountDir_msg.dir = int('0x'+msg[10:12], base=16) 654 | 655 | self._logger.debug("Recording state %s", self._record_msg.state) 656 | self._logger.debug("Mounting direction %s", self._mountDir_msg.dir) 657 | self._logger.debug("Gimbal motion mode %s", self._motionMode_msg.mode) 658 | return True 659 | except Exception as e: 660 | self._logger.error("Error %s", e) 661 | return False 662 | 663 | def parseAutoFocusMsg(self, msg:str, seq:int): 664 | 665 | try: 666 | self._autoFocus_msg.seq=seq 667 | self._autoFocus_msg.success = bool(int('0x'+msg, base=16)) 668 | 669 | 670 | self._logger.debug("Auto focus success: %s", self._autoFocus_msg.success) 671 | 672 | return True 673 | except Exception as e: 674 | self._logger.error("Error %s", e) 675 | return False 676 | 677 | def parseZoomMsg(self, msg:str, seq:int): 678 | 679 | try: 680 | self._manualZoom_msg.seq=seq 681 | self._manualZoom_msg.level = int('0x'+msg[2:4]+msg[0:2], base=16) /10. 682 | 683 | 684 | self._logger.debug("Zoom level %s", self._manualZoom_msg.level) 685 | 686 | return True 687 | except Exception as e: 688 | self._logger.error("Error %s", e) 689 | return False 690 | 691 | def parseManualFocusMsg(self, msg:str, seq:int): 692 | 693 | try: 694 | self._manualFocus_msg.seq=seq 695 | self._manualFocus_msg.success = bool(int('0x'+msg, base=16)) 696 | 697 | 698 | self._logger.debug("Manual focus success: %s", self._manualFocus_msg.success) 699 | 700 | return True 701 | except Exception as e: 702 | self._logger.error("Error %s", e) 703 | return False 704 | 705 | def parseGimbalSpeedMsg(self, msg:str, seq:int): 706 | 707 | try: 708 | self._gimbalSpeed_msg.seq=seq 709 | self._gimbalSpeed_msg.success = bool(int('0x'+msg, base=16)) 710 | 711 | 712 | self._logger.debug("Gimbal speed success: %s", self._gimbalSpeed_msg.success) 713 | 714 | return True 715 | except Exception as e: 716 | self._logger.error("Error %s", e) 717 | return False 718 | 719 | def parseGimbalRotateMsg(self, msg:str, seq:int): 720 | pass 721 | 722 | try: 723 | self._gimbalRot_msg.seq=seq 724 | 725 | self._gimbalRot_msg.yaw = toInt(msg[2:4]+msg[0:2]) /10. 726 | self._gimbalRot_msg.pitch = toInt(msg[6:8]+msg[4:6]) /10. 727 | self._gimbalRot_msg.roll = toInt(msg[10:12]+msg[8:10]) /10. 728 | 729 | self._logger.debug("Gimbal rotate success: %s", self._gimbalRot_msg) 730 | 731 | return True 732 | except Exception as e: 733 | self._logger.error("Error %s", e) 734 | return False 735 | 736 | 737 | def parseGimbalCenterMsg(self, msg:str, seq:int): 738 | 739 | try: 740 | self._center_msg.seq=seq 741 | self._center_msg.success = bool(int('0x'+msg, base=16)) 742 | 743 | 744 | self._logger.debug("Gimbal center success: %s", self._center_msg.success) 745 | 746 | return True 747 | except Exception as e: 748 | self._logger.error("Error %s", e) 749 | return False 750 | 751 | def parseFunctionFeedbackMsg(self, msg:str, seq:int): 752 | 753 | try: 754 | self._funcFeedback_msg.seq=seq 755 | self._funcFeedback_msg.info_type = int('0x'+msg, base=16) 756 | 757 | 758 | self._logger.debug("Function Feedback Code: %s", self._funcFeedback_msg.info_type) 759 | 760 | return True 761 | except Exception as e: 762 | self._logger.error("Error %s", e) 763 | return False 764 | 765 | ################################################## 766 | # Get functions # 767 | ################################################## 768 | def getAttitude(self): 769 | return(self._att_msg.yaw, self._att_msg.pitch, self._att_msg.roll) 770 | 771 | def getAttitudeSpeed(self): 772 | return(self._att_msg.yaw_speed, self._att_msg.pitch_speed, self._att_msg.roll_speed) 773 | 774 | def getFirmwareVersion(self): 775 | return(self._fw_msg.gimbal_firmware_ver) 776 | 777 | def getHardwareID(self): 778 | return(self._hw_msg.id) 779 | 780 | def getRecordingState(self): 781 | return(self._record_msg.state) 782 | 783 | def getMotionMode(self): 784 | return(self._motionMode_msg.mode) 785 | 786 | def getMountingDirection(self): 787 | return(self._mountDir_msg.dir) 788 | 789 | def getFunctionFeedback(self): 790 | return(self._funcFeedback_msg.info_type) 791 | 792 | def getZoomLevel(self): 793 | return(self._manualZoom_msg.level) 794 | 795 | ################################################# 796 | # Set functions # 797 | ################################################# 798 | 799 | def set_zoom(self, level): 800 | # Set zoom min-max: 1-30x 801 | 802 | if level > 30: 803 | level = 30 804 | elif level < 1: 805 | level = 1 806 | 807 | self.requestZoomHold() 808 | val = self.getZoomLevel() 809 | 810 | print("Init Zoom level: ", val, " goal:", level) 811 | 812 | if val > level: 813 | while val > level: 814 | self.requestZoomOut() 815 | sleep(0.1) 816 | val = self.getZoomLevel() 817 | print("Zoom level: ", val) 818 | self.requestZoomHold() 819 | return True 820 | else: 821 | while val < level: 822 | self.requestZoomIn() 823 | sleep(0.1) 824 | val = self.getZoomLevel() 825 | print("Zoom level: ", val) 826 | self.requestZoomHold() 827 | return True 828 | 829 | 830 | def PID_contol(self): 831 | start_time = time() 832 | last_time = start_time 833 | 834 | 835 | while True: 836 | if not self._connected: 837 | self._logger.warning("Gimbal PID thread is stopped. Check connection") 838 | break 839 | current_time = time() 840 | dt = current_time - last_time 841 | # get error 842 | self.requestGimbalAttitude() 843 | if self._att_msg.seq==self._last_att_seq: 844 | # self._logger.info("Did not get new attitude msg") 845 | self.requestGimbalSpeed(0,0) 846 | continue 847 | if self._motionMode_msg.mode != MotionModeMsg.FOLLOW: 848 | print("PID on pause") 849 | sleep(1) 850 | continue 851 | else: 852 | self.is_pause_pid = False 853 | 854 | if self.is_pause_pid: 855 | print("Pause PID") 856 | continue 857 | 858 | self._last_att_seq = self._att_msg.seq 859 | 860 | yaw_err = -self._goal_yaw + self._att_msg.yaw # NOTE for some reason it's reversed!! 861 | pitch_err = self._att_msg.pitch - self._goal_pitch 862 | # print("yaw_err=", yaw_err, "pitch_err=", pitch_err) 863 | # print("goal_pitch= %s", self._goal_pitch) 864 | # print("Attitude (yaw,pitch,roll) eg:", self.getAttitude()) 865 | if abs(yaw_err) > 0.5: 866 | yaw_output = self.yaw_pid(yaw_err) 867 | else: 868 | yaw_output = self.yaw_pid(0) 869 | if abs(pitch_err) > 0.5: 870 | pitch_output = self.pitch_pid(pitch_err) 871 | else: 872 | pitch_output = self.pitch_pid(0) 873 | self.requestGimbalSpeed(int(-yaw_output), int(pitch_output)) 874 | sleep(0.05) 875 | 876 | def setYawAngle(self, value): 877 | self._goal_yaw = value 878 | 879 | def setPitchAngle(self, value): 880 | self._goal_pitch = value 881 | def setRotation(self,_yaw,_pitch): 882 | if _yaw > 170: 883 | _yaw = 170 884 | 885 | if _yaw < -170: 886 | _yaw = -170 887 | if _pitch > 20: 888 | _pitch = 20 889 | if _pitch < -90: 890 | _pitch = 90 891 | 892 | self._goal_yaw = _yaw 893 | self._goal_pitch = _pitch 894 | # self.requestGimbalRot(int(self._goal_yaw), int(self._goal_pitch)) 895 | -------------------------------------------------------------------------------- /stream.py: -------------------------------------------------------------------------------- 1 | #!/bin/env/python3 2 | """ 3 | RTSP client for getting video stream from SIYI cameras, e.g. ZR10, A8 Mini 4 | ZR10 webpage: http://en.siyi.biz/en/Gimbal%20Camera/ZR10/overview/ 5 | A8 mini page: https://shop.siyi.biz/products/siyi-a8-mini?VariantsId=10611 6 | Author : Mohamed Abdelkader 7 | Email: mohamedashraf123@gmail.com 8 | Copyright 2022 9 | 10 | Required: 11 | - OpenCV 12 | (sudo apt-get install python3-opencv -y) 13 | - imutils 14 | pip install imutils 15 | - Gstreamer (https://gstreamer.freedesktop.org/documentation/installing/index.html?gi-language=c) 16 | (Ubuntu: sudo apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libgstreamer-plugins-bad1.0-dev gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly gstreamer1.0-libav gstreamer1.0-doc gstreamer1.0-tools gstreamer1.0-x gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-qt5 gstreamer1.0-pulseaudio -y) 17 | - Deepstream (only for Nvidia Jetson boards) 18 | (https://docs.nvidia.com/metropolis/deepstream/dev-guide/text/DS_Quickstart.html#jetson-setup) 19 | - For RTMP streaming 20 | sudo apt install ffmpeg -y 21 | pip install ffmpeg-python 22 | """ 23 | import cv2 24 | from imutils.video import VideoStream 25 | import logging 26 | from time import time, sleep 27 | import subprocess 28 | import threading 29 | 30 | 31 | class SIYIRTSP: 32 | def __init__(self, rtsp_url="rtsp://192.168.144.25:8554/main.264", cam_name="ZR10", debug=False) -> None: 33 | ''' 34 | Receives video stream from SIYI cameras 35 | 36 | Params 37 | -- 38 | - rtsp_url [str] RTSP url 39 | - cam_name [str] camera name (optional) 40 | - debug [bool] print debug messages 41 | ''' 42 | self._rtsp_url=rtsp_url 43 | 44 | self._cam_name=cam_name 45 | 46 | # Desired image width/height 47 | self._width=1280 48 | self._height=720 49 | 50 | # Stored image frame 51 | self._frame=None 52 | 53 | self._debug= debug # print debug messages 54 | if self._debug: 55 | d_level = logging.DEBUG 56 | else: 57 | d_level = logging.INFO 58 | LOG_FORMAT=' [%(levelname)s] %(asctime)s [SIYIRTSP::%(funcName)s] :\t%(message)s' 59 | logging.basicConfig(format=LOG_FORMAT, level=d_level) 60 | self._logger = logging.getLogger(self.__class__.__name__) 61 | 62 | # Flad to stop frame grapping loop, and close windows 63 | self._stopped=False 64 | 65 | # Show grapped frame in a window (mainly for debugging) 66 | self._show_window=False 67 | 68 | self._last_image_time=time() 69 | 70 | # timeout (seconds) before closing everything 71 | # If the returned frame is None for this amount of time, exit 72 | self._connection_timeout=2.0 73 | 74 | # Image receiving thread 75 | self._recv_thread = threading.Thread(target=self.loop) 76 | 77 | # start stream 78 | self.start() 79 | 80 | def setShowWindow(self, f: bool): 81 | self._show_window=f 82 | 83 | def getFrame(self): 84 | """ 85 | Returns current image frame 86 | """ 87 | return(self._frame) 88 | 89 | def start(self): 90 | """ 91 | Start receiving thread 92 | """ 93 | try: 94 | self._logger.info("Connecting to %s...", self._cam_name) 95 | self._stream = VideoStream(self._rtsp_url).start() 96 | self._recv_thread.start() 97 | self._stopped=False 98 | except Exception as e: 99 | self._logger.error(" Could not receive stream from %s. Error %s", self._cam_name, e) 100 | exit(1) 101 | 102 | def close(self): 103 | self._logger.info("Closing stream of %s...", self._cam_name) 104 | cv2.destroyAllWindows() 105 | self._stream.stop() 106 | self._stopped=True 107 | 108 | def loop(self): 109 | self._last_image_time=time() 110 | while not self._stopped: 111 | self._logger.debug("Reading frame form %s ...", self._cam_name) 112 | self._frame = self._stream.read() 113 | 114 | if (time()-self._last_image_time)> self._connection_timeout: 115 | self._logger.warning("Connection timeout. Exiting") 116 | self.close() 117 | 118 | if self._frame is None: 119 | continue 120 | 121 | self._last_image_time=time() 122 | 123 | # self._frame = imutils.resize(frame, width=self._width, height=self._height) 124 | 125 | 126 | if self._show_window: 127 | cv2.imshow('{} Stream'.format(self._cam_name), self._frame) 128 | 129 | key = cv2.waitKey(25) & 0xFF 130 | 131 | if key == ord('q'): 132 | self.close() 133 | break 134 | 135 | self._logger.warning("RTSP receiving loop is done") 136 | return 137 | 138 | class RTMPSender: 139 | ''' 140 | Streams image frames to an RTMP server 141 | ''' 142 | def __init__(self, rtmp_url="rtmp://127.0.0.1:1935/live/webcam", debug=False) -> None: 143 | ''' 144 | Params 145 | -- 146 | - rtmp_url [str] RTMP server URL 147 | - debug [bool] Printing debug message 148 | ''' 149 | self._rtmp_url=rtmp_url 150 | # Desired frequency of streaming to rtmp server 151 | self._fps =30 152 | 153 | self._last_send_time = time() 154 | self._current_send_time = time() 155 | 156 | 157 | # Frame to send 158 | self._frame = None 159 | 160 | # Desired image height 161 | self._height = 480 162 | # Desired image width 163 | self._width = 640 164 | 165 | self._toGray=False 166 | if self._toGray: 167 | self._pix_fmt="gray" 168 | else: 169 | self._pix_fmt="bgr24" 170 | 171 | # Flag to stop streaming loop 172 | self._stopped = False 173 | 174 | self._debug= debug # print debug messages 175 | if self._debug: 176 | d_level = logging.DEBUG 177 | else: 178 | d_level = logging.INFO 179 | LOG_FORMAT=' [%(levelname)s] %(asctime)s [RTMPSender::%(funcName)s] :\t%(message)s' 180 | logging.basicConfig(format=LOG_FORMAT, level=d_level) 181 | self._logger = logging.getLogger(self.__class__.__name__) 182 | 183 | # Streaming thread 184 | self._st_thread = threading.Thread(target=self.loop) 185 | 186 | 187 | def setImageSize(self, w=640, h=480): 188 | self._width=w 189 | self._height=h 190 | 191 | def setFPS(self, fps=20): 192 | self._fps=fps 193 | 194 | def setGrayFrame(self, b: bool): 195 | ''' 196 | Params 197 | -- 198 | - b [bool] True: sends grayscale image. Otherwise sends color image 199 | ''' 200 | self._toGray = b 201 | if self._toGray: 202 | self._pix_fmt="gray" 203 | else: 204 | self._pix_fmt="bgr24" 205 | 206 | 207 | def setFrame(self, frame): 208 | self._frame=frame 209 | 210 | 211 | def start(self): 212 | command = ["ffmpeg", 213 | "-y", 214 | "-f", "rawvideo", 215 | "-vcodec", "rawvideo", 216 | "-pix_fmt", self._pix_fmt, 217 | "-s", "{}x{}".format(self._width, self._height), 218 | "-r", str(self._fps), 219 | "-i", "-", 220 | "-c:v", "libx264", 221 | '-pix_fmt', 'yuv420p', 222 | "-preset", "ultrafast", 223 | "-f", "flv", 224 | "-tune", "zerolatency", 225 | self._rtmp_url] 226 | # using subprocess and pipe to fetch frame data 227 | try: 228 | self._p = subprocess.Popen(command, stdin=subprocess.PIPE) 229 | except Exception as e: 230 | self._logger.error("Could not create ffmpeg pipeline. Error %s", e) 231 | exit(1) 232 | 233 | try: 234 | self._st_thread.start() 235 | except Exception as e: 236 | self._logger.error("Could not start RTMP streaming thread") 237 | exit(1) 238 | 239 | def stop(self): 240 | """ 241 | Stops streaming thread 242 | """ 243 | self._logger.warning("RTMP streaming is stopped.") 244 | self._stopped=True 245 | self._p.kill() 246 | 247 | def sendFrame(self) -> bool: 248 | ''' 249 | Sends current image frame stored in self._frame 250 | 251 | Returns 252 | -- 253 | True if all is fine. False otherwise 254 | ''' 255 | if self._frame is None: 256 | return False 257 | 258 | try: 259 | val = self._frame.shape 260 | rows = val[0] 261 | cols=val[1] 262 | # Resize the image, if needed 263 | if rows != self._height or cols != self._width: 264 | self._frame = cv2.resize(self._frame, (self._width,self._height), interpolation = cv2.INTER_AREA) 265 | 266 | if self._toGray and (len(self._frame.shape) > 2): 267 | self._frame = cv2.cvtColor(self._frame, cv2.COLOR_BGR2GRAY) 268 | 269 | self._p.stdin.write(self._frame.tobytes()) 270 | 271 | return(True) 272 | except Exception as e: 273 | self._logger.error(" Error in sending: %s", e) 274 | return(False) 275 | 276 | def loop(self): 277 | while(not self._stopped): 278 | dt = time() - self._last_send_time 279 | start_t=time() 280 | self.sendFrame() 281 | end_t=time() 282 | dt=end_t-start_t 283 | duration = 1/self._fps 284 | if dt < duration: 285 | sleep(duration-dt) 286 | 287 | self._logger.warning("RTMP streaming loop is done") 288 | return 289 | 290 | 291 | 292 | 293 | def test(): 294 | # rtsp = SIYIRTSP(debug=False) 295 | # rtsp.setShowWindow(True) 296 | # Webcam 297 | try: 298 | wc = VideoStream(src=0).start() 299 | except Exception as e: 300 | print("Error in opening webcam") 301 | exit(1) 302 | 303 | rtmp = RTMPSender(rtmp_url="rtmp://127.0.0.1:1935/live/webcam") 304 | rtmp.start() 305 | try: 306 | while(True): 307 | # frame=stream.getFrame() 308 | frame=wc.read() 309 | rtmp.setFrame(frame) 310 | except KeyboardInterrupt: 311 | # rtsp.close() 312 | rtmp.stop() 313 | wc.stop() 314 | cv2.destroyAllWindows() 315 | # quit 316 | exit(0) 317 | 318 | 319 | 320 | if __name__=="__main__": 321 | test() 322 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Some useful functions to manipulate bytes 3 | Author: Mohamed Abdelkader 4 | Contact: mohamedashraf123@gmail.com 5 | """ 6 | 7 | def toHex(intval, nbits): 8 | """ 9 | Converts an integer to hexdecimal. 10 | Useful for negative integers where hex() doesn't work as expected 11 | 12 | Params 13 | -- 14 | intaval: [int] Integer number 15 | nbits: [int] Number of bits 16 | 17 | Returns 18 | -- 19 | String of the hexdecimal value 20 | """ 21 | h = format((intval + (1 << nbits)) % (1 << nbits),'x') 22 | if len(h)==1: 23 | h="0"+h 24 | return h 25 | 26 | def toInt(hexval): 27 | """ 28 | Converts hexidecimal value to an integer number, which can be negative 29 | Ref: https://www.delftstack.com/howto/python/python-hex-to-int/ 30 | 31 | Params 32 | -- 33 | hexval: [string] String of the hex value 34 | """ 35 | bits = 16 36 | val = int(hexval, bits) 37 | if val & (1 << (bits-1)): 38 | val -= 1 << bits 39 | return val --------------------------------------------------------------------------------