├── .gitignore ├── LICENSE ├── README.md ├── modules ├── __init__.py ├── advertise.py ├── app.py ├── config.py ├── janus.py ├── logger.py ├── rtspfactory.py ├── streamer.py ├── webrtc.py └── webrtc_signalserver.py ├── visiond ├── visiond.conf └── visiond.service /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 GoodRobots 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # visiond 2 | Python/Gstreamer based project to stream video from embedded system cameras in various ways 3 | -------------------------------------------------------------------------------- /modules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/goodrobots/visiond/1b6751576c30a47c200be22f7ab3766dd0120578/modules/__init__.py -------------------------------------------------------------------------------- /modules/advertise.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import logging 3 | import socket 4 | import queue 5 | import uuid 6 | from zeroconf import IPVersion, ServiceInfo, Zeroconf 7 | 8 | 9 | class StreamAdvert(threading.Thread): 10 | def __init__(self, config): 11 | threading.Thread.__init__(self) 12 | self.daemon = True 13 | self.config = config 14 | self.logger = logging.getLogger("visiond." + __name__) 15 | 16 | # Attempt to redirect the default handler into our log files 17 | default_zeroconf_logger = logging.getLogger("zeroconf") 18 | default_zeroconf_logger.setLevel(logging.INFO) # TODO: Set based on config 19 | default_zeroconf_logger.propagate = True 20 | for handler in logging.getLogger("visiond").handlers: 21 | default_zeroconf_logger.addHandler(handler) 22 | 23 | self.zeroconf = None 24 | self._should_shutdown = threading.Event() 25 | self._q = queue.Queue() 26 | 27 | self.ip_version = IPVersion.V4Only # IPVersion.All 28 | self.service_info = self.build_service_info() 29 | 30 | def build_service_info(self, props=None, _type='visiond'): 31 | if _type == 'visiond': 32 | _subdesc = "{}:{}".format(socket.gethostname(), self.config.args.name if self.config.args.name else self.config.args.output_port) 33 | _rtspurl = f"rtsp://{socket.getfqdn()}:{self.config.args.output_port}/video" 34 | return ServiceInfo( 35 | "_rtsp._udp.local.", 36 | f"{_type} ({_subdesc}) ._rtsp._udp.local.", 37 | addresses=[socket.inet_aton(self.config.args.output_dest)], 38 | port=int(self.config.args.output_port), 39 | properties={ 40 | "port": self.config.args.output_port, 41 | "name": _subdesc, 42 | "service_type": "visiond", 43 | "rtspUrl": _rtspurl, 44 | "uuid": self.instance_uuid(_rtspurl), 45 | } 46 | ) 47 | elif _type == 'webrtc': 48 | _subdesc = "{}:{}".format(socket.gethostname(), self.config.args.name if self.config.args.name else 6011) 49 | _wsEndpoint = f"wss://{socket.getfqdn()}:6011" 50 | return ServiceInfo( 51 | "_webrtc._udp.local.", 52 | f"visiond-webrtc ({_subdesc})._webrtc._udp.local.", 53 | addresses=[socket.inet_aton('0.0.0.0')], 54 | port=6011, 55 | properties={ 56 | "hostname": socket.getfqdn(), 57 | "port": 6011, 58 | "name": _subdesc, 59 | "service_type": "webrtc", 60 | "wsEndpoint": _wsEndpoint, 61 | "uuid": self.instance_uuid(_wsEndpoint), 62 | }, 63 | ) 64 | 65 | def instance_uuid(self, url): 66 | # Create a repeatable uuid based on unique url 67 | return str(uuid.uuid5(uuid.NAMESPACE_URL, url)) 68 | 69 | def run(self): 70 | self.logger.info("Zeroconf advertisement thread is starting...") 71 | try: 72 | self.zeroconf = Zeroconf(ip_version=self.ip_version) 73 | self.register_service(self.service_info) 74 | except OSError as e: 75 | # the port was blocked 76 | self.logger.info.error( 77 | f"Unable to start zeroconf advertisement thread due to {e}" 78 | ) 79 | self.clean_up() 80 | 81 | while not self._should_shutdown.is_set(): 82 | try: 83 | # The following will block for at most [timeout] seconds 84 | desc_update = self._q.get(block=True, timeout=2) 85 | except queue.Empty: 86 | desc_update = None 87 | if desc_update: 88 | self.update_service(desc_update) 89 | 90 | # We only get here when shutdown has been called 91 | self.clean_up() 92 | 93 | def clean_up(self): 94 | self.logger.info("Zeroconf advertisement thread is stopping...") 95 | if self.zeroconf: 96 | self.zeroconf.unregister_all_services() 97 | self.zeroconf.close() 98 | self.logger.info("Zeroconf advertisement thread has stopped.") 99 | 100 | def register_service(self, service_info): 101 | self.zeroconf.register_service(service_info, cooperating_responders=True) 102 | 103 | def update_service(self, desc_update): 104 | # it does not look like there is a nice way to update 105 | # the properties field of a service. 106 | # Make a new service with the same details, 107 | # but update the properties. 108 | 109 | # Merge the dicts and apply the updates 110 | self.service_info = self.build_service_info(desc_update) 111 | self.zeroconf.update_service(self.service_info) 112 | 113 | def unregister_service(self): 114 | self.zeroconf.unregister_service(self.service_info) 115 | 116 | def shutdown(self): 117 | self._should_shutdown.set() 118 | 119 | def update(self, desc_update): 120 | self._q.put_nowait(desc_update) 121 | -------------------------------------------------------------------------------- /modules/app.py: -------------------------------------------------------------------------------- 1 | import errno 2 | import gi 3 | import glob 4 | import io 5 | import logging 6 | import os 7 | import re 8 | import time 9 | import v4l2 10 | import sdnotify 11 | import signal 12 | import sys 13 | import traceback 14 | from fcntl import ioctl 15 | 16 | from .config import * 17 | from .streamer import * 18 | from .advertise import StreamAdvert 19 | from .janus import JanusInterface 20 | 21 | gi.require_version('Gst', '1.0') 22 | from gi.repository import GLib,Gst 23 | Gst.init(None) 24 | 25 | ### Main visiond App Class 26 | class visiondApp(): 27 | def __init__(self, config): 28 | self.config = config 29 | self.logger = logging.getLogger('visiond.' + __name__) 30 | self.stream = None 31 | self.zeroconf = None 32 | self.janus = None 33 | self._should_shutdown = False 34 | self.notify = sdnotify.SystemdNotifier() 35 | 36 | signal.signal(signal.SIGINT, self.signal_handler) 37 | signal.signal(signal.SIGTERM, self.signal_handler) 38 | 39 | def signal_handler(self, sig, frame): 40 | self.shutdown() 41 | 42 | def run(self): 43 | self.logger.info("Starting maverick-visiond") 44 | 45 | if 'debug' in self.config.args and self.config.args.debug: 46 | Gst.debug_set_active(True) 47 | Gst.debug_set_default_threshold(self.config.args.debug) 48 | 49 | if 'retry' not in self.config.args or not self.config.args.retry: 50 | self.retry = 30 51 | else: 52 | self.retry = float(self.config.args.retry) 53 | 54 | # Start the zeroconf thread 55 | if self.config.args.zeroconf: 56 | self.zeroconf = StreamAdvert(self.config) 57 | self.zeroconf.start() 58 | else: 59 | self.zeroconf = None 60 | 61 | self.janus = JanusInterface(self.config, self.zeroconf) 62 | self.janus.start() 63 | 64 | # Start the pipeline. Trap any errors and wait for 30sec before trying again. 65 | while not self._should_shutdown: 66 | try: 67 | if 'pipeline_override' in self.config.args and self.config.args.pipeline_override: 68 | self.logger.info("pipeline_override set, constructing manual pipeline") 69 | self.manualconstruct() 70 | else: 71 | self.logger.info("pipeline_override is not set, auto-constructing pipeline") 72 | self.autoconstruct() 73 | except ValueError as e: 74 | self.logger.critical("Error constructing pipeline: {}, retrying in {} sec".format(repr(e), self.retry)) 75 | # Inform systemd that start is complete 76 | #self.logger.info("Notifying systemd of startup failure") 77 | #self.notify.notify("ERRNO=1") 78 | #self.notify.notify("STATUS=Error constructing pipeline: {}".format(repr(e))) 79 | self.logger.info("Notifying systemd of startup completion") 80 | self.notify.notify("READY=1") 81 | self.notify.notify("STATUS=Manual Pipeline Initialisation Complete") 82 | sys.exit(0) 83 | 84 | def manualconstruct(self): 85 | if self.config.args.pipeline_override not in self.config.args: 86 | self.logger.critical('manualconstruct() called but no pipeline_override config argument specified') 87 | sys.exit(0) 88 | self.logger.info("Manual Pipeline Construction") 89 | self.logger.info("Creating pipeline from config: " + self.config.args.pipeline_override) 90 | try: 91 | # Create the pipeline from config override 92 | self.pipeline = Gst.parse_launch(self.config.args.pipeline_override) 93 | # Set pipeline to playing 94 | self.pipeline.set_state(Gst.State.PLAYING) 95 | except Exception as e: 96 | raise ValueError('Error constructing manual pipeline specified: {}'.format(repr(e))) 97 | 98 | # Inform systemd that start is complete 99 | self.logger.info("Notifying systemd of startup completion") 100 | self.notify.notify("READY=1") 101 | self.notify.notify("STATUS=Manual Pipeline Initialisation Complete") 102 | 103 | while True: 104 | time.sleep(5) 105 | 106 | def autoconstruct(self): 107 | # If camera device set in config use it, otherwise autodetect 108 | cameradev = None 109 | devicepaths = glob.glob("/dev/video*") 110 | if self.config.args.camera_device: 111 | self.logger.debug('camera_device specified: {}'.format(self.config.args.camera_device)) 112 | cameradev = self.config.args.camera_device 113 | else: 114 | # device not set, carry on and try to autodetect 115 | for devicepath in sorted(devicepaths): 116 | if not cameradev and self.check_input(devicepath): 117 | cameradev = devicepath 118 | self.logger.info('v4l2 device '+devicepath+' is a camera, autoselecting') 119 | elif not cameradev: 120 | self.logger.debug('v4l2 device '+devicepath+' is not a camera, ignoring') 121 | if not cameradev: 122 | raise ValueError('Error detecting camera video device') 123 | 124 | # Check the camera has a valid input 125 | try: 126 | self.vd = io.TextIOWrapper(open(cameradev, "r+b", buffering=0)) 127 | cp = v4l2.v4l2_capability() 128 | except Exception as e: 129 | raise ValueError("Camera not specified in config, or camera not valid: {}".format(repr(e))) 130 | if not self.check_input(): 131 | raise ValueError('Specified camera not valid') 132 | 133 | # Log info 134 | self.camera_info() 135 | 136 | # Try and autodetect Jetson/Tegra CSI connection 137 | if self.driver == 'tegra-video': 138 | self.logger.info('Nvidia Jetson/Tegra CSI connection detected, switching to nvarguscamerasrc') 139 | self.input = "nvarguscamerasrc" 140 | elif 'input' not in self.config.args or not self.config.args.input: 141 | self.input = "v4l2src" 142 | else: 143 | self.input = self.config.args.input 144 | 145 | # Try and autodetect MFC device 146 | self.mfcdev = None 147 | for devicepath in devicepaths: 148 | dp = io.TextIOWrapper(open(devicepath, "r+b", buffering=0)) 149 | ioctl(dp, v4l2.VIDIOC_QUERYCAP, cp) 150 | if cp.card == "s5p-mfc-enc": 151 | self.mfcdev = dp 152 | self.logger.info(f'MFC Hardware encoder detected, autoselecting {devicepath}') 153 | 154 | # If format set in config use it, otherwise autodetect 155 | streamtype = None 156 | if self.config.args.format: 157 | streamtype = self.config.args.format 158 | else: 159 | if self.input == "nvarguscamerasrc": 160 | self.logger.info('Nvidia Jetson/Tegra input detected, forcing Tegra stream format') 161 | streamtype = 'tegra' 162 | elif re.search("C920", self.card): 163 | self.logger.info("Logitech C920 detected, forcing H264 passthrough") 164 | streamtype = 'h264' 165 | # format not set, carry on and try to autodetect 166 | elif self.check_format('yuv'): 167 | self.logger.info('Camera YUV stream available, using yuv stream') 168 | streamtype = 'yuv' 169 | # Otherwise, check for an mjpeg->h264 encoder pipeline. 170 | elif self.check_format('mjpeg'): 171 | self.logger.info('Camera MJPEG stream available, using mjpeg stream') 172 | streamtype = 'mjpeg' 173 | # Lastly look for a h264 stream 174 | elif self.check_format('h264'): 175 | self.logger.info('Camera H264 stream available, using H264 stream') 176 | streamtype = 'h264' 177 | if not streamtype: 178 | raise ValueError('Error detecting camera video format') 179 | 180 | # If encoder set in config use it, otherwise set to h264 181 | encoder = None 182 | if self.config.args.encoder: 183 | encoder = self.config.args.encoder 184 | if not encoder: 185 | encoder = "h264" 186 | self.logger.debug("Using encoder: {}".format(encoder)) 187 | 188 | # If raspberry camera detected set pixelformat to I420, otherwise set to YUY2 by default 189 | pixelformat = "YUY2" 190 | ioctl(self.vd, v4l2.VIDIOC_QUERYCAP, cp) 191 | if cp.driver == "bm2835 mmal": 192 | self.logger.info("Raspberry Pi Camera detected, setting pixel format to I420") 193 | pixelformat = "I420" 194 | 195 | # If raw pixelformat set in config override the defaults 196 | if 'pixelformat' in self.config.args and self.config.args.pixelformat: 197 | pixelformat = self.config.args.pixelformat 198 | self.logger.debug("Using pixelformat: {}".format(pixelformat)) 199 | 200 | # Create and start the stream 201 | try: 202 | self.logger.info("Creating stream object - device: {}, stream: {}, pixelformat: {}, encoder: {}, input: {}".format(cameradev, streamtype, pixelformat, encoder, self.input)) 203 | Streamer(self.config, streamtype, pixelformat, encoder, self.input, cameradev) 204 | if self.zeroconf: 205 | # Update the stream advertisement with the new info 206 | self.zeroconf.update({"stream":"replace_with_stream_info"}) 207 | except Exception as e: 208 | if self.zeroconf: 209 | self.zeroconf.update({"stream":""}) 210 | raise ValueError('Error creating {} stream: {}'.format(streamtype, repr(e))) 211 | 212 | # Inform systemd that start is complete 213 | self.logger.info("Notifying systemd of startup completion") 214 | self.notify.notify("READY=1") 215 | self.notify.notify("STATUS=Automatic Pipeline Initialisation Complete") 216 | 217 | while not self._should_shutdown: 218 | time.sleep(1) 219 | 220 | def camera_info(self): 221 | # Log capability info 222 | cp = v4l2.v4l2_capability() 223 | ioctl(self.vd, v4l2.VIDIOC_QUERYCAP, cp) 224 | self.logger.debug("driver: " + cp.driver.decode()) 225 | self.logger.debug("card: " + cp.card.decode()) 226 | self.driver = cp.driver.decode() 227 | self.card = cp.card.decode() 228 | 229 | # Log controls available 230 | queryctrl = v4l2.v4l2_queryctrl(v4l2.V4L2_CID_BASE) 231 | while queryctrl.id < v4l2.V4L2_CID_LASTP1: 232 | try: 233 | ioctl(self.vd, v4l2.VIDIOC_QUERYCTRL, queryctrl) 234 | except IOError as e: 235 | # this predefined control is not supported by this device 236 | assert e.errno == errno.EINVAL 237 | queryctrl.id += 1 238 | continue 239 | self.logger.debug("Camera control: " + queryctrl.name.decode()) 240 | queryctrl = v4l2.v4l2_queryctrl(queryctrl.id + 1) 241 | queryctrl.id = v4l2.V4L2_CID_PRIVATE_BASE 242 | while True: 243 | try: 244 | ioctl(self.vd, v4l2.VIDIOC_QUERYCTRL, queryctrl) 245 | except IOError as e: 246 | # no more custom controls available on this device 247 | assert e.errno == errno.EINVAL 248 | break 249 | self.logger.debug("Camera control: " + queryctrl.name.decode()) 250 | queryctrl = v4l2.v4l2_queryctrl(queryctrl.id + 1) 251 | 252 | # Log formats available 253 | capture = v4l2.v4l2_fmtdesc() 254 | capture.index = 0 255 | capture.type = v4l2.V4L2_BUF_TYPE_VIDEO_CAPTURE 256 | try: 257 | while (ioctl(self.vd, v4l2.VIDIOC_ENUM_FMT, capture) >= 0): 258 | self.logger.debug("Camera format: " + capture.description.decode()) 259 | capture.index += 1 260 | except: 261 | pass 262 | 263 | def check_input(self, vd=None, index=0): 264 | if vd == None: 265 | vd = self.vd 266 | else: 267 | vd = io.TextIOWrapper(open(vd, "r+b", buffering=0)) 268 | input = v4l2.v4l2_input(index) 269 | try: 270 | ioctl(vd, v4l2.VIDIOC_ENUMINPUT, input) 271 | self.logger.debug('V4l2 device input: ' + input.name.decode() + ':' + str(input.type)) 272 | if input.type != 2: 273 | return False # If input type is not camera (2) then return false 274 | return True 275 | except Exception as e: 276 | self.logger.debug("Error checking input: {}".format(repr(e))) 277 | return False 278 | 279 | def check_format(self, format): 280 | capture = v4l2.v4l2_fmtdesc() 281 | capture.index = 0 282 | capture.type = v4l2.V4L2_BUF_TYPE_VIDEO_CAPTURE 283 | available = False 284 | try: 285 | while (ioctl(self.vd, v4l2.VIDIOC_ENUM_FMT, capture) >= 0): 286 | self.logger.debug("Checking format: {} : {}".format(format, capture.description.decode())) 287 | if format.lower() == "h264": 288 | if re.search('H264', capture.description.decode().lower()) or re.search('H.264', capture.description.decode().lower()): 289 | available = True 290 | elif format.lower() == "mjpeg": 291 | if re.search('jpeg', capture.description.decode().lower()): 292 | available = True 293 | elif format.lower() == "yuv" or format.lower() == "raw": 294 | if re.search('^yu', capture.description.decode().lower()): 295 | available = True 296 | else: 297 | if re.search(format.lower(), capture.description.decode().lower()): 298 | available = True 299 | capture.index += 1 300 | except: 301 | pass 302 | return available 303 | 304 | def shutdown(self): 305 | self._should_shutdown = True 306 | self.logger.info("Shutting down visiond") 307 | if self.stream: 308 | if self.stream.webrtc: 309 | self.stream.webrtc.shutdown() 310 | if self.stream.webrtc_signal_server: 311 | self.stream.webrtc_signal_server.shutdown() 312 | self.stream.webrtc_signal_server.join() 313 | self.stream.stop() 314 | if self.janus: 315 | self.janus.shutdown() 316 | self.janus.join() 317 | if self.zeroconf: 318 | self.zeroconf.shutdown() 319 | self.zeroconf.join() 320 | -------------------------------------------------------------------------------- /modules/config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from __future__ import print_function 4 | 5 | import sys 6 | import argparse 7 | import configparser 8 | import os 9 | 10 | class visiondConfig: 11 | 12 | def __init__(self, config_file = './visiond.conf', arglist = []): 13 | self.config_file = config_file 14 | self.args = None 15 | self.setup(arglist) 16 | 17 | def setup(self, arglist): 18 | # Declare args parser 19 | self.parser = argparse.ArgumentParser(description='Visiond Video Streaming') 20 | 21 | # Setup common args 22 | self.parser.add_argument('--bitrate', '-br', default='1000000', help="Target stream bitrate in bits/second") 23 | self.parser.add_argument('--brightness', '-b', default=0, help="Brightness - 0 is automatic") 24 | self.parser.add_argument('--camera_device', '-dev', help="Camera device, usually /dev/video0 for a single normal camera") 25 | self.parser.add_argument('--config', '-c', default=self.config_file, help="config file location, defaults to visiond directory") 26 | self.parser.add_argument('--debug', '-d', help="Debug: Turns on gstreamer debug to the specified level. Note level 4 and above is very verbose") 27 | self.parser.add_argument('--encoder', '-e', help="Encoder, if not specified will default to h264. Values are h264, mjpeg or none") 28 | self.parser.add_argument('--encoder_type', '-et', help="Encoder type, if not specified will fall back to sensible default for the encoder") 29 | self.parser.add_argument('--format', '-f', help="Camera format, if not specified here will autodetect yuv->mjpeg->h264") 30 | self.parser.add_argument('--framerate', '-fr', default=30, help="Framerate of video stream, must be valid for camera") 31 | self.parser.add_argument('--height', '-ht', default=480, help="Resolution height of video stream, must be valid for camera") 32 | self.parser.add_argument('--input', '-i', default="v4l2src", help="Stream input type: v4l2src (fpv), appsrc (cv), nvarguscamerasrc (nvidia jetson csi)") 33 | self.parser.add_argument('--logdest', '-ld', default='both', help="Log destination - can be file, console or both (if run through systemd, console will log to system journal)") 34 | self.parser.add_argument('--logdir', '-li', default="/var/tmp/visiond", help="Log directory, if file logging set") 35 | self.parser.add_argument('--name', '-n', default=None, help="Descriptive name of the visiond instance / camera. Used for display in zeroconf and -web interface.") 36 | self.parser.add_argument('--output', '-o', default="rtsp", help="Stream output type: file (save video), udp (stream video), wcast (wifibroadcast), rtsp (rtsp server), webrtc (webrtc server") 37 | self.parser.add_argument('--output_dest', '-od', default="0.0.0.0", help="Output destination: filename (file output), IP address (udp/rtsp output), Interface (wcast output)") 38 | self.parser.add_argument('--output_port', '-op', default="5600", help="Output port: Port number (eg. 5000) for network destination, Channel for wifibroadcast output (eg. 1)") 39 | self.parser.add_argument('--pipeline_override', '-po', help="Pipeline Override - This is used to provide a manual pipeline if the auto construction fails") 40 | self.parser.add_argument('--pixelformat', '-p', help="Pixel Format, could be (fourcc)YUV2, I420, RGB etc") 41 | self.parser.add_argument('--retry', '-r', default=10, help="Retry timeout - number of seconds visiond will wait before trying to recreate pipeline after error") 42 | self.parser.add_argument('--rotate', '-ro', default=0, help="Rotate image") 43 | self.parser.add_argument('--ssl_keyfile', '-sk', help="Set the path to SSL key for webrtc signalling server") 44 | self.parser.add_argument('--ssl_certfile', '-sc', help="Set the path to SSL cert for webrtc signalling server") 45 | self.parser.add_argument('--width', '-wt', default=640, help="Resolution width of video stream, must be valid for camera") 46 | self.parser.add_argument('--zeroconf', '-z', default=True, help="Control if the service is advertised via zeroconf") 47 | 48 | self.args = self.parser.parse_args() 49 | 50 | # First parse config file, and set defaults 51 | defaults = {} 52 | if os.path.isfile(self.args.config): 53 | config = configparser.RawConfigParser() 54 | config.read([self.args.config]) 55 | try: 56 | for key, value in config.items("Defaults"): 57 | defaults[key] = self.get_config_value(config, "Defaults", key) 58 | self.parser.set_defaults(**defaults) 59 | self.args = self.parser.parse_args() 60 | except Exception as e: 61 | print("Error reading config file {}: {}".format(self.config_file, repr(e))) 62 | sys.exit(1) 63 | else: 64 | print("Error: Config file "+str(self.args.config)+" does not exist") 65 | sys.exit(1) 66 | 67 | # Return correctly typed config parser option 68 | def get_config_value(self, config, section, option): 69 | # Parses config value as python type 70 | try: 71 | return config.getint(section, option) 72 | except ValueError: 73 | pass 74 | try: 75 | return config.getfloat(section, option) 76 | except ValueError: 77 | pass 78 | try: 79 | return config.getboolean(section, option) 80 | except ValueError: 81 | pass 82 | return config.get(section, option) 83 | 84 | 85 | if __name__ == "__main__": 86 | print("Error: This should only be called as a module") 87 | sys.exit(1) 88 | 89 | -------------------------------------------------------------------------------- /modules/janus.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import logging 3 | import asyncio 4 | import requests 5 | from urllib3.exceptions import InsecureRequestWarning 6 | import socket 7 | import tornado.ioloop 8 | import tornado.web 9 | import tornado.websocket 10 | from tornado.options import define, options 11 | from zeroconf import IPVersion, ServiceInfo, Zeroconf 12 | 13 | define("port", default=1235, help="Port to listen on", type=int) 14 | define( 15 | "interface", 16 | default="127.0.0.1", 17 | type=str, 18 | help="Interface to listen on: 0.0.0.0 represents all interfaces", 19 | ) 20 | 21 | 22 | class TApp(tornado.web.Application): 23 | def __init__(self, zeroconf, config): 24 | # Setup websocket handler 25 | handlers = [(r"/", JanusHandler, {'zeroconf': zeroconf, 'config': config})] 26 | settings = dict( 27 | cookie_secret="asdlkfjhfiguhefgrkjbfdlgkjadfh", xsrf_cookies=True, 28 | ) 29 | super(TApp, self).__init__(handlers, **settings) 30 | 31 | 32 | class JanusHandler(tornado.websocket.WebSocketHandler): 33 | def initialize(self, zeroconf, config): 34 | self.zeroconf = zeroconf 35 | self.config = config 36 | 37 | def open(self): 38 | self.logger = logging.getLogger("visiond.janushandler") 39 | self.logger.info("Opening JanusHandler websocket connection") 40 | 41 | def on_close(self): 42 | self.logger.info("Closing JanusHandler websocket connection") 43 | 44 | def on_message(self, message): 45 | parsed = tornado.escape.json_decode(message) 46 | self.logger.debug("got message %r", message) 47 | if parsed['type'] == 256: 48 | _serviceinfo = self.zeroconf.build_service_info({}, _type='webrtc') 49 | try: 50 | self.zeroconf.register_service(_serviceinfo) 51 | except Exception as e: 52 | self.logger.warning(f"Error trying to advertise service: {repr(e)}") 53 | 54 | def get_compression_options(self): 55 | return {} 56 | 57 | def check_origin(self, origin): 58 | return True 59 | 60 | class JanusInterface(threading.Thread): 61 | def __init__(self, config, zeroconf): 62 | threading.Thread.__init__(self) 63 | self.daemon = True 64 | self.config = config 65 | self.zeroconf = zeroconf 66 | self.logger = logging.getLogger("visiond." + __name__) 67 | 68 | self.get_info() 69 | 70 | # Attempt to redirect the default handlers into our log files 71 | tornado_loggers = [ 72 | "tornado.websocket", 73 | "tornado.application", 74 | "tornado.general", 75 | "tornado.access", 76 | ] 77 | for tornado_logger in tornado_loggers: 78 | default_tornado_logger = logging.getLogger(tornado_logger) 79 | default_tornado_logger.setLevel(logging.DEBUG) # TODO: Set based on config 80 | default_tornado_logger.propagate = True 81 | for handler in logging.getLogger("visiond").handlers: 82 | default_tornado_logger.addHandler(handler) 83 | 84 | self._should_shutdown = threading.Event() 85 | 86 | def get_info(self): 87 | try: 88 | url = 'https://localhost:6795/janus/info' 89 | requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) 90 | _request = requests.get(url='https://localhost:6795/janus/info', verify=False) 91 | _data = _request.json() 92 | if _data['janus'] == 'server_info' and _data['server-name'] == 'Maverick': 93 | self.logger.info("Maverick janus webrtc service detected, registering with zeroconf") 94 | _serviceinfo = self.zeroconf.build_service_info({}, _type='webrtc') 95 | self.zeroconf.register_service(_serviceinfo) 96 | except Exception as e: 97 | self.logger.info("Maverick janus webrtc service not detected, skipping zeroconf registration") 98 | 99 | def run(self): 100 | self.logger.info("Janus interface thread is starting...") 101 | asyncio.set_event_loop(asyncio.new_event_loop()) 102 | self.ioloop = tornado.ioloop.IOLoop.current() 103 | tornado.ioloop.PeriodicCallback(self.check_for_shutdown, 1000, jitter = 0.1).start() 104 | application = TApp(self.zeroconf, self.config) 105 | server = tornado.httpserver.HTTPServer(application, ssl_options=None) 106 | server.listen(port=options.port, address=options.interface) 107 | self.ioloop.start() 108 | # this function blocks at this point until the server 109 | # is asked to exit via shutdown() 110 | self.logger.info("Janus interface thread has stopped.") 111 | 112 | def check_for_shutdown(self): 113 | if self._should_shutdown.is_set(): 114 | self.ioloop.add_callback(self.ioloop.stop) 115 | self.logger.info("Janus interface thread is stopping...") 116 | 117 | def shutdown(self): 118 | self._should_shutdown.set() 119 | -------------------------------------------------------------------------------- /modules/logger.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import sys 4 | 5 | class visiondLogger(): 6 | def __init__(self, cwd, config): 7 | self.cwd = cwd 8 | self.config = config 9 | self.handle = self.setup_logger() 10 | 11 | def setup_logdir(self): 12 | if 'logdir' in self.config.args: 13 | self.logdir = self.config.args.logdir 14 | else: 15 | self.logdir = os.path.join(self.cwd, 'logs') 16 | print("Using log directory: {}".format(self.logdir)) 17 | if not os.path.exists(self.logdir): 18 | os.makedirs(self.logdir) 19 | 20 | def setup_logger(self): 21 | 22 | if 'logdest' in self.config.args: 23 | self.logdest = self.config.args.logdest 24 | else: 25 | self.logdest = 'both' 26 | 27 | root = logging.getLogger('visiond') 28 | root.setLevel(logging.DEBUG) 29 | formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 30 | 31 | if self.logdest == 'both' or self.logdest == 'file': 32 | self.setup_logdir() 33 | fhandler = logging.FileHandler(os.path.join(self.logdir, "visiond.log")) 34 | fhandler.setLevel(logging.DEBUG) 35 | fhandler.setFormatter(formatter) 36 | root.addHandler(fhandler) 37 | 38 | if self.logdest == 'both' or self.logdest == 'console': 39 | chandler = logging.StreamHandler(sys.stdout) 40 | chandler.setLevel(logging.DEBUG) 41 | chandler.setFormatter(formatter) 42 | root.addHandler(chandler) 43 | 44 | return root -------------------------------------------------------------------------------- /modules/rtspfactory.py: -------------------------------------------------------------------------------- 1 | import gi 2 | import logging 3 | 4 | gi.require_version('Gst', '1.0') 5 | gi.require_version('GstRtspServer', '1.0') 6 | from gi.repository import Gst, GstRtspServer 7 | 8 | ### Create an RTSP Media Factory from an existing pipeline 9 | class MavRTSPMediaFactory(GstRtspServer.RTSPMediaFactory): 10 | def __init__(self, pipeline): 11 | self.logger = logging.getLogger('visiond.' + __name__) 12 | self.logger.info("Overriding RTSPMediaFactory with constructed pipeline") 13 | self.pipeline = pipeline 14 | GstRtspServer.RTSPMediaFactory.__init__(self) 15 | 16 | def do_create_element(self, url): 17 | self.logger.info("Creating RTSP factory element: {}".format(url.abspath)) 18 | return self.pipeline 19 | 20 | def do_configure(self, rtsp_media): 21 | self.logger.debug('Configuring RTSPMedia: {}'.format(rtsp_media)) 22 | rtsp_media.set_reusable(True) 23 | rtsp_media.set_shared(True) 24 | rtsp_media.set_buffer_size(0) 25 | rtsp_media.set_latency(0) 26 | rtsp_media.prepare() 27 | rtsp_media.set_pipeline_state(Gst.State.PLAYING) 28 | -------------------------------------------------------------------------------- /modules/streamer.py: -------------------------------------------------------------------------------- 1 | import gi 2 | import logging 3 | import os 4 | import signal 5 | import subprocess 6 | import threading 7 | import sys 8 | 9 | from .rtspfactory import * 10 | from .webrtc import * 11 | from .webrtc_signalserver import * 12 | 13 | gi.require_version('Gst', '1.0') 14 | gi.require_version('GstRtspServer', '1.0') 15 | gi.require_version('GstVideo', '1.0') 16 | from gi.repository import GLib, Gst, GstRtspServer, GstVideo 17 | Gst.init(None) 18 | 19 | ### Streamer Class to build up Gstreamer pipeline from in to out 20 | class Streamer(object): 21 | def __init__(self, config, format, pixelformat, encoder, input, device): 22 | self.config = config 23 | self.size = 0 24 | self.playing = False 25 | self.paused = False 26 | self.format = format 27 | self.encoder = encoder 28 | self.encoder_type = self.config.args.encoder_type 29 | self.payload = None # This is worked out later based on encoding and output type 30 | self.device = device 31 | self.pixelformat = pixelformat 32 | self.width = int(self.config.args.width) 33 | self.height = int(self.config.args.height) 34 | self.framerate = int(self.config.args.framerate) 35 | self.output = self.config.args.output 36 | self.dest = self.config.args.output_dest 37 | self.port = int(self.config.args.output_port) 38 | self.brightness = int(self.config.args.brightness) 39 | self.bitrate = int(self.config.args.bitrate) 40 | self.webrtc = None 41 | self.webrtc_signal_server = None 42 | self.glib_mainloop = None 43 | self.glib_thread = None 44 | self.logger = logging.getLogger('visiond.' + __name__) 45 | 46 | # Start with creating a pipeline from source element 47 | if input == "appsrc": 48 | self.input_appsrc() 49 | elif input == "v4l2src" or input == "v4l2": 50 | self.input_v4l2() 51 | elif input == "nvarguscamerasrc": 52 | self.input_tegra() 53 | 54 | # Next deal with each input format separately and interpret the stream and encoding method to the pipeline 55 | if format == "h264": 56 | self.capstring = 'video/x-h264,width='+str(self.width)+',height='+str(self.height)+',framerate='+str(self.framerate)+'/1' 57 | self.stream_h264() 58 | elif format == "mjpeg": 59 | self.capstring = 'image/jpeg,width='+str(self.width)+',height='+str(self.height)+',framerate='+str(self.framerate)+'/1' 60 | self.stream_mjpeg() 61 | elif format == "yuv": 62 | self.capstring = 'video/x-raw,format='+self.pixelformat+',width='+str(self.width)+',height='+str(self.height)+',framerate='+str(self.framerate)+'/1' 63 | self.stream_yuv() 64 | elif format == "tegra": 65 | self.capstring = 'video/x-raw(memory:NVMM), format=NV12,width='+str(self.width)+',height='+str(self.height)+',framerate='+str(self.framerate)+'/1' 66 | self.stream_tegra() 67 | else: 68 | self.logger.critical("Stream starting with unrecognised video format: " + str(format)) 69 | return 70 | 71 | # Next insert processing components, eg. rotation 72 | # First set the processing_attach point which self.rotate() can override if successful 73 | self.processing_attach = self.source_attach 74 | if int(self.config.args.rotate): 75 | self.rotate() 76 | 77 | # Next choose the encoder 78 | if encoder == format: 79 | self.encode_attach = self.source_attach 80 | pass 81 | elif encoder == "h264": 82 | self.encode_h264() 83 | elif encoder == "mjpeg": 84 | self.encode_mjpeg() 85 | elif encoder == "yuv": 86 | self.encode_yuv() 87 | else: 88 | self.logger.critical("Stream starting with unrecognised encoder: " + str(encoder)) 89 | return 90 | 91 | # Then work out which payload we want. 92 | if self.output == "udp" or self.output == "rtsp" or self.output == "webrtc": 93 | # For now, we fix webrtc to h264, which automatically invokes the rtp264pay that we also want 94 | if self.output == "webrtc": 95 | encoder = "h264" 96 | # Now set payloads according to the encoder 97 | if encoder == "h264": 98 | self.payload = "rtp264pay" 99 | self.payload_h264() 100 | elif encoder == "mjpeg": 101 | self.payload = "rtpjpegpay" 102 | self.payload_mjpeg() 103 | else: 104 | self.payload_attach = self.encode_attach 105 | 106 | # Finally connect the requested output to the end of the pipeline 107 | if self.output == "file": 108 | self.output_file() 109 | elif self.output == "udp": 110 | self.output_udp() 111 | elif self.output == "dynudp": 112 | self.output_dynudp() 113 | elif self.output == "rtsp": 114 | self.output_rtsp() 115 | elif self.output == "wcast": 116 | self.output_wcast() 117 | elif self.output == "webrtc": 118 | self.output_webrtc() 119 | 120 | # Start the pipeline 121 | self.show_pipeline() 122 | self.bus() 123 | self.start() 124 | 125 | def show_pipeline(self): 126 | # Output the resulting pipeline construction to log 127 | pipeline_iterator = Gst.Bin.iterate_elements(self.pipeline) 128 | pipe_elements = [] 129 | while True: 130 | res = Gst.Iterator.next(pipeline_iterator) 131 | if res[1]: 132 | elemstr = res[1].name 133 | # Extract caps if capsfilter element 134 | if res[1].name == "capsfilter": 135 | elemstr += " '" + Gst.Caps.to_string(res[1].get_property('caps')) + "'" 136 | # Extract device if v4l2src element 137 | if res[1].name == "v4l2-source": 138 | elemstr += " " + res[1].get_property('device') 139 | # Add element to pipeline output 140 | pipe_elements.append(elemstr) 141 | # logger.debug("Element: "+str(res[1])) 142 | if res[0] == Gst.IteratorResult.DONE: 143 | break 144 | self.logger.info("Pipeline: \"" + " ! ".join(list(reversed(pipe_elements))) +"\"") 145 | 146 | ### Input methods 147 | def input_appsrc(self): 148 | self.logger.info("Attaching input 'appsrc'") 149 | self.pipeline = Gst.Pipeline.new() 150 | self.source = Gst.ElementFactory.make("appsrc", "source") 151 | self.pipeline.add(self.source) 152 | # Set appsrc stream to live 153 | self.source.set_property("is-live",True) 154 | # Let appsrc set the timestamp so we don't have to do it 155 | self.source.set_property("do-timestamp",True) 156 | self.source.set_property("min-latency",0) 157 | 158 | def input_v4l2(self): 159 | if not self.device: 160 | self.device = "/dev/video0" 161 | self.logger.info("Attaching input 'v4l2': "+str(self.device)) 162 | self.pipeline = Gst.Pipeline.new() 163 | self.source = Gst.ElementFactory.make("v4l2src", "v4l2-source") 164 | self.source.set_property("device", self.device) 165 | self.source.set_property("brightness", self.brightness) 166 | self.pipeline.add(self.source) 167 | 168 | def input_tegra(self): 169 | self.logger.info("Attaching input 'tegra' using nvarguscamerasrc") 170 | self.pipeline = Gst.Pipeline.new() 171 | self.source = Gst.ElementFactory.make("nvarguscamerasrc", "nvarguscamerasrc-source") 172 | self.pipeline.add(self.source) 173 | 174 | ### Stream methods 175 | def stream_h264(self): 176 | self.logger.info("Attaching stream 'h264'") 177 | capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter") 178 | capsfilter.set_property('caps', Gst.Caps.from_string(self.capstring)) 179 | self.pipeline.add(capsfilter) 180 | self.source.link(capsfilter) 181 | self.source_attach = capsfilter 182 | 183 | def stream_mjpeg(self): 184 | self.logger.info("Attaching stream 'mjpeg'") 185 | capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter") 186 | capsfilter.set_property('caps', Gst.Caps.from_string(self.capstring)) 187 | self.pipeline.add(capsfilter) 188 | self.source.link(capsfilter) 189 | # Try and construct a parse element. 190 | parse = Gst.ElementFactory.make("jpegparse", "jpegparse") 191 | if parse: 192 | self.pipeline.add(parse) 193 | capsfilter.link(parse) 194 | queue = Gst.ElementFactory.make("queue", "queue") 195 | if self.format != self.encoder: 196 | dec = None 197 | # if Gst.ElementFactory.find("omxmjpegdec"): 198 | if Gst.ElementFactory.find("omxmjpegdecDISABLED"): 199 | self.logger.info("Raspberry hardware decoder detected, using omxmjpegdec as mjpeg decoder") 200 | dec = Gst.ElementFactory.make("omxmjpegdec", "omxmjpegdec") 201 | elif Gst.ElementFactory.find("jpegdec"): 202 | dec = Gst.ElementFactory.make("jpegdec", "jpegdec") 203 | if not dec: 204 | self.logger.critical("Error: No jpeg decoder found for mjpeg stream, exiting") 205 | sys.exit(1) 206 | self.pipeline.add(dec) 207 | if parse: 208 | parse.link(dec) 209 | else: 210 | capsfilter.link(dec) 211 | self.pipeline.add(queue) 212 | dec.link(queue) 213 | else: 214 | self.pipeline.add(queue) 215 | if parse: 216 | parse.link(queue) 217 | else: 218 | capsfilter.link(queue) 219 | vconvert = Gst.ElementFactory.make("videoconvert", "videoconvert") 220 | self.pipeline.add(vconvert) 221 | queue.link(vconvert) 222 | self.source_attach = vconvert 223 | 224 | def stream_yuv(self): 225 | self.logger.info("Attaching stream 'yuv'") 226 | if self.capstring: 227 | capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter") 228 | capsfilter.set_property('caps', Gst.Caps.from_string(self.capstring)) 229 | self.pipeline.add(capsfilter) 230 | self.source.link(capsfilter) 231 | queue = Gst.ElementFactory.make("queue", "queue") 232 | self.pipeline.add(queue) 233 | if self.capstring: 234 | capsfilter.link(queue) 235 | else: 236 | self.source.link(queue) 237 | vconvert = Gst.ElementFactory.make("autovideoconvert", "autovideoconvert") 238 | if not vconvert: 239 | vconvert = Gst.ElementFactory.make("videoconvert", "videoconvert") 240 | self.pipeline.add(vconvert) 241 | queue.link(vconvert) 242 | self.source_attach = vconvert 243 | 244 | def stream_tegra(self): 245 | self.logger.info("Attaching stream 'tegra'") 246 | capsfilter = Gst.ElementFactory.make("capsfilter", "capsfilter") 247 | capsfilter.set_property('caps', Gst.Caps.from_string(self.capstring)) 248 | self.pipeline.add(capsfilter) 249 | self.source.link(capsfilter) 250 | self.source_attach = capsfilter 251 | 252 | ### Processing methods 253 | def rotate(self): 254 | self.logger.info(f"Attaching component to rotate image by {self.config.args.rotate} degrees") 255 | # If nvidia hardware component is available then use that in preference 256 | 257 | if Gst.ElementFactory.find("nvvidconv"): 258 | rotatecmp = Gst.ElementFactory.make("nvvidconv", "rotate") 259 | # Define a dict that provides lookup mappings of rotation degrees to flip-method values 260 | method_table = { 261 | 270: 1, 262 | 180: 2, 263 | 90: 3 264 | } 265 | try: 266 | rotatecmp.set_property('flip-method', method_table[self.config.args.rotate]) 267 | # Replace source_attach with rotation component for encoders to attach directly to 268 | self.pipeline.add(rotatecmp) 269 | self.source_attach.link(rotatecmp) 270 | rotate_capsfilter = Gst.ElementFactory.make("capsfilter", "rotate_capsfilter") 271 | rotate_capsfilter.set_property('caps', Gst.Caps.from_string('video/x-raw(memory:NVMM), format=(string)I420')) 272 | self.pipeline.add(rotate_capsfilter) 273 | rotatecmp.link(rotate_capsfilter) 274 | self.processing_attach = rotate_capsfilter 275 | except: 276 | self.logger.warning(f"Rotation value {self.config.args.rotate} cannot be used with nvidia hardware, must be 90, 180 or 270 degrees exactly") 277 | else: 278 | self.logger.info('eek') 279 | rotatecmp = Gst.ElementFactory.make("videoflip", "rotate") 280 | # Define a dict that provides lookup mappings of rotation degrees to video-direction values 281 | method_table = { 282 | 90: 1, 283 | 180: 2, 284 | 270: 3 285 | } 286 | try: 287 | rotatecmp.set_property('video-direction', method_table[self.config.args.rotate]) 288 | # Replace source_attach with rotation component for encoders to attach directly to 289 | self.pipeline.add(rotatecmp) 290 | self.source_attach.link(rotatecmp) 291 | self.processing_attach = rotatecmp 292 | except: 293 | self.logger.warning(f"Rotation value {self.config.args.rotate} cannot be used with nvidia hardware, must be 90, 180 or 270 degrees exactly") 294 | 295 | ### Encoding methods 296 | def encode_h264(self): 297 | self.logger.info("Attaching encoding 'h264'") 298 | ### First attempt to detect the best encoder type for the platform 299 | _encoder_type = None 300 | # If encoder type is manually set, use it as an override 301 | if self.encoder_type: 302 | _encoder_type = self.encoder_type 303 | self.logger.info("Encoder type override set: {}".format(_encoder_type)) 304 | # Detect Nvidia encoder - note tegra hardware usually also has omx available so we detect this first 305 | elif Gst.ElementFactory.find("nvv4l2h264enc"): 306 | _encoder_type = "nvv4l2h264enc" 307 | # Detect OMX hardware 308 | elif Gst.ElementFactory.find("omxh264enc"): 309 | _encoder_type = "omxh264enc" 310 | # Detect Intel/VAAPI 311 | elif Gst.ElementFactory.find("vaapih264enc"): 312 | _encoder_type = "vaapih264enc" 313 | # If h264 encoding hardware was not detected, use software encoder 314 | else: 315 | _encoder_type = "x264" 316 | 317 | ### Create encoder element 318 | self.h264enc = None 319 | 320 | # Nvidia hardware 321 | if _encoder_type == "nvv4l2h264enc": 322 | self.logger.info("Nvidia hardware encoder detected, using nvv4l2h264enc as h264 encoder") 323 | self.h264enc = Gst.ElementFactory.make("nvv4l2h264enc", "nvidia-h264-encode") 324 | self.h264enc.set_property('control-rate', 0) # 0=variable, 1=constant 325 | self.h264enc.set_property('bitrate', self.bitrate) 326 | self.h264enc.set_property('maxperf-enable', 1) 327 | self.h264enc.set_property('preset-level', 1) # 1 = UltraFast 328 | #self.h264enc.set_property('MeasureEncoderLatency', 1) 329 | self.h264enc.set_property('profile', 0) # 0 = BaseProfile which should usually be set for max compatibility particularly with webrtc. 2 = Main, 4 = High 330 | 331 | # OMX hardware 332 | elif _encoder_type == "omxh264enc": 333 | self.logger.info("OMX hardware encoder detected, using omxh264enc as h264 encoder") 334 | self.h264enc = Gst.ElementFactory.make("omxh264enc", "omx-h264-encode") 335 | self.h264enc.set_property('control-rate', 3) # 1: variable, 2: constant, 3: variable-skip-frames, 4: constant-skip-frames 336 | self.h264enc.set_property('target-bitrate', self.bitrate) 337 | 338 | # Intel hardware 339 | elif _encoder_type == "vaapih264enc": 340 | self.logger.info("VAAPI hardware encoder detected, using vaapih264enc as h264 encoder") 341 | self.h264enc = Gst.ElementFactory.make("vaapih264enc", "vaapi-h264-encode") 342 | self.h264enc.set_property('control-rate', 0) # 0=variable, 1=constant 343 | self.h264enc.set_property('target-bitrate', self.bitrate) 344 | 345 | # Software encoder 346 | elif _encoder_type == "x264": 347 | self.logger.info("No hardware encoder detected, using software x264 encoder") 348 | self.h264enc = Gst.ElementFactory.make("x264enc", "x264-encode") 349 | self.h264enc.set_property('speed-preset', 1) 350 | self.h264enc.set_property('tune', 0x00000004) 351 | self.h264enc.set_property('bitrate', self.bitrate / 1024) 352 | 353 | # Attach the h264 element 354 | self.pipeline.add(self.h264enc) 355 | self.processing_attach.link(self.h264enc) 356 | 357 | # If using omx hardware encoder, specify caps explicitly otherwise it can get upset when using rtspserver 358 | if _encoder_type == "omxh264enc": 359 | h264capsfilter = Gst.ElementFactory.make("capsfilter", "h264capsfilter") 360 | h264capsfilter.set_property('caps', Gst.Caps.from_string("video/x-h264,profile=high,width={},height={},framerate={}/1".format(self.width,self.height,self.framerate))) 361 | self.pipeline.add(h264capsfilter) 362 | self.h264enc.link(h264capsfilter) 363 | self.encode_attach = h264capsfilter 364 | else: 365 | self.encode_attach = self.h264enc 366 | 367 | def encode_mjpeg(self): 368 | # TODO: Add actual mjpeg encoding, currently we just pass through the source to the encoder attach points 369 | if self.format == self.encoder: 370 | self.encode_attach = self.processing_attach 371 | else: 372 | self.encode_attach = self.processing_attach 373 | 374 | def encode_yuv(self): 375 | # Nothing todo, just hang the source onto the encoder attach point 376 | self.encode_attach = self.processing_attach 377 | 378 | ### Payload methods 379 | def payload_h264(self): 380 | self.logger.info("Attaching payload 'h264'") 381 | # Attach an h264parse element. 382 | parse = Gst.ElementFactory.make("h264parse", "h264parse") 383 | if parse: 384 | self.logger.debug('h264parse element created') 385 | self.pipeline.add(parse) 386 | self.encode_attach.link(parse) 387 | h264pay = Gst.ElementFactory.make("rtph264pay", "h264-payload") 388 | h264pay.set_property("config-interval", 1) 389 | h264pay.set_property("pt", 96) 390 | h264pay.set_property("name", "pay0") # Set pay%d for rtsp stream pickup 391 | self.pipeline.add(h264pay) 392 | if parse: 393 | self.logger.debug('Attaching h264pay to h264parse') 394 | parse.link(h264pay) 395 | else: 396 | self.logger.debug('Attaching h264pay direct to h264 encoder') 397 | self.encode_attach.link(h264pay) 398 | self.payload_attach = h264pay 399 | 400 | def payload_mjpeg(self): 401 | self.logger.info("Attaching payload 'mjpeg'") 402 | mjpegpay = Gst.ElementFactory.make("rtpjpegpay", "mjpeg-payload") 403 | mjpegpay.set_property("pt", 26) 404 | self.pipeline.add(mjpegpay) 405 | self.encode_attach.link(mjpegpay) 406 | self.payload_attach = mjpegpay 407 | 408 | ### Output methods 409 | def output_file(self): 410 | self.logger.info("Attaching output 'file'") 411 | mux = Gst.ElementFactory.make("mpegtsmux", "mux") 412 | self.pipeline.add(mux) 413 | self.payload_attach.link(mux) 414 | 415 | sink = Gst.ElementFactory.make("filesink", "sink") 416 | sink.set_property("location", self.dest) 417 | self.pipeline.add(sink) 418 | mux.link(sink) 419 | 420 | def output_udp(self): 421 | if not self.dest: 422 | self.logger.warn("UDP destination must be set") 423 | return 424 | self.logger.info("Attaching output 'udp', sending to "+str(self.dest)+":"+str(self.port)) 425 | sink = Gst.ElementFactory.make("udpsink", "udpsink") 426 | sink.set_property("host", self.dest) 427 | sink.set_property("port", self.port) 428 | sink.set_property("sync", False) 429 | self.pipeline.add(sink) 430 | self.payload_attach.link(sink) 431 | 432 | def output_wcast(self): 433 | self.logger.info("Attaching output 'wcast'") 434 | # Create an OS pipe so we can attach the gstream pipeline to one end, and wifibroadcast tx to the other end 435 | read, write = os.pipe() 436 | # Create an fdsink to dump the pipeline out of 437 | sink = Gst.ElementFactory.make("fdsink", "fdsink") 438 | # Attach the sink to one end of the os pipe 439 | sink.set_property("fd", write) 440 | sink.set_property("sync", False) 441 | self.pipeline.add(sink) 442 | self.payload_attach.link(sink) 443 | # Spawn wifibroadcast tx 444 | self.wcast_tx = subprocess.Popen(['/srv/maverick/software/wifibroadcast/tx','-b 8', '-r 4', '-f 1024',self.dest], stdin=read) 445 | self.logger.info("wcast tx pid:" + str(self.wcast_tx.pid)) 446 | signal.signal(signal.SIGTERM, self.shutdown_tx) 447 | 448 | def output_dynudp(self): 449 | self.logger.info("Attaching output 'dynudp'") 450 | sink = Gst.ElementFactory.make("dynudpsink", "dynudpsink") 451 | sink.set_property("sync", False) 452 | sink.set_property("bind-address", "0.0.0.0") 453 | sink.set_property("bind-port", self.port) 454 | self.pipeline.add(sink) 455 | self.encode_attach.link(sink) 456 | 457 | def output_rtsp(self): 458 | self.logger.info("Attaching output 'rtsp'") 459 | self.rtspserver = GstRtspServer.RTSPServer() 460 | self.rtspserver.set_address(self.dest) 461 | self.rtspserver.set_service(str(self.port)) 462 | 463 | try: 464 | # Here we override RTSPMediaFactory to use the constructed object pipeline rather than the usual 465 | # set_launch which parses a pipeline string. 466 | self.rtspfactory = MavRTSPMediaFactory(self.pipeline) 467 | except Exception as e: 468 | self.logger.critical("Error creating rstpfactory: "+repr(e)) 469 | 470 | # Set the media to shared and reusable 471 | self.rtspfactory.set_shared(True) 472 | self.rtspfactory.set_eos_shutdown(False) 473 | self.rtspfactory.set_stop_on_disconnect(False) 474 | self.rtspfactory.set_suspend_mode(GstRtspServer.RTSPSuspendMode.NONE) # Do not suspend the media 475 | 476 | # Tune for minimum latency 477 | self.rtspfactory.set_buffer_size(0) 478 | self.rtspfactory.set_latency(0) 479 | try: 480 | self.rtspfactory.set_do_retransmission(False) 481 | except: 482 | self.logger.info('RTSP set_do_retransmission not available in this version of GStreamer') 483 | 484 | # Add the /video endpoint. More/dynamic endpoints will be added in the future 485 | self.rtspfactory.set_transport_mode(GstRtspServer.RTSPTransportMode.PLAY) 486 | self.rtspmounts = self.rtspserver.get_mount_points() 487 | self.rtspmounts.add_factory('/video', self.rtspfactory) 488 | self.rtspserver.attach(None) 489 | 490 | # Attach a signal callback for when the media is constructed 491 | self.rtspfactory.connect("media_constructed", self.on_rtsp_media) 492 | 493 | # Create a fake rtsp client that sits and listens permanently, to stop GSTMedia pipeline from being destroyed 494 | self.rtsp_fakeclient() 495 | 496 | self.logger.info("RTSP stream running at rtsp://"+str(self.dest)+":"+str(self.port)+"/video") 497 | 498 | def on_rtsp_media(self, rtspfactory, rtspmedia): 499 | self.rtspmedia = rtspmedia 500 | self.rtspmedia.set_reusable(True) 501 | self.rtspmedia.set_shared(True) 502 | self.rtspmedia.set_buffer_size(0) 503 | self.rtspmedia.set_latency(0) 504 | self.logger.info("RTSPMedia constructed: reusable: {}, shared: {}, stopondisconnect: {}, latency: {}".format(self.rtspmedia.is_reusable(), self.rtspmedia.is_shared(), self.rtspmedia.is_stop_on_disconnect(), self.rtspmedia.get_latency())) 505 | 506 | def rtsp_fakeclient(self): 507 | self.logger.info("Creating fake RTSP client to hold RTSPMedia open and prevent the pipeline from collapsing in the future") 508 | client_pipeline = Gst.parse_launch("rtspsrc name=rtspclient latency=0 ! fakesink sync=false") 509 | source = client_pipeline.get_by_name("rtspclient") 510 | source.props.location = "rtsp://localhost:{}/video".format(self.port) 511 | client_pipeline.set_state(Gst.State.PLAYING) 512 | 513 | def output_webrtc(self): 514 | self.logger.info("Creating WebRTC Signal Server") 515 | self.webrtc_signal_server = MavWebRTCSignalServer(self.config) 516 | self.logger.info("Attaching output 'webrtc'") 517 | sink = Gst.ElementFactory.make("webrtcbin", "webrtc") 518 | self.pipeline.add(sink) 519 | self.payload_attach.link(sink) 520 | self.our_webrtcid = 12345 521 | self.webrtc = MavWebRTC(self.pipeline, self.our_webrtcid, self.config) 522 | self.webrtc.start() 523 | 524 | ### Misc methods (glib introspection) 525 | def on_message(self, bus, message): 526 | t = message.type 527 | if t == Gst.MessageType.EOS: 528 | self.playing = False 529 | self.pipeline.set_state(Gst.State.READY) 530 | self.logger.info("Stream EOS, setting piepline state to READY") 531 | elif t == Gst.MessageType.ERROR: 532 | self.pipeline.set_state(Gst.State.NULL) 533 | err, debug = message.parse_error() 534 | print("Error: %s" % err, debug) 535 | self.playing = False 536 | 537 | def bus(self): 538 | bus = self.pipeline.get_bus() 539 | bus.add_signal_watch() 540 | bus.connect("message", self.on_message) 541 | 542 | ### Action methods 543 | def start(self): 544 | # TODO: check to make sure we are not already playing... 545 | if self.output not in ["rtsp", "webrtc"]: 546 | self.pipeline.set_state(Gst.State.PLAYING) 547 | self.playing = True 548 | self.logger.info('Starting camera stream') 549 | self.glib_mainloop = GLib.MainLoop() 550 | # self.glib_mainloop.run() is a blocking call 551 | # Wrap the call in a thread: 552 | self.glib_thread = threading.Thread(target=self.glib_mainloop.run, daemon=True) 553 | self.glib_thread.start() 554 | 555 | # or let it block here... 556 | # self.glib_mainloop.run() 557 | 558 | def write(self,s): 559 | gstbuff = Gst.Buffer.new_wrapped(s) 560 | self.source.emit("push-buffer",gstbuff) 561 | 562 | def stop(self): 563 | self.logger.info('Stopping camera stream') 564 | if self.glib_thread and self.glib_thread.is_alive() and self.glib_mainloop.is_running(): 565 | # if self.glib_mainloop.is_running(): 566 | self.glib_mainloop.quit() 567 | self.glib_thread.join() # wait for the thread to finish 568 | self.playing = False 569 | self.pipeline.set_state(Gst.State.READY) 570 | 571 | def flush(self): 572 | self.stop() 573 | 574 | def shutdown_tx(self, signum, frame): 575 | os.kill(self.wcast_tx.pid, signal.SIGTERM) 576 | sys.exit() 577 | 578 | -------------------------------------------------------------------------------- /modules/webrtc.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import logging 4 | import ssl 5 | import threading 6 | import websockets 7 | 8 | import gi 9 | gi.require_version('Gst', '1.0') 10 | from gi.repository import Gst 11 | gi.require_version('GstWebRTC', '1.0') 12 | from gi.repository import GstWebRTC 13 | gi.require_version('GstSdp', '1.0') 14 | from gi.repository import GstSdp 15 | 16 | class MavWebRTC(threading.Thread): 17 | def __init__(self, pipeline, our_id, config): 18 | threading.Thread.__init__(self) 19 | self.daemon = True 20 | self.pipeline = pipeline 21 | self.logger = logging.getLogger('visiond.' + __name__) 22 | self.config = config 23 | self._should_shutdown = threading.Event() 24 | self.conn = None 25 | self.peer_id = None 26 | self.our_id = our_id 27 | self.server = 'wss://localhost:8443' 28 | self.webrtc = self.pipeline.get_by_name('webrtc') 29 | self.connection_timeout = 3.0 # seconds 30 | 31 | @property 32 | def connected(self): 33 | if self.conn: 34 | return True 35 | return False 36 | 37 | def run(self): 38 | self.logger.info("Webrtc stream is starting...") 39 | self.loop = asyncio.new_event_loop() 40 | asyncio.set_event_loop(self.loop) 41 | self.loop.run_until_complete(self.main()) 42 | self.loop.close() 43 | self.logger.info("Webrtc stream has exited") 44 | 45 | def shutdown(self): 46 | self._should_shutdown.set() 47 | 48 | async def main(self): 49 | self.tasks = [] 50 | connect_loop_task = asyncio.create_task(self.connect_loop_tasks()) 51 | processing_loop_task = asyncio.create_task(self.processing_loop_tasks()) 52 | self.tasks.append(connect_loop_task) 53 | self.tasks.append(processing_loop_task) 54 | await asyncio.gather(*self.tasks, return_exceptions=True) 55 | 56 | async def connect_loop_tasks(self): 57 | while not self._should_shutdown.is_set(): 58 | await asyncio.sleep(1) 59 | await self.connect_loop() 60 | 61 | async def connect_loop(self): 62 | if not self.connected: 63 | try: 64 | self.logger.info("Starting peer connection with signalling server") 65 | await asyncio.wait_for(self.connect(), timeout=self.connection_timeout) 66 | #except asyncio.TimeoutError: 67 | except Exception as e: 68 | self.logger.warning("connect_loop error: {}".format(repr(e))) 69 | self.conn = None 70 | 71 | async def processing_loop_tasks(self): 72 | while not self._should_shutdown.is_set(): 73 | await asyncio.sleep(2) # TODO: assess this timeout 74 | await self.processing_loop() 75 | 76 | async def connect(self): 77 | sslctx = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH) 78 | self.conn = await websockets.connect(self.server, ssl=sslctx) 79 | await self.conn.send('HELLO %d' % self.our_id) 80 | self.logger.info("WebRTC: registered with signalling server, peer id {}".format(self.our_id)) 81 | 82 | async def setup_call(self): 83 | await self.conn.send('SESSION {}'.format(self.peer_id)) 84 | 85 | def send_sdp_offer(self, offer): 86 | text = offer.sdp.as_text() 87 | self.logger.info('Sending offer:\n%s' % text) 88 | msg = json.dumps({'sdp': {'type': 'offer', 'sdp': text}}) 89 | loop = asyncio.new_event_loop() 90 | loop.run_until_complete(self.conn.send(msg)) 91 | 92 | def on_offer_created(self, promise, _, __): 93 | promise.wait() 94 | reply = promise.get_reply() 95 | offer = reply.get_value('offer') 96 | promise = Gst.Promise.new() 97 | self.webrtc.emit('set-local-description', offer, promise) 98 | promise.interrupt() 99 | self.send_sdp_offer(offer) 100 | 101 | def on_negotiation_needed(self, element): 102 | promise = Gst.Promise.new_with_change_func(self.on_offer_created, element, None) 103 | element.emit('create-offer', None, promise) 104 | 105 | def send_ice_candidate_message(self, _, mlineindex, candidate): 106 | icemsg = json.dumps({'ice': {'candidate': candidate, 'sdpMLineIndex': mlineindex}}) 107 | loop = asyncio.new_event_loop() 108 | loop.run_until_complete(self.conn.send(icemsg)) 109 | 110 | def on_incoming_decodebin_stream(self, _, pad): 111 | if not pad.has_current_caps(): 112 | self.logger.info(pad, 'has no caps, ignoring') 113 | return 114 | 115 | caps = pad.get_current_caps() 116 | assert (len(caps)) 117 | s = caps[0] 118 | name = s.get_name() 119 | if name.startswith('video'): 120 | q = Gst.ElementFactory.make('queue') 121 | conv = Gst.ElementFactory.make('videoconvert') 122 | sink = Gst.ElementFactory.make('autovideosink') 123 | self.pipe.add(q, conv, sink) 124 | self.pipe.sync_children_states() 125 | pad.link(q.get_static_pad('sink')) 126 | q.link(conv) 127 | conv.link(sink) 128 | elif name.startswith('audio'): 129 | q = Gst.ElementFactory.make('queue') 130 | conv = Gst.ElementFactory.make('audioconvert') 131 | resample = Gst.ElementFactory.make('audioresample') 132 | sink = Gst.ElementFactory.make('autoaudiosink') 133 | self.pipe.add(q, conv, resample, sink) 134 | self.pipe.sync_children_states() 135 | pad.link(q.get_static_pad('sink')) 136 | q.link(conv) 137 | conv.link(resample) 138 | resample.link(sink) 139 | 140 | def on_incoming_stream(self, _, pad): 141 | if pad.direction != Gst.PadDirection.SRC: 142 | return 143 | 144 | decodebin = Gst.ElementFactory.make('decodebin') 145 | decodebin.connect('pad-added', self.on_incoming_decodebin_stream) 146 | self.pipe.add(decodebin) 147 | decodebin.sync_state_with_parent() 148 | self.webrtc.link(decodebin) 149 | 150 | def start_pipeline(self): 151 | self.webrtc = self.pipeline.get_by_name('webrtc') 152 | 153 | ### Set transceiver to SENDONLY 154 | # https://gstreamer.freedesktop.org/documentation/webrtc/index.html?gi-language=c#webrtcbin::get-transceivers 155 | # https://gstreamer.freedesktop.org/documentation/webrtclib/webrtc_fwd.html?gi-language=c#GstWebRTCRTPTransceiverDirection 156 | # https://gstreamer.freedesktop.org/documentation/webrtc/index.html?gi-language=c#webrtcbin::get-transceiver 157 | # ^^ get_transceivers returns GLib.Array which is not useable in python introspection. get_transceiver added but only works > 1.16 158 | # https://stackoverflow.com/a/57464086 159 | """ 160 | # Need to translate this to python 161 | g_signal_emit_by_name (receiver_entry->webrtcbin, "get-transceivers", &transceivers); 162 | g_assert (transceivers != NULL && transceivers->len > 0); 163 | trans = g_array_index (transceivers, GstWebRTCRTPTransceiver *, 0); 164 | trans->direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY; 165 | """ 166 | #pay = self.pipeline.get_by_name('pay0') 167 | #self.logger.debug("pay: {}".format(pay.get_caps())) 168 | #direction = GstWebRTC.WebRTCRTPTransceiverDirection.SENDONLY 169 | #caps = Gst.caps_from_string("application/x-rtp,media=video,encoding-name=VP8/9000,payload=96") 170 | #self.webrtc.emit('add-transceiver', direction, caps) 171 | 172 | self.webrtc.connect('on-negotiation-needed', self.on_negotiation_needed) 173 | self.webrtc.connect('on-ice-candidate', self.send_ice_candidate_message) 174 | self.webrtc.connect('pad-added', self.on_incoming_stream) 175 | self.logger.info("Setting WebRTC pipeline to active") 176 | self.pipeline.set_state(Gst.State.PLAYING) 177 | 178 | async def handle_sdp(self, message): 179 | assert (self.webrtc) 180 | msg = json.loads(message) 181 | if 'sdp' in msg: 182 | sdp = msg['sdp'] 183 | assert(sdp['type'] == 'answer') 184 | sdp = sdp['sdp'] 185 | self.logger.info('Received answer:\n%s' % sdp) 186 | res, sdpmsg = GstSdp.SDPMessage.new() 187 | GstSdp.sdp_message_parse_buffer(bytes(sdp.encode()), sdpmsg) 188 | answer = GstWebRTC.WebRTCSessionDescription.new(GstWebRTC.WebRTCSDPType.ANSWER, sdpmsg) 189 | promise = Gst.Promise.new() 190 | self.webrtc.emit('set-remote-description', answer, promise) 191 | promise.interrupt() 192 | elif 'ice' in msg: 193 | ice = msg['ice'] 194 | candidate = ice['candidate'] 195 | sdpmlineindex = ice['sdpMLineIndex'] 196 | self.webrtc.emit('add-ice-candidate', sdpmlineindex, candidate) 197 | 198 | async def processing_loop(self): 199 | if self.connected and not self._should_shutdown.is_set(): 200 | # TODO: add a timeout to self.conn here so we don't await forever 201 | # https://stackoverflow.com/questions/50241696/how-to-iterate-over-an-asynchronous-iterator-with-a-timeout 202 | async for message in self.conn: 203 | self.logger.debug("Message: {}".format(message)) 204 | if message == 'HELLO': 205 | self.logger.info("Received registration response from signalling server: {}".format(message)) 206 | #self.start_pipeline() 207 | #await self.setup_call() 208 | elif message == 'SESSION_OK': 209 | self.logger.info("Received SESSION_OK, starting pipeline") 210 | # self.start_pipeline() 211 | elif message == 'SEND_SDP': 212 | self.logger.info('Received SEND_SDP, starting pipeline') 213 | self.start_pipeline() 214 | elif message.startswith('ERROR'): 215 | self.logger.warning(message) 216 | return 1 217 | else: 218 | await self.handle_sdp(message) 219 | return 0 220 | else: 221 | return 1 222 | -------------------------------------------------------------------------------- /modules/webrtc_signalserver.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Example 1-1 call signalling server 4 | # 5 | # Copyright (C) 2017 Centricular Ltd. 6 | # 7 | # Author: Nirbheek Chauhan 8 | # 9 | 10 | import os 11 | import sys 12 | import ssl 13 | import time 14 | import logging 15 | import asyncio 16 | import websockets 17 | import http 18 | import multiprocessing 19 | 20 | ############### data info ############### 21 | 22 | # self.peers 23 | # Format: {uid: (Peer WebSocketServerProtocol, 24 | # remote_address, 25 | # <'session'|room_id|None>)} 26 | 27 | # self.sessions 28 | # Format: {caller_uid: callee_uid, 29 | # callee_uid: caller_uid} 30 | # Bidirectional mapping between the two peers 31 | 32 | # self.rooms 33 | # Format: {room_id: {peer1_id, peer2_id, peer3_id, ...}} 34 | # Room dict with a set of peers in each room 35 | 36 | class MavWebRTCSignalServer(multiprocessing.Process): 37 | def __init__(self, config): 38 | multiprocessing.Process.__init__(self) 39 | self.daemon = True 40 | self.config = config 41 | self.logger = logging.getLogger('visiond.' + __name__) 42 | self._should_shutdown = multiprocessing.Event() 43 | self.signal_server = None 44 | self.peers = dict() 45 | self.sessions = dict() 46 | self.rooms = dict() 47 | 48 | # Attempt to redirect the default handler into our log files 49 | default_server_logger = logging.getLogger('websockets.server') 50 | default_server_logger.setLevel(logging.DEBUG) # TODO: Set based on options 51 | default_server_logger.propagate = True 52 | for handler in logging.getLogger('visiond').handlers: 53 | default_server_logger.addHandler(handler) 54 | 55 | self.disable_ssl = False # TODO: pass these in as options 56 | self.ADDR_PORT = ("0.0.0.0", 8443) # TODO: pass these in as options 57 | self.health_check_path = "/health" # TODO: pass these in as options 58 | self.keepalive_timeout = 30 # TODO: pass these in as options 59 | self.start() # the server will self start 60 | 61 | def shutdown(self): 62 | self.logger.info("shutdown was called") 63 | self._should_shutdown.set() 64 | 65 | def run(self): 66 | self.logger.info("Webrtc signal server starting...") 67 | self.loop = asyncio.new_event_loop() 68 | asyncio.set_event_loop(self.loop) 69 | self.loop.run_until_complete(self.main()) 70 | self.loop.close() 71 | self.logger.info("Webrtc signal server exited") 72 | 73 | async def main(self): 74 | self.tasks = [] 75 | server_monitor_loop_task = asyncio.create_task(self.server_monitor_task()) 76 | websocket_server_start_loop_task = asyncio.create_task(self.server_starter_task()) 77 | self.tasks.append(server_monitor_loop_task) 78 | self.tasks.append(websocket_server_start_loop_task) 79 | await asyncio.gather(*self.tasks, return_exceptions=False) 80 | 81 | async def server_monitor_task(self): 82 | while not self._should_shutdown.is_set(): 83 | await asyncio.sleep(1) 84 | # if we get here shutdown() has been called 85 | self.logger.info("Stopping the event loop") 86 | if self.signal_server: 87 | self.signal_server.close() 88 | await self.signal_server.wait_closed() 89 | # self.loop.stop() 90 | 91 | async def server_starter_task(self): 92 | while ((not self.signal_server) and (not self._should_shutdown.is_set())): 93 | # attempt to start the signal server 94 | self.signal_server = await self.start_signal_server() 95 | await asyncio.sleep(3) 96 | 97 | async def start_signal_server(self): 98 | # called in the loop of run() 99 | sslctx = None 100 | 101 | if not self.disable_ssl: 102 | # Create an SSL context to be used by the websocket server 103 | if 'ssl_keyfile' in self.config.args: 104 | self.ssl_keyfile = self.config.args.ssl_keyfile 105 | else: 106 | self.ssl_keyfile = os.path.join(os.path.dirname(__file__), 'key.pem') 107 | if 'ssl_certfile' in self.config.args: 108 | self.ssl_certfile = self.config.args.ssl_certfile 109 | else: 110 | self.ssl_certfile = os.path.join(os.path.dirname(__file__), 'cert.pem') 111 | sslctx = ssl.create_default_context() 112 | self.logger.info("Using ssl keyfile: {}, certfile: {}".format(self.ssl_keyfile, self.ssl_certfile)) 113 | try: 114 | #sslctx.load_verify_locations(cafile="/srv/maverick/data/security/ssl/ca/mavCA.pem") 115 | sslctx.load_cert_chain(self.ssl_certfile, keyfile=self.ssl_keyfile) 116 | except Exception as e: 117 | self.logger.critical("Error loading certificates: {}".format(repr(e))) 118 | # we can't run the signal server without ssl, so bail out here 119 | self.logger.critical("Server startup aborted, SSL is required") 120 | sys.exit(1) 121 | # FIXME 122 | sslctx.check_hostname = False 123 | sslctx.verify_mode = ssl.CERT_NONE 124 | 125 | self.logger.info("Listening on https://{}:{}".format(*self.ADDR_PORT)) 126 | 127 | # Websocket server 128 | start_server = websockets.serve(self.handler, *self.ADDR_PORT, ssl=sslctx, process_request=self.health_check, 129 | # Maximum number of messages that websockets will pop 130 | # off the asyncio and OS buffers per connection. See: 131 | # https://websockets.readthedocs.io/en/stable/api.html#websockets.protocol.WebSocketCommonProtocol 132 | max_queue=16) 133 | server = await start_server 134 | return server 135 | 136 | async def health_check(self, path, request_headers): 137 | if path == self.health_check_path: 138 | return http.HTTPStatus.OK, [], b"OK\n" 139 | 140 | async def recv_msg_ping(self, ws, raddr): 141 | ''' 142 | Wait for a message forever, and send a regular ping to prevent bad routers 143 | from closing the connection. 144 | ''' 145 | msg = None 146 | while msg is None: 147 | try: 148 | msg = await asyncio.wait_for(ws.recv(), self.keepalive_timeout) 149 | except asyncio.TimeoutError: 150 | # self.logger.debug('Sending keepalive ping to {!r} in recv'.format(raddr)) 151 | await ws.ping() 152 | return msg 153 | 154 | async def disconnect(self, ws, peer_id): 155 | ''' 156 | Remove @peer_id from the list of sessions and close our connection to it. 157 | This informs the peer that the session and all calls have ended, and it 158 | must reconnect. 159 | ''' 160 | if peer_id in self.sessions: 161 | del self.sessions[peer_id] 162 | # Close connection 163 | if ws and ws.open: 164 | # Don't care about errors 165 | asyncio.ensure_future(ws.close(reason='hangup')) 166 | 167 | async def cleanup_session(self, uid): 168 | if uid in self.sessions: 169 | other_id = self.sessions[uid] 170 | del self.sessions[uid] 171 | self.logger.info("Cleaned up {} session".format(uid)) 172 | if other_id in self.sessions: 173 | del self.sessions[other_id] 174 | self.logger.info("Also cleaned up {} session".format(other_id)) 175 | # If there was a session with this peer, also 176 | # close the connection to reset its state. 177 | if other_id in self.peers: 178 | self.logger.info("Closing connection to {}".format(other_id)) 179 | wso, oaddr, _ = self.peers[other_id] 180 | del self.peers[other_id] 181 | await wso.close() 182 | 183 | async def cleanup_room(self, uid, room_id): 184 | room_peers = self.rooms[room_id] 185 | if uid not in room_peers: 186 | return 187 | room_peers.remove(uid) 188 | for pid in room_peers: 189 | wsp, paddr, _ = self.peers[pid] 190 | msg = 'ROOM_PEER_LEFT {}'.format(uid) 191 | self.logger.debug('room {}: {} -> {}: {}'.format(room_id, uid, pid, msg)) 192 | await wsp.send(msg) 193 | 194 | async def remove_peer(self, uid): 195 | await self.cleanup_session(uid) 196 | if uid in self.peers: 197 | ws, raddr, status = self.peers[uid] 198 | if status and status != 'session': 199 | await self.cleanup_room(uid, status) 200 | del self.peers[uid] 201 | await ws.close() 202 | self.logger.info("Disconnected from peer {!r} at {!r}".format(uid, raddr)) 203 | 204 | ############### Handler functions ############### 205 | 206 | async def connection_handler(self, ws, uid): 207 | raddr = ws.remote_address 208 | peer_status = None 209 | self.peers[uid] = [ws, raddr, peer_status] 210 | self.logger.info("Registered peer {!r} at {!r}".format(uid, raddr)) 211 | while True: 212 | # Receive command, wait forever if necessary 213 | msg = await self.recv_msg_ping(ws, raddr) 214 | # Update current status 215 | peer_status = self.peers[uid][2] 216 | # We are in a session or a room, messages must be relayed 217 | if peer_status is not None: 218 | # We're in a session, route message to connected peer 219 | if peer_status == 'session': 220 | other_id = self.sessions[uid] 221 | wso, oaddr, status = self.peers[other_id] 222 | assert(status == 'session') 223 | self.logger.debug("{} -> {}: {}".format(uid, other_id, msg)) 224 | await wso.send(msg) 225 | # We're in a room, accept room-specific commands 226 | elif peer_status: 227 | # ROOM_PEER_MSG peer_id MSG 228 | if msg.startswith('ROOM_PEER_MSG'): 229 | _, other_id, msg = msg.split(maxsplit=2) 230 | if other_id not in self.peers: 231 | await ws.send('ERROR peer {!r} not found' 232 | ''.format(other_id)) 233 | continue 234 | wso, oaddr, status = self.peers[other_id] 235 | if status != room_id: 236 | await ws.send('ERROR peer {!r} is not in the room' 237 | ''.format(other_id)) 238 | continue 239 | msg = 'ROOM_PEER_MSG {} {}'.format(uid, msg) 240 | self.logger.debug('room {}: {} -> {}: {}'.format(room_id, uid, other_id, msg)) 241 | await wso.send(msg) 242 | elif msg == 'ROOM_PEER_LIST': 243 | room_id = self.peers[peer_id][2] 244 | room_peers = ' '.join([pid for pid in self.rooms[room_id] if pid != peer_id]) 245 | msg = 'ROOM_PEER_LIST {}'.format(room_peers) 246 | self.logger.debug('room {}: -> {}: {}'.format(room_id, uid, msg)) 247 | await ws.send(msg) 248 | else: 249 | await ws.send('ERROR invalid msg, already in room') 250 | continue 251 | else: 252 | raise AssertionError('Unknown peer status {!r}'.format(peer_status)) 253 | # Requested a session with a specific peer 254 | elif msg.startswith('SESSION'): 255 | self.logger.info("{!r} command {!r}".format(uid, msg)) 256 | _, callee_id = msg.split(maxsplit=1) 257 | if callee_id not in self.peers: 258 | await ws.send('ERROR peer {!r} not found'.format(callee_id)) 259 | continue 260 | if peer_status is not None: 261 | await ws.send('ERROR peer {!r} busy'.format(callee_id)) 262 | continue 263 | await ws.send('SESSION_OK') 264 | wsc = self.peers[callee_id][0] 265 | self.logger.info('Session from {!r} ({!r}) to {!r} ({!r})' 266 | ''.format(uid, raddr, callee_id, wsc.remote_address)) 267 | # Register session 268 | self.peers[uid][2] = peer_status = 'session' 269 | self.sessions[uid] = callee_id 270 | self.peers[callee_id][2] = 'session' 271 | self.sessions[callee_id] = uid 272 | # Requested joining or creation of a room 273 | elif msg.startswith('ROOM'): 274 | self.logger.info('{!r} command {!r}'.format(uid, msg)) 275 | _, room_id = msg.split(maxsplit=1) 276 | # Room name cannot be 'session', empty, or contain whitespace 277 | if room_id == 'session' or room_id.split() != [room_id]: 278 | await ws.send('ERROR invalid room id {!r}'.format(room_id)) 279 | continue 280 | if room_id in self.rooms: 281 | if uid in self.rooms[room_id]: 282 | raise AssertionError('How did we accept a ROOM command ' 283 | 'despite already being in a room?') 284 | else: 285 | # Create room if required 286 | self.rooms[room_id] = set() 287 | room_peers = ' '.join([pid for pid in self.rooms[room_id]]) 288 | await ws.send('ROOM_OK {}'.format(room_peers)) 289 | # Enter room 290 | self.peers[uid][2] = peer_status = room_id 291 | self.rooms[room_id].add(uid) 292 | for pid in self.rooms[room_id]: 293 | if pid == uid: 294 | continue 295 | wsp, paddr, _ = self.peers[pid] 296 | msg = 'ROOM_PEER_JOINED {}'.format(uid) 297 | self.logger.debug('room {}: {} -> {}: {}'.format(room_id, uid, pid, msg)) 298 | await wsp.send(msg) 299 | else: 300 | self.logger.info('Ignoring unknown message {!r} from {!r}'.format(msg, uid)) 301 | 302 | async def hello_peer(self, ws): 303 | ''' 304 | Exchange hello, register peer 305 | ''' 306 | raddr = ws.remote_address 307 | hello = await ws.recv() 308 | hello, uid = hello.split(maxsplit=1) 309 | if hello != 'HELLO': 310 | await ws.close(code=1002, reason='invalid protocol') 311 | raise Exception("Invalid hello from {!r}".format(raddr)) 312 | if not uid or uid in self.peers or uid.split() != [uid]: # no whitespace 313 | await ws.close(code=1002, reason='invalid peer uid') 314 | raise Exception("Invalid uid {!r} from {!r}".format(uid, raddr)) 315 | # Send back a HELLO 316 | await ws.send('HELLO') 317 | return uid 318 | 319 | async def handler(self, ws, path): 320 | ''' 321 | All incoming messages are handled here. @path is unused. 322 | ''' 323 | raddr = ws.remote_address 324 | self.logger.info("Connected to {!r}".format(raddr)) 325 | peer_id = await self.hello_peer(ws) 326 | try: 327 | await self.connection_handler(ws, peer_id) 328 | except websockets.ConnectionClosed: 329 | self.logger.info("Connection to peer {!r} closed, exiting handler".format(raddr)) 330 | finally: 331 | await self.remove_peer(peer_id) 332 | -------------------------------------------------------------------------------- /visiond: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # (c) 2016-2020 GoodRobots 4 | # http://github.com/goodrobots/visiond 5 | # This is a simple python project that controls a video source, transmutes formats and transmits it over the network to various clients 6 | 7 | import os 8 | 9 | from modules.config import * 10 | from modules.logger import * 11 | from modules.app import * 12 | 13 | if __name__ == "__main__": 14 | print("Starting visiond") 15 | 16 | # Setup config 17 | cwd = os.path.dirname(os.path.abspath(__file__)) 18 | config = visiondConfig(os.path.join(cwd, 'visiond.conf')) 19 | 20 | # Setup logger 21 | logger = visiondLogger(cwd, config) 22 | 23 | # Create app instance and daemonize 24 | app = visiondApp(config) 25 | app.run() 26 | -------------------------------------------------------------------------------- /visiond.conf: -------------------------------------------------------------------------------- 1 | [Defaults] 2 | 3 | ### Config options in alphabetical order. Values are set to the defaults. 4 | 5 | # Target stream bitrate in bits/second 6 | # bitrate = 2000000 7 | 8 | # Brightness - 0 is automatic 9 | # brightness = 0 10 | 11 | # Camera device, usually /dev/video0 for a single normal camera 12 | # camera_device = /dev/video0 13 | 14 | # Debug: Turns on gstreamer debug to the specified level. Note level 3 and above can be very verbose 15 | # debug = 0 16 | 17 | # Encoder, if not specified will default to h264. Values are h264, mjpeg or none 18 | # encoder = h264 19 | 20 | # Encoder type, if not specified will fall back to sensible default for the encoder 21 | # encoder_type = 22 | 23 | # Camera format, if not specified here will autodetect yuv->mjpeg->h264 24 | # format = yuv 25 | 26 | # Framerate of video stream, must be valid for camera 27 | # framerate = 30 28 | 29 | # Resolution height of video stream, must be valid for camera 30 | # height = 480 31 | 32 | # Stream input type: v4l2src (fpv), appsrc (cv), nvarguscamerasrc (Nvidia Jetson Platform) 33 | # input = v4l2src 34 | 35 | # Log destination - can be file, console or both (if run through systemd, console will log to system journal) 36 | # logdest = both 37 | 38 | # Log directory, if file logging set 39 | # logdir = /var/tmp/visiond 40 | 41 | # Descriptive name of the visiond instance / camera. Used for display in zeroconf and -web interface. 42 | # name = 43 | 44 | # Stream output type: file (save video), udp (stream video), wcast (wifibroadcast), rtsp (rtsp server), webrtc (webrtc service) 45 | # output = rtsp 46 | 47 | # Output destination: filename (file output), IP address (udp output), Interface (wcast output) 48 | # output_dest = 0.0.0.0 49 | 50 | # Output port: Port number (eg. 5000) for network destination, Channel for wifibroadcast output (eg. 1) 51 | # output_port = 5600 52 | 53 | ## Pipeline Override - This is used to provide a manual pipeline if the auto construction fails o 54 | # eg. Intel hardware, stream to known udp destination 55 | # pipeline_override = v4l2src device=/dev/video2 ! video/x-raw,format=YUY2,width=640,height=480,framerate=30/1 ! autovideoconvert ! vaapih264enc ! h264parse ! rtph264pay config-interval=1 pt=96 ! udpsink host=192.168.1.111 port=5000 sync=false 56 | # 57 | # eg. Flir ONE colourized thermal stream 58 | # pipeline_override = v4l2src device=/dev/video3 ! queue ! autovideoconvert ! omxh264enc 59 | 60 | # Pixel Format, could be (fourcc)YUV2, I420, RGB etc 61 | # pixelformat = I420 62 | 63 | # Retry timeout - number of seconds visiond will wait before trying to recreate pipeline after error 64 | # retry = 10 65 | 66 | # Set the path to SSL certs for webrtc signalling server. 67 | # ssl_keyfile = /srv/visiond/ssl/visiond.key 68 | # ssl_certfile = /srv/visiond/ssl/visiond.crt 69 | 70 | # Resolution width of video stream, must be valid for camera 71 | # width = 640 72 | 73 | # If True (default), advertise the video stream over mDNS/Zeroconf 74 | # zeroconf=True -------------------------------------------------------------------------------- /visiond.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Maverick Vision Daemon 3 | After=network-online.target 4 | Requires=network-online.target 5 | StartLimitBurst=3 6 | 7 | [Service] 8 | TimeoutStartSec=0 9 | ExecStart=/srv/visiond/visiond 10 | User=mav 11 | Restart=on-failure 12 | RestartSec=30 13 | Type=notify 14 | 15 | [Install] 16 | WantedBy=multi-user.target 17 | 18 | --------------------------------------------------------------------------------