├── .gitignore ├── mjpeg ├── server.py ├── client.py ├── __init__.py └── aioclient.py ├── setup.py ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | /dist 3 | /py_mjpeg.egg-info 4 | *.pyc 5 | __pycache__ 6 | 7 | -------------------------------------------------------------------------------- /mjpeg/server.py: -------------------------------------------------------------------------------- 1 | from flask import Response 2 | 3 | __all__ = ['mjpeg_generator', 'MJPEGResponse'] 4 | 5 | 6 | def mjpeg_generator(boundary, frames): 7 | hdr = '--%s\r\nContent-Type: image/jpeg\r\n' % boundary 8 | 9 | prefix = '' 10 | for f in frames: 11 | msg = prefix + hdr + 'Content-Length: %d\r\n\r\n' % len(f) 12 | yield msg.encode('utf-8') + f 13 | prefix = '\r\n' 14 | 15 | 16 | def MJPEGResponse(it): 17 | boundary='herebedragons' 18 | return Response(mjpeg_generator(boundary, it), mimetype='multipart/x-mixed-replace;boundary=%s' % boundary) 19 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import setuptools 4 | 5 | with open('README.md', 'r') as fh: 6 | long_description = fh.read() 7 | 8 | setuptools.setup( 9 | name = 'py-mjpeg', 10 | version = '1.0.1', 11 | author = 'Jan Janak', 12 | author_email = 'jan@janakj.org', 13 | description = 'MJPEG Streaming Tools', 14 | long_description = long_description, 15 | long_description_content_type = 'text/markdown', 16 | url = 'https://github.com/janakj/py-mjpeg', 17 | packages = setuptools.find_packages(), 18 | extras_require = { 19 | 'asyncio':['aiohttp'], 20 | }, 21 | classifiers = [ 22 | 'Programming Language :: Python :: 3', 23 | 'License :: OSI Approved :: MIT License', 24 | 'Operating System :: OS Independent', 25 | 'Development Status :: 5 - Production/Stable' 26 | ], 27 | python_requires='>=3' 28 | ) 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016-2020 Jan Janak 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MJPEG Streaming Utilities for Python 3.x 2 | 3 | This library provides utility functions for working with MJPEG streams. 4 | MJPEG is a simple streaming protocol running on top of HTTP that is used by many existing webcams. 5 | The library provides a threaded client and a streaming generator for Flask based servers. 6 | 7 | The library is only compatible with Python 3.x (tested on Python 3.4 and 3.5). 8 | It was tested with [mjpg_streamer](https://github.com/jacksonliam/mjpg-streamer). 9 | 10 | ## Installation 11 | ```sh 12 | pip3 install py-mjpeg 13 | ``` 14 | 15 | ## Client API 16 | 17 | The library provides a simple threaded streaming client in the file `mjpeg/client.py`. 18 | The client is designed to run in a separate background thread to ensure that it can continue reading from the stream 19 | while the main thread is blocked. 20 | The client automatically reconnects to the server if it gets disconnected. 21 | 22 | Here is a simple example: 23 | ```python 24 | from mjpeg.client import MJPEGClient 25 | 26 | url='http://example.com:8080/?action=stream' 27 | 28 | # Create a new client thread 29 | client = MJPEGClient(url) 30 | 31 | # Allocate memory buffers for frames 32 | bufs = client.request_buffers(65536, 50) 33 | for b in bufs: 34 | client.enqueue_buffer(b) 35 | 36 | # Start the client in a background thread 37 | client.start() 38 | ``` 39 | To obtain frame data, the application creates a list of memory buffers via `client.request_buffers`. 40 | Each buffer holds exactly one JPEG frame. 41 | The application then requests the buffers to be filled by calling `client.enqueue_buffer`. 42 | Once a buffer is enqueued, the application must no longer touch it. 43 | 44 | To received finished frames, the application calls `client.dequeue_buffer()` repeatedly: 45 | 46 | ```python 47 | while True: 48 | buf = client.dequeue_buffer() 49 | 50 | client.enqueue_buffer(buf) 51 | ``` 52 | 53 | The call to `dequeue_buffer` is blocking. 54 | Each buffer object provides the following attributes: 55 | 56 | - **length**: The total number of bytes that can fit into the data portion of the buffer 57 | - **used**: The number of bytes occupied by frame data in this buffer 58 | - **timestamp**: The timestamp of the first byte within this buffers (obtained via time.time()) 59 | - **sequence**: Frame's sequence number 60 | - **data**: Thea actual frame data 61 | 62 | You can use a memory view to obtain frame data from the buffer: 63 | ```python 64 | data = memoryview(buf.data)[:buf.used] 65 | ``` 66 | 67 | When the client runs out of buffers to store frames, it will continue receiving the stream, but any frame data will be discarded. 68 | If the connection is disconnected or if the client detects a protocol error, it will try to reconnect the stream automatically. 69 | If the client receives a frame that is larger than the destination buffer, the frame will be discarded. 70 | 71 | The client can be stopped via its stop() method: 72 | ``` 73 | client.stop() 74 | ``` 75 | 76 | If the client shall be restarted after calling stop() one must create a new instance as threads can only be started once: 77 | ``` 78 | # Create a new client thread 79 | client = MJPEGClient(url) 80 | ``` 81 | 82 | The client provides a method called `print_stats` which can be used for debugging: 83 | ``` 84 | MJPEGClient: 85 | URL: : http://example.com:8080/?action=stream 86 | FPS : 30 87 | Buffer overruns : 2 88 | Reconnects : 0 89 | Total frames : 2984 90 | Discarded frames: 704 91 | Buffer queue : 0 92 | ``` 93 | 94 | ## Server API 95 | The file `mjpeg/server.py` provides a generator for Flask that can be used to generate a MJPEG stream from iterator data. 96 | 97 | Here is a simple "echo" example which just sends any frames received from a client back to the client: 98 | ```python 99 | from Flask import Flask, Response 100 | from mjpeg.server import MJPEGResponse 101 | 102 | def relay(): 103 | while True: 104 | buf = client.dequeue_buffer() 105 | yield memoryview(buf.data)[:buf.used] 106 | client.enqueue_buffer(buf) 107 | 108 | @app.route('/') 109 | def stream(): 110 | return MJPEGResponse(relay()) 111 | 112 | if __name__ == '__main__': 113 | app = Flask(__name__) 114 | app.run(host='0.0.0.0', port=8080) 115 | ``` 116 | 117 | -------------------------------------------------------------------------------- /mjpeg/client.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | import urllib.request 3 | from time import time, sleep 4 | from queue import Queue 5 | from threading import Thread 6 | from collections import deque 7 | from mjpeg import open_mjpeg_stream, read_mjpeg_frame 8 | 9 | 10 | __all__ = ['Buffer', 'MJPEGClient'] 11 | 12 | 13 | class Buffer(object): 14 | '''Memory buffer object used by the MJPEG streamer. 15 | 16 | Attribute data contains a bytearray buffer. Attribute length contains the 17 | total number of bytes that can be stored in the bytearray. This value can 18 | also be obtained via len(bytearray). 19 | 20 | Attribute used is set to the number of bytes currently stored in the 21 | buffer. Obviously, used <= length. 22 | 23 | Attribute timestamp is set to the timestamp (obtained via time.time()) of 24 | the first byte in the buffer. For outgoing data the timestamp indicates to 25 | the sender that the frame is not to be sent before this time. 26 | 27 | The attribute sequence contains the sequence number of the frame within a 28 | stream. The number is set to 0 when a new stream is opened and incremented 29 | for each frame, even if frames are skipped. This is useful for detecting 30 | buffer overruns. 31 | ''' 32 | def __init__(self, length): 33 | self.length = length 34 | self.used = 0 35 | self.timestamp = 0 36 | self.sequence = 0 37 | self.data = bytearray(length) 38 | 39 | 40 | 41 | class MJPEGClient(Thread): 42 | '''A threaded MJPEG streaming client. 43 | 44 | This thread implements a MJPEG client. Given a URL, the streamer will open 45 | the URL as a MJPEG stream and read frames from the stream until the stream 46 | is closed or the streamer is stopped. When the stream closes or an error 47 | occurs, the streamer automatically reconnects after 'reconnect_interval'. 48 | ''' 49 | def __init__(self, url, log_interval=5, reconnect_interval=5): 50 | Thread.__init__(self, None) 51 | self.url = url 52 | self.log_interval = log_interval 53 | self.reconnect_interval = reconnect_interval 54 | self.daemon = True 55 | self._incoming = deque() 56 | self._outgoing = Queue() 57 | 58 | # Internal variable used to control internal processing loops 59 | self._stop_loops = False 60 | 61 | # Keep track of the total number of overruns in this attribute 62 | self.overruns = 0 63 | # Keep track of the total number of reconnect attempts in this attribute 64 | self.reconnects = 0 65 | 66 | # When set to true, the streamer is out of buffers and is throwing away received frames. 67 | self.in_overrun = False 68 | 69 | # The total number of received frames across stream reconnects 70 | self.frames = 0 71 | # The total number of discarded frames across stream reconnects 72 | self.discarded_frames = 0 73 | 74 | def stop(self): 75 | self._stop_loops = True 76 | 77 | def request_buffers(self, length, count): 78 | rv = [] 79 | for i in range(0, count): 80 | rv.append(Buffer(length)) 81 | return rv 82 | 83 | def enqueue_buffer(self, buf): 84 | self._incoming.append(buf) 85 | 86 | def dequeue_buffer(self, *args, **kwargs): 87 | buf = self._outgoing.get(*args, **kwargs) 88 | self._outgoing.task_done() 89 | return buf 90 | 91 | def _init_fps(self): 92 | self.fps = 0 93 | self._frame = 0 94 | self._start = int(time()) 95 | self._cur = self._prev = self._start 96 | 97 | def _update_fps(self): 98 | self._frame += 1 99 | self._cur = int(time()) 100 | if self._cur >= self._prev + self.log_interval: 101 | self.fps = int(self._frame / self.log_interval) 102 | self._prev = self._cur 103 | self._frame = 0 104 | 105 | def process_stream(self, stream): 106 | boundary = open_mjpeg_stream(stream) 107 | seq = 0 108 | 109 | while not self._stop_loops: 110 | try: 111 | buf = self._incoming.pop() 112 | mem = buf.data 113 | length = buf.length 114 | self.in_overrun = False 115 | except IndexError: 116 | buf = None 117 | mem = None 118 | length = 0 119 | if not self.in_overrun: 120 | self.overruns += 1 121 | self.in_overrun = True 122 | 123 | timestamp, clen = read_mjpeg_frame(stream, boundary, mem, length) 124 | self._update_fps() 125 | self.frames += 1 126 | 127 | if buf is not None and length >= clen: 128 | buf.timestamp = timestamp 129 | buf.used = clen 130 | buf.seq = seq 131 | self._outgoing.put(buf) 132 | else: 133 | self.discarded_frames += 1 134 | 135 | seq += 1 136 | 137 | def print_stats(self): 138 | print('MJPEGClient:') 139 | print(' URL: : %s' % self.url) 140 | print(' FPS : %d' % self.fps) 141 | print(' Buffer overruns : %d' % self.overruns) 142 | print(' Reconnects : %d' % self.reconnects) 143 | print(' Total frames : %d' % self.frames) 144 | print(' Discarded frames: %d' % self.discarded_frames) 145 | print(' Buffer queue : %d' % len(self._incoming)) 146 | 147 | def run(self): 148 | self._stop_loops = False 149 | self._init_fps() 150 | 151 | while not self._stop_loops: 152 | try: 153 | with urllib.request.urlopen(self.url) as s: 154 | self.process_stream(s) 155 | except EOFError: 156 | pass 157 | except Exception as e: 158 | traceback.print_exc() 159 | 160 | sleep(self.reconnect_interval) 161 | self.reconnects += 1 162 | -------------------------------------------------------------------------------- /mjpeg/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from time import time 4 | import asyncio 5 | from typing import TYPE_CHECKING 6 | if TYPE_CHECKING: 7 | import aiohttp 8 | 9 | __all__ = [ 10 | 'ProtoError', 11 | 'open_mjpeg_stream', 12 | 'read_mjpeg_frame'] 13 | 14 | 15 | class ProtoError(Exception): 16 | pass 17 | 18 | 19 | def read_header_line(stream): 20 | '''Read one header line within the stream. 21 | 22 | The headers come right after the boundary marker and usually contain 23 | headers like Content-Type and Content-Length which determine the type and 24 | length of the data portion. 25 | ''' 26 | return stream.readline().decode('utf-8').strip() 27 | 28 | 29 | def read_headers(stream, boundary): 30 | '''Read and return stream headers. 31 | 32 | Each stream data packet starts with an empty line, followed by a boundary 33 | marker, followed by zero or more headers, followed by an empty line, 34 | followed by actual data. This function reads and parses the entire header 35 | section. It returns a dictionary with all the headers. Header names are 36 | converted to lower case. Each value in the dictionary is a list of header 37 | fields values. 38 | ''' 39 | l = read_header_line(stream) 40 | if l == '': 41 | l = read_header_line(stream) 42 | if l != boundary: 43 | raise ProtoError('Boundary string expected, but not found') 44 | 45 | headers = {} 46 | while True: 47 | l = read_header_line(stream) 48 | # An empty line indicates the end of the header section 49 | if l == '': 50 | break 51 | 52 | # Parse the header into lower case header name and header body 53 | i = l.find(':') 54 | if i == -1: 55 | raise ProtoError('Invalid header line: ' + l) 56 | name = l[:i].lower() 57 | body = l[i+1:].strip() 58 | 59 | lst = headers.get(name, list()) 60 | lst.append(body) 61 | headers[name] = lst 62 | 63 | return headers 64 | 65 | 66 | def skip_data(stream, left): 67 | while left: 68 | rv = stream.read(left) 69 | if len(rv) == 0 and left: 70 | raise ProtoError('Not enough data in chunk') 71 | left -= len(rv) 72 | 73 | 74 | async def askip_data(stream: aiohttp.BodyPartReader, left: int): 75 | buf = bytearray(left) 76 | await aread_data(buf, stream, left) 77 | 78 | 79 | def read_data(buf, stream, length): 80 | '''Read the give number of bytes into an existing bytearray buffer. 81 | 82 | The caller must supply the memory buffer and is responsible for ensuring 83 | that the buffer is big enough. This function will read from the response 84 | object repeatedly until it has read 'length' bytes. Throws an exception if 85 | the response ends prematurely. 86 | ''' 87 | v = memoryview(buf)[:length] 88 | while len(v): 89 | n = stream.readinto(v) 90 | if n == 0 and len(v): 91 | raise ProtoError('Not enough data in chunk') 92 | v = v[n:] 93 | return buf 94 | 95 | 96 | async def aread_data(buf, stream: aiohttp.BodyPartReader, length: int): 97 | chunk_size = 8192 98 | i = 0 99 | remaining = length 100 | while remaining > 0: 101 | if remaining < chunk_size: 102 | chunk_size = remaining 103 | data = await stream.read_chunk(chunk_size) 104 | n = len(data) 105 | remaining -= n 106 | if n > 0: 107 | buf[i:n] = data 108 | if n == 0 and remaining > 0: 109 | raise ProtoError('Not enough data in chunk') 110 | i += n 111 | return buf 112 | 113 | 114 | def parse_content_length(headers): 115 | # Parse and check Content-Length. The header must be present in 116 | # each chunk, otherwise we wouldn't know how much data to read. 117 | clen = headers.get('content-length', None) 118 | try: 119 | return int(clen[0]) 120 | except (ValueError, TypeError): 121 | raise ProtoError('Invalid or missing Content-Length') 122 | 123 | 124 | def check_content_type(headers, type_): 125 | ctype = headers.get('content-type', None) 126 | if ctype is None: 127 | raise ProtoError('Missing Content-Type header') 128 | ctype = ctype[0] 129 | 130 | i = ctype.find(';') 131 | if i != -1: 132 | ctype = ctype[:i] 133 | 134 | if ctype != type_: 135 | raise ProtoError('Wrong Content-Type: %s' % ctype) 136 | 137 | return True 138 | 139 | def check_boundary_string(boundary: str) -> str: 140 | if not boundary.startswith('--'): 141 | return '--' + boundary 142 | return boundary 143 | 144 | 145 | def open_mjpeg_stream(stream): 146 | '''Open an MJPEG stream. 147 | 148 | Given a response from urllib, ensure that all the headers are correct, 149 | obtain the boundary string that delimits frames and return it. Raises 150 | ProtoError on errors. 151 | ''' 152 | if stream.status != 200: 153 | raise ProtoError('Invalid response from server: %d' % stream.status) 154 | h = stream.info() 155 | 156 | boundary = h.get_param('boundary', header='content-type', unquote=True) 157 | if boundary is None: 158 | raise ProtoError('Content-Type header does not provide boundary string') 159 | boundary = check_boundary_string(boundary) 160 | 161 | return boundary 162 | 163 | 164 | def read_mjpeg_frame(stream, boundary, buf, length, skip_big=True): 165 | '''Read one MJPEG frame from given stream. 166 | 167 | The stream must be a response object returned by urllib. This function 168 | processes exactly one frame. End of stream events are detected when the 169 | length of the next frame is 0. Ensures that Content-Type is present and 170 | set to 'image/jpeg'. 171 | 172 | If skip_big is set to True, frames bigger than the destination buffer are 173 | silently skipped. The function reads the data, but does not store it in 174 | the provided buffer. If the flag is set to False, a ProtoError exception 175 | will be raised. 176 | 177 | The function returns a tuple (timestamp, clen) where timestamp is the 178 | timestamp of the first byte of the frame and clen is the total number of 179 | bytes in the frame. 180 | 181 | To skip data when buffer is not available, simply pass buf=None, length=0, 182 | skip_big=True and the next frame will be silently poped from the stream 183 | and discarded. 184 | ''' 185 | hdr = read_headers(stream, boundary) 186 | 187 | clen = parse_content_length(hdr) 188 | if clen == 0: 189 | raise EOFError('End of stream reached') 190 | 191 | if clen > length and not skip_big: 192 | raise ProtoError('Received chunk too big: %d' % clen) 193 | 194 | check_content_type(hdr, 'image/jpeg') 195 | 196 | timestamp = time() 197 | if length >= clen: 198 | read_data(buf, stream, clen) 199 | else: 200 | skip_data(stream, clen) 201 | 202 | return (timestamp, clen) 203 | 204 | async def aread_mjpeg_frame( 205 | stream: aiohttp.BodyPartReader, 206 | buf, 207 | length: int, 208 | skip_big: bool = True 209 | ) -> tuple[float, int]: 210 | '''Read one MJPEG frame from given stream asynchronously. 211 | 212 | This function processes exactly one frame. 213 | End of stream events are detected when the length of the next frame is 0. 214 | Ensures that Content-Type is present and set to 'image/jpeg'. 215 | 216 | Arguments: 217 | stream: An :class:`aiohttp.BodyPartReader` instance produced by 218 | :class:`aiohttp.MultipartReader` 219 | buf: An instance of :class:`mjpeg.Buffer` to store the frame data in 220 | length: Maximum number of bytes to read 221 | skip_big: If ``True`` and the frame data is larger than the given *length*, 222 | the frame is skipped and the buffer is not written to. 223 | If ``False`` and frame data is larger than *length*, 224 | a :class:`ProtoError` is raised. (default is ``True``) 225 | 226 | Returns 227 | ------- 228 | timestamp : float 229 | POSIX timestamp of the first byte of the frame 230 | clen : int 231 | Total number of bytes in the frame 232 | 233 | 234 | To skip data when buffer is not available, simply pass buf=None, length=0, 235 | skip_big=True and the next frame will be silently poped from the stream 236 | and discarded. 237 | ''' 238 | 239 | loop = asyncio.get_event_loop() 240 | clen = stream._length 241 | assert clen is not None 242 | 243 | if clen == 0: 244 | raise EOFError('End of stream reached') 245 | 246 | if clen > length and not skip_big: 247 | raise ProtoError('Received chunk too big: %d' % clen) 248 | 249 | ctype = stream.headers['Content-type'] 250 | if ctype != 'image/jpeg': 251 | raise ProtoError('Wrong Content-Type: %s' % ctype) 252 | 253 | timestamp = loop.time() 254 | 255 | if length >= clen: 256 | await aread_data(buf, stream, clen) 257 | else: 258 | await askip_data(stream, clen) 259 | 260 | return (timestamp, clen) 261 | -------------------------------------------------------------------------------- /mjpeg/aioclient.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import traceback 4 | import asyncio 5 | from collections import deque 6 | 7 | import aiohttp 8 | 9 | from mjpeg import aread_mjpeg_frame 10 | from .client import Buffer 11 | 12 | 13 | __all__ = ['AioMJPEGClient'] 14 | 15 | 16 | class AioMJPEGClient: 17 | """An asynchronous MJPEG streaming client. 18 | 19 | This is an MJPEG client implemented using :mod:`asyncio`. 20 | Given a URL, the streamer will open the URL as an MJPEG stream and read 21 | frames from it until the stream is closed or the streamer is stopped. 22 | When the stream closes or an error occurs, the streamer automatically 23 | reconnects after :attr:`reconnect_interval` until the number of attempts 24 | reaches :attr:`reconnect_limit` (if set). 25 | 26 | Use as an :term:`asynchronous context manager` is supported (in an 27 | :keyword:`async with` block). Note that this method does not allow for 28 | an existing :class:`~aiohttp.ClientSession` to be used as described in 29 | the :meth:`open` method. 30 | 31 | >>> import asyncio 32 | >>> from mjpeg.aioclient import AioMJPEGClient 33 | >>> url = 'http://{your_stream_url}' 34 | >>> async def run_client(): 35 | ... async with AioMJPEGClient(url) as client: 36 | ... ... 37 | >>> asyncio.run(run_client()) 38 | 39 | 40 | Use as an :term:`asynchronous iterator` is also supported (in an 41 | :keyword:`async for` statement). The loop will continue until the stream is 42 | closed. 43 | 44 | >>> async def run_client(): 45 | ... async with AioMJPEGClient(url) as client: 46 | ... async for bfr in client: 47 | ... # do something with the buffer 48 | ... ... 49 | >>> asyncio.run(run_client()) 50 | 51 | """ 52 | 53 | url: str 54 | """The stream url""" 55 | 56 | log_interval: int|float 57 | """Interval between updating the :attr:`fps` estimate""" 58 | 59 | reconnect_interval: int|float 60 | """Time (in seconds) to wait before reconnecting""" 61 | 62 | reconnect_limit: int|None 63 | """Maximum number of connection attempts to make before closing. 64 | ``None`` will reconnect indefinitely until :meth:`close` is called. 65 | """ 66 | 67 | overruns: int 68 | """Number of frames received with no buffer available to write to""" 69 | 70 | reconnects: int 71 | """Current number of reconnects""" 72 | 73 | in_overrun: bool 74 | """Flag indicating a buffer overrun state. This will be ``True`` if no 75 | write buffers are available for received frames. 76 | """ 77 | 78 | frames: int 79 | """Total number of frames received (whether discarded or not)""" 80 | 81 | discarded_frames: int 82 | """Number of frames skipped while in the :attr:`overrun ` state""" 83 | 84 | fps: float 85 | """Calculated estimate of the stream framerate""" 86 | 87 | exc: BaseException|None 88 | """If an :class:`Exception` was caught while connecting to or processing 89 | the stream, it will be available here. Otherwise ``None`` 90 | """ 91 | 92 | exc_notify: asyncio.Condition 93 | """A :class:`Condition ` object that can be used to 94 | :term:`await` for an Exception (available as :attr:`exc`). 95 | Waiters will also be notified when the stream ends (for graceful shutdown). 96 | """ 97 | 98 | is_open: bool 99 | """Flag indicating if the stream is currently open""" 100 | 101 | _stop_loops: bool 102 | """Internal variable used to control internal processing loops""" 103 | 104 | _incoming: deque 105 | """Container for :class:`.client.Buffer` objects to write received 106 | frames to 107 | """ 108 | 109 | """Container for :class:`.client.Buffer` objects with received frame data 110 | """ 111 | _outgoing: asyncio.Queue 112 | def __init__( 113 | self, 114 | url: str, 115 | log_interval: int|float = .5, 116 | reconnect_interval: int|float = 5, 117 | reconnect_limit: int|None = None, 118 | ): 119 | self.url = url 120 | self.log_interval = log_interval 121 | self.reconnect_interval = reconnect_interval 122 | self.reconnect_limit = reconnect_limit 123 | self._incoming = deque() 124 | self._outgoing = asyncio.Queue() 125 | self._stop_loops = False 126 | self.overruns = 0 127 | self.reconnects = 0 128 | self.in_overrun = False 129 | self.frames = 0 130 | self.discarded_frames = 0 131 | self.loop = asyncio.get_event_loop() 132 | self._run_task = None 133 | self.exc = None 134 | self.exc_notify = asyncio.Condition() 135 | self.is_open = False 136 | 137 | async def open(self, session: aiohttp.ClientSession|None = None): 138 | """Open the client and begin streaming in a background task 139 | 140 | Arguments: 141 | session: If provided, an existing :class:`aiohttp.ClientSession` 142 | to use. This would be recommended for multiple streams. If 143 | ``None``, a new instance will be created 144 | 145 | Note that if *session* is provided, it will be left open. This should be 146 | done by the caller. 147 | 148 | If *session* is not provided and an instance is created however, it will 149 | be closed with the client. 150 | """ 151 | if self.is_open or self._run_task is not None: 152 | raise RuntimeError('{self} already open') 153 | self._run_task = asyncio.create_task(self.run(session)) 154 | 155 | async def close(self): 156 | """Close the client and stop any background tasks 157 | """ 158 | self._stop_loops = True 159 | t = self._run_task 160 | try: 161 | if t is not None: 162 | self._run_task = None 163 | if asyncio.current_task() is not t: 164 | t.cancel() 165 | try: 166 | await t 167 | except asyncio.CancelledError: 168 | pass 169 | assert not self.is_open 170 | finally: 171 | await self._outgoing.put(None) 172 | 173 | async def __aenter__(self): 174 | await self.open() 175 | return self 176 | 177 | async def __aexit__(self, *args): 178 | await self.close() 179 | 180 | def __aiter__(self): 181 | return self 182 | 183 | async def __anext__(self): 184 | if self._stop_loops: 185 | raise StopAsyncIteration 186 | buf = await self.dequeue_buffer() 187 | if buf is None: 188 | raise StopAsyncIteration 189 | return buf 190 | 191 | def request_buffers(self, length: int, count: int) -> list[Buffer]: 192 | """Shortcut to create :class:`~.client.Buffer` instances 193 | 194 | Arguments: 195 | length: The buffer length 196 | count: Number of instances to create 197 | 198 | """ 199 | rv = [] 200 | for i in range(0, count): 201 | rv.append(Buffer(length)) 202 | return rv 203 | 204 | def enqueue_buffer(self, buf: Buffer): 205 | """Add a :class:`~.client.Buffer` instance to be written to 206 | """ 207 | self._incoming.append(buf) 208 | 209 | async def dequeue_buffer(self) -> Buffer|None: 210 | """Block until a :class:`~.client.Buffer` is available to be read from 211 | and return it. 212 | 213 | If the stream is closed (by either the :meth:`close` method or 214 | :attr:`reconnect_limit`), ``None`` is returned. This allows the caller 215 | to handle graceful shutdown without using timeout logic 216 | (such as :func:`asyncio.wait_for`). 217 | 218 | If successful, the returned instance will contain data received 219 | from the stream. 220 | 221 | .. note:: 222 | 223 | This is called behind the scenes within an :keyword:`async for` loop. 224 | Unexpected behavior may occur if this method is called manually during 225 | the loop. 226 | """ 227 | buf = await self._outgoing.get() 228 | self._outgoing.task_done() 229 | return buf 230 | 231 | def _init_fps(self): 232 | self.fps = 0 233 | self._frame = 0 234 | self._start = int(self.loop.time()) 235 | self._cur = self._prev = self._start 236 | 237 | def _update_fps(self): 238 | self._frame += 1 239 | self._cur = int(self.loop.time()) 240 | if self._cur >= self._prev + self.log_interval: 241 | self.fps = int(self._frame / self.log_interval) 242 | self._prev = self._cur 243 | self._frame = 0 244 | 245 | async def process_stream(self, resp: aiohttp.ClientResponse): 246 | reader = aiohttp.MultipartReader.from_response(resp) 247 | 248 | # Can be refactor to use changes in PR #2 if merged 249 | if reader.stream._boundary.startswith(b'----'): 250 | reader.stream._boundary = reader.stream._boundary[2:] 251 | 252 | seq = 0 253 | 254 | while not self._stop_loops: 255 | try: 256 | buf = self._incoming.pop() 257 | mem = buf.data 258 | length = buf.length 259 | self.in_overrun = False 260 | except IndexError: 261 | buf = None 262 | mem = None 263 | length = 0 264 | if not self.in_overrun: 265 | self.overruns += 1 266 | self.in_overrun = True 267 | 268 | try: 269 | part = await asyncio.wait_for(reader.next(), timeout=10) 270 | except asyncio.TimeoutError: 271 | raise 272 | 273 | if self._stop_loops: 274 | break 275 | 276 | if part is None: 277 | raise EOFError('End of stream reached') 278 | timestamp, clen = await aread_mjpeg_frame(part, mem, length) 279 | self._update_fps() 280 | self.frames += 1 281 | 282 | if buf is not None and length >= clen: 283 | buf.timestamp = timestamp 284 | buf.used = clen 285 | buf.seq = seq 286 | await self._outgoing.put(buf) 287 | else: 288 | self.discarded_frames += 1 289 | 290 | seq += 1 291 | 292 | def print_stats(self): 293 | print('MJPEGClient:') 294 | print(' URL: : %s' % self.url) 295 | print(' FPS : %d' % self.fps) 296 | print(' Buffer overruns : %d' % self.overruns) 297 | print(' Reconnects : %d' % self.reconnects) 298 | print(' Total frames : %d' % self.frames) 299 | print(' Discarded frames: %d' % self.discarded_frames) 300 | print(' Buffer queue : %d' % len(self._incoming)) 301 | 302 | async def run(self, session: aiohttp.ClientSession|None = None): 303 | self.is_open = True 304 | self._stop_loops = False 305 | self._init_fps() 306 | 307 | async def set_exc(exc: BaseException|None): 308 | async with self.exc_notify: 309 | self.exc = exc 310 | self.exc_notify.notify_all() 311 | 312 | if session is None: 313 | session = aiohttp.ClientSession() 314 | owns_session = True 315 | else: 316 | owns_session = False 317 | 318 | try: 319 | while not self._stop_loops: 320 | if self.exc is not None: 321 | await set_exc(None) 322 | try: 323 | async with session.get(self.url) as resp: 324 | resp.raise_for_status() 325 | await self.process_stream(resp) 326 | except EOFError as e: 327 | pass 328 | except Exception as exc: 329 | traceback.print_exc() 330 | await set_exc(exc) 331 | 332 | if not self._stop_loops: 333 | limit = self.reconnect_limit 334 | if limit is not None and self.reconnects >= limit: 335 | break 336 | await asyncio.sleep(self.reconnect_interval) 337 | self.reconnects += 1 338 | finally: 339 | self.is_open = False 340 | if owns_session: 341 | await session.close() 342 | await set_exc(None) 343 | --------------------------------------------------------------------------------