├── README.md ├── launch_pipeline ├── pipeline_with_factory.py ├── pipeline_with_parse_launch.py ├── run_appsink.py ├── run_appsrc.py └── run_rtsp.py └── requirements.txt /README.md: -------------------------------------------------------------------------------- 1 | ### Description 2 | - Simple examples running Gstreamer pipeline with Python 3 | 4 | ### Installation 5 | ```bash 6 | python3 -m venv venv 7 | source venv/bin/activate 8 | 9 | pip install --upgrade wheel pip setuptools 10 | pip install --upgrade --requirement requirements.txt 11 | ``` 12 | 13 | ### Run examples 14 | 15 | #### Run Gstreamer pipeline in Python using Gst.ElementFactory 16 | ```bash 17 | python launch_pipeline/pipeline_with_factory.py 18 | ``` 19 | 20 | #### Run Gstreamer pipeline in Python using Gst.parse_launch 21 | ```bash 22 | python launch_pipeline/pipeline_with_parse_launch.py -p "videotestsrc num-buffers=100 pattern=1 ! autovideosink" 23 | ``` 24 | 25 | #### Capture frames (np.ndarray) from any Gstreamer pipeline 26 | ```bash 27 | python launch_pipeline/run_appsink.py -p "videotestsrc num-buffers=100 ! capsfilter caps=video/x-raw,format=RGB,width=640,height=480 ! appsink emit-signals=True" 28 | ``` 29 | 30 | #### Push images (np.ndarray) to any Gstreamer pipeline 31 | ```bash 32 | python launch_pipeline/run_appsrc.py -p "appsrc emit-signals=True is-live=True caps=video/x-raw,format=RGB,width=640,height=480 ! queue ! videoconvert ! autovideosink" -n 1000 33 | ``` 34 | -------------------------------------------------------------------------------- /launch_pipeline/pipeline_with_factory.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | import sys 3 | 4 | import gi 5 | gi.require_version('Gst', '1.0') 6 | from gi.repository import Gst, GObject # noqa:F401,F402 7 | 8 | 9 | # Initializes Gstreamer, it's variables, paths 10 | Gst.init(sys.argv) 11 | 12 | 13 | def on_message(bus: Gst.Bus, message: Gst.Message, loop: GObject.MainLoop): 14 | mtype = message.type 15 | """ 16 | Gstreamer Message Types and how to parse 17 | https://lazka.github.io/pgi-docs/Gst-1.0/flags.html#Gst.MessageType 18 | """ 19 | if mtype == Gst.MessageType.EOS: 20 | print("End of stream") 21 | loop.quit() 22 | 23 | elif mtype == Gst.MessageType.ERROR: 24 | err, debug = message.parse_error() 25 | print(err, debug) 26 | loop.quit() 27 | elif mtype == Gst.MessageType.WARNING: 28 | err, debug = message.parse_warning() 29 | print(err, debug) 30 | 31 | return True 32 | 33 | 34 | # Gst.Pipeline https://lazka.github.io/pgi-docs/Gst-1.0/classes/Pipeline.html 35 | pipeline = Gst.Pipeline() 36 | 37 | # Creates element by name 38 | # https://lazka.github.io/pgi-docs/Gst-1.0/classes/ElementFactory.html#Gst.ElementFactory.make 39 | src_name = "my_video_test_src" 40 | src = Gst.ElementFactory.make("videotestsrc", "my_video_test_src") 41 | src.set_property("num-buffers", 50) 42 | src.set_property("pattern", "ball") 43 | 44 | sink = Gst.ElementFactory.make("gtksink") 45 | 46 | pipeline.add(src, sink) 47 | 48 | src.link(sink) 49 | 50 | # https://lazka.github.io/pgi-docs/Gst-1.0/classes/Bus.html 51 | bus = pipeline.get_bus() 52 | 53 | # allow bus to emit messages to main thread 54 | bus.add_signal_watch() 55 | 56 | # Start pipeline 57 | pipeline.set_state(Gst.State.PLAYING) 58 | 59 | # Init GObject loop to handle Gstreamer Bus Events 60 | loop = GObject.MainLoop() 61 | 62 | # Add handler to specific signal 63 | # https://lazka.github.io/pgi-docs/GObject-2.0/classes/Object.html#GObject.Object.connect 64 | bus.connect("message", on_message, loop) 65 | 66 | try: 67 | loop.run() 68 | except Exception: 69 | traceback.print_exc() 70 | loop.quit() 71 | 72 | # Stop Pipeline 73 | pipeline.set_state(Gst.State.NULL) 74 | del pipeline 75 | -------------------------------------------------------------------------------- /launch_pipeline/pipeline_with_parse_launch.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import traceback 3 | import argparse 4 | 5 | import gi 6 | gi.require_version('Gst', '1.0') 7 | from gi.repository import Gst, GObject # noqa:F401,F402 8 | 9 | 10 | # Initializes Gstreamer, it's variables, paths 11 | Gst.init(sys.argv) 12 | 13 | DEFAULT_PIPELINE = "videotestsrc num-buffers=100 ! autovideosink" 14 | 15 | ap = argparse.ArgumentParser() 16 | ap.add_argument("-p", "--pipeline", required=False, 17 | default=DEFAULT_PIPELINE, help="Gstreamer pipeline without gst-launch") 18 | 19 | args = vars(ap.parse_args()) 20 | 21 | 22 | def on_message(bus: Gst.Bus, message: Gst.Message, loop: GObject.MainLoop): 23 | mtype = message.type 24 | """ 25 | Gstreamer Message Types and how to parse 26 | https://lazka.github.io/pgi-docs/Gst-1.0/flags.html#Gst.MessageType 27 | """ 28 | if mtype == Gst.MessageType.EOS: 29 | print("End of stream") 30 | loop.quit() 31 | 32 | elif mtype == Gst.MessageType.ERROR: 33 | err, debug = message.parse_error() 34 | print(err, debug) 35 | loop.quit() 36 | 37 | elif mtype == Gst.MessageType.WARNING: 38 | err, debug = message.parse_warning() 39 | print(err, debug) 40 | 41 | return True 42 | 43 | 44 | command = args["pipeline"] 45 | 46 | # Gst.Pipeline https://lazka.github.io/pgi-docs/Gst-1.0/classes/Pipeline.html 47 | # https://lazka.github.io/pgi-docs/Gst-1.0/functions.html#Gst.parse_launch 48 | pipeline = Gst.parse_launch(command) 49 | 50 | # https://lazka.github.io/pgi-docs/Gst-1.0/classes/Bus.html 51 | bus = pipeline.get_bus() 52 | 53 | # allow bus to emit messages to main thread 54 | bus.add_signal_watch() 55 | 56 | # Start pipeline 57 | pipeline.set_state(Gst.State.PLAYING) 58 | 59 | # Init GObject loop to handle Gstreamer Bus Events 60 | loop = GObject.MainLoop() 61 | 62 | # Add handler to specific signal 63 | # https://lazka.github.io/pgi-docs/GObject-2.0/classes/Object.html#GObject.Object.connect 64 | bus.connect("message", on_message, loop) 65 | 66 | try: 67 | loop.run() 68 | except Exception: 69 | traceback.print_exc() 70 | loop.quit() 71 | 72 | # Stop Pipeline 73 | pipeline.set_state(Gst.State.NULL) 74 | -------------------------------------------------------------------------------- /launch_pipeline/run_appsink.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import traceback 3 | import argparse 4 | import typing as typ 5 | import time 6 | import attr 7 | 8 | import numpy as np 9 | 10 | from gstreamer import GstContext, GstPipeline, GstApp, Gst, GstVideo 11 | import gstreamer.utils as utils 12 | 13 | # Converts list of plugins to gst-launch string 14 | # ['plugin_1', 'plugin_2', 'plugin_3'] => plugin_1 ! plugin_2 ! plugin_3 15 | DEFAULT_PIPELINE = utils.to_gst_string([ 16 | "videotestsrc num-buffers=100", 17 | "capsfilter caps=video/x-raw,format=RGB,width=640,height=480", 18 | "queue", 19 | "appsink emit-signals=True" 20 | ]) 21 | 22 | ap = argparse.ArgumentParser() 23 | ap.add_argument("-p", "--pipeline", required=False, 24 | default=DEFAULT_PIPELINE, help="Gstreamer pipeline without gst-launch") 25 | 26 | args = vars(ap.parse_args()) 27 | 28 | command = args["pipeline"] 29 | 30 | 31 | def extract_buffer(sample: Gst.Sample) -> np.ndarray: 32 | """Extracts Gst.Buffer from Gst.Sample and converts to np.ndarray""" 33 | 34 | buffer = sample.get_buffer() # Gst.Buffer 35 | 36 | print(buffer.pts, buffer.dts, buffer.offset) 37 | 38 | caps_format = sample.get_caps().get_structure(0) # Gst.Structure 39 | 40 | # GstVideo.VideoFormat 41 | video_format = GstVideo.VideoFormat.from_string( 42 | caps_format.get_value('format')) 43 | 44 | w, h = caps_format.get_value('width'), caps_format.get_value('height') 45 | c = utils.get_num_channels(video_format) 46 | 47 | buffer_size = buffer.get_size() 48 | shape = (h, w, c) if (h * w * c == buffer_size) else buffer_size 49 | array = np.ndarray(shape=shape, buffer=buffer.extract_dup(0, buffer_size), 50 | dtype=utils.get_np_dtype(video_format)) 51 | 52 | return np.squeeze(array) # remove single dimension if exists 53 | 54 | 55 | def on_buffer(sink: GstApp.AppSink, data: typ.Any) -> Gst.FlowReturn: 56 | """Callback on 'new-sample' signal""" 57 | # Emit 'pull-sample' signal 58 | # https://lazka.github.io/pgi-docs/GstApp-1.0/classes/AppSink.html#GstApp.AppSink.signals.pull_sample 59 | 60 | sample = sink.emit("pull-sample") # Gst.Sample 61 | 62 | if isinstance(sample, Gst.Sample): 63 | array = extract_buffer(sample) 64 | print( 65 | "Received {type} with shape {shape} of type {dtype}".format(type=type(array), 66 | shape=array.shape, 67 | dtype=array.dtype)) 68 | return Gst.FlowReturn.OK 69 | 70 | return Gst.FlowReturn.ERROR 71 | 72 | 73 | with GstContext(): # create GstContext (hides MainLoop) 74 | # create GstPipeline (hides Gst.parse_launch) 75 | with GstPipeline(command) as pipeline: 76 | appsink = pipeline.get_by_cls(GstApp.AppSink)[0] # get AppSink 77 | # subscribe to signal 78 | appsink.connect("new-sample", on_buffer, None) 79 | while not pipeline.is_done: 80 | time.sleep(.1) 81 | -------------------------------------------------------------------------------- /launch_pipeline/run_appsrc.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import traceback 3 | import argparse 4 | import typing as typ 5 | import random 6 | import time 7 | from fractions import Fraction 8 | 9 | import numpy as np 10 | 11 | from gstreamer import GstContext, GstPipeline, GstApp, Gst, GstVideo, GLib, GstVideoSink 12 | import gstreamer.utils as utils 13 | 14 | VIDEO_FORMAT = "RGB" 15 | WIDTH, HEIGHT = 640, 480 16 | FPS = Fraction(30) 17 | GST_VIDEO_FORMAT = GstVideo.VideoFormat.from_string(VIDEO_FORMAT) 18 | 19 | 20 | def fraction_to_str(fraction: Fraction) -> str: 21 | """Converts fraction to str""" 22 | return '{}/{}'.format(fraction.numerator, fraction.denominator) 23 | 24 | 25 | def parse_caps(pipeline: str) -> dict: 26 | """Parses appsrc's caps from pipeline string into a dict 27 | 28 | :param pipeline: "appsrc caps=video/x-raw,format=RGB,width=640,height=480 ! videoconvert ! autovideosink" 29 | 30 | Result Example: 31 | { 32 | "width": "640", 33 | "height": "480" 34 | "format": "RGB", 35 | "fps": "30/1", 36 | ... 37 | } 38 | """ 39 | 40 | try: 41 | # typ.List[typ.Tuple[str, str]] 42 | caps = [prop for prop in pipeline.split( 43 | "!")[0].split(" ") if "caps" in prop][0] 44 | return dict([p.split('=') for p in caps.split(',') if "=" in p]) 45 | except IndexError as err: 46 | return None 47 | 48 | 49 | FPS_STR = fraction_to_str(FPS) 50 | DEFAULT_CAPS = "video/x-raw,format={VIDEO_FORMAT},width={WIDTH},height={HEIGHT},framerate={FPS_STR}".format(**locals()) 51 | 52 | # Converts list of plugins to gst-launch string 53 | # ['plugin_1', 'plugin_2', 'plugin_3'] => plugin_1 ! plugin_2 ! plugin_3 54 | DEFAULT_PIPELINE = utils.to_gst_string([ 55 | "appsrc emit-signals=True is-live=True caps={DEFAULT_CAPS}".format(**locals()), 56 | "queue", 57 | "videoconvert", 58 | "autovideosink" 59 | ]) 60 | 61 | 62 | ap = argparse.ArgumentParser() 63 | ap.add_argument("-p", "--pipeline", required=False, 64 | default=DEFAULT_PIPELINE, help="Gstreamer pipeline without gst-launch") 65 | 66 | ap.add_argument("-n", "--num_buffers", required=False, 67 | default=100, help="Num buffers to pass") 68 | 69 | args = vars(ap.parse_args()) 70 | 71 | command = args["pipeline"] 72 | 73 | args_caps = parse_caps(command) 74 | NUM_BUFFERS = int(args['num_buffers']) 75 | 76 | WIDTH = int(args_caps.get("width", WIDTH)) 77 | HEIGHT = int(args_caps.get("height", HEIGHT)) 78 | FPS = Fraction(args_caps.get("framerate", FPS)) 79 | 80 | GST_VIDEO_FORMAT = GstVideo.VideoFormat.from_string( 81 | args_caps.get("format", VIDEO_FORMAT)) 82 | CHANNELS = utils.get_num_channels(GST_VIDEO_FORMAT) 83 | DTYPE = utils.get_np_dtype(GST_VIDEO_FORMAT) 84 | 85 | FPS_STR = fraction_to_str(FPS) 86 | CAPS = "video/x-raw,format={VIDEO_FORMAT},width={WIDTH},height={HEIGHT},framerate={FPS_STR}".format(**locals()) 87 | 88 | with GstContext(): # create GstContext (hides MainLoop) 89 | 90 | pipeline = GstPipeline(command) 91 | 92 | def on_pipeline_init(self): 93 | """Setup AppSrc element""" 94 | appsrc = self.get_by_cls(GstApp.AppSrc)[0] # get AppSrc 95 | 96 | # instructs appsrc that we will be dealing with timed buffer 97 | appsrc.set_property("format", Gst.Format.TIME) 98 | 99 | # instructs appsrc to block pushing buffers until ones in queue are preprocessed 100 | # allows to avoid huge queue internal queue size in appsrc 101 | appsrc.set_property("block", True) 102 | 103 | # set input format (caps) 104 | appsrc.set_caps(Gst.Caps.from_string(CAPS)) 105 | 106 | # override on_pipeline_init to set specific properties before launching pipeline 107 | pipeline._on_pipeline_init = on_pipeline_init.__get__(pipeline) 108 | 109 | try: 110 | pipeline.startup() 111 | appsrc = pipeline.get_by_cls(GstApp.AppSrc)[0] # GstApp.AppSrc 112 | 113 | pts = 0 # buffers presentation timestamp 114 | duration = 10**9 / (FPS.numerator / FPS.denominator) # frame duration 115 | for _ in range(NUM_BUFFERS): 116 | 117 | # create random np.ndarray 118 | array = np.random.randint(low=0, high=255, 119 | size=(HEIGHT, WIDTH, CHANNELS), dtype=DTYPE) 120 | 121 | # convert np.ndarray to Gst.Buffer 122 | gst_buffer = utils.ndarray_to_gst_buffer(array) 123 | 124 | # set pts and duration to be able to record video, calculate fps 125 | pts += duration # Increase pts by duration 126 | gst_buffer.pts = pts 127 | gst_buffer.duration = duration 128 | 129 | # emit event with Gst.Buffer 130 | appsrc.emit("push-buffer", gst_buffer) 131 | 132 | # emit event 133 | appsrc.emit("end-of-stream") 134 | 135 | while not pipeline.is_done: 136 | time.sleep(.1) 137 | except Exception as e: 138 | print("Error: ", e) 139 | finally: 140 | pipeline.shutdown() 141 | -------------------------------------------------------------------------------- /launch_pipeline/run_rtsp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding:utf-8 vi:ts=4:noexpandtab 3 | # Simple RTSP server. Run as-is or with a command-line to replace the default pipeline 4 | 5 | import time 6 | import sys 7 | import abc 8 | import numpy as np 9 | import typing as typ 10 | from fractions import Fraction 11 | import functools 12 | 13 | import gi 14 | 15 | gi.require_version('Gst', '1.0') 16 | gi.require_version('GstRtspServer', '1.0') 17 | gi.require_version('GstApp', '1.0') 18 | gi.require_version('GstVideo', '1.0') 19 | 20 | from gi.repository import Gst, GLib, GstRtspServer, GObject, GstApp, GstVideo # noqa:F401,F402 21 | 22 | import gstreamer as gst # noqa:F401,F402 23 | 24 | # Examples 25 | # https://github.com/tamaggo/gstreamer-examples 26 | # https://github.com/GStreamer/gst-rtsp-server/tree/master/examples 27 | # https://stackoverflow.com/questions/47396372/write-opencv-frames-into-gstreamer-rtsp-server-pipeline 28 | 29 | 30 | VIDEO_FORMAT = "RGB" 31 | WIDTH, HEIGHT = 640, 480 32 | FPS = Fraction(30) 33 | GST_VIDEO_FORMAT = GstVideo.VideoFormat.from_string(VIDEO_FORMAT) 34 | 35 | 36 | class GstBufferGenerator(metaclass=abc.ABCMeta): 37 | 38 | @abc.abstractmethod 39 | def get(self) -> Gst.Buffer: 40 | pass 41 | 42 | @property 43 | def caps(self) -> Gst.Caps: 44 | pass 45 | 46 | def __enter__(self): 47 | self.startup() 48 | return self 49 | 50 | def __exit__(self, exc_type, exc_val, exc_tb): 51 | self.shutdown() 52 | 53 | def startup(self): 54 | pass 55 | 56 | def shutdown(self): 57 | pass 58 | 59 | 60 | class FakeGstBufferGenerator(GstBufferGenerator): 61 | 62 | def __init__(self, *, width: int, height: int, 63 | fps: typ.Union[Fraction, int] = Fraction('30/1'), 64 | video_type: gst.gst_tools.VideoType = gst.gst_tools.VideoType.VIDEO_RAW, 65 | video_frmt: GstVideo.VideoFormat = GstVideo.VideoFormat.RGB): 66 | 67 | self._width = width 68 | self._height = height 69 | 70 | self._fps = Fraction(fps) 71 | 72 | self._pts = 0 73 | self._dts = GLib.MAXUINT64 74 | 75 | self._duration = Gst.SECOND / (self._fps.numerator / self._fps.denominator) 76 | self._video_frmt = video_frmt 77 | self._video_type = video_type 78 | 79 | # Gst.Caps 80 | self._caps = gst.gst_tools.gst_video_format_plugin( 81 | width=width, height=height, fps=self._fps, 82 | video_type=video_type, video_frmt=video_frmt 83 | ) 84 | 85 | @property 86 | def caps(self) -> Gst.Caps: 87 | return Gst.Caps.from_string(self._caps) 88 | 89 | def get(self) -> Gst.Buffer: 90 | 91 | np_dtype = gst.utils.get_np_dtype(self._video_frmt) 92 | channels = gst.utils.get_num_channels(self._video_frmt) 93 | 94 | array = np.random.randint(low=0, high=255, 95 | size=(self._height, self._width, channels), dtype=np_dtype) 96 | 97 | self._pts += self._duration 98 | 99 | gst_buffer = gst.utils.ndarray_to_gst_buffer(array) # Gst.Buffer 100 | 101 | gst_buffer.pts = self._pts 102 | gst_buffer.dts = self._dts 103 | gst_buffer.duration = self._duration 104 | gst_buffer.offset = self._pts // self._duration 105 | 106 | return gst_buffer 107 | 108 | 109 | class GstBufferGeneratorFromPipeline(GstBufferGenerator): 110 | 111 | def __init__(self, gst_launch: str, loop: bool = False): 112 | self._loop = loop 113 | self._gst_launch = gst_launch 114 | self._num_loops = 0 115 | 116 | self._pipeline = None # gst.GstVideoSource 117 | 118 | def startup(self): 119 | self._pipeline = gst.GstVideoSource(self._gst_launch, max_buffers_size=8) 120 | self._pipeline.startup() 121 | 122 | self._num_loops += 1 123 | print(f"Starting {self._num_loops} loop") 124 | 125 | def shutdown(self): 126 | if isinstance(self._pipeline, gst.GstVideoSource): 127 | self._pipeline.shutdown() 128 | 129 | @property 130 | def caps(self) -> Gst.Caps: 131 | appsink = self._pipeline.get_by_cls(GstApp.AppSink)[0] 132 | return appsink.sinkpad.get_current_caps() 133 | 134 | def get(self) -> Gst.Buffer: 135 | 136 | buffer = self._pipeline.pop() 137 | if not buffer: 138 | if self._pipeline.is_done and self._loop: 139 | self.shutdown() 140 | self.startup() 141 | return None 142 | 143 | gst_buffer = gst.utils.ndarray_to_gst_buffer(buffer.data) # Gst.Buffer 144 | 145 | gst_buffer.pts = buffer.pts 146 | gst_buffer.dts = buffer.dts 147 | gst_buffer.duration = buffer.duration 148 | gst_buffer.offset = buffer.offset 149 | 150 | return gst_buffer 151 | 152 | @classmethod 153 | def clone(cls) -> 'GstBufferGeneratorFromPipeline': 154 | return cls(self._gst_launch) 155 | 156 | 157 | def get_child_by_cls(element: Gst.Element, cls: GObject.GType) -> typ.List[Gst.Element]: 158 | """ Get Gst.Element[] from pipeline by GType """ 159 | return [e for e in element.iterate_elements() if isinstance(e, cls)] 160 | 161 | 162 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPMediaFactory.html#gstrtspserver-rtspmediafactory 163 | class RTSPMediaFactoryCustom(GstRtspServer.RTSPMediaFactory): 164 | 165 | def __init__(self, source: typ.Callable[..., GstBufferGenerator]): 166 | super().__init__() 167 | 168 | self._source = source 169 | self._sources = {} 170 | 171 | def do_create_element(self, url) -> Gst.Element: 172 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPMediaFactory.html#GstRtspServer.RTSPMediaFactory.do_create_element 173 | 174 | src = "appsrc emit-signals=True is-live=True" 175 | encoder = "x264enc tune=zerolatency" # pass=quant 176 | color_convert = "videoconvert n-threads=0 ! video/x-raw,format=I420" 177 | rtp = "rtph264pay config-interval=1 name=pay0 pt=96" 178 | pipeline = "{src} ! {color_convert} ! {encoder} ! queue max-size-buffers=8 ! {rtp}".format(**locals()) 179 | print(f"gst-launch-1.0 {pipeline}") 180 | return Gst.parse_launch(pipeline) 181 | 182 | def on_need_data(self, src: GstApp.AppSrc, length: int): 183 | """ Callback on "need-data" signal 184 | 185 | Signal: 186 | https://lazka.github.io/pgi-docs/GstApp-1.0/classes/AppSrc.html#GstApp.AppSrc.signals.need_data 187 | :param length: amount of bytes needed 188 | """ 189 | 190 | buffer = None 191 | while not buffer: # looping pipeline 192 | buffer = self._sources[src.name].get() # Gst.Buffer 193 | time.sleep(.1) 194 | 195 | retval = src.emit('push-buffer', buffer) 196 | # print(f'Pushed buffer, frame {buffer.offset}, duration {Gst.TIME_ARGS(buffer.pts)}') 197 | if retval != Gst.FlowReturn.OK: 198 | print(retval) 199 | 200 | def do_configure(self, rtsp_media: GstRtspServer.RTSPMedia): 201 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPMedia.html#GstRtspServer.RTSPMedia 202 | 203 | appsrc = get_child_by_cls(rtsp_media.get_element(), GstApp.AppSrc)[0] 204 | 205 | self._sources[appsrc.name] = self._source() 206 | self._sources[appsrc.name].startup() 207 | time.sleep(.5) # wait to start 208 | 209 | # this instructs appsrc that we will be dealing with timed buffer 210 | appsrc.set_property("format", Gst.Format.TIME) 211 | 212 | # instructs appsrc to block pushing buffers until ones in queue are preprocessed 213 | # allows to avoid huge queue internal queue size in appsrc 214 | appsrc.set_property("block", True) 215 | 216 | appsrc.set_property("caps", self._sources[appsrc.name].caps) 217 | 218 | appsrc.connect('need-data', self.on_need_data) 219 | 220 | def __del__(self): 221 | for source in self._sources.values(): 222 | source.shutdown() 223 | 224 | 225 | class GstServer(): 226 | def __init__(self, shared: bool = False): 227 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPServer.html 228 | self.server = GstRtspServer.RTSPServer() 229 | 230 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPMediaFactory.html#GstRtspServer.RTSPMediaFactory.set_shared 231 | # f.set_shared(True) 232 | 233 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPServer.html#GstRtspServer.RTSPServer.get_mount_points 234 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPMountPoints.html#GstRtspServer.RTSPMountPoints 235 | m = self.server.get_mount_points() 236 | 237 | generator = functools.partial(FakeGstBufferGenerator, width=WIDTH, height=HEIGHT, 238 | fps=FPS, video_frmt=GST_VIDEO_FORMAT) 239 | 240 | # pipeline 241 | # pipeline = "videotestsrc num-buffers=1000 ! capsfilter caps=video/x-raw,format=RGB,width=640,height=480 ! appsink emit-signals=True" 242 | 243 | # path = "/home/taras/coder/datai/production/sales_zone/data/videos/letoile/sales_zone_letoile.mp4" 244 | # pipeline = f"filesrc location={path} ! decodebin ! videoconvert n-threads=0 ! video/x-raw,format=RGB ! appsink emit-signals=True" 245 | 246 | # generator = functools.partial( 247 | # GstBufferGeneratorFromPipeline, gst_launch=pipeline, loop=True 248 | # ) 249 | 250 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPMountPoints.html#GstRtspServer.RTSPMountPoints.add_factory 251 | mount_point = "/stream.rtp" 252 | factory = RTSPMediaFactoryCustom(generator) 253 | factory.set_shared(shared) 254 | m.add_factory(mount_point, factory) # adding streams 255 | 256 | port = self.server.get_property("service") 257 | print(f"rtsp://localhost:{port}/{mount_point}") 258 | 259 | # https://lazka.github.io/pgi-docs/GstRtspServer-1.0/classes/RTSPServer.html#GstRtspServer.RTSPServer.attach 260 | self.server.attach(None) 261 | 262 | 263 | if __name__ == '__main__': 264 | with gst.GstContext(): 265 | s = GstServer(shared=True) 266 | 267 | while True: 268 | time.sleep(.1) 269 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | gstreamer-python @ git+https://github.com/jackersson/gstreamer-python.git#egg=gstreamer-python --------------------------------------------------------------------------------