├── .gitignore ├── LICENSE ├── README.md ├── package.json └── src ├── tutorial-1.js ├── tutorial-2.js ├── tutorial-3.js ├── tutorial-4.js ├── tutorial-5.js ├── tutorial-6.js ├── tutorial-7.js ├── tutorial-8.js ├── tutorial-9.js └── typefind-example.js /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 wotzlaff 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # node-gstreamer-tutorial 2 | In this repo we try to provide a version of the "Basic tutorial" for GStreamer under NodeJS using the great GObject Introspection bindings provided by [node-gtk](https://github.com/romgrk/node-gtk). At the moment not all functionalities of the GStreamer library are working. So stay tuned for a fully working version. 3 | 4 | ## Prerequisites 5 | Below we assume that you'll be using Debian or Ubuntu. We are using the most recent version of `node-gtk`. 6 | 7 | Install the dependencies: 8 | * For installation of Node.js refer to [nodesource/distributions](https://github.com/nodesource/distributions/blob/master/README.md#debinstall). 9 | * Install the dependencies: 10 | ```bash 11 | apt install git build-essential gobject-introspection libgstreamer1.0-0 gstreamer1.0-plugins-base gstreamer1.0-plugins-good gir1.2-gstreamer-1.0 gir1.2-gtk-3.0 libcairo2-dev libgirepository1.0-dev 12 | ``` 13 | * Clone and install this repository: 14 | ```bash 15 | git clone https://github.com/wotzlaff/node-gstreamer-tutorial 16 | cd node-gstreamer-tutorial 17 | npm i 18 | ``` 19 | 20 | *Note:* Maybe some depencies are missing in the list. 21 | 22 | ## Usage 23 | Run `node src/tutorial-x.js` to start the example `x`. 24 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-gstreamer-examples", 3 | "version": "0.1.1", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "keywords": [ 10 | "gstreamer" 11 | ], 12 | "author": "Nico Strasdat ", 13 | "license": "MIT", 14 | "dependencies": { 15 | "node-gtk": "^0.12.0" 16 | } 17 | } -------------------------------------------------------------------------------- /src/tutorial-1.js: -------------------------------------------------------------------------------- 1 | const gi = require('node-gtk') 2 | const Gst = gi.require('Gst', '1.0') 3 | gi.startLoop() 4 | Gst.init() 5 | 6 | const pipeline = Gst.parseLaunch('playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm') 7 | 8 | pipeline.setState(Gst.State.PLAYING) 9 | 10 | const bus = pipeline.getBus() 11 | 12 | let done = false 13 | while (!done) { 14 | const msg = bus.timedPopFiltered(Gst.CLOCK_TIME_NONE, Gst.MessageType.ERROR | Gst.MessageType.EOS | Gst.MessageType.STATE_CHANGED) 15 | if (msg) { 16 | switch (msg.type) { 17 | case Gst.MessageType.ERROR: 18 | const [err, debug] = msg.parseError() 19 | console.error(`Error received from element ${msg.src}: ${err.message}`) 20 | done = true 21 | break 22 | case Gst.MessageType.EOS: 23 | console.log('End-Of-Stream reached.') 24 | done = true 25 | case Gst.MessageType.STATE_CHANGED: 26 | const msgSrc = msg.src.getName() 27 | const [oldState, newState, pendingState] = msg.parseStateChanged() 28 | console.log(`State of ${msgSrc} changed: ${Gst.Element.stateGetName(oldState)} -> ${Gst.Element.stateGetName(newState)}.`) 29 | break 30 | default: 31 | console.error('Unexpected message received.') 32 | break 33 | } 34 | } else { 35 | console.log('Timeout?') 36 | } 37 | } 38 | 39 | 40 | pipeline.setState(Gst.State.NULL) 41 | -------------------------------------------------------------------------------- /src/tutorial-2.js: -------------------------------------------------------------------------------- 1 | const gi = require('node-gtk') 2 | const Gst = gi.require('Gst', '1.0') 3 | gi.startLoop() 4 | Gst.init() 5 | 6 | const pipeline = new Gst.Pipeline() 7 | const src = Gst.ElementFactory.make('videotestsrc') 8 | const sink = Gst.ElementFactory.make('autovideosink') 9 | if (!pipeline || !src || !sink) { 10 | throw new Error('Not all elements could be created.') 11 | } 12 | 13 | // In the original example `gst_bin_add_many` is used... 14 | pipeline.add(src) 15 | pipeline.add(sink) 16 | if (!src.link(sink)) { 17 | throw new Error('Elements could not be linked.') 18 | } 19 | 20 | src.pattern = 21 // = pinwheel 21 | src.kt = 50 22 | 23 | const ret = pipeline.setState(Gst.State.PLAYING) 24 | if (ret === Gst.State.CHANGE_FAILURE) { 25 | throw new Error('Unable to set the pipelne to the playing state.') 26 | } 27 | 28 | const bus = pipeline.getBus() 29 | const msg = bus.timedPopFiltered(Gst.CLOCK_TIME_NONE, Gst.MessageType.ERROR | Gst.MessageType.EOS) 30 | if (msg) { 31 | if (msg.type === Gst.MessageType.ERROR) { 32 | console.log('Got error') 33 | const [err, debug] = msg.parseError() 34 | console.error(`Error received from element ${msg.src}: ${err.message}`) 35 | } else if (msg.type === Gst.MessageType.EOS) { 36 | console.log('End-Of-Stream reached.') 37 | } else { 38 | console.error('Unexpected message received.') 39 | } 40 | } 41 | 42 | pipeline.setState(Gst.State.NULL) 43 | 44 | -------------------------------------------------------------------------------- /src/tutorial-3.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/dynamic-pipelines.html 2 | 3 | const gi = require('node-gtk') 4 | const Gst = gi.require('Gst', '1.0') 5 | gi.startLoop() 6 | Gst.init() 7 | 8 | const pipeline = new Gst.Pipeline() 9 | const src = Gst.ElementFactory.make('uridecodebin') 10 | const convert = Gst.ElementFactory.make('audioconvert') 11 | const resample = Gst.ElementFactory.make('audioresample') 12 | const sink = Gst.ElementFactory.make('autoaudiosink') 13 | if (!pipeline || !src || !convert || !resample || !sink) { 14 | throw new Error('Not all elements could be created.') 15 | } 16 | 17 | pipeline.add(src) 18 | pipeline.add(convert) 19 | pipeline.add(resample) 20 | pipeline.add(sink) 21 | 22 | if (!convert.link(resample) || !resample.link(sink)) { 23 | throw new Error('Elements could not be linked.') 24 | } 25 | 26 | src.uri = 'https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm' 27 | 28 | src.on('pad-added', pad => { 29 | console.log(`Received new pad '${pad.getName()}' from '${src.getName()}':`) 30 | 31 | const sinkPad = convert.getStaticPad('sink') 32 | 33 | if (sinkPad.isLinked()) { 34 | console.log('We are already linked. Ignoring.') 35 | return 36 | } 37 | const caps = pad.getCurrentCaps() 38 | const struct = caps.getStructure(0) 39 | const type = struct.getName() 40 | if (!type.startsWith('audio/x-raw')) { 41 | console.log(`It has type '${type}' which is not raw audio. Ignoring.`) 42 | return 43 | } 44 | 45 | if (pad.link(sinkPad) < Gst.Pad.LINK_OK) { 46 | console.log(`Type is '${type}' but link failed.`) 47 | } else { 48 | console.log(`Link succeeded (type '${type}').`) 49 | } 50 | }) 51 | 52 | const ret = pipeline.setState(Gst.State.PLAYING) 53 | if (ret === Gst.State.CHANGE_FAILURE) { 54 | throw new Error('Unable to set the pipelne to the playing state.') 55 | } 56 | 57 | let terminate = false 58 | const bus = pipeline.getBus() 59 | while (!terminate) { 60 | const msg = bus.timedPopFiltered(Gst.CLOCK_TIME_NONE, Gst.MessageType.ERROR | Gst.MessageType.EOS) 61 | switch (msg.type) { 62 | case Gst.MessageType.ERROR: 63 | console.log('Got error.') 64 | // TODO: parse error 65 | terminate = true 66 | break 67 | case Gst.MessageType.EOS: 68 | console.log('End-Of-Stream reached.') 69 | terminate = true 70 | break 71 | } 72 | } 73 | 74 | pipeline.setState(Gst.State.NULL) 75 | 76 | -------------------------------------------------------------------------------- /src/tutorial-4.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/time-management.html 2 | 3 | const gi = require('node-gtk') 4 | const Gst = gi.require('Gst', '1.0') 5 | gi.startLoop() 6 | 7 | let duration = Gst.CLOCK_TIME_NONE 8 | let terminate = false 9 | let playing = false 10 | 11 | let seekEnabled = false 12 | let seekDone = false 13 | 14 | let playbin = null 15 | 16 | // Initialize GStreamer 17 | Gst.init() 18 | 19 | function main () { 20 | // Create the elements 21 | playbin = Gst.ElementFactory.make('playbin') 22 | if (!playbin) { 23 | console.error('Could not create playbin') 24 | return 25 | } 26 | 27 | // Set the URI to play 28 | playbin.uri = 29 | 'https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm' 30 | 31 | // Start playing 32 | if (playbin.setState(Gst.State.PLAYING) === Gst.State.CHANGE_FAILURE) { 33 | console.error('Unable to set the pipeline to the playing state.') 34 | return 35 | } 36 | 37 | // Listen to the bus 38 | const bus = playbin.getBus() 39 | while (!terminate) { 40 | const msg = bus.timedPopFiltered( 41 | 100 * Gst.MSECOND, 42 | Gst.MessageType.STATE_CHANGED | 43 | Gst.MessageType.ERROR | 44 | Gst.MessageType.EOS | 45 | Gst.MessageType.DURATION_CHANGED 46 | ) 47 | if (msg) { 48 | handleMessage(msg) 49 | } else { 50 | // We got no message, this means the timeout expired 51 | if (!playing) continue 52 | 53 | // Query the current position of the stream 54 | const [ok, current] = playbin.queryPosition(Gst.Format.TIME) 55 | if (!ok) { 56 | console.error('Could not query current position.') 57 | } 58 | 59 | // If we didn't know it yet, query the stream duration 60 | if (duration !== Gst.CLOCK_TIME_NONE) { 61 | const [ok, durationValue] = playbin.queryDuration(Gst.Format.TIME) 62 | if (!ok) { 63 | console.error('Could not query current duration.') 64 | } 65 | duration = durationValue 66 | } 67 | 68 | // Print current position and total duration 69 | console.log(`Position ${current} / ${duration}`) 70 | 71 | // If seeking is enabled, we have not done it yet, and the time is right, seek 72 | if (seekEnabled && !seekDone && current > 10 * Gst.SECOND) { 73 | console.log('Reached 10s, performing seek...') 74 | playbin.seekSimple( 75 | Gst.Format.TIME, 76 | Gst.SeekFlags.FLUSH | Gst.SeekFlags.KEY_UNIT, 77 | 30 * Gst.SECOND 78 | ) 79 | seekDone = true 80 | } 81 | } 82 | } 83 | 84 | playbin.setState(Gst.State.NULL) 85 | } 86 | 87 | function handleMessage (msg) { 88 | switch (msg.type) { 89 | case Gst.MessageType.ERROR: 90 | console.log('Got error.') 91 | // TOOD: parse error 92 | terminate = true 93 | break 94 | case Gst.MessageType.EOS: 95 | console.log('End-Of-Stream reached.') 96 | terminate = true 97 | break 98 | case Gst.MessageType.DURATION_CHANGED: 99 | duration = Gst.CLOCK_TIME_NONE 100 | console.log('Duration changed.') 101 | break 102 | case Gst.MessageType.STATE_CHANGED: 103 | if (msg.src === playbin) { 104 | const [oldState, newState, pendingState] = msg.parseStateChanged() 105 | console.log( 106 | `Pipeline state changed from ${Gst.Element.stateGetName( 107 | oldState 108 | )} to ${Gst.Element.stateGetName(newState)}.` 109 | ) 110 | playing = newState === Gst.State.PLAYING 111 | if (playing) { 112 | const query = Gst.Query.newSeeking(Gst.Format.TIME) 113 | if (playbin.query(query)) { 114 | const [_format, seekEnabledValue, startTime, endTime] = 115 | query.parseSeeking(null) 116 | seekEnabled = seekEnabledValue 117 | if (seekEnabled) { 118 | console.log(`Seeking is ENABLED from ${startTime} to ${endTime}`) 119 | } else { 120 | console.log('Seeking is DISABLED for this stream.') 121 | } 122 | } 123 | } 124 | } 125 | break 126 | default: 127 | console.error('Unknown message type:', msg) 128 | break 129 | } 130 | } 131 | 132 | main() 133 | -------------------------------------------------------------------------------- /src/tutorial-5.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/toolkit-integration.html 2 | 3 | const gi = require('node-gtk') 4 | const GstVideo = gi.require('GstVideo', '1.0') 5 | const Gst = gi.require('Gst', '1.0') 6 | const GLib = gi.require('GLib', '2.0') 7 | const GObject = gi.require('GObject', '2.0') 8 | const Gtk = gi.require('Gtk', '3.0') 9 | gi.startLoop() 10 | 11 | // Initialize GTK 12 | Gtk.init() 13 | // Initialize GStreamer 14 | Gst.init() 15 | 16 | function createUI(data) { 17 | const mainWindow = new Gtk.Window(Gtk.WindowType.TOPLEVEL) 18 | mainWindow.connect('delete-event', () => { 19 | 20 | }) 21 | const videoWindow = data.videoWindow = new Gtk.DrawingArea() 22 | videoWindow.setDoubleBuffered(false) 23 | videoWindow.connect('realize', () => { 24 | const window = videoWindow.getWindow() 25 | if (!window.ensureNative()) { 26 | console.error('Couldn\'t create native window needed for GstVideoOverlay!') 27 | } 28 | const overlay = Object.create(GstVideo.VideoOverlay.prototype, data.playbin) 29 | console.log(overlay) 30 | overlay.setWindowHandle(window) 31 | }) 32 | videoWindow.connect('draw', () => { 33 | 34 | }) 35 | 36 | const playButton = Gtk.Button.newFromIconName('media-playback-start', Gtk.IconSize.SMALL_TOOLBAR) 37 | playButton.connect('clicked', () => { 38 | data.playbin.setState(Gst.State.PLAYING) 39 | }) 40 | const pauseButton = Gtk.Button.newFromIconName('media-playback-pause', Gtk.IconSize.SMALL_TOOLBAR) 41 | pauseButton.connect('clicked', () => { 42 | data.playbin.setState(Gst.State.PAUSED) 43 | }) 44 | const stopButton = Gtk.Button.newFromIconName('media-playback-stop', Gtk.IconSize.SMALL_TOOLBAR) 45 | stopButton.connect('clicked', () => { 46 | data.playbin.setState(Gst.State.READY) 47 | }) 48 | 49 | const slider = data.slider = Gtk.Scale.newWithRange(Gtk.Orientation.HORIZONTAL, 0, 100, 1) 50 | slider.setDrawValue(0) 51 | data.sliderUpdateSignalId = slider.connect('value-changed', () => { 52 | const value = slider.getValue() 53 | data.playbin.seekSimple(Gst.Format.TIME, Gst.SeekFlags.FLUSH | Gst.SeekFlags.KEY_UNIT, value * Gst.SECOND) 54 | }) 55 | 56 | const streamList = new Gtk.TextView() 57 | streamList.setEditable(false) 58 | 59 | const controls = new Gtk.Box(Gtk.Orientation.HORIZONTAL, 0) 60 | controls.packStart(playButton, false, false, 2) 61 | controls.packStart(pauseButton, false, false, 2) 62 | controls.packStart(stopButton, false, false, 2) 63 | controls.packStart(slider, true, true, 2) 64 | 65 | const mainHBox = new Gtk.Box(Gtk.Orientation.HORIZONTAL, 0) 66 | mainHBox.packStart(videoWindow, true, true, 0) 67 | mainHBox.packStart(streamList, false, false, 2) 68 | 69 | const mainBox = new Gtk.Box(Gtk.Orientation.VERTICAL, 0) 70 | mainBox.packStart(mainHBox, true, true, 0) 71 | mainBox.packStart(controls, false, false, 0) 72 | mainWindow.add(mainBox) 73 | mainWindow.setDefaultSize(640, 480) 74 | mainWindow.showAll() 75 | } 76 | 77 | function refreshUI(data) { 78 | if (data.state === Gst.State.PAUSED) return 79 | if (data.duration === Gst.CLOCK_TIME_NONE) { 80 | const [res, duration] = data.playbin.queryDuration(Gst.Format.TIME) 81 | if(!res) { 82 | console.error('Could not query current duration.') 83 | } else { 84 | data.duration = duration 85 | data.slider.setRange(0, duration / Gst.SECOND) 86 | } 87 | } 88 | 89 | const [res, current] = data.playbin.queryPosition(Gst.Format.TIME) 90 | if (res && data.slider) { 91 | // data.slider.signalHandlerBlock(data.sliderUpdateSignalId) 92 | // data.slider.setValue(current / Gst.SECOND) 93 | // data.slider.signalHandlerUnblock(data.sliderUpdateSignalId) 94 | } 95 | } 96 | 97 | function main() { 98 | // Create the elements 99 | const playbin = Gst.ElementFactory.make('playbin') 100 | function tagsCb() { 101 | const nVideo = gi._c.ObjectPropertyGetter(playbin, 'n-video') 102 | const nAudio = gi._c.ObjectPropertyGetter(playbin, 'n-audio') 103 | const nText = gi._c.ObjectPropertyGetter(playbin, 'n-text') 104 | 105 | for (let i = 0; i < nVideo; ++i) { 106 | const tags = playbin.emit('get-video-tags', i) 107 | if (!tags) continue 108 | const codec = tags.getString(Gst.TAG_VIDEO_CODEC) 109 | console.log(`video stream ${i}: codec ${codec}`) 110 | } 111 | } 112 | 113 | if (!playbin) { 114 | console.error('Not all elements could be created.') 115 | return 116 | } 117 | 118 | // Set the URI to play 119 | // TODO: use non-internal setters? 120 | gi._c.ObjectPropertySetter(playbin, 'uri', 'file:///data/sintel_trailer-480p.webm') 121 | 122 | // Connect to interesting signals in playbin 123 | playbin.on('video-tags-changed', tagsCb) 124 | playbin.on('audio-tags-changed', tagsCb) 125 | playbin.on('text-tags-changed', tagsCb) 126 | 127 | // Create the GUI 128 | const data = {playbin} 129 | createUI(data) 130 | 131 | // Instruct the bus to emit signals for each received message, and connect to the interesting signals 132 | const bus = playbin.getBus() 133 | bus.addSignalWatch() 134 | bus.on('message::error', () => { 135 | console.log('message::error') 136 | Gtk.mainQuit() 137 | clearInterval(interval) 138 | }) 139 | bus.on('message::eos', () => { 140 | console.log('message::eos') 141 | Gtk.mainQuit() 142 | clearInterval(interval) 143 | }) 144 | bus.on('message::state-changed', () => { 145 | // console.log('message::state-changed') 146 | }) 147 | bus.on('message::application', () => { 148 | console.log('message::application') 149 | }) 150 | 151 | // Start playing 152 | if (playbin.setState(Gst.State.PLAYING) === Gst.StateChangeReturn.FAILURE) { 153 | console.error('Unable to set the pipeline to the playing state.') 154 | return 155 | } 156 | 157 | // Register a function that GLib will call every second 158 | GLib.timeoutAddSeconds(0, 1, () => { 159 | refreshUI(data) 160 | return true 161 | }) 162 | 163 | const interval = setInterval(() => {}, 1000) 164 | // Start the GTK main loop. We will not regain control until gtk_main_quit is called. 165 | Gtk.main() 166 | 167 | // Free resources 168 | playbin.setState(Gst.State.NULL) 169 | } 170 | 171 | main() 172 | -------------------------------------------------------------------------------- /src/tutorial-6.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/media-formats-and-pad-capabilities.html 2 | const gi = require('node-gtk') 3 | const Gst = gi.require('Gst', '1.0') 4 | const GLib = gi.require('GLib', '2.0') 5 | gi.startLoop() 6 | 7 | 8 | let terminate = false 9 | 10 | // Initialize GStreamer 11 | Gst.init() 12 | 13 | function main() { 14 | // Create the element factories 15 | const srcFactory = Gst.ElementFactory.find('audiotestsrc') 16 | const sinkFactory = Gst.ElementFactory.find('autoaudiosink') 17 | if (!srcFactory || !sinkFactory) { 18 | console.error('Not all element factories could be created.') 19 | return 20 | } 21 | 22 | // Print information about the pad templates of these factories 23 | printPadTemplateInformation(srcFactory) 24 | printPadTemplateInformation(sinkFactory) 25 | 26 | // Ask the factories to instantiate actual elements 27 | const src = srcFactory.create() 28 | const sink = sinkFactory.create() 29 | 30 | // Create the empty pipeline 31 | const pipeline = new Gst.Pipeline() 32 | 33 | if (!pipeline || !src || !sink) { 34 | console.error('Not all elements could be created.') 35 | return 36 | } 37 | 38 | // Build the pipeline 39 | pipeline.add(src) 40 | pipeline.add(sink) 41 | if (!src.link(sink)) { 42 | console.error('Elements could not be linked.') 43 | return 44 | } 45 | 46 | // Print initial negotiated caps (in NULL state) 47 | console.log('In NULL state:') 48 | printPadCapabilities(sink, 'sink') 49 | 50 | // Start playing 51 | if (pipeline.setState(Gst.State.PLAYING) === Gst.State.CHANGE_FAILURE) { 52 | console.error('Unable to set the pipeline to the playing state (check the bus for error messages).') 53 | } 54 | 55 | // Listen to the bus 56 | const bus = pipeline.getBus() 57 | while (!terminate) { 58 | const msg = bus.timedPopFiltered(Gst.CLOCK_TIME_NONE, Gst.MessageType.STATE_CHANGED | Gst.MessageType.ERROR | Gst.MessageType.EOS) 59 | if (!msg) continue 60 | 61 | switch (msg.type) { 62 | case Gst.MessageType.ERROR: 63 | console.log('Got error.') 64 | // TOOD: parse error 65 | terminate = true 66 | break 67 | case Gst.MessageType.EOS: 68 | console.log('End-Of-Stream reached.') 69 | terminate = true 70 | break 71 | case Gst.MessageType.STATE_CHANGED: 72 | if (msg.src === pipeline) { 73 | const [oldState, newState, pendingState] = msg.parseStateChanged() 74 | console.log(`Pipeline state changed from ${Gst.Element.stateGetName(oldState)} to ${Gst.Element.stateGetName(newState)}.`) 75 | printPadCapabilities(sink, 'sink') 76 | } 77 | break 78 | default: 79 | console.error('Unknown message type:', msg) 80 | break 81 | } 82 | } 83 | 84 | pipeline.setState(Gst.State.NULL) 85 | } 86 | 87 | function printPadTemplateInformation(factory) { 88 | console.log(`Pad Templates for ${factory.getName()}:`) 89 | if (factory.getNumPadTemplates() === 0) { 90 | console.log(' none') 91 | return 92 | } 93 | 94 | const templates = factory.getStaticPadTemplates() 95 | for (const template of templates) { 96 | switch (template.direction) { 97 | case Gst.PadDirection.SRC: 98 | console.log(` SRC template: '${template.nameTemplate}'`) 99 | break 100 | case Gst.PadDirection.SINK: 101 | console.log(` SINK template: '${template.nameTemplate}'`) 102 | break 103 | default: 104 | console.log(` UNKNOWN!!! template: '${template.nameTemplate}'`) 105 | break 106 | } 107 | switch (template.presence) { 108 | case Gst.PadPresence.ALWAYS: 109 | console.log(' Availability: Always') 110 | break 111 | case Gst.PadPresence.SOMETIMES: 112 | console.log(' Availability: Sometimes') 113 | break 114 | case Gst.PadPresence.REQUEST: 115 | console.log(' Availability: On request') 116 | break 117 | default: 118 | console.log(' Availability: UNKNOWN!!!') 119 | break 120 | } 121 | // TODO: this is not yet working 122 | // const staticCaps = template.staticCaps 123 | // if (staticCaps.string) { 124 | // console.log(' Capabilities:') 125 | // const caps = stateCaps.get() 126 | // printCaps(caps, ' ') 127 | // } 128 | 129 | // Instead we're doing this: 130 | const caps = template.getCaps() 131 | console.log(' Capabilities:') 132 | printCaps(caps, ' ') 133 | console.log('') 134 | } 135 | } 136 | 137 | function printCaps(caps, prefix) { 138 | if (!caps) return 139 | if (caps.isAny()) { 140 | console.log(prefix + 'ANY') 141 | return 142 | } 143 | if (caps.isEmpty()) { 144 | console.log(prefix + 'EMPTY') 145 | return 146 | } 147 | 148 | for (let i = 0; i < caps.getSize(); ++i) { 149 | const struct = caps.getStructure(i) 150 | console.log(prefix + struct.getName()) 151 | struct.foreach((fieldId, val) => { 152 | const str = Gst.valueSerialize(val) 153 | console.log(prefix + ' ' + GLib.quarkToString(fieldId).padStart(15) + ' ' + str) 154 | // We should free the string here... 155 | return true 156 | }) 157 | } 158 | } 159 | 160 | function printPadCapabilities(element, padName) { 161 | const pad = element.getStaticPad(padName) 162 | if (!pad) { 163 | console.error(`Could not retrieve pad '${padName}'`) 164 | return 165 | } 166 | 167 | // Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) 168 | let caps = pad.getCurrentCaps() 169 | if (!caps) { 170 | caps = pad.queryCaps(null) 171 | } 172 | 173 | // Print and free 174 | console.log(`Caps for the ${padName} pad:`) 175 | printCaps(caps, ' ') 176 | } 177 | 178 | main() 179 | -------------------------------------------------------------------------------- /src/tutorial-7.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/multithreading-and-pad-availability.html 2 | 3 | const gi = require('node-gtk') 4 | const Gst = gi.require('Gst', '1.0') 5 | gi.startLoop() 6 | 7 | // Initialize GStreamer 8 | Gst.init() 9 | 10 | function main () { 11 | // Create the elements 12 | const asrc = Gst.ElementFactory.make('audiotestsrc') 13 | const tee = Gst.ElementFactory.make('tee') 14 | const aqueue = Gst.ElementFactory.make('queue', 'audio_queue') 15 | const aconvert = Gst.ElementFactory.make('audioconvert') 16 | const aresample = Gst.ElementFactory.make('audioresample') 17 | const asink = Gst.ElementFactory.make('autoaudiosink') 18 | const vqueue = Gst.ElementFactory.make('queue', 'video_queue') 19 | const visual = Gst.ElementFactory.make('wavescope', 'visual') 20 | const vconvert = Gst.ElementFactory.make('videoconvert') 21 | const vsink = Gst.ElementFactory.make('autovideosink') 22 | 23 | // Create the empty pipeline 24 | const pipeline = new Gst.Pipeline() 25 | 26 | if ( 27 | !asrc || 28 | !tee || 29 | !aqueue || 30 | !aconvert || 31 | !aresample || 32 | !asink || 33 | !vqueue || 34 | !visual || 35 | !vconvert || 36 | !vsink || 37 | !pipeline 38 | ) { 39 | console.error('Not all elements could be created.') 40 | return 41 | } 42 | 43 | // Configure elements 44 | // TODO: use non-internal setters? 45 | asrc.freq = 215.0 46 | visual.shader = 0 47 | visual.style = 1 48 | 49 | pipeline.add(asrc) 50 | pipeline.add(tee) 51 | pipeline.add(aqueue) 52 | pipeline.add(aconvert) 53 | pipeline.add(aresample) 54 | pipeline.add(asink) 55 | pipeline.add(vqueue) 56 | pipeline.add(visual) 57 | pipeline.add(vconvert) 58 | pipeline.add(vsink) 59 | 60 | // Link all elements that can be automatically linked because they have "Always" pads 61 | if ( 62 | !asrc.link(tee) || 63 | !aqueue.link(aconvert) || 64 | !aconvert.link(aresample) || 65 | !aresample.link(asink) || 66 | !vqueue.link(visual) || 67 | !visual.link(vconvert) || 68 | !vconvert.link(vsink) 69 | ) { 70 | console.error('Elements could not be linked.') 71 | return 72 | } 73 | 74 | // Manually link the Tee, which has "Request" pads 75 | const teeAudioPad = tee.getRequestPad('src_%u') 76 | console.log(`Obtained request pad ${teeAudioPad.getName()} for audio branch.`) 77 | const queueAudioPad = aqueue.getStaticPad('sink') 78 | const teeVideoPad = tee.getRequestPad('src_%u') 79 | console.log(`Obtained request pad ${teeVideoPad.getName()} for video branch.`) 80 | const queueVideoPad = vqueue.getStaticPad('sink') 81 | if ( 82 | teeAudioPad.link(queueAudioPad) !== Gst.PadLinkReturn.OK || 83 | teeVideoPad.link(queueVideoPad) !== Gst.PadLinkReturn.OK 84 | ) { 85 | console.error('Tee could not be linked.') 86 | return 87 | } 88 | 89 | // Start playing the pipeline 90 | pipeline.setState(Gst.State.PLAYING) 91 | 92 | // Wait until error or EOS 93 | const bus = pipeline.getBus() 94 | const msg = bus.timedPopFiltered( 95 | Gst.CLOCK_TIME_NONE, 96 | Gst.MessageType.ERROR | Gst.MessageType.EOS 97 | ) 98 | 99 | // Release the request pads from the tee 100 | tee.releaseRequestPad(teeAudioPad) 101 | tee.releaseRequestPad(teeVideoPad) 102 | 103 | pipeline.setState(Gst.State.NULL) 104 | } 105 | 106 | main() 107 | -------------------------------------------------------------------------------- /src/tutorial-8.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/short-cutting-the-pipeline.html 2 | 3 | const gi = require('node-gtk') 4 | const Gst = gi.require('Gst', '1.0') 5 | const GstAudio = gi.require('GstAudio', '1.0') 6 | const GLib = gi.require('GLib', '2.0') 7 | gi.startLoop() 8 | 9 | // Initialize GStreamer 10 | Gst.init() 11 | 12 | // global constants 13 | const CHUNK_SIZE = 1024 14 | const SAMPLE_RATE = 44100 15 | 16 | let sourceId = 0 17 | let numSamples = 0 18 | let data = { 19 | appsrc: undefined, 20 | a: 0, 21 | b: 1, 22 | c: 0, 23 | d: 1 24 | } 25 | 26 | function pushData () { 27 | const buf = Buffer.alloc(CHUNK_SIZE, 0) 28 | data.c += data.d 29 | data.d -= data.c / 1000 30 | const freq = 1100 + 1000 * data.d 31 | for (let i = 0; i < CHUNK_SIZE / 2; ++i) { 32 | data.a += data.b 33 | data.b -= data.a / freq 34 | buf.writeUInt16LE((500 * data.a) & 0xffff, i * 2) 35 | } 36 | // Create Gst.Buffer 37 | const buffer = Gst.Buffer.newWrapped(buf) 38 | buffer.dts = Gst.utilUint64Scale(numSamples, Gst.SECOND, SAMPLE_RATE) 39 | buffer.duration = Gst.utilUint64Scale(CHUNK_SIZE / 2, Gst.SECOND, SAMPLE_RATE) 40 | numSamples += CHUNK_SIZE / 2 41 | 42 | // Push the buffer into the appsrc 43 | const ret = data.appsrc.emit('push-buffer', buffer) 44 | 45 | if (ret !== Gst.FlowReturn.OK) { 46 | // We got some error, stop sending data 47 | return false 48 | } 49 | return true 50 | } 51 | 52 | function main () { 53 | // Create the elements 54 | const appsrc = (data.appsrc = Gst.ElementFactory.make('appsrc')) 55 | const tee = Gst.ElementFactory.make('tee') 56 | const aqueue = Gst.ElementFactory.make('queue', 'audio_queue') 57 | const aconvert1 = Gst.ElementFactory.make('audioconvert') 58 | const aresample = Gst.ElementFactory.make('audioresample') 59 | const asink = Gst.ElementFactory.make('autoaudiosink') 60 | const vqueue = Gst.ElementFactory.make('queue', 'video_queue') 61 | const aconvert2 = Gst.ElementFactory.make('audioconvert') 62 | const visual = Gst.ElementFactory.make('wavescope', 'visual') 63 | const vconvert = Gst.ElementFactory.make('videoconvert') 64 | const vsink = Gst.ElementFactory.make('autovideosink') 65 | const appqueue = Gst.ElementFactory.make('queue') 66 | const appsink = Gst.ElementFactory.make('appsink') 67 | 68 | // Create the empty pipeline 69 | const pipeline = new Gst.Pipeline() 70 | 71 | if ( 72 | !appsrc || 73 | !tee || 74 | !aqueue || 75 | !aconvert1 || 76 | !aresample || 77 | !asink || 78 | !vqueue || 79 | !aconvert2 || 80 | !visual || 81 | !vconvert || 82 | !vsink || 83 | !appqueue || 84 | !appsink || 85 | !pipeline 86 | ) { 87 | console.error('Not all elements could be created.') 88 | return 89 | } 90 | 91 | // Configure wavescope 92 | visual.shader = 0 93 | visual.style = 0 94 | 95 | // Configure appsrc 96 | const audioInfo = new GstAudio.AudioInfo() 97 | audioInfo.setFormat(GstAudio.AudioFormat.S16, SAMPLE_RATE, 1) 98 | const audioCaps = audioInfo.toCaps() 99 | console.log(audioCaps.toString()) 100 | appsrc.caps = audioCaps 101 | appsrc.format = Gst.Format.TIME 102 | appsrc.on('need-data', size => { 103 | if (sourceId !== 0) return 104 | console.log('Start feeding.') 105 | sourceId = GLib.idleAdd(GLib.PRIORITY_DEFAULT_IDLE, pushData) 106 | }) 107 | appsrc.on('enough-data', () => { 108 | if (sourceId === 0) return 109 | console.log('Stop feeding.') 110 | GLib.sourceRemove(sourceId) 111 | sourceId = 0 112 | }) 113 | 114 | // Configure appsink 115 | appsink.emitSignals = true 116 | appsink.caps = audioCaps 117 | appsink.on('new-sample', () => { 118 | const sample = appsink.emit('pull-sample') 119 | if (sample) { 120 | process.stdout.write('*') 121 | return Gst.FlowReturn.OK 122 | } 123 | return Gst.FlowReturn.ERROR 124 | }) 125 | 126 | pipeline.add(appsrc) 127 | pipeline.add(tee) 128 | pipeline.add(aqueue) 129 | pipeline.add(aconvert1) 130 | pipeline.add(aresample) 131 | pipeline.add(asink) 132 | pipeline.add(vqueue) 133 | pipeline.add(aconvert2) 134 | pipeline.add(visual) 135 | pipeline.add(vconvert) 136 | pipeline.add(vsink) 137 | pipeline.add(appqueue) 138 | pipeline.add(appsink) 139 | 140 | // Link all elements that can be automatically linked because they have "Always" pads 141 | const isLinked = 142 | appsrc.link(tee) && 143 | aqueue.link(aconvert1) && 144 | aconvert1.link(aresample) && 145 | aresample.link(asink) && 146 | vqueue.link(aconvert2) && 147 | aconvert2.link(visual) && 148 | visual.link(vconvert) && 149 | vconvert.link(vsink) && 150 | appqueue.link(appsink) 151 | if (!isLinked) { 152 | console.error('Elements could not be linked.') 153 | return 154 | } 155 | 156 | // Manually link the Tee, which has "Request" pads 157 | const teeAudioPad = tee.getRequestPad('src_%u') 158 | console.log(`Obtained request pad ${teeAudioPad.getName()} for audio branch.`) 159 | const queueAudioPad = aqueue.getStaticPad('sink') 160 | 161 | const teeVideoPad = tee.getRequestPad('src_%u') 162 | console.log(`Obtained request pad ${teeVideoPad.getName()} for video branch.`) 163 | const queueVideoPad = vqueue.getStaticPad('sink') 164 | 165 | const teeAppPad = tee.getRequestPad('src_%u') 166 | console.log(`Obtained request pad ${teeAppPad.getName()} for app branch.`) 167 | const queueAppPad = appqueue.getStaticPad('sink') 168 | 169 | const padsLinked = 170 | teeAudioPad.link(queueAudioPad) === Gst.PadLinkReturn.OK && 171 | teeVideoPad.link(queueVideoPad) === Gst.PadLinkReturn.OK && 172 | teeAppPad.link(queueAppPad) === Gst.PadLinkReturn.OK 173 | if (!padsLinked) { 174 | console.error('Tee could not be linked.') 175 | return 176 | } 177 | 178 | // Instruct the bus to emit signals for each received message, and connect to the interesting signals 179 | const bus = pipeline.getBus() 180 | bus.addSignalWatch() 181 | bus.on('message::error', msg => { 182 | // const [err, debugInfo] = msg.parseError() 183 | console.error('Got error') 184 | loop.quit() 185 | clearInterval(interval) 186 | }) 187 | 188 | // Start playing the pipeline 189 | pipeline.setState(Gst.State.PLAYING) 190 | 191 | // Create a GLib Main Loop and set it to run 192 | // FIXME: workaround to keep js code running 193 | const interval = setInterval(() => {}, 1000) 194 | const loop = new GLib.MainLoop(null, false) 195 | loop.run() 196 | 197 | // Release the request pads from the tee 198 | tee.releaseRequestPad(teeAudioPad) 199 | tee.releaseRequestPad(teeVideoPad) 200 | tee.releaseRequestPad(teeAppPad) 201 | 202 | // Free resources 203 | pipeline.setState(Gst.State.NULL) 204 | } 205 | 206 | main() 207 | -------------------------------------------------------------------------------- /src/tutorial-9.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/media-information-gathering.html 2 | 3 | const gi = require('node-gtk') 4 | const Gst = gi.require('Gst') 5 | const GstPbutils = gi.require('GstPbutils') 6 | const GLib = gi.require('GLib') 7 | gi.startLoop() 8 | 9 | // Initialize GStreamer 10 | Gst.init() 11 | 12 | 13 | function main() { 14 | // if a URI was provided, use it instead of the default one 15 | const uri = process.argv[2] || 'https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm' 16 | console.log(`Discovering '${uri}'`); 17 | 18 | // Instantiate the Discoverer 19 | const discoverer = new GstPbutils.Discoverer() 20 | 21 | // Connect to the interesting signals 22 | discoverer.on('discovered', () => { 23 | 24 | }) 25 | discoverer.on('finished', () => { 26 | 27 | }) 28 | 29 | // Start the discoverer process (nothing to do yet) 30 | discoverer.start() 31 | 32 | // Add a request to process asynchronously the URI passed through the command line 33 | 34 | if (discoverer.discoverUriAsync(uri)) { 35 | console.error(`Failed to start discovering URI '${uri}'\n`); 36 | return 37 | } 38 | 39 | // Create a GLib Main Loop and set it to run, so we can wait for the signals 40 | const loop = new GLib.MainLoop(null, false) 41 | loop.run() 42 | 43 | // Stop the discoverer process 44 | discoverer.stop() 45 | } 46 | 47 | main() 48 | -------------------------------------------------------------------------------- /src/typefind-example.js: -------------------------------------------------------------------------------- 1 | // translation of https://gstreamer.freedesktop.org/documentation/tutorials/basic/dynamic-pipelines.html 2 | 3 | const gi = require('node-gtk') 4 | const Gst = gi.require('Gst', '1.0') 5 | const GLib = gi.require('GLib', '2.0') 6 | gi.startLoop() 7 | Gst.init() 8 | 9 | function main() { 10 | // Create pipeline 11 | const pipeline = new Gst.Pipeline() 12 | const src = Gst.ElementFactory.make('videotestsrc') 13 | const typefind = Gst.ElementFactory.make('typefind') 14 | if (!pipeline || !src || !typefind) { 15 | console.error('Not all elements could be created.') 16 | return 17 | } 18 | pipeline.add(src) 19 | pipeline.add(typefind) 20 | 21 | // Link elements 22 | if (!src.link(typefind)) { 23 | console.error('Elements could not be linked.') 24 | return 25 | } 26 | 27 | typefind.once('have-type', (probability, caps) => { 28 | console.log(`Got type with probability ${probability}:`) 29 | console.log(caps.toString()) 30 | 31 | clearTimeout(timeout) 32 | }) 33 | 34 | const ret = pipeline.setState(Gst.State.PLAYING) 35 | if (ret === Gst.State.CHANGE_FAILURE) { 36 | console.error('Unable to set the pipeline to the playing state.') 37 | return 38 | } 39 | 40 | // keep alive 41 | const timeout = setTimeout(() => {}, 5000) 42 | } 43 | 44 | main() 45 | --------------------------------------------------------------------------------