├── .npmignore
├── .gitignore
├── package.json
├── demo
├── index.html
├── wave-stream.js
└── index.js
├── README.md
├── capture.js
└── index.js
/.npmignore:
--------------------------------------------------------------------------------
1 | demo
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .DS_Store
3 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audio-stream",
3 | "version": "0.2.0",
4 | "description": "Stream raw audio data from a MediaStream",
5 | "main": "index.js",
6 | "scripts": {
7 | "demo-watch": "wzrd demo/index.js:demo/bundle.js",
8 | "demo-build": "browserify demo/index.js -o demo/bundle.js"
9 | },
10 | "repository": {
11 | "type": "git",
12 | "url": "git+https://github.com/kapetan/audio-stream.git"
13 | },
14 | "keywords": [
15 | "audio",
16 | "media",
17 | "stream",
18 | "webrtc"
19 | ],
20 | "license": "MIT",
21 | "bugs": {
22 | "url": "https://github.com/kapetan/audio-stream/issues"
23 | },
24 | "homepage": "https://github.com/kapetan/audio-stream#readme",
25 | "devDependencies": {
26 | "browserify": "^11.2.0",
27 | "pcm-stream": "^1.0.0",
28 | "wzrd": "^1.3.1"
29 | },
30 | "dependencies": {
31 | "debug": "^2.2.0",
32 | "once": "^1.3.2",
33 | "xtend": "^4.0.0"
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/demo/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Recorder
5 |
6 |
24 |
25 |
26 | Recorder
27 |
28 |
29 | Volume:
30 |
31 |
32 |
33 |
34 |
35 | 00:00
36 |
37 |
38 |
39 | Download
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # audio-stream
2 |
3 | Stream raw audio data from a MediaStream. Only works in modern browsers that support the Web Audio API.
4 |
5 | npm install audio-stream
6 |
7 | See the [live recorder demo](https://kapetan.github.io/audio-stream/demo/index.html).
8 |
9 | # Usage
10 |
11 | Use together with tools similar to browserify. The data is streamed as interleaved 32-bit floats ranging between -1 and 1.
12 |
13 | ```javascript
14 | var audio = require('audio-stream');
15 |
16 | navigator.getUserMedia({
17 | video: false,
18 | audio: true
19 | }, function(mediaStream) {
20 | var stream = audio(mediaStream, {
21 | channels: 1,
22 | volume: 0.5
23 | });
24 |
25 | stream.on('header', function(header) {
26 | // Wave header properties
27 | });
28 |
29 | stream.on('data', function(data) {
30 | // Data is a Buffer instance (UInt8Array)
31 | });
32 |
33 | stream.on('end', function() {
34 | // End is emitted when media stream has ended
35 | });
36 |
37 | setTimeout(function() {
38 | mediaStream.stop();
39 | }, 5000);
40 | }, function() {
41 | console.log('Failed to get media');
42 | });
43 | ```
44 |
45 | The constructor accepts number of channels and microphone volume as options.
46 |
47 | #### `stream.destroy([err])`
48 |
49 | Destroy the audio stream, releasing all associated resources. The media stream is not closed.
50 |
51 | #### `stream.suspend()`
52 |
53 | Suspend audio data capturing.
54 |
55 | #### `stream.restart()`
56 |
57 | Restart audio data capturing.
58 |
59 | # Limitations
60 |
61 | Currently Chrome lacks supports for capturing a remote stream sent using a peer connection. But works in Firefox.
62 |
63 | ```javascript
64 | // This only works in Firefox at the moment
65 | peerConnection.onaddstream = function(e) {
66 | var stream = audio(e.stream);
67 |
68 | stream.on('data', function(data) {
69 |
70 | });
71 | };
72 | ```
73 |
--------------------------------------------------------------------------------
/demo/wave-stream.js:
--------------------------------------------------------------------------------
1 | var stream = require('stream');
2 | var util = require('util');
3 |
4 | var HEADER_LENGTH = 44;
5 | var EMPTY_BUFFER = new Buffer(0);
6 | var HIGH_WATER_MARK = Math.pow(2, 14) * 16;
7 |
8 | var writeHeader = function(dataLength, options) {
9 | var header = new Buffer(HEADER_LENGTH);
10 |
11 | header.write('RIFF', 0, 4, 'ascii');
12 | header.writeUInt32LE(dataLength + HEADER_LENGTH - 8, 4);
13 | header.write('WAVE', 8, 4, 'ascii');
14 | header.write('fmt ', 12, 4, 'ascii');
15 | header.writeUInt32LE(16, 16);
16 | header.writeUInt16LE(options.audioFormat, 20);
17 | header.writeUInt16LE(options.channels, 22);
18 | header.writeUInt32LE(options.sampleRate, 24);
19 | header.writeUInt32LE(options.byteRate, 28);
20 | header.writeUInt16LE(options.blockAlign, 32);
21 | header.writeUInt16LE(options.bitDepth, 34);
22 | header.write('data', 36, 4, 'ascii');
23 | header.writeUInt32LE(dataLength, 40);
24 |
25 | return header;
26 | };
27 |
28 | var WaveStream = function() {
29 | if(!(this instanceof WaveStream)) return new WaveStream();
30 | stream.Writable.call(this, { highWaterMark: HIGH_WATER_MARK });
31 |
32 | var self = this;
33 |
34 | this._header = null;
35 | this._buffer = [EMPTY_BUFFER];
36 | this._length = 0;
37 |
38 | this.once('finish', function() {
39 | var buffer = self._buffer;
40 | buffer[0] = writeHeader(self._length, self._header);
41 |
42 | var blob = new Blob(buffer, { type: 'audio/wav' });
43 | var url = URL.createObjectURL(blob);
44 |
45 | self.emit('url', url);
46 | });
47 | };
48 |
49 | util.inherits(WaveStream, stream.Writable);
50 |
51 | WaveStream.prototype.setHeader = function(header) {
52 | this._header = header;
53 | };
54 |
55 | WaveStream.prototype._write = function(data, encoding, callback) {
56 | this._buffer.push(data);
57 | this._length += data.length;
58 | callback();
59 | };
60 |
61 | module.exports = WaveStream;
62 |
--------------------------------------------------------------------------------
/demo/index.js:
--------------------------------------------------------------------------------
1 | var audio = require('../');
2 | var pcm = require('pcm-stream');
3 | var wave = require('./wave-stream');
4 |
5 | var getUserMedia = navigator.getUserMedia ||
6 | navigator.webkitGetUserMedia ||
7 | navigator.mozGetUserMedia;
8 |
9 | var pad = function(n) {
10 | return n < 10 ? ('0' + n) : n;
11 | };
12 |
13 | var mediaStream = null;
14 | var sourceStream = null;
15 |
16 | var record = document.getElementById('record-button');
17 | var pause = document.getElementById('pause-button');
18 | var stop = document.getElementById('stop-button');
19 | var duration = document.getElementById('duration');
20 | var volume = document.getElementById('volume');
21 |
22 | var player = document.getElementById('player');
23 | var download = document.getElementById('download');
24 |
25 | record.addEventListener('click', function() {
26 | volume.setAttribute('disabled', 'disabled');
27 | record.setAttribute('disabled', 'disabled');
28 | pause.removeAttribute('disabled');
29 | stop.removeAttribute('disabled');
30 |
31 | setInterval(function() {
32 | if(sourceStream) {
33 | var seconds = Math.floor(sourceStream.duration);
34 | var minutes = Math.floor(seconds / 60);
35 |
36 | duration.innerHTML = pad(minutes) + ':' + pad(seconds - minutes * 60);
37 | }
38 | }, 500);
39 |
40 | if(sourceStream) {
41 | sourceStream.restart();
42 | } else {
43 | getUserMedia.call(navigator, {
44 | video: false,
45 | audio: true
46 | }, function(result) {
47 | var w = wave();
48 |
49 | mediaStream = window.ms = result;
50 | sourceStream = audio(mediaStream, { volume: volume.value / 100 });
51 |
52 | sourceStream
53 | .on('header', function(header) {
54 | var channels = header.channels;
55 | var sampleRate = header.sampleRate;
56 |
57 | w.setHeader({
58 | audioFormat: 1,
59 | channels: channels,
60 | sampleRate: sampleRate,
61 | byteRate: sampleRate * channels * 2,
62 | blockAlign: channels * 2,
63 | bitDepth: 16
64 | });
65 | })
66 | .pipe(pcm())
67 | .pipe(w)
68 | .on('url', function(url) {
69 | player.src = url;
70 | download.href = url;
71 | download.classList.remove('hidden');
72 | });
73 | }, function(err) {
74 | console.error(err);
75 | });
76 | }
77 | });
78 |
79 | pause.addEventListener('click', function() {
80 | record.removeAttribute('disabled');
81 | pause.setAttribute('disabled', 'disabled');
82 |
83 | sourceStream.suspend();
84 | });
85 |
86 | stop.addEventListener('click', function() {
87 | pause.setAttribute('disabled', 'disabled');
88 | stop.setAttribute('disabled', 'disabled');
89 |
90 | mediaStream.getAudioTracks().forEach(function(track) {
91 | track.stop();
92 | });
93 | });
94 |
--------------------------------------------------------------------------------
/capture.js:
--------------------------------------------------------------------------------
1 | var events = require('events');
2 | var once = require('once');
3 | var debug = require('debug')('audio-stream:capture');
4 |
5 | var isActive = function(media, track) {
6 | if(media.getAudioTracks) return !!media.getAudioTracks().length;
7 | if(track) return track.readyState !== 'ended';
8 | if('active' in media) return media.active;
9 | if('ended' in media) return media.ended;
10 | return true;
11 | };
12 |
13 | var getAudioTrack = function(media) {
14 | var track = media.getAudioTracks ? media.getAudioTracks()[0] : null;
15 | return (track && track.readyState && ('onended' in track)) ? track : null;
16 | };
17 |
18 | module.exports = function(media, processor) {
19 | var that = new events.EventEmitter();
20 | var track = getAudioTrack(media);
21 |
22 | var hasInactive = ('oninactive' in media);
23 | var hasEnded = ('onended' in media);
24 | var hasCurrentTime = ('currentTime' in media) && !hasInactive && !hasEnded;
25 |
26 | var currentTime = -1;
27 | var lastCall = -1;
28 | var timeout = null;
29 | var interval = null;
30 | var timeBuffer = [];
31 |
32 | var onaudioprocess = function(e) {
33 | lastCall = Date.now();
34 |
35 | if(hasCurrentTime) {
36 | // Current time is not updated in Firefox
37 | // when the media stream is stopped.
38 | if(currentTime === media.currentTime) {
39 | debug('current time unchanged', currentTime, !!timeout);
40 |
41 | // At the begining it can take some time before current time is updated.
42 | if(!timeout) timeout = setTimeout(onended, !currentTime ? 5000 : 1000);
43 | timeBuffer.push(e);
44 | return;
45 | }
46 | if(timeout) {
47 | debug('current time updated', currentTime, media.currentTime);
48 |
49 | clearTimeout(timeout);
50 | timeBuffer.forEach(function(entry) {
51 | that.emit('data', entry);
52 | });
53 |
54 | timeout = null;
55 | timeBuffer = [];
56 | }
57 |
58 | currentTime = media.currentTime;
59 | }
60 |
61 | that.emit('data', e);
62 | };
63 |
64 | var onended = once(function(e) {
65 | debug('onended', (e instanceof Event) ? [e.type, e.target] : null);
66 |
67 | suspend();
68 |
69 | if(track) track.removeEventListener('ended', onended, false);
70 |
71 | if(hasInactive) media.removeEventListener('inactive', onended, false);
72 | else if(hasEnded) media.removeEventListener('ended', onended, false);
73 |
74 | that.emit('end');
75 | });
76 |
77 | var scheduleInterval = function() {
78 | debug('schedule interval');
79 |
80 | // The processor listener is not called in
81 | // Firefox when the audio track is stopped.
82 | lastCall = Date.now();
83 | interval = setInterval(function() {
84 | if(Date.now() - lastCall > 10000) {
85 | debug('audio process timeout');
86 | onended();
87 | }
88 | }, 1000);
89 | };
90 |
91 | var suspend = function() {
92 | processor.removeEventListener('audioprocess', onaudioprocess, false);
93 | clearTimeout(timeout);
94 | clearInterval(interval);
95 | };
96 |
97 | var restart = function() {
98 | processor.addEventListener('audioprocess', onaudioprocess, false);
99 | scheduleInterval();
100 | };
101 |
102 | that.suspend = suspend;
103 | that.restart = restart;
104 | that.destroy = onended;
105 |
106 | if(track) track.addEventListener('ended', onended, false);
107 |
108 | if(hasInactive) media.addEventListener('inactive', onended, false);
109 | else if(hasEnded) media.addEventListener('ended', onended, false);
110 | else if(hasCurrentTime) currentTime = media.currentTime;
111 |
112 | restart();
113 |
114 | return that;
115 | };
116 |
117 | module.exports.ended = function(media) {
118 | var track = getAudioTrack(media);
119 | return !isActive(media, track);
120 | };
121 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | var stream = require('stream');
2 | var util = require('util');
3 | var once = require('once');
4 | var extend = require('xtend');
5 | var debug = require('debug')('audio-stream');
6 |
7 | var capture = require('./capture');
8 |
9 | var BIT_DEPTH = 32;
10 | var SAMPLE_RATE = 44100;
11 | var HIGH_WATER_MARK = Math.pow(2, 14) * 16;
12 |
13 | var AudioContext = window.AudioContext || window.webkitAudioContext;
14 | var noop = function() {};
15 |
16 | var AudioStream = function(media, options) {
17 | if(!(this instanceof AudioStream)) return new AudioStream(media, options);
18 | stream.Readable.call(this, { highWaterMark: HIGH_WATER_MARK });
19 |
20 | options = extend({
21 | buffer: 2048,
22 | channels: 2,
23 | volume: 1
24 | }, options);
25 |
26 | var self = this;
27 | var buffer = options.buffer;
28 | var channels = options.channels;
29 | var bytesPerSample = BIT_DEPTH / 8;
30 |
31 | this.duration = 0;
32 | this.samples = 0;
33 |
34 | this._destroyed = false;
35 | this._suspend = noop;
36 | this._restart = noop;
37 | this._stop = noop;
38 | this._record = once(function() {
39 | if(capture.ended(media)) {
40 | debug('ended before data');
41 |
42 | self._emitHeader(SAMPLE_RATE, channels);
43 | return self.push(null);
44 | }
45 |
46 | var context = options.context || new AudioContext();
47 | var source = (media instanceof Audio) ?
48 | context.createMediaElementSource(media) :
49 | context.createMediaStreamSource(media);
50 | var gain = context.createGain();
51 | var processor = context.createScriptProcessor(buffer, channels, channels);
52 |
53 | var that = capture(media, processor);
54 |
55 | gain.gain.value = options.volume;
56 |
57 | that.on('data', function(e) {
58 | var input = e.inputBuffer;
59 | var numberOfChannels = input.numberOfChannels;
60 | var numberOfSamples = input.length;
61 | var data = new Buffer(bytesPerSample * numberOfChannels * numberOfSamples);
62 |
63 | for(var i = 0; i < numberOfChannels; i++) {
64 | var channel = input.getChannelData(i);
65 |
66 | for(var j = 0; j < numberOfSamples; j++) {
67 | var offset = bytesPerSample * (j * numberOfChannels + i);
68 | data.writeFloatLE(channel[j], offset);
69 | }
70 | }
71 |
72 | self.duration += input.duration;
73 | self.samples += numberOfSamples;
74 |
75 | self.push(data);
76 | });
77 |
78 | that.on('end', function() {
79 | self._stop();
80 | self.push(null);
81 | });
82 |
83 | self._suspend = function() {
84 | debug('suspend');
85 | that.suspend();
86 | };
87 |
88 | self._restart = function() {
89 | debug('restart');
90 | that.restart();
91 | };
92 |
93 | self._stop = function() {
94 | debug('stop');
95 |
96 | that.destroy();
97 |
98 | processor.disconnect();
99 | gain.disconnect();
100 | source.disconnect();
101 | if(!options.context) context.close().catch(function(e) {
102 | debug(e);
103 | });
104 | };
105 |
106 | self.on('end', function() {
107 | debug('end');
108 | });
109 |
110 | self.on('pause', function() {
111 | debug('pause');
112 | });
113 |
114 | self.on('resume', function() {
115 | debug('resume');
116 | });
117 |
118 | source.connect(gain);
119 | gain.connect(processor);
120 | processor.connect(context.destination);
121 |
122 | self._emitHeader(context.sampleRate, channels);
123 | });
124 | };
125 |
126 | util.inherits(AudioStream, stream.Readable);
127 |
128 | AudioStream.prototype.suspend = function() {
129 | this._suspend();
130 | };
131 |
132 | AudioStream.prototype.restart = function() {
133 | this._restart();
134 | };
135 |
136 | AudioStream.prototype.destroy = function(err) {
137 | debug('destroy', err);
138 |
139 | if(this._destroyed) return;
140 | this._destroyed = true;
141 |
142 | this._stop();
143 | if(err) this.emit('error', err);
144 | this.emit('close');
145 | };
146 |
147 | AudioStream.prototype._read = function() {
148 | this._record();
149 | };
150 |
151 | AudioStream.prototype._emitHeader = function(sampleRate, channels) {
152 | var bytesPerSample = BIT_DEPTH / 8;
153 |
154 | this.emit('header', {
155 | audioFormat: 3,
156 | channels: channels,
157 | sampleRate: sampleRate,
158 | byteRate: sampleRate * channels * bytesPerSample,
159 | blockAlign: channels * bytesPerSample,
160 | bitDepth: BIT_DEPTH
161 | });
162 | };
163 |
164 | module.exports = AudioStream;
165 |
--------------------------------------------------------------------------------