├── .gitignore
├── src
├── public
│ ├── ping.wav
│ ├── css
│ │ └── app.css
│ ├── index.html
│ └── js
│ │ └── app.js
├── index.js
└── ws-server.js
├── .eslintrc.js
├── README.md
└── package.json
/.gitignore:
--------------------------------------------------------------------------------
1 | **/node_modules
2 | src/public/js/bundle.js
3 |
--------------------------------------------------------------------------------
/src/public/ping.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/websound/remote-performer/HEAD/src/public/ping.wav
--------------------------------------------------------------------------------
/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | extends: 'standard',
3 | plugins: [
4 | 'standard'
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | var attachWSS = require('./ws-server.js')
2 | var Hapi = require('hapi')
3 | var path = require('path')
4 |
5 | var server = new Hapi.Server({})
6 |
7 | server.connection({
8 | port: Number(process.env.PORT) || 9090
9 | })
10 |
11 | server.register(require('inert'), function (err) {
12 | if (err) {
13 | throw err
14 | }
15 |
16 | server.route({
17 | method: 'GET',
18 | path: '/{param*}',
19 | handler: {
20 | directory: {
21 | path: path.join(__dirname, '/public')
22 | }
23 | }
24 | })
25 |
26 | server.start(function (err) {
27 | if (err) {
28 | throw err
29 | }
30 |
31 | console.log('Server running at:', server.info.uri)
32 | attachWSS(server)
33 | })
34 | })
35 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Remote Performance example using WebMIDI and WebRTC
2 | ===================================================
3 |
4 | > WIP: Currently implemented as a WebSocket broadcast triggering chromatic notes via the Web Audio API
5 |
6 | ## Usage
7 |
8 | 1. Clone repository: `$ git clone git@github.com:websound/RemotePerformer.git; cd midisocket;`
9 | 2. Install dependencies: `$ npm install`
10 | 3. Start server: `$ npm start`
11 | 4. Plug a MIDI controller into an available port
12 | 5. Point your browser to [http://localhost:9090](http://localhost:9090) (Google Chrome Only)
13 | 6. Download [ngrok](https://ngrok.com/)
14 | 7. Move to the directory containing ngrok and expose your local web server at port 9090 `$ ./ngrok http 9090`
15 | 8. Share your ngrok generated URL and Play!
16 |
17 | -
18 |
19 | _This code originated as a fork of [midisocket](https://github.com/vine77/midisocket)_
20 |
--------------------------------------------------------------------------------
/src/public/css/app.css:
--------------------------------------------------------------------------------
1 | html, body {
2 | height: 100%;
3 | }
4 |
5 | #wrap {
6 | min-height: 100%;
7 | height: auto;
8 | margin: 0 auto -60px;
9 | padding: 0 0 60px;
10 | }
11 |
12 | #footer {
13 | height: 60px;
14 | background-color: #f5f5f5;
15 | margin-top: 40px;
16 | }
17 |
18 | h1, h2, h3, h4, h5, h6 {
19 | font-family: 'Raleway';
20 | }
21 | .navbar-brand {
22 | font-family: 'Raleway';
23 | font-size: 20px;
24 | }
25 | #echo {
26 | display: none;
27 | }
28 | #console {
29 | height: 150px;
30 | overflow-y: scroll;
31 | }
32 |
33 |
34 | #wrap > .container {
35 | padding: 60px 15px 0;
36 | }
37 | .container .text-muted {
38 | margin: 20px 0;
39 | }
40 |
41 | #footer > .container {
42 | padding-left: 15px;
43 | padding-right: 15px;
44 | }
45 |
46 | code {
47 | font-size: 80%;
48 | }
49 |
50 | label {
51 | font-weight: normal;
52 | cursor: pointer;
53 | }
54 |
--------------------------------------------------------------------------------
/src/ws-server.js:
--------------------------------------------------------------------------------
1 | var WebSocketServer = require('ws').Server
2 |
3 | module.exports = function attachWSS (server) {
4 | // Websocket server
5 | var wss = new WebSocketServer({server: server.listener})
6 |
7 | wss.broadcast = function broadcast (data, flags) {
8 | wss.clients.forEach(function each (client) {
9 | client.send(data, flags)
10 | })
11 | }
12 |
13 | console.log('websocket server created')
14 | wss.on('connection', function (ws) {
15 | ws.broadcast = function broadcast (data, flags) {
16 | wss.clients.forEach(function bc (client) {
17 | if (client === ws) return
18 | client.send(data, flags)
19 | })
20 | }
21 |
22 | ws.on('message', function (data, flags) {
23 | if (flags.binary) { // If received binary message, i.e. MIDI
24 | console.log('MIDI:', data)
25 | wss.broadcast(data, {binary: true}) // Echo MIDI message back to client
26 | }
27 | })
28 |
29 | console.log('websocket connection open')
30 | ws.on('close', function () {
31 | console.log('websocket connection close')
32 | })
33 | })
34 | }
35 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "remote-performer",
3 | "version": "0.0.0",
4 | "description": "Real-time music collaboration for the web",
5 | "main": "src/index.js",
6 | "scripts": {
7 | "build": "browserify -t brfs src/public/js/app.js > src/public/js/bundle.js",
8 | "start": "npm run build && node src/index.js",
9 | "lint": "standard",
10 | "test": "exit 0"
11 | },
12 | "repository": {
13 | "type": "git",
14 | "url": "https://github.com/websound/RemotePerformer.git"
15 | },
16 | "author": "Nathan Ward",
17 | "license": "MIT",
18 | "bugs": {
19 | "url": "https://github.com/websound/RemotePerformer/issues"
20 | },
21 | "homepage": "https://github.com/websound/RemotePerformer",
22 | "dependencies": {
23 | "bootstrap": "~3.3.6",
24 | "brfs": "^1.4.1",
25 | "browserify": "^13.0.1",
26 | "eslint": "^2.8.0",
27 | "eslint-config-standard": "^5.1.0",
28 | "eslint-plugin-promise": "^1.1.0",
29 | "eslint-plugin-standard": "^1.3.2",
30 | "font-awesome": "~4.6.3",
31 | "hapi": "^13.4.1",
32 | "inert": "^4.0.0",
33 | "insert-css": "^0.2.0",
34 | "jquery": "^2.1.0",
35 | "process-nextick-args": "^1.0.7",
36 | "watchify": "^3.7.0",
37 | "ws": "^1.1.0"
38 | },
39 | "devDependencies": {
40 | "eslint-config-standard": "^5.1.0",
41 | "eslint-plugin-standard": "^1.3.2",
42 | "pre-commit": "^1.1.3",
43 | "standard": "^7.1.2"
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | Remote Performer
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
20 |
Note: Remote Performer is in an early stage of development.
21 |
This interface aims to make a remote musician be the lead & master tempo for any accompanying performers playing with.
22 |
23 |
24 | Network Console:
25 |
26 |
27 |
28 |
29 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/src/public/js/app.js:
--------------------------------------------------------------------------------
1 | /* globals location, WebSocket, $, AudioContext, XMLHttpRequest */
2 |
3 | window.jQuery = window.$ = require('jquery')
4 |
5 | var fs = require('fs')
6 | var insertCss = require('insert-css')
7 | var bootstrap = require('bootstrap/dist/js/bootstrap') // eslint-disable-line
8 | var bsStyle = fs.readFileSync('./node_modules/bootstrap/dist/css/bootstrap.css')
9 | insertCss(bsStyle)
10 |
11 | /* Global App Object */
12 | var App = {}
13 |
14 | /* App Declarations */
15 | App.audioContext = null
16 | App.midiAccess = null
17 | App.audioBuffer = null
18 | App.audioSources = {}
19 | App.activeNotes = []
20 | App.socket = null
21 | App.timestamp = 0
22 | App.sumLatencies = 0
23 | App.numberOfLatencies = 0
24 | App.source
25 | var context = new AudioContext()
26 |
27 | /*
28 | // Audio Globals, could be app globals(?)
29 | var channels = 2
30 | var bufferSampleSize = 22050
31 | var sampleRate = 44100
32 | var isOctaveKeyDown = false // Originally Redacted
33 | */
34 |
35 | /* WebSocket Connection */
36 | App.setupSocket = function () {
37 | var host = location.origin.replace(/^http/, 'ws')
38 | App.socket = new WebSocket(host)
39 | App.socket.binaryType = 'arraybuffer'
40 | App.socket.onmessage = function (event) {
41 | var midiMessage = new Uint8Array(event.data)
42 | if (midiMessage[0] >> 4 === 9) {
43 | App.handleMidi(midiMessage)
44 | // Log note on events from websockets
45 | $('#console').prepend('WS note: ' + midiMessage[1] + '
')
46 | var latency = window.performance.now() - App.timestamp
47 | App.sumLatencies = App.sumLatencies + latency
48 | App.numberOfLatencies++
49 | $('#console').prepend('WS roundtrip: ' + Math.ceil(latency) + '
')
50 | $('#averageLatency').html('Latency: ' + Math.ceil(App.sumLatencies / App.numberOfLatencies))
51 | }
52 | }
53 | }
54 |
55 | /* Load App */
56 | App.load = function () {
57 | window.AudioContext = window.AudioContext || window.webkitAudioContext
58 | App.audioContext = new AudioContext()
59 | if (navigator.requestMIDIAccess) {
60 | navigator.requestMIDIAccess().then(function (midi) {
61 | App.midiAccess = midi // Required to prevent loss in MIDI input
62 | var inputs = App.midiAccess.inputs
63 | if (inputs.size > 0) {
64 | inputs.forEach(function (port, key) {
65 | port.onmidimessage = App.handleMidiEvent
66 | })
67 | } else {
68 | console.log('No MIDI devices detected. Please connect a MIDI device and reload the app.')
69 | }
70 | }, function () {
71 | window.alert('Your browser does not support MIDI input. Please use Google Chrome Canary.')
72 | })
73 | } else {
74 | window.alert('Your browser does not support MIDI input. Please use Google Chrome Canary.')
75 | }
76 | App.getPing()
77 | App.setupSocket()
78 | }
79 |
80 | /* TODO: Modularize ping play and note play, swappable testing components, then comment for just midi send */
81 | var audioContext = new AudioContext()
82 |
83 | App.getPing = function (url, cb) {
84 | var request = new XMLHttpRequest()
85 |
86 | request.open('GET', '../ping.wav', true)
87 | request.responseType = 'arraybuffer'
88 | request.onload = function () {
89 | var audioData = request.response
90 |
91 | // Wait 100ms for sample to download/decode.
92 | var startTime = audioContext.currentTime + 0.2
93 |
94 | App.audioContext.decodeAudioData(audioData, function (buffer) {
95 | App.source = App.audioContext.createBufferSource()
96 | App.source.buffer = buffer
97 | App.source.connect(App.audioContext.destination)
98 | App.source.start(startTime)
99 | },
100 | function (e) { 'Error with decoding audio data' + e.err })
101 | }
102 | request.send()
103 | }
104 |
105 | /* AUDIO */
106 |
107 | // Chromatic Scale
108 | var octave = 0
109 | var notes = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
110 |
111 | App.noteToString = function (pitch) {
112 | if (!pitch) return null
113 | var octave = Math.floor(pitch / 12) - 1
114 | return notes[pitch % 12] + '' + octave + '
'
115 | }
116 |
117 | // TODO: WHAT HAPPENS WHEN NOTES ARE PLAYED
118 | App.noteOn = function (pitch) {
119 | // Log note
120 | var note = App.noteToString(pitch)
121 | $('#console').prepend(note)
122 | App.activeNotes.push(pitch)
123 |
124 | /* Play audio */
125 | var frequency = 440 * Math.pow(2, (pitch - 69) / 12)
126 | var source = App.audioContext.createBufferSource()
127 |
128 | source.playbackRate.value = frequency / 440
129 | source.loop = false
130 | source.connect(App.audioContext.destination)
131 | source.start(0)
132 | App.osc(frequency)
133 | App.audioSources[pitch] = source
134 | }
135 |
136 | App.osc = function (frequency) { // Get this to play notes, then
137 | var osc = context.createOscillator()
138 | var gain = context.createGain()
139 |
140 | osc.type = 'square'
141 | osc.frequency.value = frequency
142 | // osc.frequency.setValueAtTime(frequency, 0.0)
143 | gain.value = 0.5
144 |
145 | osc.connect(gain)
146 | gain.connect(context.destination)
147 |
148 | osc.start(context.currentTime)
149 | osc.stop(context.currentTime + 0.1)
150 | }
151 |
152 | // Note timing optimization
153 | App.noteOff = function (pitch) {
154 | var position = App.activeNotes.indexOf(pitch)
155 | if (position !== -1) {
156 | App.activeNotes.splice(position, 1)
157 | // App.audioSources[pitch].gain.setTargetAtTime(0.0, App.audioContext.currentTime, 0.1)
158 | }
159 | }
160 |
161 | /* MIDI */
162 |
163 | // TODO: MIDI event time optimization
164 | App.handleMidiEvent = function (event) {
165 | if (event.data[0] >> 4 === 9) App.timestamp = event.receivedTime
166 | App.handleMidi(event.data)
167 | App.socket.send(event.data.buffer)
168 | }
169 |
170 | // TODO: Optimize MIDI behavior
171 | App.handleMidi = function (midiMessage) {
172 | var type = midiMessage[0] >> 4
173 | // var channel = midiMessage[0] & 0x0F
174 | var pitch = midiMessage[1]
175 | var velocity = midiMessage[2]
176 | switch (type) {
177 | case 9:
178 | if (velocity !== 0) {
179 | App.noteOn(pitch)
180 | } else {
181 | // Note off
182 | App.noteOff(pitch)
183 | }
184 | break
185 | // Note off
186 | case 8:
187 | App.noteOff(pitch)
188 | break
189 | }
190 | }
191 |
192 | // TODO: All needed parameters?
193 | App.createMidiMessage = function (type, channel, pitch, velocity) {
194 | return {
195 | data: new Uint8Array([(type << 4) | channel, pitch, velocity]),
196 | receivedTime: window.performance.now()
197 | }
198 | }
199 |
200 | // TODO: Review key to MIDI mapping accuracy
201 | App.keyToMidi = function (key, isKeyDown) {
202 | // Start index is 56 for G# below middle C
203 | var keyToNote = [81, 65, 87, 83, 68, 82, 70, 84, 71, 72, 85, 74, 73, 75, 79, 76, 186, 219, 222, 221]
204 | if (keyToNote.indexOf(key) !== -1) {
205 | var pitch = keyToNote.indexOf(key) + 56
206 | pitch = Math.max(0, pitch + octave * 12)
207 | var type = (isKeyDown) ? 9 : 8
208 | var velocity = 127
209 | if (!isKeyDown || App.activeNotes.indexOf(pitch) === -1) {
210 | App.handleMidiEvent(App.createMidiMessage(type, 0, pitch, velocity))
211 | }
212 | }
213 | }
214 |
215 | // App-load event listener
216 | window.addEventListener('load', function () {
217 | App.load()
218 | })
219 |
220 | // Keyup/Keydown listener
221 | $(document).keydown(function (e) {
222 | if (e.which === 90) { // Z
223 | octave--
224 | } else if (e.which === 88) { // X
225 | octave++
226 | } else {
227 | App.keyToMidi(e.which, true)
228 | }
229 | })
230 | $(document).keyup(function (e) {
231 | App.keyToMidi(e.which, false)
232 | })
233 |
--------------------------------------------------------------------------------