├── .gitignore
├── LICENSE.md
├── README.md
├── app.coffee
├── package.json
├── public
├── css
│ └── app.css
└── javascripts
│ ├── app.js
│ ├── complex.js
│ └── simple.js
└── views
├── index.jade
└── layout.jade
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .DS_Store
3 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 Keith Norman
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Simple WebRTC Demo
2 |
3 | WebRTC is a peer to peer standard proposal from Google allowing browsers to connect directly and transfer information without a central server. This allows browsers to share raw data as well as audio/video.
4 |
5 | This is a very simple video chat using WebRTC. I worked on it because I couldn't completely grok what was going on in [apprtc.appspot.com](apprtc.appspot.com) - Google's canonical example. My version uses websockets instead of Google App Engine Channels, so you can see what the socket responses are doing, and I think the front end code is a bit simpler too.
6 |
7 | # Get It Running
8 | git clone https://github.com/keithnorm/simple-webrtc-video-chat.git
9 | cd simple-webrtc-vide-chat
10 | npm install
11 | ./node_modules/coffee-script/bin/coffee app.coffee
12 |
13 | Then open up two browser windows pointed to `localhost:3002/room_name`.
14 |
15 | You should see something like this:
16 |
17 | 
18 |
19 | # WebRTC in a nutshell
20 |
21 | Firstly read through the article on [HTML5 Rocks](http://www.html5rocks.com/en/tutorials/webrtc/basics/). Here are the steps to create a successful connection in high-level pseudo-code:
22 |
23 | pc = new PeerConnection
24 | ws = new WebSocket
25 |
26 | // gets called when connection is complete
27 | // this is when a remote peer can stream video
28 | // to your browser
29 | pc.onaddstream (event) ->
30 | remoteVid.src = event.stream
31 |
32 | // local peer
33 | pc.createOffer (description) ->
34 | pc.setLocalDescription(description)
35 | // over websockets
36 | ws.send description
37 |
38 | ws.on 'create_offer', (data) ->
39 | // now this acts on a remote peer
40 | pc.setRemoteDescription(data)
41 | pc.createAnswer (description) ->
42 | pc.setLocalDescription(description)
43 | ws.send description
44 |
45 | ws.on 'create_answer', (data) ->
46 | // back on local
47 | pc.setRemoteDescription(data)
48 |
49 | // called when handshake is complete
50 | pc.onicecandidate = (event) ->
51 | // forward to remote
52 | ws.send event.candidate
53 |
54 | ws.on 'ice_candidate', (data) ->
55 | pc.addIceCandidate(candidate)
56 |
57 | So this song and dance is mainly complicated by the need to talk to the remote host via some transport method (websockets in this case). Check out public/javascripts/simple.js for an example of connecting two peers within the same browser window for an example of the PeerConnection API without the transport layer.
58 |
59 | If you have any questions feel free to email me keithnorm@gmail.
60 |
--------------------------------------------------------------------------------
/app.coffee:
--------------------------------------------------------------------------------
1 | express = require 'express'
2 | app = express()
3 | ws = require 'websocket.io'
4 | uuid = require 'node-uuid'
5 |
6 | app.use express.static './public'
7 |
8 | app.get '/:room', (req, res) ->
9 | res.render 'index.jade', params: req.query, room_count: io.clientsByRoom[req.params.room]?.length || 0
10 |
11 | server = app.listen 3002
12 |
13 | io = ws.attach server
14 |
15 | io.clientsById ||= {}
16 | io.clientsByRoom ||= {}
17 |
18 | io.on 'connection', (socket) ->
19 | room = /\/(.+)/.exec(socket.req.url)[1]
20 | socket.id = uuid.v1()
21 | socket.room = room
22 |
23 | if !room
24 | socket.close()
25 | return
26 |
27 | io.clientsByRoom[room] ||= []
28 | io.clientsByRoom[room].push socket
29 | io.clientsById[socket.id] = socket
30 |
31 | socket.send JSON.stringify
32 | type: 'assigned_id'
33 | id: socket.id
34 |
35 | socket.on 'message', (data) ->
36 | msg = JSON.parse(data)
37 |
38 | switch msg.type
39 | when 'received_offer', 'received_candidate', 'received_answer'
40 | # broadcast to all connected clients in the room
41 | # except for the socket that initiated this message
42 | for sock in io.clientsByRoom[socket.room]
43 | if sock.id != socket.id
44 | sock.send(JSON.stringify msg)
45 |
46 | when 'close'
47 | socket.close()
48 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Vid",
3 | "version": "0.0.0",
4 | "description": "ERROR: No README.md file found!",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1"
8 | },
9 | "repository": "",
10 | "author": "",
11 | "license": "BSD",
12 | "dependencies": {
13 | "express": "3",
14 | "coffee-script": "*",
15 | "socket.io": "*",
16 | "websocket.io": "*",
17 | "jade": "*",
18 | "node-uuid": "*"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/public/css/app.css:
--------------------------------------------------------------------------------
1 | body {
2 | font: 18px/1 Helvetica, Arial, sans-serif;
3 | }
4 |
5 | h1 {
6 | margin-bottom: 10px;
7 | }
8 |
9 | video {
10 | width: 300px;
11 | height: 300px;
12 | margin-right: 20px;
13 | }
14 |
--------------------------------------------------------------------------------
/public/javascripts/app.js:
--------------------------------------------------------------------------------
1 | // the socket handles sending messages between peer connections while they are in the
2 | // process of connecting
3 | var socket = new WebSocket('ws://' + window.location.host + window.location.pathname);
4 |
5 | socket.onmessage = function(message) {
6 | var msg = JSON.parse(message.data);
7 |
8 | switch(msg.type) {
9 | case 'assigned_id' :
10 | socket.id = msg.id;
11 | break;
12 | case 'received_offer' :
13 | console.log('received offer', msg.data);
14 | pc.setRemoteDescription(new RTCSessionDescription(msg.data)).then(function() {
15 | console.log('create answer');
16 | pc.createAnswer().then(function(description) {
17 | console.log('sending answer');
18 | pc.setLocalDescription(description).then(function() {
19 | socket.send(JSON.stringify({
20 | type: 'received_answer',
21 | data: description
22 | }));
23 | })
24 | });
25 | });
26 | break;
27 | case 'received_answer' :
28 | console.log('received answer');
29 | if(!connected) {
30 | pc.setRemoteDescription(new RTCSessionDescription(msg.data));
31 | connected = true;
32 | }
33 | break;
34 |
35 | case 'received_candidate' :
36 | console.log('received candidate', msg.data);
37 | var candidate = new RTCIceCandidate({
38 | sdpMLineIndex: msg.data.label,
39 | candidate: msg.data.candidate
40 | });
41 | pc.addIceCandidate(candidate);
42 | break;
43 | }
44 | };
45 |
46 | var pc;
47 | var configuration = {"iceServers": [{"url": "stun:stun.l.google.com:19302"}]};
48 | var stream;
49 | var pc = new webkitRTCPeerConnection(configuration);
50 | var connected = false;
51 | var mediaConstraints = {
52 | 'mandatory': {
53 | 'OfferToReceiveAudio':true,
54 | 'OfferToReceiveVideo':true
55 | }
56 | };
57 |
58 | pc.onicecandidate = function(e) {
59 | if(e.candidate) {
60 | socket.send(JSON.stringify({
61 | type: 'received_candidate',
62 | data: {
63 | label: e.candidate.sdpMLineIndex,
64 | id: e.candidate.sdpMid,
65 | candidate: e.candidate.candidate
66 | }
67 | }));
68 | }
69 | };
70 |
71 | pc.onaddstream = function(e) {
72 | console.log('start remote video stream');
73 | vid2.src = webkitURL.createObjectURL(e.stream);
74 | vid2.play();
75 | };
76 |
77 | function broadcast() {
78 | // gets local video stream and renders to vid1
79 | navigator.webkitGetUserMedia({audio: true, video: true}, function(s) {
80 | stream = s;
81 | pc.addStream(s);
82 | vid1.src = webkitURL.createObjectURL(s);
83 | vid1.play();
84 | // initCall is set in views/index and is based on if there is another person in the room to connect to
85 | if(initCall)
86 | start();
87 | }, function (error) {
88 | try {
89 | console.error(error);
90 | } catch (e) {}
91 | });
92 | }
93 |
94 | function start() {
95 | // this initializes the peer connection
96 | pc.createOffer().then(function(description) {
97 | pc.setLocalDescription(description).then(function() {
98 | socket.send(JSON.stringify({
99 | type: 'received_offer',
100 | data: description
101 | }));
102 | });
103 | });
104 | }
105 |
106 | window.onload = function() {
107 | broadcast();
108 | };
109 |
110 | window.onbeforeunload = function() {
111 | socket.send(JSON.stringify({
112 | type: 'close'
113 | }));
114 | pc.close();
115 | pc = null;
116 | };
117 |
--------------------------------------------------------------------------------
/public/javascripts/complex.js:
--------------------------------------------------------------------------------
1 | var localVideo;
2 | var miniVideo;
3 | var remoteVideo;
4 | var localStream;
5 | var remoteStream;
6 | var channel;
7 | var channelReady = false;
8 | var pc;
9 | var socket;
10 | var initiator = 0;
11 | var started = false;
12 | // Set up audio and video regardless of what devices are present.
13 | var mediaConstraints = {'mandatory': {
14 | 'OfferToReceiveAudio':true,
15 | 'OfferToReceiveVideo':true }};
16 | var isVideoMuted = false;
17 | var isAudioMuted = false;
18 |
19 | function initialize() {
20 | console.log("Initializing; room=23666637.");
21 | card = document.getElementById("card");
22 | localVideo = document.getElementById("localVideo");
23 | miniVideo = document.getElementById("miniVideo");
24 | remoteVideo = document.getElementById("remoteVideo");
25 | resetStatus();
26 | openChannel('AHRlWronxgH2k_AR8MurZXhrcdeM0QsSh439GLHtK7-yisoZ0tFrYwJWVCejqEXsmLT8PDtc_Th_P0OhEPQmeWzrLwILreGuQtrV7fZpgx_7aKXcxnP4WxA');
27 | doGetUserMedia();
28 | }
29 |
30 | function openChannel(channelToken) {
31 | console.log("Opening channel.");
32 | var channel = new goog.appengine.Channel(channelToken);
33 | var handler = {
34 | 'onopen': onChannelOpened,
35 | 'onmessage': onChannelMessage,
36 | 'onerror': onChannelError,
37 | 'onclose': onChannelClosed
38 | };
39 | socket = channel.open(handler);
40 | }
41 |
42 | function resetStatus() {
43 | if (!initiator) {
44 | setStatus("Waiting for someone to join: https://apprtc.appspot.com/?r=23666637");
45 | } else {
46 | setStatus("Initializing...");
47 | }
48 | }
49 |
50 | function doGetUserMedia() {
51 | // Call into getUserMedia via the polyfill (adapter.js).
52 | var constraints = {"mandatory": {}, "optional": []};
53 | try {
54 | getUserMedia({'audio':true, 'video':constraints}, onUserMediaSuccess,
55 | onUserMediaError);
56 | console.log("Requested access to local media with mediaConstraints:\n" +
57 | " \"" + JSON.stringify(constraints) + "\"");
58 | } catch (e) {
59 | alert("getUserMedia() failed. Is this a WebRTC capable browser?");
60 | console.log("getUserMedia failed with exception: " + e.message);
61 | }
62 | }
63 |
64 | function createPeerConnection() {
65 | var pc_config = {"iceServers": [{"url": "stun:stun.l.google.com:19302"}]};
66 | try {
67 | // Create an RTCPeerConnection via the polyfill (adapter.js).
68 | pc = new RTCPeerConnection(pc_config);
69 | pc.onicecandidate = onIceCandidate;
70 | console.log("Created RTCPeerConnnection with config:\n" + " \"" +
71 | JSON.stringify(pc_config) + "\".");
72 | } catch (e) {
73 | console.log("Failed to create PeerConnection, exception: " + e.message);
74 | alert("Cannot create RTCPeerConnection object; WebRTC is not supported by this browser.");
75 | return;
76 | }
77 |
78 | pc.onconnecting = onSessionConnecting;
79 | pc.onopen = onSessionOpened;
80 | pc.onaddstream = onRemoteStreamAdded;
81 | pc.onremovestream = onRemoteStreamRemoved;
82 | }
83 |
84 | function maybeStart() {
85 | if (!started && localStream && channelReady) {
86 | setStatus("Connecting...");
87 | console.log("Creating PeerConnection.");
88 | createPeerConnection();
89 | console.log("Adding local stream.");
90 | pc.addStream(localStream);
91 | started = true;
92 | // Caller initiates offer to peer.
93 | if (initiator)
94 | doCall();
95 | }
96 | }
97 |
98 | function setStatus(state) {
99 | footer.innerHTML = state;
100 | }
101 |
102 | function doCall() {
103 | console.log("Sending offer to peer.");
104 | pc.createOffer(setLocalAndSendMessage, null, mediaConstraints);
105 | }
106 |
107 | function doAnswer() {
108 | console.log("Sending answer to peer.");
109 | pc.createAnswer(setLocalAndSendMessage, null, mediaConstraints);
110 | }
111 |
112 | function setLocalAndSendMessage(sessionDescription) {
113 | // Set Opus as the preferred codec in SDP if Opus is present.
114 | sessionDescription.sdp = preferOpus(sessionDescription.sdp);
115 | pc.setLocalDescription(sessionDescription);
116 | sendMessage(sessionDescription);
117 | }
118 |
119 | function sendMessage(message) {
120 | var msgString = JSON.stringify(message);
121 | console.log('C->S: ' + msgString);
122 | path = '/message?r=23666637' + '&u=65431484';
123 | var xhr = new XMLHttpRequest();
124 | xhr.open('POST', path, true);
125 | xhr.send(msgString);
126 | }
127 |
128 | function processSignalingMessage(message) {
129 | var msg = JSON.parse(message);
130 |
131 | if (msg.type === 'offer') {
132 | // Callee creates PeerConnection
133 | if (!initiator && !started)
134 | maybeStart();
135 |
136 | pc.setRemoteDescription(new RTCSessionDescription(msg));
137 | doAnswer();
138 | } else if (msg.type === 'answer' && started) {
139 | pc.setRemoteDescription(new RTCSessionDescription(msg));
140 | } else if (msg.type === 'candidate' && started) {
141 | var candidate = new RTCIceCandidate({sdpMLineIndex:msg.label,
142 | candidate:msg.candidate});
143 | pc.addIceCandidate(candidate);
144 | } else if (msg.type === 'bye' && started) {
145 | onRemoteHangup();
146 | }
147 | }
148 |
149 | function onChannelOpened() {
150 | console.log('Channel opened.');
151 | channelReady = true;
152 | if (initiator) maybeStart();
153 | }
154 | function onChannelMessage(message) {
155 | console.log('S->C: ' + message.data);
156 | processSignalingMessage(message.data);
157 | }
158 | function onChannelError() {
159 | console.log('Channel error.', arguments);
160 | }
161 | function onChannelClosed() {
162 | console.log('Channel closed.');
163 | }
164 |
165 | function onUserMediaSuccess(stream) {
166 | console.log("User has granted access to local media.");
167 | // Call the polyfill wrapper to attach the media stream to this element.
168 | attachMediaStream(localVideo, stream);
169 | localVideo.style.opacity = 1;
170 | localStream = stream;
171 | // Caller creates PeerConnection.
172 | if (initiator) maybeStart();
173 | }
174 |
175 | function onUserMediaError(error) {
176 | console.log("Failed to get access to local media. Error code was " + error.code);
177 | alert("Failed to get access to local media. Error code was " + error.code + ".");
178 | }
179 |
180 | function onIceCandidate(event) {
181 | if (event.candidate) {
182 | sendMessage({type: 'candidate',
183 | label: event.candidate.sdpMLineIndex,
184 | id: event.candidate.sdpMid,
185 | candidate: event.candidate.candidate});
186 | } else {
187 | console.log("End of candidates.");
188 | }
189 | }
190 |
191 | function onSessionConnecting(message) {
192 | console.log("Session connecting.");
193 | }
194 | function onSessionOpened(message) {
195 | console.log("Session opened.");
196 | }
197 |
198 | function onRemoteStreamAdded(event) {
199 | console.log("Remote stream added.");
200 | // TODO(ekr@rtfm.com): Copy the minivideo on Firefox
201 | miniVideo.src = localVideo.src;
202 | attachMediaStream(remoteVideo, event.stream);
203 | remoteStream = event.stream;
204 | waitForRemoteVideo();
205 | }
206 | function onRemoteStreamRemoved(event) {
207 | console.log("Remote stream removed.");
208 | }
209 |
210 | function onHangup() {
211 | console.log("Hanging up.");
212 | transitionToDone();
213 | stop();
214 | // will trigger BYE from server
215 | socket.close();
216 | }
217 |
218 | function onRemoteHangup() {
219 | console.log('Session terminated.');
220 | transitionToWaiting();
221 | stop();
222 | initiator = 0;
223 | }
224 |
225 | function stop() {
226 | started = false;
227 | isAudioMuted = false;
228 | isVideoMuted = false;
229 | pc.close();
230 | pc = null;
231 | }
232 |
233 | function waitForRemoteVideo() {
234 | if (remoteStream.videoTracks.length === 0 || remoteVideo.currentTime > 0) {
235 | transitionToActive();
236 | } else {
237 | setTimeout(waitForRemoteVideo, 100);
238 | }
239 | }
240 | function transitionToActive() {
241 | remoteVideo.style.opacity = 1;
242 | card.style.webkitTransform = "rotateY(180deg)";
243 | setTimeout(function() { localVideo.src = ""; }, 500);
244 | setTimeout(function() { miniVideo.style.opacity = 1; }, 1000);
245 | setStatus("");
246 | }
247 | function transitionToWaiting() {
248 | card.style.webkitTransform = "rotateY(0deg)";
249 | setTimeout(function() {
250 | localVideo.src = miniVideo.src;
251 | miniVideo.src = "";
252 | remoteVideo.src = ""; }, 500);
253 | miniVideo.style.opacity = 0;
254 | remoteVideo.style.opacity = 0;
255 | resetStatus();
256 | }
257 | function transitionToDone() {
258 | localVideo.style.opacity = 0;
259 | remoteVideo.style.opacity = 0;
260 | miniVideo.style.opacity = 0;
261 | setStatus("You have left the call. Click here to rejoin.");
262 | }
263 | function enterFullScreen() {
264 | container.webkitRequestFullScreen();
265 | }
266 |
267 | function toggleVideoMute() {
268 | if (localStream.videoTracks.length === 0) {
269 | console.log("No local video available.");
270 | return;
271 | }
272 |
273 | if (isVideoMuted) {
274 | for (i = 0; i < localStream.videoTracks.length; i++) {
275 | localStream.videoTracks[i].enabled = true;
276 | }
277 | console.log("Video unmuted.");
278 | } else {
279 | for (i = 0; i < localStream.videoTracks.length; i++) {
280 | localStream.videoTracks[i].enabled = false;
281 | }
282 | console.log("Video muted.");
283 | }
284 |
285 | isVideoMuted = !isVideoMuted;
286 | }
287 |
288 | function toggleAudioMute() {
289 | if (localStream.audioTracks.length === 0) {
290 | console.log("No local audio available.");
291 | return;
292 | }
293 |
294 | if (isAudioMuted) {
295 | for (i = 0; i < localStream.audioTracks.length; i++) {
296 | localStream.audioTracks[i].enabled = true;
297 | }
298 | console.log("Audio unmuted.");
299 | } else {
300 | for (i = 0; i < localStream.audioTracks.length; i++){
301 | localStream.audioTracks[i].enabled = false;
302 | }
303 | console.log("Audio muted.");
304 | }
305 |
306 | isAudioMuted = !isAudioMuted;
307 | }
308 |
309 | setTimeout(initialize, 1);
310 |
311 | // Send BYE on refreshing(or leaving) a demo page
312 | // to ensure the room is cleaned for next session.
313 | window.onbeforeunload = function() {
314 | sendMessage({type: 'bye'});
315 | };
316 |
317 | // Ctrl-D: toggle audio mute; Ctrl-E: toggle video mute.
318 | // On Mac, Command key is instead of Ctrl.
319 | // Return false to screen out original Chrome shortcuts.
320 | document.onkeydown = function() {
321 | if (navigator.appVersion.indexOf("Mac") != -1) {
322 | if (event.metaKey && event.keyCode === 68) {
323 | toggleAudioMute();
324 | return false;
325 | }
326 | if (event.metaKey && event.keyCode === 69) {
327 | toggleVideoMute();
328 | return false;
329 | }
330 | } else {
331 | if (event.ctrlKey && event.keyCode === 68) {
332 | toggleAudioMute();
333 | return false;
334 | }
335 | if (event.ctrlKey && event.keyCode === 69) {
336 | toggleVideoMute();
337 | return false;
338 | }
339 | }
340 | };
341 |
342 | // Set Opus as the default audio codec if it's present.
343 | function preferOpus(sdp) {
344 | var sdpLines = sdp.split('\r\n');
345 |
346 | // Search for m line.
347 | for (var i = 0; i < sdpLines.length; i++) {
348 | if (sdpLines[i].search('m=audio') !== -1) {
349 | var mLineIndex = i;
350 | break;
351 | }
352 | }
353 | if (mLineIndex === null)
354 | return sdp;
355 |
356 | // If Opus is available, set it as the default in m line.
357 | for (var i = 0; i < sdpLines.length; i++) {
358 | if (sdpLines[i].search('opus/48000') !== -1) {
359 | var opusPayload = extractSdp(sdpLines[i], /:(\d+) opus\/48000/i);
360 | if (opusPayload)
361 | sdpLines[mLineIndex] = setDefaultCodec(sdpLines[mLineIndex], opusPayload);
362 | break;
363 | }
364 | }
365 |
366 | // Remove CN in m line and sdp.
367 | sdpLines = removeCN(sdpLines, mLineIndex);
368 |
369 | sdp = sdpLines.join('\r\n');
370 | return sdp;
371 | }
372 |
373 | function extractSdp(sdpLine, pattern) {
374 | var result = sdpLine.match(pattern);
375 | return (result && result.length == 2)? result[1]: null;
376 | }
377 |
378 | // Set the selected codec to the first in m line.
379 | function setDefaultCodec(mLine, payload) {
380 | var elements = mLine.split(' ');
381 | var newLine = new Array();
382 | var index = 0;
383 | for (var i = 0; i < elements.length; i++) {
384 | if (index === 3) // Format of media starts from the fourth.
385 | newLine[index++] = payload; // Put target payload to the first.
386 | if (elements[i] !== payload)
387 | newLine[index++] = elements[i];
388 | }
389 | return newLine.join(' ');
390 | }
391 |
392 | // Strip CN from sdp before CN constraints is ready.
393 | function removeCN(sdpLines, mLineIndex) {
394 | var mLineElements = sdpLines[mLineIndex].split(' ');
395 | // Scan from end for the convenience of removing an item.
396 | for (var i = sdpLines.length-1; i >= 0; i--) {
397 | var payload = extractSdp(sdpLines[i], /a=rtpmap:(\d+) CN\/\d+/i);
398 | if (payload) {
399 | var cnPos = mLineElements.indexOf(payload);
400 | if (cnPos !== -1) {
401 | // Remove CN payload from m line.
402 | mLineElements.splice(cnPos, 1);
403 | }
404 | // Remove CN line in sdp
405 | sdpLines.splice(i, 1);
406 | }
407 | }
408 |
409 | sdpLines[mLineIndex] = mLineElements.join(' ');
410 | return sdpLines;
411 | }
412 |
413 |
--------------------------------------------------------------------------------
/public/javascripts/simple.js:
--------------------------------------------------------------------------------
1 | var vid1 = document.getElementById("vid1");
2 | var vid2 = document.getElementById("vid2");
3 | btn1.disabled = false;
4 | btn2.disabled = true;
5 | btn3.disabled = true;
6 | var pc1,pc2;
7 | var localstream;
8 |
9 | function trace(text) {
10 | // This function is used for logging.
11 | if (text[text.length - 1] == '\n') {
12 | text = text.substring(0, text.length - 1);
13 | }
14 | console.log((performance.now() / 1000).toFixed(3) + ": " + text);
15 | }
16 |
17 | function gotStream(stream){
18 | trace("Received local stream");
19 | vid1.src = webkitURL.createObjectURL(stream);
20 | vid1.play();
21 | localstream = stream;
22 | btn2.disabled = false;
23 | }
24 |
25 | function start() {
26 | trace("Requesting local stream");
27 | btn1.disabled = true;
28 | navigator.webkitGetUserMedia({audio:true, video:true},
29 | gotStream, function() {});
30 | }
31 |
32 | function call() {
33 | btn2.disabled = true;
34 | btn3.disabled = false;
35 | trace("Starting call");
36 | if (localstream.videoTracks.length > 0)
37 | trace('Using Video device: ' + localstream.videoTracks[0].label);
38 | if (localstream.audioTracks.length > 0)
39 | trace('Using Audio device: ' + localstream.audioTracks[0].label);
40 | var servers = null;
41 | pc1 = new webkitRTCPeerConnection(servers);
42 | trace("Created local peer connection object pc1");
43 | pc1.onicecandidate = iceCallback1;
44 | pc2 = new webkitRTCPeerConnection(servers);
45 | trace("Created remote peer connection object pc2");
46 | pc2.onicecandidate = iceCallback2;
47 | pc2.onaddstream = gotRemoteStream;
48 | //pc2.addStream(localstream);
49 |
50 | //pc1.onaddstream = function() {
51 | // console.log('pc 1 added sream');
52 | //};
53 | pc1.addStream(localstream);
54 | trace("Adding Local Stream to peer connection");
55 |
56 | pc1.createOffer(gotDescription1);
57 | }
58 |
59 | function gotDescription1(desc){
60 | pc1.setLocalDescription(desc);
61 | trace("Offer from pc1 \n" + desc.sdp);
62 | pc2.setRemoteDescription(desc);
63 | pc2.createAnswer(gotDescription2);
64 | }
65 |
66 | function gotDescription2(desc){
67 | pc2.setLocalDescription(desc);
68 | trace("Answer from pc2 \n" + desc.sdp);
69 | pc1.setRemoteDescription(desc);
70 | }
71 |
72 | function hangup() {
73 | trace("Ending call");
74 | pc1.close();
75 | pc2.close();
76 | pc1 = null;
77 | pc2 = null;
78 | btn3.disabled = true;
79 | btn2.disabled = false;
80 | }
81 |
82 | function gotRemoteStream(e){
83 | vid2.src = webkitURL.createObjectURL(e.stream);
84 | vid2.play();
85 | trace("Received remote stream");
86 | }
87 |
88 | function iceCallback1(event){
89 | if (event.candidate) {
90 | pc2.addIceCandidate(new RTCIceCandidate(event.candidate));
91 | trace("Local ICE candidate: \n" + event.candidate.candidate);
92 | }
93 | }
94 |
95 | function iceCallback2(event){
96 | if (event.candidate) {
97 | pc1.addIceCandidate(new RTCIceCandidate(event.candidate));
98 | trace("Remote ICE candidate: \n " + event.candidate.candidate);
99 | }
100 | }
101 |
102 |
--------------------------------------------------------------------------------
/views/index.jade:
--------------------------------------------------------------------------------
1 | extends layout
2 |
3 | block content
4 | script(type='text/javascript').
5 | var room = "#{ params.room }";
6 | var initCall = #{ room_count > 0 };
7 |
8 | video#vid1(width='100%', height='100%')
9 | video#vid2(width='100%', height='100%')
10 |
11 |
--------------------------------------------------------------------------------
/views/layout.jade:
--------------------------------------------------------------------------------
1 | html
2 | head
3 | title WebRTC Chat
4 | link(rel='stylesheet', type='text/css', href='/css/app.css')
5 |
6 | body
7 | h1 WebRTC Chat
8 | block content
9 | script(src='/javascripts/app.js')
10 |
--------------------------------------------------------------------------------