├── work
├── js
│ ├── main.js
│ └── lib
│ │ └── adapter.js
├── css
│ └── main.css
└── index.html
├── step-01
├── css
│ └── main.css
├── index.html
└── js
│ └── main.js
├── step-02
├── css
│ └── main.css
├── index.html
└── js
│ └── main.js
├── step-03
├── css
│ └── main.css
├── index.html
└── js
│ └── main.js
├── step-04
├── css
│ └── main.css
├── package.json
├── index.html
├── js
│ └── main.js
└── index.js
├── step-05
├── css
│ └── main.css
├── package.json
├── index.html
├── index.js
└── js
│ └── main.js
├── step-06
├── css
│ └── main.css
├── package.json
├── index.html
├── index.js
└── js
│ └── main.js
├── .gitignore
├── README.md
├── CONTRIBUTING.md
├── index.html
├── js
└── lib
│ └── adapter.js
└── LICENSE
/work/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 |
--------------------------------------------------------------------------------
/work/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | video {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
--------------------------------------------------------------------------------
/step-01/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | video {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
--------------------------------------------------------------------------------
/step-02/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | video {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
--------------------------------------------------------------------------------
/step-03/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | video {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
--------------------------------------------------------------------------------
/step-04/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | video {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
--------------------------------------------------------------------------------
/step-05/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | video {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
--------------------------------------------------------------------------------
/step-06/css/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | }
4 |
5 | canvas {
6 | max-width: 100%;
7 | width: 320px;
8 | }
9 |
10 | video {
11 | max-width: 100%;
12 | width: 320px;
13 | }
14 |
--------------------------------------------------------------------------------
/step-04/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webrtc-codelab",
3 | "version": "0.0.1",
4 | "description": "WebRTC codelab",
5 | "dependencies": {
6 | "node-static": "^0.7.10",
7 | "socket.io": "^2.0.4"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/step-05/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webrtc-codelab",
3 | "version": "0.0.1",
4 | "description": "WebRTC codelab",
5 | "dependencies": {
6 | "node-static": "^0.7.10",
7 | "socket.io": "^2.0.4"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/step-06/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "webrtc-codelab",
3 | "version": "0.0.1",
4 | "description": "WebRTC codelab",
5 | "dependencies": {
6 | "node-static": "^0.7.10",
7 | "socket.io": "^2.0.4"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/work/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Realtime communication with WebRTC
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Realtime communication with WebRTC
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/step-01/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Realtime communication with WebRTC
5 |
6 |
7 |
8 | Realtime communication with WebRTC
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/step-04/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Realtime communication with WebRTC
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Realtime communication with WebRTC
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/step-02/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Realtime communication with WebRTC
6 |
7 |
8 |
9 |
10 | Realtime communication with WebRTC
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/step-05/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Realtime communication with WebRTC
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Realtime communication with WebRTC
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/step-03/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Realtime communication with WebRTC
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Realtime communication with WebRTC
15 |
16 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/step-04/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var isInitiator;
4 |
5 | window.room = prompt("Enter room name:");
6 |
7 | var socket = io.connect();
8 |
9 | if (room !== "") {
10 | console.log('Message from client: Asking to join room ' + room);
11 | socket.emit('create or join', room);
12 | }
13 |
14 | socket.on('created', function(room, clientId) {
15 | isInitiator = true;
16 | });
17 |
18 | socket.on('full', function(room) {
19 | console.log('Message from client: Room ' + room + ' is full :^(');
20 | });
21 |
22 | socket.on('ipaddr', function(ipaddr) {
23 | console.log('Message from client: Server IP address is ' + ipaddr);
24 | });
25 |
26 | socket.on('joined', function(room, clientId) {
27 | isInitiator = false;
28 | });
29 |
30 | socket.on('log', function(array) {
31 | console.log.apply(console, array);
32 | });
33 |
--------------------------------------------------------------------------------
/step-01/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | // On this codelab, you will be streaming only video (video: true).
4 | const mediaStreamConstraints = {
5 | video: true,
6 | };
7 |
8 | // Video element where stream will be placed.
9 | const localVideo = document.querySelector('video');
10 |
11 | // Local stream that will be reproduced on the video.
12 | let localStream;
13 |
14 | // Handles success by adding the MediaStream to the video element.
15 | function gotLocalMediaStream(mediaStream) {
16 | localStream = mediaStream;
17 | localVideo.srcObject = mediaStream;
18 | }
19 |
20 | // Handles error by logging a message to the console with the error message.
21 | function handleLocalMediaStreamError(error) {
22 | console.log('navigator.getUserMedia error: ', error);
23 | }
24 |
25 | // Initializes media stream.
26 | navigator.mediaDevices.getUserMedia(mediaStreamConstraints)
27 | .then(gotLocalMediaStream).catch(handleLocalMediaStreamError);
28 |
--------------------------------------------------------------------------------
/step-06/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Realtime communication with WebRTC
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | Realtime communication with WebRTC
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | then
23 | or
24 |
25 |
26 |
27 |
28 |
Incoming photos
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 |
6 | # Runtime data
7 | pids
8 | *.pid
9 | *.seed
10 | *.pid.lock
11 |
12 | # Directory for instrumented libs generated by jscoverage/JSCover
13 | lib-cov
14 |
15 | # Coverage directory used by tools like istanbul
16 | coverage
17 |
18 | # nyc test coverage
19 | .nyc_output
20 |
21 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
22 | .grunt
23 |
24 | # node-waf configuration
25 | .lock-wscript
26 |
27 | # Compiled binary addons (http://nodejs.org/api/addons.html)
28 | build/Release
29 |
30 | # Dependency directories
31 | node_modules
32 | */node_modules
33 | jspm_packages
34 | package-lock.json
35 | .DS_Store
36 |
37 | # Optional npm cache directory
38 | .npm
39 |
40 | # Optional eslint cache
41 | .eslintcache
42 |
43 | # Optional REPL history
44 | .node_repl_history
45 |
46 | # Output of 'npm pack'
47 | *.tgz
48 |
49 | # Yarn Integrity file
50 | .yarn-integrity
51 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Realtime communication with WebRTC
2 |
3 | This code has the resources you need for the codelab [Realtime communication with WebRTC](https://codelabs.developers.google.com/codelabs/webrtc-web/#0).
4 |
5 | This is a work in progress. If you find a mistake or have a suggestion, please [file an issue](https://github.com/googlecodelabs/webrtc-web/issues). Thanks!
6 |
7 | ## What you'll learn
8 | * Get video from your webcam
9 | * Stream video with RTCPeerConnection
10 | * Stream data with RTCDataChannel
11 | * Set up a signaling service to exchange messages
12 | * Combine peer connection and signaling
13 | * Take a photo and share it via a data channel
14 |
15 |
16 | ## What you'll need
17 | * Chrome 47 or above.
18 | * Web Server for Chrome, or use your own web server of choice.
19 | * The sample code.
20 | * A text editor.
21 | * Basic knowledge of HTML, CSS and JavaScript, Node.JS.
22 |
23 |
24 | ## For 'step-04', 'step-05', 'step-06'
25 |
26 | Run `npm install` before running the code.
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to become a contributor and submit your own code
2 |
3 | ## Contributor License Agreements
4 |
5 | We'd love to accept your patches! Before we can take them, we
6 | have to jump a couple of legal hurdles.
7 |
8 | ### Before you contribute
9 | Before we can use your code, you must sign the
10 | [Google Individual Contributor License Agreement](https://cla.developers.google.com/about/google-individual)
11 | (CLA), which you can do online. The CLA is necessary mainly because you own the
12 | copyright to your changes, even after your contribution becomes part of our
13 | codebase, so we need your permission to use and distribute your code. We also
14 | need to be sure of various other things—for instance that you'll tell us if you
15 | know that your code infringes on other people's patents. You don't have to sign
16 | the CLA until after you've submitted your code for review and a member has
17 | approved it, but you must do it before we can put your code into our codebase.
18 | Before you start working on a larger contribution, you should get in touch with
19 | us first through the issue tracker with your idea so that we can help out and
20 | possibly guide you. Coordinating up front makes it much easier to avoid
21 | frustration later on.
22 |
23 | ### Code reviews
24 | All submissions, including submissions by project members, require review. We
25 | use Github pull requests for this purpose.
26 |
27 | ### The small print
28 | Contributions made by corporations are covered by a different agreement than
29 | the one above, the
30 | [Software Grant and Corporate Contributor License Agreement](https://cla.developers.google.com/about/google-corporate).
31 |
--------------------------------------------------------------------------------
/step-04/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var os = require('os');
4 | var nodeStatic = require('node-static');
5 | var http = require('http');
6 | var socketIO = require('socket.io');
7 |
8 | var fileServer = new(nodeStatic.Server)();
9 | var app = http.createServer(function(req, res) {
10 | fileServer.serve(req, res);
11 | }).listen(8080);
12 |
13 | var io = socketIO.listen(app);
14 | io.sockets.on('connection', function(socket) {
15 |
16 | // convenience function to log server messages on the client
17 | function log() {
18 | var array = ['Message from server:'];
19 | array.push.apply(array, arguments);
20 | socket.emit('log', array);
21 | }
22 |
23 | socket.on('message', function(message) {
24 | log('Client said: ', message);
25 | // for a real app, would be room-only (not broadcast)
26 | socket.broadcast.emit('message', message);
27 | });
28 |
29 | socket.on('create or join', function(room) {
30 | log('Received request to create or join room ' + room);
31 |
32 | var clientsInRoom = io.sockets.adapter.rooms[room];
33 | var numClients = clientsInRoom ? Object.keys(clientsInRoom.sockets).length : 0;
34 | log('Room ' + room + ' now has ' + numClients + ' client(s)');
35 |
36 | if (numClients === 0) {
37 | socket.join(room);
38 | log('Client ID ' + socket.id + ' created room ' + room);
39 | socket.emit('created', room, socket.id);
40 |
41 | } else if (numClients === 1) {
42 | log('Client ID ' + socket.id + ' joined room ' + room);
43 | io.sockets.in(room).emit('join', room);
44 | socket.join(room);
45 | socket.emit('joined', room, socket.id);
46 | io.sockets.in(room).emit('ready');
47 | } else { // max two clients
48 | socket.emit('full', room);
49 | }
50 | });
51 |
52 | socket.on('ipaddr', function() {
53 | var ifaces = os.networkInterfaces();
54 | for (var dev in ifaces) {
55 | ifaces[dev].forEach(function(details) {
56 | if (details.family === 'IPv4' && details.address !== '127.0.0.1') {
57 | socket.emit('ipaddr', details.address);
58 | }
59 | });
60 | }
61 | });
62 |
63 | });
64 |
--------------------------------------------------------------------------------
/step-05/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var os = require('os');
4 | var nodeStatic = require('node-static');
5 | var http = require('http');
6 | var socketIO = require('socket.io');
7 |
8 | var fileServer = new(nodeStatic.Server)();
9 | var app = http.createServer(function(req, res) {
10 | fileServer.serve(req, res);
11 | }).listen(8080);
12 |
13 | var io = socketIO.listen(app);
14 | io.sockets.on('connection', function(socket) {
15 |
16 | // convenience function to log server messages on the client
17 | function log() {
18 | var array = ['Message from server:'];
19 | array.push.apply(array, arguments);
20 | socket.emit('log', array);
21 | }
22 |
23 | socket.on('message', function(message) {
24 | log('Client said: ', message);
25 | // for a real app, would be room-only (not broadcast)
26 | socket.broadcast.emit('message', message);
27 | });
28 |
29 | socket.on('create or join', function(room) {
30 | log('Received request to create or join room ' + room);
31 |
32 | var clientsInRoom = io.sockets.adapter.rooms[room];
33 | var numClients = clientsInRoom ? Object.keys(clientsInRoom.sockets).length : 0;
34 | log('Room ' + room + ' now has ' + numClients + ' client(s)');
35 |
36 | if (numClients === 0) {
37 | socket.join(room);
38 | log('Client ID ' + socket.id + ' created room ' + room);
39 | socket.emit('created', room, socket.id);
40 |
41 | } else if (numClients === 1) {
42 | log('Client ID ' + socket.id + ' joined room ' + room);
43 | io.sockets.in(room).emit('join', room);
44 | socket.join(room);
45 | socket.emit('joined', room, socket.id);
46 | io.sockets.in(room).emit('ready');
47 | } else { // max two clients
48 | socket.emit('full', room);
49 | }
50 | });
51 |
52 | socket.on('ipaddr', function() {
53 | var ifaces = os.networkInterfaces();
54 | for (var dev in ifaces) {
55 | ifaces[dev].forEach(function(details) {
56 | if (details.family === 'IPv4' && details.address !== '127.0.0.1') {
57 | socket.emit('ipaddr', details.address);
58 | }
59 | });
60 | }
61 | });
62 |
63 | socket.on('bye', function(){
64 | console.log('received bye');
65 | });
66 |
67 | });
68 |
--------------------------------------------------------------------------------
/step-06/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var os = require('os');
4 | var nodeStatic = require('node-static');
5 | var http = require('http');
6 | var socketIO = require('socket.io');
7 |
8 | var fileServer = new(nodeStatic.Server)();
9 | var app = http.createServer(function(req, res) {
10 | fileServer.serve(req, res);
11 | }).listen(8080);
12 |
13 | var io = socketIO.listen(app);
14 | io.sockets.on('connection', function(socket) {
15 |
16 | // convenience function to log server messages on the client
17 | function log() {
18 | var array = ['Message from server:'];
19 | array.push.apply(array, arguments);
20 | socket.emit('log', array);
21 | }
22 |
23 | socket.on('message', function(message) {
24 | log('Client said: ', message);
25 | // for a real app, would be room-only (not broadcast)
26 | socket.broadcast.emit('message', message);
27 | });
28 |
29 | socket.on('create or join', function(room) {
30 | log('Received request to create or join room ' + room);
31 |
32 | var clientsInRoom = io.sockets.adapter.rooms[room];
33 | var numClients = clientsInRoom ? Object.keys(clientsInRoom.sockets).length : 0;
34 | log('Room ' + room + ' now has ' + numClients + ' client(s)');
35 |
36 | if (numClients === 0) {
37 | socket.join(room);
38 | log('Client ID ' + socket.id + ' created room ' + room);
39 | socket.emit('created', room, socket.id);
40 | } else if (numClients === 1) {
41 | log('Client ID ' + socket.id + ' joined room ' + room);
42 | // io.sockets.in(room).emit('join', room);
43 | socket.join(room);
44 | socket.emit('joined', room, socket.id);
45 | io.sockets.in(room).emit('ready', room);
46 | socket.broadcast.emit('ready', room);
47 | } else { // max two clients
48 | socket.emit('full', room);
49 | }
50 | });
51 |
52 | socket.on('ipaddr', function() {
53 | var ifaces = os.networkInterfaces();
54 | for (var dev in ifaces) {
55 | ifaces[dev].forEach(function(details) {
56 | if (details.family === 'IPv4' && details.address !== '127.0.0.1') {
57 | socket.emit('ipaddr', details.address);
58 | }
59 | });
60 | }
61 | });
62 |
63 | socket.on('disconnect', function(reason) {
64 | console.log(`Peer or server disconnected. Reason: ${reason}.`);
65 | socket.broadcast.emit('bye');
66 | });
67 |
68 | socket.on('bye', function(room) {
69 | console.log(`Peer said bye on room ${room}.`);
70 | });
71 | });
72 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 | Google codelab: WebRTC
31 |
32 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 | Google codelab:
Realtime communication with WebRTC
174 |
175 | Code for this codelab is available from github.com/googlecodelabs/webrtc-web.
176 |
177 | This is a work in progress. If you find a mistake or have a suggestion, please file an issue.
178 |
179 | Thanks!
180 |
181 |
182 |
183 |
186 |
187 |
188 |
189 |
199 |
200 |
201 |
202 |
--------------------------------------------------------------------------------
/step-03/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var localConnection;
4 | var remoteConnection;
5 | var sendChannel;
6 | var receiveChannel;
7 | var pcConstraint;
8 | var dataConstraint;
9 | var dataChannelSend = document.querySelector('textarea#dataChannelSend');
10 | var dataChannelReceive = document.querySelector('textarea#dataChannelReceive');
11 | var startButton = document.querySelector('button#startButton');
12 | var sendButton = document.querySelector('button#sendButton');
13 | var closeButton = document.querySelector('button#closeButton');
14 |
15 | startButton.onclick = createConnection;
16 | sendButton.onclick = sendData;
17 | closeButton.onclick = closeDataChannels;
18 |
19 | function enableStartButton() {
20 | startButton.disabled = false;
21 | }
22 |
23 | function disableSendButton() {
24 | sendButton.disabled = true;
25 | }
26 |
27 | function createConnection() {
28 | dataChannelSend.placeholder = '';
29 | var servers = null;
30 | pcConstraint = null;
31 | dataConstraint = null;
32 | trace('Using SCTP based data channels');
33 | // For SCTP, reliable and ordered delivery is true by default.
34 | // Add localConnection to global scope to make it visible
35 | // from the browser console.
36 | window.localConnection = localConnection =
37 | new RTCPeerConnection(servers, pcConstraint);
38 | trace('Created local peer connection object localConnection');
39 |
40 | sendChannel = localConnection.createDataChannel('sendDataChannel',
41 | dataConstraint);
42 | trace('Created send data channel');
43 |
44 | localConnection.onicecandidate = iceCallback1;
45 | sendChannel.onopen = onSendChannelStateChange;
46 | sendChannel.onclose = onSendChannelStateChange;
47 |
48 | // Add remoteConnection to global scope to make it visible
49 | // from the browser console.
50 | window.remoteConnection = remoteConnection =
51 | new RTCPeerConnection(servers, pcConstraint);
52 | trace('Created remote peer connection object remoteConnection');
53 |
54 | remoteConnection.onicecandidate = iceCallback2;
55 | remoteConnection.ondatachannel = receiveChannelCallback;
56 |
57 | localConnection.createOffer().then(
58 | gotDescription1,
59 | onCreateSessionDescriptionError
60 | );
61 | startButton.disabled = true;
62 | closeButton.disabled = false;
63 | }
64 |
65 | function onCreateSessionDescriptionError(error) {
66 | trace('Failed to create session description: ' + error.toString());
67 | }
68 |
69 | function sendData() {
70 | var data = dataChannelSend.value;
71 | sendChannel.send(data);
72 | trace('Sent Data: ' + data);
73 | }
74 |
75 | function closeDataChannels() {
76 | trace('Closing data channels');
77 | sendChannel.close();
78 | trace('Closed data channel with label: ' + sendChannel.label);
79 | receiveChannel.close();
80 | trace('Closed data channel with label: ' + receiveChannel.label);
81 | localConnection.close();
82 | remoteConnection.close();
83 | localConnection = null;
84 | remoteConnection = null;
85 | trace('Closed peer connections');
86 | startButton.disabled = false;
87 | sendButton.disabled = true;
88 | closeButton.disabled = true;
89 | dataChannelSend.value = '';
90 | dataChannelReceive.value = '';
91 | dataChannelSend.disabled = true;
92 | disableSendButton();
93 | enableStartButton();
94 | }
95 |
96 | function gotDescription1(desc) {
97 | localConnection.setLocalDescription(desc);
98 | trace('Offer from localConnection \n' + desc.sdp);
99 | remoteConnection.setRemoteDescription(desc);
100 | remoteConnection.createAnswer().then(
101 | gotDescription2,
102 | onCreateSessionDescriptionError
103 | );
104 | }
105 |
106 | function gotDescription2(desc) {
107 | remoteConnection.setLocalDescription(desc);
108 | trace('Answer from remoteConnection \n' + desc.sdp);
109 | localConnection.setRemoteDescription(desc);
110 | }
111 |
112 | function iceCallback1(event) {
113 | trace('local ice callback');
114 | if (event.candidate) {
115 | remoteConnection.addIceCandidate(
116 | event.candidate
117 | ).then(
118 | onAddIceCandidateSuccess,
119 | onAddIceCandidateError
120 | );
121 | trace('Local ICE candidate: \n' + event.candidate.candidate);
122 | }
123 | }
124 |
125 | function iceCallback2(event) {
126 | trace('remote ice callback');
127 | if (event.candidate) {
128 | localConnection.addIceCandidate(
129 | event.candidate
130 | ).then(
131 | onAddIceCandidateSuccess,
132 | onAddIceCandidateError
133 | );
134 | trace('Remote ICE candidate: \n ' + event.candidate.candidate);
135 | }
136 | }
137 |
138 | function onAddIceCandidateSuccess() {
139 | trace('AddIceCandidate success.');
140 | }
141 |
142 | function onAddIceCandidateError(error) {
143 | trace('Failed to add Ice Candidate: ' + error.toString());
144 | }
145 |
146 | function receiveChannelCallback(event) {
147 | trace('Receive Channel Callback');
148 | receiveChannel = event.channel;
149 | receiveChannel.onmessage = onReceiveMessageCallback;
150 | receiveChannel.onopen = onReceiveChannelStateChange;
151 | receiveChannel.onclose = onReceiveChannelStateChange;
152 | }
153 |
154 | function onReceiveMessageCallback(event) {
155 | trace('Received Message');
156 | dataChannelReceive.value = event.data;
157 | }
158 |
159 | function onSendChannelStateChange() {
160 | var readyState = sendChannel.readyState;
161 | trace('Send channel state is: ' + readyState);
162 | if (readyState === 'open') {
163 | dataChannelSend.disabled = false;
164 | dataChannelSend.focus();
165 | sendButton.disabled = false;
166 | closeButton.disabled = false;
167 | } else {
168 | dataChannelSend.disabled = true;
169 | sendButton.disabled = true;
170 | closeButton.disabled = true;
171 | }
172 | }
173 |
174 | function onReceiveChannelStateChange() {
175 | var readyState = receiveChannel.readyState;
176 | trace('Receive channel state is: ' + readyState);
177 | }
178 |
179 | function trace(text) {
180 | if (text[text.length - 1] === '\n') {
181 | text = text.substring(0, text.length - 1);
182 | }
183 | if (window.performance) {
184 | var now = (window.performance.now() / 1000).toFixed(3);
185 | console.log(now + ': ' + text);
186 | } else {
187 | console.log(text);
188 | }
189 | }
190 |
--------------------------------------------------------------------------------
/js/lib/adapter.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 |
9 | /*
10 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
11 | *
12 | * Use of this source code is governed by a BSD-style license
13 | * that can be found in the LICENSE file in the root of the source
14 | * tree.
15 | */
16 |
17 | 'use strict';
18 |
19 | var RTCPeerConnection = null;
20 | var getUserMedia = null;
21 | var attachMediaStream = null;
22 | var reattachMediaStream = null;
23 | var webrtcDetectedBrowser = null;
24 | var webrtcDetectedVersion = null;
25 |
26 | function maybeFixConfiguration(pcConfig) {
27 | if (!pcConfig) {
28 | return;
29 | }
30 | for (var i = 0; i < pcConfig.iceServers.length; i++) {
31 | if (pcConfig.iceServers[i].hasOwnProperty('urls')) {
32 | pcConfig.iceServers[i].url = pcConfig.iceServers[i].urls;
33 | delete pcConfig.iceServers[i].urls;
34 | }
35 | }
36 | }
37 |
38 | if (navigator.mozGetUserMedia) {
39 | console.log('This appears to be Firefox');
40 |
41 | window.webrtcDetectedBrowser = 'firefox';
42 |
43 | window.webrtcDetectedVersion =
44 | parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
45 |
46 | // The RTCPeerConnection object.
47 | RTCPeerConnection = function(pcConfig, pcConstraints) {
48 | // .urls is not supported in FF yet.
49 | maybeFixConfiguration(pcConfig);
50 | return new mozRTCPeerConnection(pcConfig, pcConstraints);
51 | };
52 |
53 | // The RTCSessionDescription object.
54 | RTCSessionDescription = mozRTCSessionDescription;
55 |
56 | // The RTCIceCandidate object.
57 | RTCIceCandidate = mozRTCIceCandidate;
58 |
59 | // Get UserMedia (only difference is the prefix).
60 | // Code from Adam Barth.
61 | window.getUserMedia = navigator.mozGetUserMedia.bind(navigator);
62 | navigator.getUserMedia = getUserMedia;
63 |
64 | // Creates iceServer from the url for FF.
65 | window.createIceServer = function(url, username, password) {
66 | var iceServer = null;
67 | var urlParts = url.split(':');
68 | if (urlParts[0].indexOf('stun') === 0) {
69 | // Create iceServer with stun url.
70 | iceServer = {
71 | 'url': url
72 | };
73 | } else if (urlParts[0].indexOf('turn') === 0) {
74 | if (webrtcDetectedVersion < 27) {
75 | // Create iceServer with turn url.
76 | // Ignore the transport parameter from TURN url for FF version <=27.
77 | var turnUrlParts = url.split('?');
78 | // Return null for createIceServer if transport=tcp.
79 | if (turnUrlParts.length === 1 ||
80 | turnUrlParts[1].indexOf('transport=udp') === 0) {
81 | iceServer = {
82 | 'url': turnUrlParts[0],
83 | 'credential': password,
84 | 'username': username
85 | };
86 | }
87 | } else {
88 | // FF 27 and above supports transport parameters in TURN url,
89 | // So passing in the full url to create iceServer.
90 | iceServer = {
91 | 'url': url,
92 | 'credential': password,
93 | 'username': username
94 | };
95 | }
96 | }
97 | return iceServer;
98 | };
99 |
100 | window.createIceServers = function(urls, username, password) {
101 | var iceServers = [];
102 | // Use .url for FireFox.
103 | for (var i = 0; i < urls.length; i++) {
104 | var iceServer = createIceServer(urls[i],
105 | username,
106 | password);
107 | if (iceServer !== null) {
108 | iceServers.push(iceServer);
109 | }
110 | }
111 | return iceServers;
112 | };
113 |
114 | // Attach a media stream to an element.
115 | window.attachMediaStream = function(element, stream) {
116 | console.log('Attaching media stream');
117 | element.mozSrcObject = stream;
118 | element.play();
119 | };
120 |
121 | window.reattachMediaStream = function(to, from) {
122 | console.log('Reattaching media stream');
123 | to.mozSrcObject = from.mozSrcObject;
124 | to.play();
125 | };
126 |
127 | } else if (navigator.webkitGetUserMedia) {
128 | console.log('This appears to be Chrome');
129 |
130 | window.webrtcDetectedBrowser = 'chrome';
131 | // Temporary fix until crbug/374263 is fixed.
132 | // Setting Chrome version to 999, if version is unavailable.
133 | var result = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);
134 | if (result !== null) {
135 | window.webrtcDetectedVersion = parseInt(result[2], 10);
136 | } else {
137 | window.webrtcDetectedVersion = 999;
138 | }
139 |
140 | // Creates iceServer from the url for Chrome M33 and earlier.
141 | window.createIceServer = function(url, username, password) {
142 | var iceServer = null;
143 | var urlParts = url.split(':');
144 | if (urlParts[0].indexOf('stun') === 0) {
145 | // Create iceServer with stun url.
146 | iceServer = {
147 | 'url': url
148 | };
149 | } else if (urlParts[0].indexOf('turn') === 0) {
150 | // Chrome M28 & above uses below TURN format.
151 | iceServer = {
152 | 'url': url,
153 | 'credential': password,
154 | 'username': username
155 | };
156 | }
157 | return iceServer;
158 | };
159 |
160 | // Creates iceServers from the urls for Chrome M34 and above.
161 | window.createIceServers = function(urls, username, password) {
162 | var iceServers = [];
163 | if (webrtcDetectedVersion >= 34) {
164 | // .urls is supported since Chrome M34.
165 | iceServers = {
166 | 'urls': urls,
167 | 'credential': password,
168 | 'username': username
169 | };
170 | } else {
171 | for (var i = 0; i < urls.length; i++) {
172 | var iceServer = createIceServer(urls[i],
173 | username,
174 | password);
175 | if (iceServer !== null) {
176 | iceServers.push(iceServer);
177 | }
178 | }
179 | }
180 | return iceServers;
181 | };
182 |
183 | // The RTCPeerConnection object.
184 | RTCPeerConnection = function(pcConfig, pcConstraints) {
185 | // .urls is supported since Chrome M34.
186 | if (webrtcDetectedVersion < 34) {
187 | maybeFixConfiguration(pcConfig);
188 | }
189 | return new webkitRTCPeerConnection(pcConfig, pcConstraints);
190 | };
191 |
192 | // Get UserMedia (only difference is the prefix).
193 | // Code from Adam Barth.
194 | window.getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
195 | navigator.getUserMedia = getUserMedia;
196 |
197 | // Attach a media stream to an element.
198 | window.attachMediaStream = function(element, stream) {
199 | if (typeof element.srcObject !== 'undefined') {
200 | element.srcObject = stream;
201 | } else if (typeof element.mozSrcObject !== 'undefined') {
202 | element.mozSrcObject = stream;
203 | } else if (typeof element.src !== 'undefined') {
204 | element.src = URL.createObjectURL(stream);
205 | } else {
206 | console.log('Error attaching stream to element.');
207 | }
208 | };
209 |
210 | window.reattachMediaStream = function(to, from) {
211 | to.src = from.src;
212 | };
213 | } else {
214 | console.log('Browser does not appear to be WebRTC-capable');
215 | }
216 |
--------------------------------------------------------------------------------
/step-05/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | var isChannelReady = false;
4 | var isInitiator = false;
5 | var isStarted = false;
6 | var localStream;
7 | var pc;
8 | var remoteStream;
9 | var turnReady;
10 |
11 | var pcConfig = {
12 | 'iceServers': [{
13 | 'urls': 'stun:stun.l.google.com:19302'
14 | }]
15 | };
16 |
17 | // Set up audio and video regardless of what devices are present.
18 | var sdpConstraints = {
19 | offerToReceiveAudio: true,
20 | offerToReceiveVideo: true
21 | };
22 |
23 | /////////////////////////////////////////////
24 |
25 | var room = 'foo';
26 | // Could prompt for room name:
27 | // room = prompt('Enter room name:');
28 |
29 | var socket = io.connect();
30 |
31 | if (room !== '') {
32 | socket.emit('create or join', room);
33 | console.log('Attempted to create or join room', room);
34 | }
35 |
36 | socket.on('created', function(room) {
37 | console.log('Created room ' + room);
38 | isInitiator = true;
39 | });
40 |
41 | socket.on('full', function(room) {
42 | console.log('Room ' + room + ' is full');
43 | });
44 |
45 | socket.on('join', function (room){
46 | console.log('Another peer made a request to join room ' + room);
47 | console.log('This peer is the initiator of room ' + room + '!');
48 | isChannelReady = true;
49 | });
50 |
51 | socket.on('joined', function(room) {
52 | console.log('joined: ' + room);
53 | isChannelReady = true;
54 | });
55 |
56 | socket.on('log', function(array) {
57 | console.log.apply(console, array);
58 | });
59 |
60 | ////////////////////////////////////////////////
61 |
62 | function sendMessage(message) {
63 | console.log('Client sending message: ', message);
64 | socket.emit('message', message);
65 | }
66 |
67 | // This client receives a message
68 | socket.on('message', function(message) {
69 | console.log('Client received message:', message);
70 | if (message === 'got user media') {
71 | maybeStart();
72 | } else if (message.type === 'offer') {
73 | if (!isInitiator && !isStarted) {
74 | maybeStart();
75 | }
76 | pc.setRemoteDescription(new RTCSessionDescription(message));
77 | doAnswer();
78 | } else if (message.type === 'answer' && isStarted) {
79 | pc.setRemoteDescription(new RTCSessionDescription(message));
80 | } else if (message.type === 'candidate' && isStarted) {
81 | var candidate = new RTCIceCandidate({
82 | sdpMLineIndex: message.label,
83 | candidate: message.candidate
84 | });
85 | pc.addIceCandidate(candidate);
86 | } else if (message === 'bye' && isStarted) {
87 | handleRemoteHangup();
88 | }
89 | });
90 |
91 | ////////////////////////////////////////////////////
92 |
93 | var localVideo = document.querySelector('#localVideo');
94 | var remoteVideo = document.querySelector('#remoteVideo');
95 |
96 | navigator.mediaDevices.getUserMedia({
97 | audio: false,
98 | video: true
99 | })
100 | .then(gotStream)
101 | .catch(function(e) {
102 | alert('getUserMedia() error: ' + e.name);
103 | });
104 |
105 | function gotStream(stream) {
106 | console.log('Adding local stream.');
107 | localStream = stream;
108 | localVideo.srcObject = stream;
109 | sendMessage('got user media');
110 | if (isInitiator) {
111 | maybeStart();
112 | }
113 | }
114 |
115 | var constraints = {
116 | video: true
117 | };
118 |
119 | console.log('Getting user media with constraints', constraints);
120 |
121 | if (location.hostname !== 'localhost') {
122 | requestTurn(
123 | 'https://computeengineondemand.appspot.com/turn?username=41784574&key=4080218913'
124 | );
125 | }
126 |
127 | function maybeStart() {
128 | console.log('>>>>>>> maybeStart() ', isStarted, localStream, isChannelReady);
129 | if (!isStarted && typeof localStream !== 'undefined' && isChannelReady) {
130 | console.log('>>>>>> creating peer connection');
131 | createPeerConnection();
132 | pc.addStream(localStream);
133 | isStarted = true;
134 | console.log('isInitiator', isInitiator);
135 | if (isInitiator) {
136 | doCall();
137 | }
138 | }
139 | }
140 |
141 | window.onbeforeunload = function() {
142 | sendMessage('bye');
143 | };
144 |
145 | /////////////////////////////////////////////////////////
146 |
147 | function createPeerConnection() {
148 | try {
149 | pc = new RTCPeerConnection(null);
150 | pc.onicecandidate = handleIceCandidate;
151 | pc.onaddstream = handleRemoteStreamAdded;
152 | pc.onremovestream = handleRemoteStreamRemoved;
153 | console.log('Created RTCPeerConnnection');
154 | } catch (e) {
155 | console.log('Failed to create PeerConnection, exception: ' + e.message);
156 | alert('Cannot create RTCPeerConnection object.');
157 | return;
158 | }
159 | }
160 |
161 | function handleIceCandidate(event) {
162 | console.log('icecandidate event: ', event);
163 | if (event.candidate) {
164 | sendMessage({
165 | type: 'candidate',
166 | label: event.candidate.sdpMLineIndex,
167 | id: event.candidate.sdpMid,
168 | candidate: event.candidate.candidate
169 | });
170 | } else {
171 | console.log('End of candidates.');
172 | }
173 | }
174 |
175 | function handleCreateOfferError(event) {
176 | console.log('createOffer() error: ', event);
177 | }
178 |
179 | function doCall() {
180 | console.log('Sending offer to peer');
181 | pc.createOffer(setLocalAndSendMessage, handleCreateOfferError);
182 | }
183 |
184 | function doAnswer() {
185 | console.log('Sending answer to peer.');
186 | pc.createAnswer().then(
187 | setLocalAndSendMessage,
188 | onCreateSessionDescriptionError
189 | );
190 | }
191 |
192 | function setLocalAndSendMessage(sessionDescription) {
193 | pc.setLocalDescription(sessionDescription);
194 | console.log('setLocalAndSendMessage sending message', sessionDescription);
195 | sendMessage(sessionDescription);
196 | }
197 |
198 | function onCreateSessionDescriptionError(error) {
199 | trace('Failed to create session description: ' + error.toString());
200 | }
201 |
202 | function requestTurn(turnURL) {
203 | var turnExists = false;
204 | for (var i in pcConfig.iceServers) {
205 | if (pcConfig.iceServers[i].urls.substr(0, 5) === 'turn:') {
206 | turnExists = true;
207 | turnReady = true;
208 | break;
209 | }
210 | }
211 | if (!turnExists) {
212 | console.log('Getting TURN server from ', turnURL);
213 | // No TURN server. Get one from computeengineondemand.appspot.com:
214 | var xhr = new XMLHttpRequest();
215 | xhr.onreadystatechange = function() {
216 | if (xhr.readyState === 4 && xhr.status === 200) {
217 | var turnServer = JSON.parse(xhr.responseText);
218 | console.log('Got TURN server: ', turnServer);
219 | pcConfig.iceServers.push({
220 | 'urls': 'turn:' + turnServer.username + '@' + turnServer.turn,
221 | 'credential': turnServer.password
222 | });
223 | turnReady = true;
224 | }
225 | };
226 | xhr.open('GET', turnURL, true);
227 | xhr.send();
228 | }
229 | }
230 |
231 | function handleRemoteStreamAdded(event) {
232 | console.log('Remote stream added.');
233 | remoteStream = event.stream;
234 | remoteVideo.srcObject = remoteStream;
235 | }
236 |
237 | function handleRemoteStreamRemoved(event) {
238 | console.log('Remote stream removed. Event: ', event);
239 | }
240 |
241 | function hangup() {
242 | console.log('Hanging up.');
243 | stop();
244 | sendMessage('bye');
245 | }
246 |
247 | function handleRemoteHangup() {
248 | console.log('Session terminated.');
249 | stop();
250 | isInitiator = false;
251 | }
252 |
253 | function stop() {
254 | isStarted = false;
255 | pc.close();
256 | pc = null;
257 | }
258 |
--------------------------------------------------------------------------------
/step-02/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | // Set up media stream constant and parameters.
4 |
5 | // In this codelab, you will be streaming video only: "video: true".
6 | // Audio will not be streamed because it is set to "audio: false" by default.
7 | const mediaStreamConstraints = {
8 | video: true,
9 | };
10 |
11 | // Set up to exchange only video.
12 | const offerOptions = {
13 | offerToReceiveVideo: 1,
14 | };
15 |
16 | // Define initial start time of the call (defined as connection between peers).
17 | let startTime = null;
18 |
19 | // Define peer connections, streams and video elements.
20 | const localVideo = document.getElementById('localVideo');
21 | const remoteVideo = document.getElementById('remoteVideo');
22 |
23 | let localStream;
24 | let remoteStream;
25 |
26 | let localPeerConnection;
27 | let remotePeerConnection;
28 |
29 |
30 | // Define MediaStreams callbacks.
31 |
32 | // Sets the MediaStream as the video element src.
33 | function gotLocalMediaStream(mediaStream) {
34 | localVideo.srcObject = mediaStream;
35 | localStream = mediaStream;
36 | trace('Received local stream.');
37 | callButton.disabled = false; // Enable call button.
38 | }
39 |
40 | // Handles error by logging a message to the console.
41 | function handleLocalMediaStreamError(error) {
42 | trace(`navigator.getUserMedia error: ${error.toString()}.`);
43 | }
44 |
45 | // Handles remote MediaStream success by adding it as the remoteVideo src.
46 | function gotRemoteMediaStream(event) {
47 | const mediaStream = event.stream;
48 | remoteVideo.srcObject = mediaStream;
49 | remoteStream = mediaStream;
50 | trace('Remote peer connection received remote stream.');
51 | }
52 |
53 |
54 | // Add behavior for video streams.
55 |
56 | // Logs a message with the id and size of a video element.
57 | function logVideoLoaded(event) {
58 | const video = event.target;
59 | trace(`${video.id} videoWidth: ${video.videoWidth}px, ` +
60 | `videoHeight: ${video.videoHeight}px.`);
61 | }
62 |
63 | // Logs a message with the id and size of a video element.
64 | // This event is fired when video begins streaming.
65 | function logResizedVideo(event) {
66 | logVideoLoaded(event);
67 |
68 | if (startTime) {
69 | const elapsedTime = window.performance.now() - startTime;
70 | startTime = null;
71 | trace(`Setup time: ${elapsedTime.toFixed(3)}ms.`);
72 | }
73 | }
74 |
75 | localVideo.addEventListener('loadedmetadata', logVideoLoaded);
76 | remoteVideo.addEventListener('loadedmetadata', logVideoLoaded);
77 | remoteVideo.addEventListener('onresize', logResizedVideo);
78 |
79 |
80 | // Define RTC peer connection behavior.
81 |
82 | // Connects with new peer candidate.
83 | function handleConnection(event) {
84 | const peerConnection = event.target;
85 | const iceCandidate = event.candidate;
86 |
87 | if (iceCandidate) {
88 | const newIceCandidate = new RTCIceCandidate(iceCandidate);
89 | const otherPeer = getOtherPeer(peerConnection);
90 |
91 | otherPeer.addIceCandidate(newIceCandidate)
92 | .then(() => {
93 | handleConnectionSuccess(peerConnection);
94 | }).catch((error) => {
95 | handleConnectionFailure(peerConnection, error);
96 | });
97 |
98 | trace(`${getPeerName(peerConnection)} ICE candidate:\n` +
99 | `${event.candidate.candidate}.`);
100 | }
101 | }
102 |
103 | // Logs that the connection succeeded.
104 | function handleConnectionSuccess(peerConnection) {
105 | trace(`${getPeerName(peerConnection)} addIceCandidate success.`);
106 | };
107 |
108 | // Logs that the connection failed.
109 | function handleConnectionFailure(peerConnection, error) {
110 | trace(`${getPeerName(peerConnection)} failed to add ICE Candidate:\n`+
111 | `${error.toString()}.`);
112 | }
113 |
114 | // Logs changes to the connection state.
115 | function handleConnectionChange(event) {
116 | const peerConnection = event.target;
117 | console.log('ICE state change event: ', event);
118 | trace(`${getPeerName(peerConnection)} ICE state: ` +
119 | `${peerConnection.iceConnectionState}.`);
120 | }
121 |
122 | // Logs error when setting session description fails.
123 | function setSessionDescriptionError(error) {
124 | trace(`Failed to create session description: ${error.toString()}.`);
125 | }
126 |
127 | // Logs success when setting session description.
128 | function setDescriptionSuccess(peerConnection, functionName) {
129 | const peerName = getPeerName(peerConnection);
130 | trace(`${peerName} ${functionName} complete.`);
131 | }
132 |
133 | // Logs success when localDescription is set.
134 | function setLocalDescriptionSuccess(peerConnection) {
135 | setDescriptionSuccess(peerConnection, 'setLocalDescription');
136 | }
137 |
138 | // Logs success when remoteDescription is set.
139 | function setRemoteDescriptionSuccess(peerConnection) {
140 | setDescriptionSuccess(peerConnection, 'setRemoteDescription');
141 | }
142 |
143 | // Logs offer creation and sets peer connection session descriptions.
144 | function createdOffer(description) {
145 | trace(`Offer from localPeerConnection:\n${description.sdp}`);
146 |
147 | trace('localPeerConnection setLocalDescription start.');
148 | localPeerConnection.setLocalDescription(description)
149 | .then(() => {
150 | setLocalDescriptionSuccess(localPeerConnection);
151 | }).catch(setSessionDescriptionError);
152 |
153 | trace('remotePeerConnection setRemoteDescription start.');
154 | remotePeerConnection.setRemoteDescription(description)
155 | .then(() => {
156 | setRemoteDescriptionSuccess(remotePeerConnection);
157 | }).catch(setSessionDescriptionError);
158 |
159 | trace('remotePeerConnection createAnswer start.');
160 | remotePeerConnection.createAnswer()
161 | .then(createdAnswer)
162 | .catch(setSessionDescriptionError);
163 | }
164 |
165 | // Logs answer to offer creation and sets peer connection session descriptions.
166 | function createdAnswer(description) {
167 | trace(`Answer from remotePeerConnection:\n${description.sdp}.`);
168 |
169 | trace('remotePeerConnection setLocalDescription start.');
170 | remotePeerConnection.setLocalDescription(description)
171 | .then(() => {
172 | setLocalDescriptionSuccess(remotePeerConnection);
173 | }).catch(setSessionDescriptionError);
174 |
175 | trace('localPeerConnection setRemoteDescription start.');
176 | localPeerConnection.setRemoteDescription(description)
177 | .then(() => {
178 | setRemoteDescriptionSuccess(localPeerConnection);
179 | }).catch(setSessionDescriptionError);
180 | }
181 |
182 |
183 | // Define and add behavior to buttons.
184 |
185 | // Define action buttons.
186 | const startButton = document.getElementById('startButton');
187 | const callButton = document.getElementById('callButton');
188 | const hangupButton = document.getElementById('hangupButton');
189 |
190 | // Set up initial action buttons status: disable call and hangup.
191 | callButton.disabled = true;
192 | hangupButton.disabled = true;
193 |
194 |
195 | // Handles start button action: creates local MediaStream.
196 | function startAction() {
197 | startButton.disabled = true;
198 | navigator.mediaDevices.getUserMedia(mediaStreamConstraints)
199 | .then(gotLocalMediaStream).catch(handleLocalMediaStreamError);
200 | trace('Requesting local stream.');
201 | }
202 |
203 | // Handles call button action: creates peer connection.
204 | function callAction() {
205 | callButton.disabled = true;
206 | hangupButton.disabled = false;
207 |
208 | trace('Starting call.');
209 | startTime = window.performance.now();
210 |
211 | // Get local media stream tracks.
212 | const videoTracks = localStream.getVideoTracks();
213 | const audioTracks = localStream.getAudioTracks();
214 | if (videoTracks.length > 0) {
215 | trace(`Using video device: ${videoTracks[0].label}.`);
216 | }
217 | if (audioTracks.length > 0) {
218 | trace(`Using audio device: ${audioTracks[0].label}.`);
219 | }
220 |
221 | const servers = null; // Allows for RTC server configuration.
222 |
223 | // Create peer connections and add behavior.
224 | localPeerConnection = new RTCPeerConnection(servers);
225 | trace('Created local peer connection object localPeerConnection.');
226 |
227 | localPeerConnection.addEventListener('icecandidate', handleConnection);
228 | localPeerConnection.addEventListener(
229 | 'iceconnectionstatechange', handleConnectionChange);
230 |
231 | remotePeerConnection = new RTCPeerConnection(servers);
232 | trace('Created remote peer connection object remotePeerConnection.');
233 |
234 | remotePeerConnection.addEventListener('icecandidate', handleConnection);
235 | remotePeerConnection.addEventListener(
236 | 'iceconnectionstatechange', handleConnectionChange);
237 | remotePeerConnection.addEventListener('addstream', gotRemoteMediaStream);
238 |
239 | // Add local stream to connection and create offer to connect.
240 | localPeerConnection.addStream(localStream);
241 | trace('Added local stream to localPeerConnection.');
242 |
243 | trace('localPeerConnection createOffer start.');
244 | localPeerConnection.createOffer(offerOptions)
245 | .then(createdOffer).catch(setSessionDescriptionError);
246 | }
247 |
248 | // Handles hangup action: ends up call, closes connections and resets peers.
249 | function hangupAction() {
250 | localPeerConnection.close();
251 | remotePeerConnection.close();
252 | localPeerConnection = null;
253 | remotePeerConnection = null;
254 | hangupButton.disabled = true;
255 | callButton.disabled = false;
256 | trace('Ending call.');
257 | }
258 |
259 | // Add click event handlers for buttons.
260 | startButton.addEventListener('click', startAction);
261 | callButton.addEventListener('click', callAction);
262 | hangupButton.addEventListener('click', hangupAction);
263 |
264 |
265 | // Define helper functions.
266 |
267 | // Gets the "other" peer connection.
268 | function getOtherPeer(peerConnection) {
269 | return (peerConnection === localPeerConnection) ?
270 | remotePeerConnection : localPeerConnection;
271 | }
272 |
273 | // Gets the name of a certain peer connection.
274 | function getPeerName(peerConnection) {
275 | return (peerConnection === localPeerConnection) ?
276 | 'localPeerConnection' : 'remotePeerConnection';
277 | }
278 |
279 | // Logs an action (text) and the time when it happened on the console.
280 | function trace(text) {
281 | text = text.trim();
282 | const now = (window.performance.now() / 1000).toFixed(3);
283 |
284 | console.log(now, text);
285 | }
286 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/step-06/js/main.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | /****************************************************************************
4 | * Initial setup
5 | ****************************************************************************/
6 |
7 | // var configuration = {
8 | // 'iceServers': [{
9 | // 'urls': 'stun:stun.l.google.com:19302'
10 | // }]
11 | // };
12 |
13 | var configuration = null;
14 |
15 | // var roomURL = document.getElementById('url');
16 | var video = document.querySelector('video');
17 | var photo = document.getElementById('photo');
18 | var photoContext = photo.getContext('2d');
19 | var trail = document.getElementById('trail');
20 | var snapBtn = document.getElementById('snap');
21 | var sendBtn = document.getElementById('send');
22 | var snapAndSendBtn = document.getElementById('snapAndSend');
23 |
24 | var photoContextW;
25 | var photoContextH;
26 |
27 | // Attach event handlers
28 | snapBtn.addEventListener('click', snapPhoto);
29 | sendBtn.addEventListener('click', sendPhoto);
30 | snapAndSendBtn.addEventListener('click', snapAndSend);
31 |
32 | // Disable send buttons by default.
33 | sendBtn.disabled = true;
34 | snapAndSendBtn.disabled = true;
35 |
36 | // Create a random room if not already present in the URL.
37 | var isInitiator;
38 | var room = window.location.hash.substring(1);
39 | if (!room) {
40 | room = window.location.hash = randomToken();
41 | }
42 |
43 |
44 | /****************************************************************************
45 | * Signaling server
46 | ****************************************************************************/
47 |
48 | // Connect to the signaling server
49 | var socket = io.connect();
50 |
51 | socket.on('ipaddr', function(ipaddr) {
52 | console.log('Server IP address is: ' + ipaddr);
53 | // updateRoomURL(ipaddr);
54 | });
55 |
56 | socket.on('created', function(room, clientId) {
57 | console.log('Created room', room, '- my client ID is', clientId);
58 | isInitiator = true;
59 | grabWebCamVideo();
60 | });
61 |
62 | socket.on('joined', function(room, clientId) {
63 | console.log('This peer has joined room', room, 'with client ID', clientId);
64 | isInitiator = false;
65 | createPeerConnection(isInitiator, configuration);
66 | grabWebCamVideo();
67 | });
68 |
69 | socket.on('full', function(room) {
70 | alert('Room ' + room + ' is full. We will create a new room for you.');
71 | window.location.hash = '';
72 | window.location.reload();
73 | });
74 |
75 | socket.on('ready', function() {
76 | console.log('Socket is ready');
77 | createPeerConnection(isInitiator, configuration);
78 | });
79 |
80 | socket.on('log', function(array) {
81 | console.log.apply(console, array);
82 | });
83 |
84 | socket.on('message', function(message) {
85 | console.log('Client received message:', message);
86 | signalingMessageCallback(message);
87 | });
88 |
89 | // Joining a room.
90 | socket.emit('create or join', room);
91 |
92 | if (location.hostname.match(/localhost|127\.0\.0/)) {
93 | socket.emit('ipaddr');
94 | }
95 |
96 | // Leaving rooms and disconnecting from peers.
97 | socket.on('disconnect', function(reason) {
98 | console.log(`Disconnected: ${reason}.`);
99 | sendBtn.disabled = true;
100 | snapAndSendBtn.disabled = true;
101 | });
102 |
103 | socket.on('bye', function(room) {
104 | console.log(`Peer leaving room ${room}.`);
105 | sendBtn.disabled = true;
106 | snapAndSendBtn.disabled = true;
107 | // If peer did not create the room, re-enter to be creator.
108 | if (!isInitiator) {
109 | window.location.reload();
110 | }
111 | });
112 |
113 | window.addEventListener('unload', function() {
114 | console.log(`Unloading window. Notifying peers in ${room}.`);
115 | socket.emit('bye', room);
116 | });
117 |
118 |
119 | /**
120 | * Send message to signaling server
121 | */
122 | function sendMessage(message) {
123 | console.log('Client sending message: ', message);
124 | socket.emit('message', message);
125 | }
126 |
127 | /**
128 | * Updates URL on the page so that users can copy&paste it to their peers.
129 | */
130 | // function updateRoomURL(ipaddr) {
131 | // var url;
132 | // if (!ipaddr) {
133 | // url = location.href;
134 | // } else {
135 | // url = location.protocol + '//' + ipaddr + ':2013/#' + room;
136 | // }
137 | // roomURL.innerHTML = url;
138 | // }
139 |
140 | /****************************************************************************
141 | * User media (webcam)
142 | ****************************************************************************/
143 |
144 | function grabWebCamVideo() {
145 | console.log('Getting user media (video) ...');
146 | navigator.mediaDevices.getUserMedia({
147 | audio: false,
148 | video: true
149 | })
150 | .then(gotStream)
151 | .catch(function(e) {
152 | alert('getUserMedia() error: ' + e.name);
153 | });
154 | }
155 |
156 | function gotStream(stream) {
157 | console.log('getUserMedia video stream URL:', stream);
158 | window.stream = stream; // stream available to console
159 | video.srcObject = stream;
160 | video.onloadedmetadata = function() {
161 | photo.width = photoContextW = video.videoWidth;
162 | photo.height = photoContextH = video.videoHeight;
163 | console.log('gotStream with width and height:', photoContextW, photoContextH);
164 | };
165 | show(snapBtn);
166 | }
167 |
168 | /****************************************************************************
169 | * WebRTC peer connection and data channel
170 | ****************************************************************************/
171 |
172 | var peerConn;
173 | var dataChannel;
174 |
175 | function signalingMessageCallback(message) {
176 | if (message.type === 'offer') {
177 | console.log('Got offer. Sending answer to peer.');
178 | peerConn.setRemoteDescription(new RTCSessionDescription(message), function() {},
179 | logError);
180 | peerConn.createAnswer(onLocalSessionCreated, logError);
181 |
182 | } else if (message.type === 'answer') {
183 | console.log('Got answer.');
184 | peerConn.setRemoteDescription(new RTCSessionDescription(message), function() {},
185 | logError);
186 |
187 | } else if (message.type === 'candidate') {
188 | peerConn.addIceCandidate(new RTCIceCandidate({
189 | candidate: message.candidate,
190 | sdpMLineIndex: message.label,
191 | sdpMid: message.id
192 | }));
193 |
194 | }
195 | }
196 |
197 | function createPeerConnection(isInitiator, config) {
198 | console.log('Creating Peer connection as initiator?', isInitiator, 'config:',
199 | config);
200 | peerConn = new RTCPeerConnection(config);
201 |
202 | // send any ice candidates to the other peer
203 | peerConn.onicecandidate = function(event) {
204 | console.log('icecandidate event:', event);
205 | if (event.candidate) {
206 | sendMessage({
207 | type: 'candidate',
208 | label: event.candidate.sdpMLineIndex,
209 | id: event.candidate.sdpMid,
210 | candidate: event.candidate.candidate
211 | });
212 | } else {
213 | console.log('End of candidates.');
214 | }
215 | };
216 |
217 | if (isInitiator) {
218 | console.log('Creating Data Channel');
219 | dataChannel = peerConn.createDataChannel('photos');
220 | onDataChannelCreated(dataChannel);
221 |
222 | console.log('Creating an offer');
223 | peerConn.createOffer().then(function(offer) {
224 | return peerConn.setLocalDescription(offer);
225 | })
226 | .then(() => {
227 | console.log('sending local desc:', peerConn.localDescription);
228 | sendMessage(peerConn.localDescription);
229 | })
230 | .catch(logError);
231 |
232 | } else {
233 | peerConn.ondatachannel = function(event) {
234 | console.log('ondatachannel:', event.channel);
235 | dataChannel = event.channel;
236 | onDataChannelCreated(dataChannel);
237 | };
238 | }
239 | }
240 |
241 | function onLocalSessionCreated(desc) {
242 | console.log('local session created:', desc);
243 | peerConn.setLocalDescription(desc).then(function() {
244 | console.log('sending local desc:', peerConn.localDescription);
245 | sendMessage(peerConn.localDescription);
246 | }).catch(logError);
247 | }
248 |
249 | function onDataChannelCreated(channel) {
250 | console.log('onDataChannelCreated:', channel);
251 |
252 | channel.onopen = function() {
253 | console.log('CHANNEL opened!!!');
254 | sendBtn.disabled = false;
255 | snapAndSendBtn.disabled = false;
256 | };
257 |
258 | channel.onclose = function () {
259 | console.log('Channel closed.');
260 | sendBtn.disabled = true;
261 | snapAndSendBtn.disabled = true;
262 | }
263 |
264 | channel.onmessage = (adapter.browserDetails.browser === 'firefox') ?
265 | receiveDataFirefoxFactory() : receiveDataChromeFactory();
266 | }
267 |
268 | function receiveDataChromeFactory() {
269 | var buf, count;
270 |
271 | return function onmessage(event) {
272 | if (typeof event.data === 'string') {
273 | buf = window.buf = new Uint8ClampedArray(parseInt(event.data));
274 | count = 0;
275 | console.log('Expecting a total of ' + buf.byteLength + ' bytes');
276 | return;
277 | }
278 |
279 | var data = new Uint8ClampedArray(event.data);
280 | buf.set(data, count);
281 |
282 | count += data.byteLength;
283 | console.log('count: ' + count);
284 |
285 | if (count === buf.byteLength) {
286 | // we're done: all data chunks have been received
287 | console.log('Done. Rendering photo.');
288 | renderPhoto(buf);
289 | }
290 | };
291 | }
292 |
293 | function receiveDataFirefoxFactory() {
294 | var count, total, parts;
295 |
296 | return function onmessage(event) {
297 | if (typeof event.data === 'string') {
298 | total = parseInt(event.data);
299 | parts = [];
300 | count = 0;
301 | console.log('Expecting a total of ' + total + ' bytes');
302 | return;
303 | }
304 |
305 | parts.push(event.data);
306 | count += event.data.size;
307 | console.log('Got ' + event.data.size + ' byte(s), ' + (total - count) +
308 | ' to go.');
309 |
310 | if (count === total) {
311 | console.log('Assembling payload');
312 | var buf = new Uint8ClampedArray(total);
313 | var compose = function(i, pos) {
314 | var reader = new FileReader();
315 | reader.onload = function() {
316 | buf.set(new Uint8ClampedArray(this.result), pos);
317 | if (i + 1 === parts.length) {
318 | console.log('Done. Rendering photo.');
319 | renderPhoto(buf);
320 | } else {
321 | compose(i + 1, pos + this.result.byteLength);
322 | }
323 | };
324 | reader.readAsArrayBuffer(parts[i]);
325 | };
326 | compose(0, 0);
327 | }
328 | };
329 | }
330 |
331 |
332 | /****************************************************************************
333 | * Aux functions, mostly UI-related
334 | ****************************************************************************/
335 |
336 | function snapPhoto() {
337 | photoContext.drawImage(video, 0, 0, photo.width, photo.height);
338 | show(photo, sendBtn);
339 | }
340 |
341 | function sendPhoto() {
342 | // Split data channel message in chunks of this byte length.
343 | var CHUNK_LEN = 64000;
344 | console.log('width and height ', photoContextW, photoContextH);
345 | var img = photoContext.getImageData(0, 0, photoContextW, photoContextH),
346 | len = img.data.byteLength,
347 | n = len / CHUNK_LEN | 0;
348 |
349 | console.log('Sending a total of ' + len + ' byte(s)');
350 |
351 | if (!dataChannel) {
352 | logError('Connection has not been initiated. ' +
353 | 'Get two peers in the same room first');
354 | return;
355 | } else if (dataChannel.readyState === 'closed') {
356 | logError('Connection was lost. Peer closed the connection.');
357 | return;
358 | }
359 |
360 | dataChannel.send(len);
361 |
362 | // split the photo and send in chunks of about 64KB
363 | for (var i = 0; i < n; i++) {
364 | var start = i * CHUNK_LEN,
365 | end = (i + 1) * CHUNK_LEN;
366 | console.log(start + ' - ' + (end - 1));
367 | dataChannel.send(img.data.subarray(start, end));
368 | }
369 |
370 | // send the reminder, if any
371 | if (len % CHUNK_LEN) {
372 | console.log('last ' + len % CHUNK_LEN + ' byte(s)');
373 | dataChannel.send(img.data.subarray(n * CHUNK_LEN));
374 | }
375 | }
376 |
377 | function snapAndSend() {
378 | snapPhoto();
379 | sendPhoto();
380 | }
381 |
382 | function renderPhoto(data) {
383 | var canvas = document.createElement('canvas');
384 | canvas.width = photoContextW;
385 | canvas.height = photoContextH;
386 | canvas.classList.add('incomingPhoto');
387 | // trail is the element holding the incoming images
388 | trail.insertBefore(canvas, trail.firstChild);
389 |
390 | var context = canvas.getContext('2d');
391 | var img = context.createImageData(photoContextW, photoContextH);
392 | img.data.set(data);
393 | context.putImageData(img, 0, 0);
394 | }
395 |
396 | function show() {
397 | Array.prototype.forEach.call(arguments, function(elem) {
398 | elem.style.display = null;
399 | });
400 | }
401 |
402 | function hide() {
403 | Array.prototype.forEach.call(arguments, function(elem) {
404 | elem.style.display = 'none';
405 | });
406 | }
407 |
408 | function randomToken() {
409 | return Math.floor((1 + Math.random()) * 1e16).toString(16).substring(1);
410 | }
411 |
412 | function logError(err) {
413 | if (!err) return;
414 | if (typeof err === 'string') {
415 | console.warn(err);
416 | } else {
417 | console.warn(err.toString(), err);
418 | }
419 | }
420 |
--------------------------------------------------------------------------------
/work/js/lib/adapter.js:
--------------------------------------------------------------------------------
1 | (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.adapter = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o 0 && typeof selector === 'function') {
205 | return origGetStats(selector, successCallback);
206 | }
207 |
208 | var fixChromeStats_ = function(response) {
209 | var standardReport = {};
210 | var reports = response.result();
211 | reports.forEach(function(report) {
212 | var standardStats = {
213 | id: report.id,
214 | timestamp: report.timestamp,
215 | type: report.type
216 | };
217 | report.names().forEach(function(name) {
218 | standardStats[name] = report.stat(name);
219 | });
220 | standardReport[standardStats.id] = standardStats;
221 | });
222 |
223 | return standardReport;
224 | };
225 |
226 | if (arguments.length >= 2) {
227 | var successCallbackWrapper_ = function(response) {
228 | args[1](fixChromeStats_(response));
229 | };
230 |
231 | return origGetStats.apply(this, [successCallbackWrapper_,
232 | arguments[0]]);
233 | }
234 |
235 | // promise-support
236 | return new Promise(function(resolve, reject) {
237 | if (args.length === 1 && typeof selector === 'object') {
238 | origGetStats.apply(self,
239 | [function(response) {
240 | resolve.apply(null, [fixChromeStats_(response)]);
241 | }, reject]);
242 | } else {
243 | origGetStats.apply(self, [resolve, reject]);
244 | }
245 | });
246 | };
247 |
248 | return pc;
249 | };
250 | window.RTCPeerConnection.prototype = webkitRTCPeerConnection.prototype;
251 |
252 | // wrap static methods. Currently just generateCertificate.
253 | if (webkitRTCPeerConnection.generateCertificate) {
254 | Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
255 | get: function() {
256 | return webkitRTCPeerConnection.generateCertificate;
257 | }
258 | });
259 | }
260 |
261 | // add promise support
262 | ['createOffer', 'createAnswer'].forEach(function(method) {
263 | var nativeMethod = webkitRTCPeerConnection.prototype[method];
264 | webkitRTCPeerConnection.prototype[method] = function() {
265 | var self = this;
266 | if (arguments.length < 1 || (arguments.length === 1 &&
267 | typeof(arguments[0]) === 'object')) {
268 | var opts = arguments.length === 1 ? arguments[0] : undefined;
269 | return new Promise(function(resolve, reject) {
270 | nativeMethod.apply(self, [resolve, reject, opts]);
271 | });
272 | }
273 | return nativeMethod.apply(this, arguments);
274 | };
275 | });
276 |
277 | ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
278 | .forEach(function(method) {
279 | var nativeMethod = webkitRTCPeerConnection.prototype[method];
280 | webkitRTCPeerConnection.prototype[method] = function() {
281 | var args = arguments;
282 | var self = this;
283 | args[0] = new ((method === 'addIceCandidate')?
284 | RTCIceCandidate : RTCSessionDescription)(args[0]);
285 | return new Promise(function(resolve, reject) {
286 | nativeMethod.apply(self, [args[0],
287 | function() {
288 | resolve();
289 | if (args.length >= 2) {
290 | args[1].apply(null, []);
291 | }
292 | },
293 | function(err) {
294 | reject(err);
295 | if (args.length >= 3) {
296 | args[2].apply(null, [err]);
297 | }
298 | }]
299 | );
300 | });
301 | };
302 | });
303 | },
304 |
305 | // Attach a media stream to an element.
306 | attachMediaStream: function(element, stream) {
307 | logging('DEPRECATED, attachMediaStream will soon be removed.');
308 | if (browserDetails.version >= 43) {
309 | element.srcObject = stream;
310 | } else if (typeof element.src !== 'undefined') {
311 | element.src = URL.createObjectURL(stream);
312 | } else {
313 | logging('Error attaching stream to element.');
314 | }
315 | },
316 |
317 | reattachMediaStream: function(to, from) {
318 | logging('DEPRECATED, reattachMediaStream will soon be removed.');
319 | if (browserDetails.version >= 43) {
320 | to.srcObject = from.srcObject;
321 | } else {
322 | to.src = from.src;
323 | }
324 | }
325 | };
326 |
327 |
328 | // Expose public methods.
329 | module.exports = {
330 | shimOnTrack: chromeShim.shimOnTrack,
331 | shimSourceObject: chromeShim.shimSourceObject,
332 | shimPeerConnection: chromeShim.shimPeerConnection,
333 | shimGetUserMedia: require('./getusermedia'),
334 | attachMediaStream: chromeShim.attachMediaStream,
335 | reattachMediaStream: chromeShim.reattachMediaStream
336 | };
337 |
338 | },{"../utils.js":9,"./getusermedia":3}],3:[function(require,module,exports){
339 | /*
340 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
341 | *
342 | * Use of this source code is governed by a BSD-style license
343 | * that can be found in the LICENSE file in the root of the source
344 | * tree.
345 | */
346 | /* eslint-env node */
347 | 'use strict';
348 | var logging = require('../utils.js').log;
349 |
350 | // Expose public methods.
351 | module.exports = function() {
352 | var constraintsToChrome_ = function(c) {
353 | if (typeof c !== 'object' || c.mandatory || c.optional) {
354 | return c;
355 | }
356 | var cc = {};
357 | Object.keys(c).forEach(function(key) {
358 | if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
359 | return;
360 | }
361 | var r = (typeof c[key] === 'object') ? c[key] : {ideal: c[key]};
362 | if (r.exact !== undefined && typeof r.exact === 'number') {
363 | r.min = r.max = r.exact;
364 | }
365 | var oldname_ = function(prefix, name) {
366 | if (prefix) {
367 | return prefix + name.charAt(0).toUpperCase() + name.slice(1);
368 | }
369 | return (name === 'deviceId') ? 'sourceId' : name;
370 | };
371 | if (r.ideal !== undefined) {
372 | cc.optional = cc.optional || [];
373 | var oc = {};
374 | if (typeof r.ideal === 'number') {
375 | oc[oldname_('min', key)] = r.ideal;
376 | cc.optional.push(oc);
377 | oc = {};
378 | oc[oldname_('max', key)] = r.ideal;
379 | cc.optional.push(oc);
380 | } else {
381 | oc[oldname_('', key)] = r.ideal;
382 | cc.optional.push(oc);
383 | }
384 | }
385 | if (r.exact !== undefined && typeof r.exact !== 'number') {
386 | cc.mandatory = cc.mandatory || {};
387 | cc.mandatory[oldname_('', key)] = r.exact;
388 | } else {
389 | ['min', 'max'].forEach(function(mix) {
390 | if (r[mix] !== undefined) {
391 | cc.mandatory = cc.mandatory || {};
392 | cc.mandatory[oldname_(mix, key)] = r[mix];
393 | }
394 | });
395 | }
396 | });
397 | if (c.advanced) {
398 | cc.optional = (cc.optional || []).concat(c.advanced);
399 | }
400 | return cc;
401 | };
402 |
403 | var getUserMedia_ = function(constraints, onSuccess, onError) {
404 | constraints = JSON.parse(JSON.stringify(constraints));
405 | if (constraints.audio) {
406 | constraints.audio = constraintsToChrome_(constraints.audio);
407 | }
408 | if (constraints.video) {
409 | constraints.video = constraintsToChrome_(constraints.video);
410 | }
411 | logging('chrome: ' + JSON.stringify(constraints));
412 | return navigator.webkitGetUserMedia(constraints, onSuccess, onError);
413 | };
414 | navigator.getUserMedia = getUserMedia_;
415 |
416 | // Returns the result of getUserMedia as a Promise.
417 | var getUserMediaPromise_ = function(constraints) {
418 | return new Promise(function(resolve, reject) {
419 | navigator.getUserMedia(constraints, resolve, reject);
420 | });
421 | };
422 |
423 | if (!navigator.mediaDevices) {
424 | navigator.mediaDevices = {
425 | getUserMedia: getUserMediaPromise_,
426 | enumerateDevices: function() {
427 | return new Promise(function(resolve) {
428 | var kinds = {audio: 'audioinput', video: 'videoinput'};
429 | return MediaStreamTrack.getSources(function(devices) {
430 | resolve(devices.map(function(device) {
431 | return {label: device.label,
432 | kind: kinds[device.kind],
433 | deviceId: device.id,
434 | groupId: ''};
435 | }));
436 | });
437 | });
438 | }
439 | };
440 | }
441 |
442 | // A shim for getUserMedia method on the mediaDevices object.
443 | // TODO(KaptenJansson) remove once implemented in Chrome stable.
444 | if (!navigator.mediaDevices.getUserMedia) {
445 | navigator.mediaDevices.getUserMedia = function(constraints) {
446 | return getUserMediaPromise_(constraints);
447 | };
448 | } else {
449 | // Even though Chrome 45 has navigator.mediaDevices and a getUserMedia
450 | // function which returns a Promise, it does not accept spec-style
451 | // constraints.
452 | var origGetUserMedia = navigator.mediaDevices.getUserMedia.
453 | bind(navigator.mediaDevices);
454 | navigator.mediaDevices.getUserMedia = function(c) {
455 | if (c) {
456 | logging('spec: ' + JSON.stringify(c)); // whitespace for alignment
457 | c.audio = constraintsToChrome_(c.audio);
458 | c.video = constraintsToChrome_(c.video);
459 | logging('chrome: ' + JSON.stringify(c));
460 | }
461 | return origGetUserMedia(c);
462 | }.bind(this);
463 | }
464 |
465 | // Dummy devicechange event methods.
466 | // TODO(KaptenJansson) remove once implemented in Chrome stable.
467 | if (typeof navigator.mediaDevices.addEventListener === 'undefined') {
468 | navigator.mediaDevices.addEventListener = function() {
469 | logging('Dummy mediaDevices.addEventListener called.');
470 | };
471 | }
472 | if (typeof navigator.mediaDevices.removeEventListener === 'undefined') {
473 | navigator.mediaDevices.removeEventListener = function() {
474 | logging('Dummy mediaDevices.removeEventListener called.');
475 | };
476 | }
477 | };
478 |
479 | },{"../utils.js":9}],4:[function(require,module,exports){
480 | /*
481 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
482 | *
483 | * Use of this source code is governed by a BSD-style license
484 | * that can be found in the LICENSE file in the root of the source
485 | * tree.
486 | */
487 | /* eslint-env node */
488 | 'use strict';
489 |
490 | // SDP helpers.
491 | var SDPUtils = {};
492 |
493 | // Generate an alphanumeric identifier for cname or mids.
494 | // TODO: use UUIDs instead? https://gist.github.com/jed/982883
495 | SDPUtils.generateIdentifier = function() {
496 | return Math.random().toString(36).substr(2, 10);
497 | };
498 |
499 | // The RTCP CNAME used by all peerconnections from the same JS.
500 | SDPUtils.localCName = SDPUtils.generateIdentifier();
501 |
502 | // Splits SDP into lines, dealing with both CRLF and LF.
503 | SDPUtils.splitLines = function(blob) {
504 | return blob.trim().split('\n').map(function(line) {
505 | return line.trim();
506 | });
507 | };
508 | // Splits SDP into sessionpart and mediasections. Ensures CRLF.
509 | SDPUtils.splitSections = function(blob) {
510 | var parts = blob.split('\nm=');
511 | return parts.map(function(part, index) {
512 | return (index > 0 ? 'm=' + part : part).trim() + '\r\n';
513 | });
514 | };
515 |
516 | // Returns lines that start with a certain prefix.
517 | SDPUtils.matchPrefix = function(blob, prefix) {
518 | return SDPUtils.splitLines(blob).filter(function(line) {
519 | return line.indexOf(prefix) === 0;
520 | });
521 | };
522 |
523 | // Parses an ICE candidate line. Sample input:
524 | // candidate:702786350 2 udp 41819902 8.8.8.8 60769 typ relay raddr 8.8.8.8
525 | // rport 55996"
526 | SDPUtils.parseCandidate = function(line) {
527 | var parts;
528 | // Parse both variants.
529 | if (line.indexOf('a=candidate:') === 0) {
530 | parts = line.substring(12).split(' ');
531 | } else {
532 | parts = line.substring(10).split(' ');
533 | }
534 |
535 | var candidate = {
536 | foundation: parts[0],
537 | component: parts[1],
538 | protocol: parts[2].toLowerCase(),
539 | priority: parseInt(parts[3], 10),
540 | ip: parts[4],
541 | port: parseInt(parts[5], 10),
542 | // skip parts[6] == 'typ'
543 | type: parts[7]
544 | };
545 |
546 | for (var i = 8; i < parts.length; i += 2) {
547 | switch (parts[i]) {
548 | case 'raddr':
549 | candidate.relatedAddress = parts[i + 1];
550 | break;
551 | case 'rport':
552 | candidate.relatedPort = parseInt(parts[i + 1], 10);
553 | break;
554 | case 'tcptype':
555 | candidate.tcpType = parts[i + 1];
556 | break;
557 | default: // Unknown extensions are silently ignored.
558 | break;
559 | }
560 | }
561 | return candidate;
562 | };
563 |
564 | // Translates a candidate object into SDP candidate attribute.
565 | SDPUtils.writeCandidate = function(candidate) {
566 | var sdp = [];
567 | sdp.push(candidate.foundation);
568 | sdp.push(candidate.component);
569 | sdp.push(candidate.protocol.toUpperCase());
570 | sdp.push(candidate.priority);
571 | sdp.push(candidate.ip);
572 | sdp.push(candidate.port);
573 |
574 | var type = candidate.type;
575 | sdp.push('typ');
576 | sdp.push(type);
577 | if (type !== 'host' && candidate.relatedAddress &&
578 | candidate.relatedPort) {
579 | sdp.push('raddr');
580 | sdp.push(candidate.relatedAddress); // was: relAddr
581 | sdp.push('rport');
582 | sdp.push(candidate.relatedPort); // was: relPort
583 | }
584 | if (candidate.tcpType && candidate.protocol.toLowerCase() === 'tcp') {
585 | sdp.push('tcptype');
586 | sdp.push(candidate.tcpType);
587 | }
588 | return 'candidate:' + sdp.join(' ');
589 | };
590 |
591 | // Parses an rtpmap line, returns RTCRtpCoddecParameters. Sample input:
592 | // a=rtpmap:111 opus/48000/2
593 | SDPUtils.parseRtpMap = function(line) {
594 | var parts = line.substr(9).split(' ');
595 | var parsed = {
596 | payloadType: parseInt(parts.shift(), 10) // was: id
597 | };
598 |
599 | parts = parts[0].split('/');
600 |
601 | parsed.name = parts[0];
602 | parsed.clockRate = parseInt(parts[1], 10); // was: clockrate
603 | // was: channels
604 | parsed.numChannels = parts.length === 3 ? parseInt(parts[2], 10) : 1;
605 | return parsed;
606 | };
607 |
608 | // Generate an a=rtpmap line from RTCRtpCodecCapability or
609 | // RTCRtpCodecParameters.
610 | SDPUtils.writeRtpMap = function(codec) {
611 | var pt = codec.payloadType;
612 | if (codec.preferredPayloadType !== undefined) {
613 | pt = codec.preferredPayloadType;
614 | }
615 | return 'a=rtpmap:' + pt + ' ' + codec.name + '/' + codec.clockRate +
616 | (codec.numChannels !== 1 ? '/' + codec.numChannels : '') + '\r\n';
617 | };
618 |
619 | // Parses an a=extmap line (headerextension from RFC 5285). Sample input:
620 | // a=extmap:2 urn:ietf:params:rtp-hdrext:toffset
621 | SDPUtils.parseExtmap = function(line) {
622 | var parts = line.substr(9).split(' ');
623 | return {
624 | id: parseInt(parts[0], 10),
625 | uri: parts[1]
626 | };
627 | };
628 |
629 | // Generates a=extmap line from RTCRtpHeaderExtensionParameters or
630 | // RTCRtpHeaderExtension.
631 | SDPUtils.writeExtmap = function(headerExtension) {
632 | return 'a=extmap:' + (headerExtension.id || headerExtension.preferredId) +
633 | ' ' + headerExtension.uri + '\r\n';
634 | };
635 |
636 | // Parses an ftmp line, returns dictionary. Sample input:
637 | // a=fmtp:96 vbr=on;cng=on
638 | // Also deals with vbr=on; cng=on
639 | SDPUtils.parseFmtp = function(line) {
640 | var parsed = {};
641 | var kv;
642 | var parts = line.substr(line.indexOf(' ') + 1).split(';');
643 | for (var j = 0; j < parts.length; j++) {
644 | kv = parts[j].trim().split('=');
645 | parsed[kv[0].trim()] = kv[1];
646 | }
647 | return parsed;
648 | };
649 |
650 | // Generates an a=ftmp line from RTCRtpCodecCapability or RTCRtpCodecParameters.
651 | SDPUtils.writeFmtp = function(codec) {
652 | var line = '';
653 | var pt = codec.payloadType;
654 | if (codec.preferredPayloadType !== undefined) {
655 | pt = codec.preferredPayloadType;
656 | }
657 | if (codec.parameters && Object.keys(codec.parameters).length) {
658 | var params = [];
659 | Object.keys(codec.parameters).forEach(function(param) {
660 | params.push(param + '=' + codec.parameters[param]);
661 | });
662 | line += 'a=fmtp:' + pt + ' ' + params.join(';') + '\r\n';
663 | }
664 | return line;
665 | };
666 |
667 | // Parses an rtcp-fb line, returns RTCPRtcpFeedback object. Sample input:
668 | // a=rtcp-fb:98 nack rpsi
669 | SDPUtils.parseRtcpFb = function(line) {
670 | var parts = line.substr(line.indexOf(' ') + 1).split(' ');
671 | return {
672 | type: parts.shift(),
673 | parameter: parts.join(' ')
674 | };
675 | };
676 | // Generate a=rtcp-fb lines from RTCRtpCodecCapability or RTCRtpCodecParameters.
677 | SDPUtils.writeRtcpFb = function(codec) {
678 | var lines = '';
679 | var pt = codec.payloadType;
680 | if (codec.preferredPayloadType !== undefined) {
681 | pt = codec.preferredPayloadType;
682 | }
683 | if (codec.rtcpFeedback && codec.rtcpFeedback.length) {
684 | // FIXME: special handling for trr-int?
685 | codec.rtcpFeedback.forEach(function(fb) {
686 | lines += 'a=rtcp-fb:' + pt + ' ' + fb.type + ' ' + fb.parameter +
687 | '\r\n';
688 | });
689 | }
690 | return lines;
691 | };
692 |
693 | // Parses an RFC 5576 ssrc media attribute. Sample input:
694 | // a=ssrc:3735928559 cname:something
695 | SDPUtils.parseSsrcMedia = function(line) {
696 | var sp = line.indexOf(' ');
697 | var parts = {
698 | ssrc: parseInt(line.substr(7, sp - 7), 10)
699 | };
700 | var colon = line.indexOf(':', sp);
701 | if (colon > -1) {
702 | parts.attribute = line.substr(sp + 1, colon - sp - 1);
703 | parts.value = line.substr(colon + 1);
704 | } else {
705 | parts.attribute = line.substr(sp + 1);
706 | }
707 | return parts;
708 | };
709 |
710 | // Extracts DTLS parameters from SDP media section or sessionpart.
711 | // FIXME: for consistency with other functions this should only
712 | // get the fingerprint line as input. See also getIceParameters.
713 | SDPUtils.getDtlsParameters = function(mediaSection, sessionpart) {
714 | var lines = SDPUtils.splitLines(mediaSection);
715 | // Search in session part, too.
716 | lines = lines.concat(SDPUtils.splitLines(sessionpart));
717 | var fpLine = lines.filter(function(line) {
718 | return line.indexOf('a=fingerprint:') === 0;
719 | })[0].substr(14);
720 | // Note: a=setup line is ignored since we use the 'auto' role.
721 | var dtlsParameters = {
722 | role: 'auto',
723 | fingerprints: [{
724 | algorithm: fpLine.split(' ')[0],
725 | value: fpLine.split(' ')[1]
726 | }]
727 | };
728 | return dtlsParameters;
729 | };
730 |
731 | // Serializes DTLS parameters to SDP.
732 | SDPUtils.writeDtlsParameters = function(params, setupType) {
733 | var sdp = 'a=setup:' + setupType + '\r\n';
734 | params.fingerprints.forEach(function(fp) {
735 | sdp += 'a=fingerprint:' + fp.algorithm + ' ' + fp.value + '\r\n';
736 | });
737 | return sdp;
738 | };
739 | // Parses ICE information from SDP media section or sessionpart.
740 | // FIXME: for consistency with other functions this should only
741 | // get the ice-ufrag and ice-pwd lines as input.
742 | SDPUtils.getIceParameters = function(mediaSection, sessionpart) {
743 | var lines = SDPUtils.splitLines(mediaSection);
744 | // Search in session part, too.
745 | lines = lines.concat(SDPUtils.splitLines(sessionpart));
746 | var iceParameters = {
747 | usernameFragment: lines.filter(function(line) {
748 | return line.indexOf('a=ice-ufrag:') === 0;
749 | })[0].substr(12),
750 | password: lines.filter(function(line) {
751 | return line.indexOf('a=ice-pwd:') === 0;
752 | })[0].substr(10)
753 | };
754 | return iceParameters;
755 | };
756 |
757 | // Serializes ICE parameters to SDP.
758 | SDPUtils.writeIceParameters = function(params) {
759 | return 'a=ice-ufrag:' + params.usernameFragment + '\r\n' +
760 | 'a=ice-pwd:' + params.password + '\r\n';
761 | };
762 |
763 | // Parses the SDP media section and returns RTCRtpParameters.
764 | SDPUtils.parseRtpParameters = function(mediaSection) {
765 | var description = {
766 | codecs: [],
767 | headerExtensions: [],
768 | fecMechanisms: [],
769 | rtcp: []
770 | };
771 | var lines = SDPUtils.splitLines(mediaSection);
772 | var mline = lines[0].split(' ');
773 | for (var i = 3; i < mline.length; i++) { // find all codecs from mline[3..]
774 | var pt = mline[i];
775 | var rtpmapline = SDPUtils.matchPrefix(
776 | mediaSection, 'a=rtpmap:' + pt + ' ')[0];
777 | if (rtpmapline) {
778 | var codec = SDPUtils.parseRtpMap(rtpmapline);
779 | var fmtps = SDPUtils.matchPrefix(
780 | mediaSection, 'a=fmtp:' + pt + ' ');
781 | // Only the first a=fmtp: is considered.
782 | codec.parameters = fmtps.length ? SDPUtils.parseFmtp(fmtps[0]) : {};
783 | codec.rtcpFeedback = SDPUtils.matchPrefix(
784 | mediaSection, 'a=rtcp-fb:' + pt + ' ')
785 | .map(SDPUtils.parseRtcpFb);
786 | description.codecs.push(codec);
787 | // parse FEC mechanisms from rtpmap lines.
788 | switch (codec.name.toUpperCase()) {
789 | case 'RED':
790 | case 'ULPFEC':
791 | description.fecMechanisms.push(codec.name.toUpperCase());
792 | break;
793 | default: // only RED and ULPFEC are recognized as FEC mechanisms.
794 | break;
795 | }
796 | }
797 | }
798 | SDPUtils.matchPrefix(mediaSection, 'a=extmap:').forEach(function(line) {
799 | description.headerExtensions.push(SDPUtils.parseExtmap(line));
800 | });
801 | // FIXME: parse rtcp.
802 | return description;
803 | };
804 |
805 | // Generates parts of the SDP media section describing the capabilities /
806 | // parameters.
807 | SDPUtils.writeRtpDescription = function(kind, caps) {
808 | var sdp = '';
809 |
810 | // Build the mline.
811 | sdp += 'm=' + kind + ' ';
812 | sdp += caps.codecs.length > 0 ? '9' : '0'; // reject if no codecs.
813 | sdp += ' UDP/TLS/RTP/SAVPF ';
814 | sdp += caps.codecs.map(function(codec) {
815 | if (codec.preferredPayloadType !== undefined) {
816 | return codec.preferredPayloadType;
817 | }
818 | return codec.payloadType;
819 | }).join(' ') + '\r\n';
820 |
821 | sdp += 'c=IN IP4 0.0.0.0\r\n';
822 | sdp += 'a=rtcp:9 IN IP4 0.0.0.0\r\n';
823 |
824 | // Add a=rtpmap lines for each codec. Also fmtp and rtcp-fb.
825 | caps.codecs.forEach(function(codec) {
826 | sdp += SDPUtils.writeRtpMap(codec);
827 | sdp += SDPUtils.writeFmtp(codec);
828 | sdp += SDPUtils.writeRtcpFb(codec);
829 | });
830 | // FIXME: add headerExtensions, fecMechanismş and rtcp.
831 | sdp += 'a=rtcp-mux\r\n';
832 | return sdp;
833 | };
834 |
835 | // Parses the SDP media section and returns an array of
836 | // RTCRtpEncodingParameters.
837 | SDPUtils.parseRtpEncodingParameters = function(mediaSection) {
838 | var encodingParameters = [];
839 | var description = SDPUtils.parseRtpParameters(mediaSection);
840 | var hasRed = description.fecMechanisms.indexOf('RED') !== -1;
841 | var hasUlpfec = description.fecMechanisms.indexOf('ULPFEC') !== -1;
842 |
843 | // filter a=ssrc:... cname:, ignore PlanB-msid
844 | var ssrcs = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
845 | .map(function(line) {
846 | return SDPUtils.parseSsrcMedia(line);
847 | })
848 | .filter(function(parts) {
849 | return parts.attribute === 'cname';
850 | });
851 | var primarySsrc = ssrcs.length > 0 && ssrcs[0].ssrc;
852 | var secondarySsrc;
853 |
854 | var flows = SDPUtils.matchPrefix(mediaSection, 'a=ssrc-group:FID')
855 | .map(function(line) {
856 | var parts = line.split(' ');
857 | parts.shift();
858 | return parts.map(function(part) {
859 | return parseInt(part, 10);
860 | });
861 | });
862 | if (flows.length > 0 && flows[0].length > 1 && flows[0][0] === primarySsrc) {
863 | secondarySsrc = flows[0][1];
864 | }
865 |
866 | description.codecs.forEach(function(codec) {
867 | if (codec.name.toUpperCase() === 'RTX' && codec.parameters.apt) {
868 | var encParam = {
869 | ssrc: primarySsrc,
870 | codecPayloadType: parseInt(codec.parameters.apt, 10),
871 | rtx: {
872 | ssrc: secondarySsrc
873 | }
874 | };
875 | encodingParameters.push(encParam);
876 | if (hasRed) {
877 | encParam = JSON.parse(JSON.stringify(encParam));
878 | encParam.fec = {
879 | ssrc: secondarySsrc,
880 | mechanism: hasUlpfec ? 'red+ulpfec' : 'red'
881 | };
882 | encodingParameters.push(encParam);
883 | }
884 | }
885 | });
886 | if (encodingParameters.length === 0 && primarySsrc) {
887 | encodingParameters.push({
888 | ssrc: primarySsrc
889 | });
890 | }
891 |
892 | // we support both b=AS and b=TIAS but interpret AS as TIAS.
893 | var bandwidth = SDPUtils.matchPrefix(mediaSection, 'b=');
894 | if (bandwidth.length) {
895 | if (bandwidth[0].indexOf('b=TIAS:') === 0) {
896 | bandwidth = parseInt(bandwidth[0].substr(7), 10);
897 | } else if (bandwidth[0].indexOf('b=AS:') === 0) {
898 | bandwidth = parseInt(bandwidth[0].substr(5), 10);
899 | }
900 | encodingParameters.forEach(function(params) {
901 | params.maxBitrate = bandwidth;
902 | });
903 | }
904 | return encodingParameters;
905 | };
906 |
907 | SDPUtils.writeSessionBoilerplate = function() {
908 | // FIXME: sess-id should be an NTP timestamp.
909 | return 'v=0\r\n' +
910 | 'o=thisisadapterortc 8169639915646943137 2 IN IP4 127.0.0.1\r\n' +
911 | 's=-\r\n' +
912 | 't=0 0\r\n';
913 | };
914 |
915 | SDPUtils.writeMediaSection = function(transceiver, caps, type, stream) {
916 | var sdp = SDPUtils.writeRtpDescription(transceiver.kind, caps);
917 |
918 | // Map ICE parameters (ufrag, pwd) to SDP.
919 | sdp += SDPUtils.writeIceParameters(
920 | transceiver.iceGatherer.getLocalParameters());
921 |
922 | // Map DTLS parameters to SDP.
923 | sdp += SDPUtils.writeDtlsParameters(
924 | transceiver.dtlsTransport.getLocalParameters(),
925 | type === 'offer' ? 'actpass' : 'active');
926 |
927 | sdp += 'a=mid:' + transceiver.mid + '\r\n';
928 |
929 | if (transceiver.rtpSender && transceiver.rtpReceiver) {
930 | sdp += 'a=sendrecv\r\n';
931 | } else if (transceiver.rtpSender) {
932 | sdp += 'a=sendonly\r\n';
933 | } else if (transceiver.rtpReceiver) {
934 | sdp += 'a=recvonly\r\n';
935 | } else {
936 | sdp += 'a=inactive\r\n';
937 | }
938 |
939 | // FIXME: for RTX there might be multiple SSRCs. Not implemented in Edge yet.
940 | if (transceiver.rtpSender) {
941 | var msid = 'msid:' + stream.id + ' ' +
942 | transceiver.rtpSender.track.id + '\r\n';
943 | sdp += 'a=' + msid;
944 | sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
945 | ' ' + msid;
946 | }
947 | // FIXME: this should be written by writeRtpDescription.
948 | sdp += 'a=ssrc:' + transceiver.sendEncodingParameters[0].ssrc +
949 | ' cname:' + SDPUtils.localCName + '\r\n';
950 | return sdp;
951 | };
952 |
953 | // Gets the direction from the mediaSection or the sessionpart.
954 | SDPUtils.getDirection = function(mediaSection, sessionpart) {
955 | // Look for sendrecv, sendonly, recvonly, inactive, default to sendrecv.
956 | var lines = SDPUtils.splitLines(mediaSection);
957 | for (var i = 0; i < lines.length; i++) {
958 | switch (lines[i]) {
959 | case 'a=sendrecv':
960 | case 'a=sendonly':
961 | case 'a=recvonly':
962 | case 'a=inactive':
963 | return lines[i].substr(2);
964 | default:
965 | // FIXME: What should happen here?
966 | }
967 | }
968 | if (sessionpart) {
969 | return SDPUtils.getDirection(sessionpart);
970 | }
971 | return 'sendrecv';
972 | };
973 |
974 | // Expose public methods.
975 | module.exports = SDPUtils;
976 |
977 | },{}],5:[function(require,module,exports){
978 | /*
979 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
980 | *
981 | * Use of this source code is governed by a BSD-style license
982 | * that can be found in the LICENSE file in the root of the source
983 | * tree.
984 | */
985 | /* eslint-env node */
986 | 'use strict';
987 |
988 | var SDPUtils = require('./edge_sdp');
989 | var logging = require('../utils').log;
990 |
991 | var edgeShim = {
992 | shimPeerConnection: function() {
993 | if (window.RTCIceGatherer) {
994 | // ORTC defines an RTCIceCandidate object but no constructor.
995 | // Not implemented in Edge.
996 | if (!window.RTCIceCandidate) {
997 | window.RTCIceCandidate = function(args) {
998 | return args;
999 | };
1000 | }
1001 | // ORTC does not have a session description object but
1002 | // other browsers (i.e. Chrome) that will support both PC and ORTC
1003 | // in the future might have this defined already.
1004 | if (!window.RTCSessionDescription) {
1005 | window.RTCSessionDescription = function(args) {
1006 | return args;
1007 | };
1008 | }
1009 | }
1010 |
1011 | window.RTCPeerConnection = function(config) {
1012 | var self = this;
1013 |
1014 | var _eventTarget = document.createDocumentFragment();
1015 | ['addEventListener', 'removeEventListener', 'dispatchEvent']
1016 | .forEach(function(method) {
1017 | self[method] = _eventTarget[method].bind(_eventTarget);
1018 | });
1019 |
1020 | this.onicecandidate = null;
1021 | this.onaddstream = null;
1022 | this.ontrack = null;
1023 | this.onremovestream = null;
1024 | this.onsignalingstatechange = null;
1025 | this.oniceconnectionstatechange = null;
1026 | this.onnegotiationneeded = null;
1027 | this.ondatachannel = null;
1028 |
1029 | this.localStreams = [];
1030 | this.remoteStreams = [];
1031 | this.getLocalStreams = function() {
1032 | return self.localStreams;
1033 | };
1034 | this.getRemoteStreams = function() {
1035 | return self.remoteStreams;
1036 | };
1037 |
1038 | this.localDescription = new RTCSessionDescription({
1039 | type: '',
1040 | sdp: ''
1041 | });
1042 | this.remoteDescription = new RTCSessionDescription({
1043 | type: '',
1044 | sdp: ''
1045 | });
1046 | this.signalingState = 'stable';
1047 | this.iceConnectionState = 'new';
1048 | this.iceGatheringState = 'new';
1049 |
1050 | this.iceOptions = {
1051 | gatherPolicy: 'all',
1052 | iceServers: []
1053 | };
1054 | if (config && config.iceTransportPolicy) {
1055 | switch (config.iceTransportPolicy) {
1056 | case 'all':
1057 | case 'relay':
1058 | this.iceOptions.gatherPolicy = config.iceTransportPolicy;
1059 | break;
1060 | case 'none':
1061 | // FIXME: remove once implementation and spec have added this.
1062 | throw new TypeError('iceTransportPolicy "none" not supported');
1063 | default:
1064 | // don't set iceTransportPolicy.
1065 | break;
1066 | }
1067 | }
1068 | if (config && config.iceServers) {
1069 | // Edge does not like
1070 | // 1) stun:
1071 | // 2) turn: that does not have all of turn:host:port?transport=udp
1072 | this.iceOptions.iceServers = config.iceServers.filter(function(server) {
1073 | if (server && server.urls) {
1074 | server.urls = server.urls.filter(function(url) {
1075 | return url.indexOf('turn:') === 0 &&
1076 | url.indexOf('transport=udp') !== -1;
1077 | })[0];
1078 | return !!server.urls;
1079 | }
1080 | return false;
1081 | });
1082 | }
1083 |
1084 | // per-track iceGathers, iceTransports, dtlsTransports, rtpSenders, ...
1085 | // everything that is needed to describe a SDP m-line.
1086 | this.transceivers = [];
1087 |
1088 | // since the iceGatherer is currently created in createOffer but we
1089 | // must not emit candidates until after setLocalDescription we buffer
1090 | // them in this array.
1091 | this._localIceCandidatesBuffer = [];
1092 | };
1093 |
1094 | window.RTCPeerConnection.prototype._emitBufferedCandidates = function() {
1095 | var self = this;
1096 | var sections = SDPUtils.splitSections(self.localDescription.sdp);
1097 | // FIXME: need to apply ice candidates in a way which is async but
1098 | // in-order
1099 | this._localIceCandidatesBuffer.forEach(function(event) {
1100 | var end = !event.candidate || Object.keys(event.candidate).length === 0;
1101 | if (end) {
1102 | for (var j = 1; j < sections.length; j++) {
1103 | if (sections[j].indexOf('\r\na=end-of-candidates\r\n') === -1) {
1104 | sections[j] += 'a=end-of-candidates\r\n';
1105 | }
1106 | }
1107 | } else if (event.candidate.candidate.indexOf('typ endOfCandidates')
1108 | === -1) {
1109 | sections[event.candidate.sdpMLineIndex + 1] +=
1110 | 'a=' + event.candidate.candidate + '\r\n';
1111 | }
1112 | self.localDescription.sdp = sections.join('');
1113 | self.dispatchEvent(event);
1114 | if (self.onicecandidate !== null) {
1115 | self.onicecandidate(event);
1116 | }
1117 | if (!event.candidate && self.iceGatheringState !== 'complete') {
1118 | var complete = self.transceivers.every(function(transceiver) {
1119 | return transceiver.iceGatherer &&
1120 | transceiver.iceGatherer.state === 'completed';
1121 | });
1122 | if (complete) {
1123 | self.iceGatheringState = 'complete';
1124 | }
1125 | }
1126 | });
1127 | this._localIceCandidatesBuffer = [];
1128 | };
1129 |
1130 | window.RTCPeerConnection.prototype.addStream = function(stream) {
1131 | // Clone is necessary for local demos mostly, attaching directly
1132 | // to two different senders does not work (build 10547).
1133 | this.localStreams.push(stream.clone());
1134 | this._maybeFireNegotiationNeeded();
1135 | };
1136 |
1137 | window.RTCPeerConnection.prototype.removeStream = function(stream) {
1138 | var idx = this.localStreams.indexOf(stream);
1139 | if (idx > -1) {
1140 | this.localStreams.splice(idx, 1);
1141 | this._maybeFireNegotiationNeeded();
1142 | }
1143 | };
1144 |
1145 | // Determines the intersection of local and remote capabilities.
1146 | window.RTCPeerConnection.prototype._getCommonCapabilities =
1147 | function(localCapabilities, remoteCapabilities) {
1148 | var commonCapabilities = {
1149 | codecs: [],
1150 | headerExtensions: [],
1151 | fecMechanisms: []
1152 | };
1153 | localCapabilities.codecs.forEach(function(lCodec) {
1154 | for (var i = 0; i < remoteCapabilities.codecs.length; i++) {
1155 | var rCodec = remoteCapabilities.codecs[i];
1156 | if (lCodec.name.toLowerCase() === rCodec.name.toLowerCase() &&
1157 | lCodec.clockRate === rCodec.clockRate &&
1158 | lCodec.numChannels === rCodec.numChannels) {
1159 | // push rCodec so we reply with offerer payload type
1160 | commonCapabilities.codecs.push(rCodec);
1161 |
1162 | // FIXME: also need to determine intersection between
1163 | // .rtcpFeedback and .parameters
1164 | break;
1165 | }
1166 | }
1167 | });
1168 |
1169 | localCapabilities.headerExtensions
1170 | .forEach(function(lHeaderExtension) {
1171 | for (var i = 0; i < remoteCapabilities.headerExtensions.length;
1172 | i++) {
1173 | var rHeaderExtension = remoteCapabilities.headerExtensions[i];
1174 | if (lHeaderExtension.uri === rHeaderExtension.uri) {
1175 | commonCapabilities.headerExtensions.push(rHeaderExtension);
1176 | break;
1177 | }
1178 | }
1179 | });
1180 |
1181 | // FIXME: fecMechanisms
1182 | return commonCapabilities;
1183 | };
1184 |
1185 | // Create ICE gatherer, ICE transport and DTLS transport.
1186 | window.RTCPeerConnection.prototype._createIceAndDtlsTransports =
1187 | function(mid, sdpMLineIndex) {
1188 | var self = this;
1189 | var iceGatherer = new RTCIceGatherer(self.iceOptions);
1190 | var iceTransport = new RTCIceTransport(iceGatherer);
1191 | iceGatherer.onlocalcandidate = function(evt) {
1192 | var event = new Event('icecandidate');
1193 | event.candidate = {sdpMid: mid, sdpMLineIndex: sdpMLineIndex};
1194 |
1195 | var cand = evt.candidate;
1196 | var end = !cand || Object.keys(cand).length === 0;
1197 | // Edge emits an empty object for RTCIceCandidateComplete‥
1198 | if (end) {
1199 | // polyfill since RTCIceGatherer.state is not implemented in
1200 | // Edge 10547 yet.
1201 | if (iceGatherer.state === undefined) {
1202 | iceGatherer.state = 'completed';
1203 | }
1204 |
1205 | // Emit a candidate with type endOfCandidates to make the samples
1206 | // work. Edge requires addIceCandidate with this empty candidate
1207 | // to start checking. The real solution is to signal
1208 | // end-of-candidates to the other side when getting the null
1209 | // candidate but some apps (like the samples) don't do that.
1210 | event.candidate.candidate =
1211 | 'candidate:1 1 udp 1 0.0.0.0 9 typ endOfCandidates';
1212 | } else {
1213 | // RTCIceCandidate doesn't have a component, needs to be added
1214 | cand.component = iceTransport.component === 'RTCP' ? 2 : 1;
1215 | event.candidate.candidate = SDPUtils.writeCandidate(cand);
1216 | }
1217 |
1218 | var complete = self.transceivers.every(function(transceiver) {
1219 | return transceiver.iceGatherer &&
1220 | transceiver.iceGatherer.state === 'completed';
1221 | });
1222 |
1223 | // Emit candidate if localDescription is set.
1224 | // Also emits null candidate when all gatherers are complete.
1225 | switch (self.iceGatheringState) {
1226 | case 'new':
1227 | self._localIceCandidatesBuffer.push(event);
1228 | if (end && complete) {
1229 | self._localIceCandidatesBuffer.push(
1230 | new Event('icecandidate'));
1231 | }
1232 | break;
1233 | case 'gathering':
1234 | self._emitBufferedCandidates();
1235 | self.dispatchEvent(event);
1236 | if (self.onicecandidate !== null) {
1237 | self.onicecandidate(event);
1238 | }
1239 | if (complete) {
1240 | self.dispatchEvent(new Event('icecandidate'));
1241 | if (self.onicecandidate !== null) {
1242 | self.onicecandidate(new Event('icecandidate'));
1243 | }
1244 | self.iceGatheringState = 'complete';
1245 | }
1246 | break;
1247 | case 'complete':
1248 | // should not happen... currently!
1249 | break;
1250 | default: // no-op.
1251 | break;
1252 | }
1253 | };
1254 | iceTransport.onicestatechange = function() {
1255 | self._updateConnectionState();
1256 | };
1257 |
1258 | var dtlsTransport = new RTCDtlsTransport(iceTransport);
1259 | dtlsTransport.ondtlsstatechange = function() {
1260 | self._updateConnectionState();
1261 | };
1262 | dtlsTransport.onerror = function() {
1263 | // onerror does not set state to failed by itself.
1264 | dtlsTransport.state = 'failed';
1265 | self._updateConnectionState();
1266 | };
1267 |
1268 | return {
1269 | iceGatherer: iceGatherer,
1270 | iceTransport: iceTransport,
1271 | dtlsTransport: dtlsTransport
1272 | };
1273 | };
1274 |
1275 | // Start the RTP Sender and Receiver for a transceiver.
1276 | window.RTCPeerConnection.prototype._transceive = function(transceiver,
1277 | send, recv) {
1278 | var params = this._getCommonCapabilities(transceiver.localCapabilities,
1279 | transceiver.remoteCapabilities);
1280 | if (send && transceiver.rtpSender) {
1281 | params.encodings = transceiver.sendEncodingParameters;
1282 | params.rtcp = {
1283 | cname: SDPUtils.localCName
1284 | };
1285 | if (transceiver.recvEncodingParameters.length) {
1286 | params.rtcp.ssrc = transceiver.recvEncodingParameters[0].ssrc;
1287 | }
1288 | transceiver.rtpSender.send(params);
1289 | }
1290 | if (recv && transceiver.rtpReceiver) {
1291 | params.encodings = transceiver.recvEncodingParameters;
1292 | params.rtcp = {
1293 | cname: transceiver.cname
1294 | };
1295 | if (transceiver.sendEncodingParameters.length) {
1296 | params.rtcp.ssrc = transceiver.sendEncodingParameters[0].ssrc;
1297 | }
1298 | transceiver.rtpReceiver.receive(params);
1299 | }
1300 | };
1301 |
1302 | window.RTCPeerConnection.prototype.setLocalDescription =
1303 | function(description) {
1304 | var self = this;
1305 | var sections;
1306 | var sessionpart;
1307 | if (description.type === 'offer') {
1308 | // FIXME: What was the purpose of this empty if statement?
1309 | // if (!this._pendingOffer) {
1310 | // } else {
1311 | if (this._pendingOffer) {
1312 | // VERY limited support for SDP munging. Limited to:
1313 | // * changing the order of codecs
1314 | sections = SDPUtils.splitSections(description.sdp);
1315 | sessionpart = sections.shift();
1316 | sections.forEach(function(mediaSection, sdpMLineIndex) {
1317 | var caps = SDPUtils.parseRtpParameters(mediaSection);
1318 | self._pendingOffer[sdpMLineIndex].localCapabilities = caps;
1319 | });
1320 | this.transceivers = this._pendingOffer;
1321 | delete this._pendingOffer;
1322 | }
1323 | } else if (description.type === 'answer') {
1324 | sections = SDPUtils.splitSections(self.remoteDescription.sdp);
1325 | sessionpart = sections.shift();
1326 | sections.forEach(function(mediaSection, sdpMLineIndex) {
1327 | var transceiver = self.transceivers[sdpMLineIndex];
1328 | var iceGatherer = transceiver.iceGatherer;
1329 | var iceTransport = transceiver.iceTransport;
1330 | var dtlsTransport = transceiver.dtlsTransport;
1331 | var localCapabilities = transceiver.localCapabilities;
1332 | var remoteCapabilities = transceiver.remoteCapabilities;
1333 | var rejected = mediaSection.split('\n', 1)[0]
1334 | .split(' ', 2)[1] === '0';
1335 |
1336 | if (!rejected) {
1337 | var remoteIceParameters = SDPUtils.getIceParameters(
1338 | mediaSection, sessionpart);
1339 | iceTransport.start(iceGatherer, remoteIceParameters,
1340 | 'controlled');
1341 |
1342 | var remoteDtlsParameters = SDPUtils.getDtlsParameters(
1343 | mediaSection, sessionpart);
1344 | dtlsTransport.start(remoteDtlsParameters);
1345 |
1346 | // Calculate intersection of capabilities.
1347 | var params = self._getCommonCapabilities(localCapabilities,
1348 | remoteCapabilities);
1349 |
1350 | // Start the RTCRtpSender. The RTCRtpReceiver for this
1351 | // transceiver has already been started in setRemoteDescription.
1352 | self._transceive(transceiver,
1353 | params.codecs.length > 0,
1354 | false);
1355 | }
1356 | });
1357 | }
1358 |
1359 | this.localDescription = {
1360 | type: description.type,
1361 | sdp: description.sdp
1362 | };
1363 | switch (description.type) {
1364 | case 'offer':
1365 | this._updateSignalingState('have-local-offer');
1366 | break;
1367 | case 'answer':
1368 | this._updateSignalingState('stable');
1369 | break;
1370 | default:
1371 | throw new TypeError('unsupported type "' + description.type +
1372 | '"');
1373 | }
1374 |
1375 | // If a success callback was provided, emit ICE candidates after it
1376 | // has been executed. Otherwise, emit callback after the Promise is
1377 | // resolved.
1378 | var hasCallback = arguments.length > 1 &&
1379 | typeof arguments[1] === 'function';
1380 | if (hasCallback) {
1381 | var cb = arguments[1];
1382 | window.setTimeout(function() {
1383 | cb();
1384 | if (self.iceGatheringState === 'new') {
1385 | self.iceGatheringState = 'gathering';
1386 | }
1387 | self._emitBufferedCandidates();
1388 | }, 0);
1389 | }
1390 | var p = Promise.resolve();
1391 | p.then(function() {
1392 | if (!hasCallback) {
1393 | if (self.iceGatheringState === 'new') {
1394 | self.iceGatheringState = 'gathering';
1395 | }
1396 | // Usually candidates will be emitted earlier.
1397 | window.setTimeout(self._emitBufferedCandidates.bind(self), 500);
1398 | }
1399 | });
1400 | return p;
1401 | };
1402 |
1403 | window.RTCPeerConnection.prototype.setRemoteDescription =
1404 | function(description) {
1405 | var self = this;
1406 | var stream = new MediaStream();
1407 | var receiverList = [];
1408 | var sections = SDPUtils.splitSections(description.sdp);
1409 | var sessionpart = sections.shift();
1410 | sections.forEach(function(mediaSection, sdpMLineIndex) {
1411 | var lines = SDPUtils.splitLines(mediaSection);
1412 | var mline = lines[0].substr(2).split(' ');
1413 | var kind = mline[0];
1414 | var rejected = mline[1] === '0';
1415 | var direction = SDPUtils.getDirection(mediaSection, sessionpart);
1416 |
1417 | var transceiver;
1418 | var iceGatherer;
1419 | var iceTransport;
1420 | var dtlsTransport;
1421 | var rtpSender;
1422 | var rtpReceiver;
1423 | var sendEncodingParameters;
1424 | var recvEncodingParameters;
1425 | var localCapabilities;
1426 |
1427 | var track;
1428 | // FIXME: ensure the mediaSection has rtcp-mux set.
1429 | var remoteCapabilities = SDPUtils.parseRtpParameters(mediaSection);
1430 | var remoteIceParameters;
1431 | var remoteDtlsParameters;
1432 | if (!rejected) {
1433 | remoteIceParameters = SDPUtils.getIceParameters(mediaSection,
1434 | sessionpart);
1435 | remoteDtlsParameters = SDPUtils.getDtlsParameters(mediaSection,
1436 | sessionpart);
1437 | }
1438 | recvEncodingParameters =
1439 | SDPUtils.parseRtpEncodingParameters(mediaSection);
1440 |
1441 | var mid = SDPUtils.matchPrefix(mediaSection, 'a=mid:');
1442 | if (mid.length) {
1443 | mid = mid[0].substr(6);
1444 | } else {
1445 | mid = SDPUtils.generateIdentifier();
1446 | }
1447 |
1448 | var cname;
1449 | // Gets the first SSRC. Note that with RTX there might be multiple
1450 | // SSRCs.
1451 | var remoteSsrc = SDPUtils.matchPrefix(mediaSection, 'a=ssrc:')
1452 | .map(function(line) {
1453 | return SDPUtils.parseSsrcMedia(line);
1454 | })
1455 | .filter(function(obj) {
1456 | return obj.attribute === 'cname';
1457 | })[0];
1458 | if (remoteSsrc) {
1459 | cname = remoteSsrc.value;
1460 | }
1461 |
1462 | var isComplete = SDPUtils.matchPrefix(mediaSection,
1463 | 'a=end-of-candidates').length > 0;
1464 | var cands = SDPUtils.matchPrefix(mediaSection, 'a=candidate:')
1465 | .map(function(cand) {
1466 | return SDPUtils.parseCandidate(cand);
1467 | })
1468 | .filter(function(cand) {
1469 | return cand.component === '1';
1470 | });
1471 | if (description.type === 'offer' && !rejected) {
1472 | var transports = self._createIceAndDtlsTransports(mid,
1473 | sdpMLineIndex);
1474 | if (isComplete) {
1475 | transports.iceTransport.setRemoteCandidates(cands);
1476 | }
1477 |
1478 | localCapabilities = RTCRtpReceiver.getCapabilities(kind);
1479 | sendEncodingParameters = [{
1480 | ssrc: (2 * sdpMLineIndex + 2) * 1001
1481 | }];
1482 |
1483 | rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
1484 |
1485 | track = rtpReceiver.track;
1486 | receiverList.push([track, rtpReceiver]);
1487 | // FIXME: not correct when there are multiple streams but that is
1488 | // not currently supported in this shim.
1489 | stream.addTrack(track);
1490 |
1491 | // FIXME: look at direction.
1492 | if (self.localStreams.length > 0 &&
1493 | self.localStreams[0].getTracks().length >= sdpMLineIndex) {
1494 | // FIXME: actually more complicated, needs to match types etc
1495 | var localtrack = self.localStreams[0]
1496 | .getTracks()[sdpMLineIndex];
1497 | rtpSender = new RTCRtpSender(localtrack,
1498 | transports.dtlsTransport);
1499 | }
1500 |
1501 | self.transceivers[sdpMLineIndex] = {
1502 | iceGatherer: transports.iceGatherer,
1503 | iceTransport: transports.iceTransport,
1504 | dtlsTransport: transports.dtlsTransport,
1505 | localCapabilities: localCapabilities,
1506 | remoteCapabilities: remoteCapabilities,
1507 | rtpSender: rtpSender,
1508 | rtpReceiver: rtpReceiver,
1509 | kind: kind,
1510 | mid: mid,
1511 | cname: cname,
1512 | sendEncodingParameters: sendEncodingParameters,
1513 | recvEncodingParameters: recvEncodingParameters
1514 | };
1515 | // Start the RTCRtpReceiver now. The RTPSender is started in
1516 | // setLocalDescription.
1517 | self._transceive(self.transceivers[sdpMLineIndex],
1518 | false,
1519 | direction === 'sendrecv' || direction === 'sendonly');
1520 | } else if (description.type === 'answer' && !rejected) {
1521 | transceiver = self.transceivers[sdpMLineIndex];
1522 | iceGatherer = transceiver.iceGatherer;
1523 | iceTransport = transceiver.iceTransport;
1524 | dtlsTransport = transceiver.dtlsTransport;
1525 | rtpSender = transceiver.rtpSender;
1526 | rtpReceiver = transceiver.rtpReceiver;
1527 | sendEncodingParameters = transceiver.sendEncodingParameters;
1528 | localCapabilities = transceiver.localCapabilities;
1529 |
1530 | self.transceivers[sdpMLineIndex].recvEncodingParameters =
1531 | recvEncodingParameters;
1532 | self.transceivers[sdpMLineIndex].remoteCapabilities =
1533 | remoteCapabilities;
1534 | self.transceivers[sdpMLineIndex].cname = cname;
1535 |
1536 | if (isComplete) {
1537 | iceTransport.setRemoteCandidates(cands);
1538 | }
1539 | iceTransport.start(iceGatherer, remoteIceParameters,
1540 | 'controlling');
1541 | dtlsTransport.start(remoteDtlsParameters);
1542 |
1543 | self._transceive(transceiver,
1544 | direction === 'sendrecv' || direction === 'recvonly',
1545 | direction === 'sendrecv' || direction === 'sendonly');
1546 |
1547 | if (rtpReceiver &&
1548 | (direction === 'sendrecv' || direction === 'sendonly')) {
1549 | track = rtpReceiver.track;
1550 | receiverList.push([track, rtpReceiver]);
1551 | stream.addTrack(track);
1552 | } else {
1553 | // FIXME: actually the receiver should be created later.
1554 | delete transceiver.rtpReceiver;
1555 | }
1556 | }
1557 | });
1558 |
1559 | this.remoteDescription = {
1560 | type: description.type,
1561 | sdp: description.sdp
1562 | };
1563 | switch (description.type) {
1564 | case 'offer':
1565 | this._updateSignalingState('have-remote-offer');
1566 | break;
1567 | case 'answer':
1568 | this._updateSignalingState('stable');
1569 | break;
1570 | default:
1571 | throw new TypeError('unsupported type "' + description.type +
1572 | '"');
1573 | }
1574 | if (stream.getTracks().length) {
1575 | self.remoteStreams.push(stream);
1576 | window.setTimeout(function() {
1577 | var event = new Event('addstream');
1578 | event.stream = stream;
1579 | self.dispatchEvent(event);
1580 | if (self.onaddstream !== null) {
1581 | window.setTimeout(function() {
1582 | self.onaddstream(event);
1583 | }, 0);
1584 | }
1585 |
1586 | receiverList.forEach(function(item) {
1587 | var track = item[0];
1588 | var receiver = item[1];
1589 | var trackEvent = new Event('track');
1590 | trackEvent.track = track;
1591 | trackEvent.receiver = receiver;
1592 | trackEvent.streams = [stream];
1593 | self.dispatchEvent(event);
1594 | if (self.ontrack !== null) {
1595 | window.setTimeout(function() {
1596 | self.ontrack(trackEvent);
1597 | }, 0);
1598 | }
1599 | });
1600 | }, 0);
1601 | }
1602 | if (arguments.length > 1 && typeof arguments[1] === 'function') {
1603 | window.setTimeout(arguments[1], 0);
1604 | }
1605 | return Promise.resolve();
1606 | };
1607 |
1608 | window.RTCPeerConnection.prototype.close = function() {
1609 | this.transceivers.forEach(function(transceiver) {
1610 | /* not yet
1611 | if (transceiver.iceGatherer) {
1612 | transceiver.iceGatherer.close();
1613 | }
1614 | */
1615 | if (transceiver.iceTransport) {
1616 | transceiver.iceTransport.stop();
1617 | }
1618 | if (transceiver.dtlsTransport) {
1619 | transceiver.dtlsTransport.stop();
1620 | }
1621 | if (transceiver.rtpSender) {
1622 | transceiver.rtpSender.stop();
1623 | }
1624 | if (transceiver.rtpReceiver) {
1625 | transceiver.rtpReceiver.stop();
1626 | }
1627 | });
1628 | // FIXME: clean up tracks, local streams, remote streams, etc
1629 | this._updateSignalingState('closed');
1630 | };
1631 |
1632 | // Update the signaling state.
1633 | window.RTCPeerConnection.prototype._updateSignalingState =
1634 | function(newState) {
1635 | this.signalingState = newState;
1636 | var event = new Event('signalingstatechange');
1637 | this.dispatchEvent(event);
1638 | if (this.onsignalingstatechange !== null) {
1639 | this.onsignalingstatechange(event);
1640 | }
1641 | };
1642 |
1643 | // Determine whether to fire the negotiationneeded event.
1644 | window.RTCPeerConnection.prototype._maybeFireNegotiationNeeded =
1645 | function() {
1646 | // Fire away (for now).
1647 | var event = new Event('negotiationneeded');
1648 | this.dispatchEvent(event);
1649 | if (this.onnegotiationneeded !== null) {
1650 | this.onnegotiationneeded(event);
1651 | }
1652 | };
1653 |
1654 | // Update the connection state.
1655 | window.RTCPeerConnection.prototype._updateConnectionState = function() {
1656 | var self = this;
1657 | var newState;
1658 | var states = {
1659 | 'new': 0,
1660 | closed: 0,
1661 | connecting: 0,
1662 | checking: 0,
1663 | connected: 0,
1664 | completed: 0,
1665 | failed: 0
1666 | };
1667 | this.transceivers.forEach(function(transceiver) {
1668 | states[transceiver.iceTransport.state]++;
1669 | states[transceiver.dtlsTransport.state]++;
1670 | });
1671 | // ICETransport.completed and connected are the same for this purpose.
1672 | states.connected += states.completed;
1673 |
1674 | newState = 'new';
1675 | if (states.failed > 0) {
1676 | newState = 'failed';
1677 | } else if (states.connecting > 0 || states.checking > 0) {
1678 | newState = 'connecting';
1679 | } else if (states.disconnected > 0) {
1680 | newState = 'disconnected';
1681 | } else if (states.new > 0) {
1682 | newState = 'new';
1683 | } else if (states.connected > 0 || states.completed > 0) {
1684 | newState = 'connected';
1685 | }
1686 |
1687 | if (newState !== self.iceConnectionState) {
1688 | self.iceConnectionState = newState;
1689 | var event = new Event('iceconnectionstatechange');
1690 | this.dispatchEvent(event);
1691 | if (this.oniceconnectionstatechange !== null) {
1692 | this.oniceconnectionstatechange(event);
1693 | }
1694 | }
1695 | };
1696 |
1697 | window.RTCPeerConnection.prototype.createOffer = function() {
1698 | var self = this;
1699 | if (this._pendingOffer) {
1700 | throw new Error('createOffer called while there is a pending offer.');
1701 | }
1702 | var offerOptions;
1703 | if (arguments.length === 1 && typeof arguments[0] !== 'function') {
1704 | offerOptions = arguments[0];
1705 | } else if (arguments.length === 3) {
1706 | offerOptions = arguments[2];
1707 | }
1708 |
1709 | var tracks = [];
1710 | var numAudioTracks = 0;
1711 | var numVideoTracks = 0;
1712 | // Default to sendrecv.
1713 | if (this.localStreams.length) {
1714 | numAudioTracks = this.localStreams[0].getAudioTracks().length;
1715 | numVideoTracks = this.localStreams[0].getVideoTracks().length;
1716 | }
1717 | // Determine number of audio and video tracks we need to send/recv.
1718 | if (offerOptions) {
1719 | // Reject Chrome legacy constraints.
1720 | if (offerOptions.mandatory || offerOptions.optional) {
1721 | throw new TypeError(
1722 | 'Legacy mandatory/optional constraints not supported.');
1723 | }
1724 | if (offerOptions.offerToReceiveAudio !== undefined) {
1725 | numAudioTracks = offerOptions.offerToReceiveAudio;
1726 | }
1727 | if (offerOptions.offerToReceiveVideo !== undefined) {
1728 | numVideoTracks = offerOptions.offerToReceiveVideo;
1729 | }
1730 | }
1731 | if (this.localStreams.length) {
1732 | // Push local streams.
1733 | this.localStreams[0].getTracks().forEach(function(track) {
1734 | tracks.push({
1735 | kind: track.kind,
1736 | track: track,
1737 | wantReceive: track.kind === 'audio' ?
1738 | numAudioTracks > 0 : numVideoTracks > 0
1739 | });
1740 | if (track.kind === 'audio') {
1741 | numAudioTracks--;
1742 | } else if (track.kind === 'video') {
1743 | numVideoTracks--;
1744 | }
1745 | });
1746 | }
1747 | // Create M-lines for recvonly streams.
1748 | while (numAudioTracks > 0 || numVideoTracks > 0) {
1749 | if (numAudioTracks > 0) {
1750 | tracks.push({
1751 | kind: 'audio',
1752 | wantReceive: true
1753 | });
1754 | numAudioTracks--;
1755 | }
1756 | if (numVideoTracks > 0) {
1757 | tracks.push({
1758 | kind: 'video',
1759 | wantReceive: true
1760 | });
1761 | numVideoTracks--;
1762 | }
1763 | }
1764 |
1765 | var sdp = SDPUtils.writeSessionBoilerplate();
1766 | var transceivers = [];
1767 | tracks.forEach(function(mline, sdpMLineIndex) {
1768 | // For each track, create an ice gatherer, ice transport,
1769 | // dtls transport, potentially rtpsender and rtpreceiver.
1770 | var track = mline.track;
1771 | var kind = mline.kind;
1772 | var mid = SDPUtils.generateIdentifier();
1773 |
1774 | var transports = self._createIceAndDtlsTransports(mid, sdpMLineIndex);
1775 |
1776 | var localCapabilities = RTCRtpSender.getCapabilities(kind);
1777 | var rtpSender;
1778 | var rtpReceiver;
1779 |
1780 | // generate an ssrc now, to be used later in rtpSender.send
1781 | var sendEncodingParameters = [{
1782 | ssrc: (2 * sdpMLineIndex + 1) * 1001
1783 | }];
1784 | if (track) {
1785 | rtpSender = new RTCRtpSender(track, transports.dtlsTransport);
1786 | }
1787 |
1788 | if (mline.wantReceive) {
1789 | rtpReceiver = new RTCRtpReceiver(transports.dtlsTransport, kind);
1790 | }
1791 |
1792 | transceivers[sdpMLineIndex] = {
1793 | iceGatherer: transports.iceGatherer,
1794 | iceTransport: transports.iceTransport,
1795 | dtlsTransport: transports.dtlsTransport,
1796 | localCapabilities: localCapabilities,
1797 | remoteCapabilities: null,
1798 | rtpSender: rtpSender,
1799 | rtpReceiver: rtpReceiver,
1800 | kind: kind,
1801 | mid: mid,
1802 | sendEncodingParameters: sendEncodingParameters,
1803 | recvEncodingParameters: null
1804 | };
1805 | var transceiver = transceivers[sdpMLineIndex];
1806 | sdp += SDPUtils.writeMediaSection(transceiver,
1807 | transceiver.localCapabilities, 'offer', self.localStreams[0]);
1808 | });
1809 |
1810 | this._pendingOffer = transceivers;
1811 | var desc = new RTCSessionDescription({
1812 | type: 'offer',
1813 | sdp: sdp
1814 | });
1815 | if (arguments.length && typeof arguments[0] === 'function') {
1816 | window.setTimeout(arguments[0], 0, desc);
1817 | }
1818 | return Promise.resolve(desc);
1819 | };
1820 |
1821 | window.RTCPeerConnection.prototype.createAnswer = function() {
1822 | var self = this;
1823 |
1824 | var sdp = SDPUtils.writeSessionBoilerplate();
1825 | this.transceivers.forEach(function(transceiver) {
1826 | // Calculate intersection of capabilities.
1827 | var commonCapabilities = self._getCommonCapabilities(
1828 | transceiver.localCapabilities,
1829 | transceiver.remoteCapabilities);
1830 |
1831 | sdp += SDPUtils.writeMediaSection(transceiver, commonCapabilities,
1832 | 'answer', self.localStreams[0]);
1833 | });
1834 |
1835 | var desc = new RTCSessionDescription({
1836 | type: 'answer',
1837 | sdp: sdp
1838 | });
1839 | if (arguments.length && typeof arguments[0] === 'function') {
1840 | window.setTimeout(arguments[0], 0, desc);
1841 | }
1842 | return Promise.resolve(desc);
1843 | };
1844 |
1845 | window.RTCPeerConnection.prototype.addIceCandidate = function(candidate) {
1846 | var mLineIndex = candidate.sdpMLineIndex;
1847 | if (candidate.sdpMid) {
1848 | for (var i = 0; i < this.transceivers.length; i++) {
1849 | if (this.transceivers[i].mid === candidate.sdpMid) {
1850 | mLineIndex = i;
1851 | break;
1852 | }
1853 | }
1854 | }
1855 | var transceiver = this.transceivers[mLineIndex];
1856 | if (transceiver) {
1857 | var cand = Object.keys(candidate.candidate).length > 0 ?
1858 | SDPUtils.parseCandidate(candidate.candidate) : {};
1859 | // Ignore Chrome's invalid candidates since Edge does not like them.
1860 | if (cand.protocol === 'tcp' && cand.port === 0) {
1861 | return;
1862 | }
1863 | // Ignore RTCP candidates, we assume RTCP-MUX.
1864 | if (cand.component !== '1') {
1865 | return;
1866 | }
1867 | // A dirty hack to make samples work.
1868 | if (cand.type === 'endOfCandidates') {
1869 | cand = {};
1870 | }
1871 | transceiver.iceTransport.addRemoteCandidate(cand);
1872 |
1873 | // update the remoteDescription.
1874 | var sections = SDPUtils.splitSections(this.remoteDescription.sdp);
1875 | sections[mLineIndex + 1] += (cand.type ? candidate.candidate.trim()
1876 | : 'a=end-of-candidates') + '\r\n';
1877 | this.remoteDescription.sdp = sections.join('');
1878 | }
1879 | if (arguments.length > 1 && typeof arguments[1] === 'function') {
1880 | window.setTimeout(arguments[1], 0);
1881 | }
1882 | return Promise.resolve();
1883 | };
1884 |
1885 | window.RTCPeerConnection.prototype.getStats = function() {
1886 | var promises = [];
1887 | this.transceivers.forEach(function(transceiver) {
1888 | ['rtpSender', 'rtpReceiver', 'iceGatherer', 'iceTransport',
1889 | 'dtlsTransport'].forEach(function(method) {
1890 | if (transceiver[method]) {
1891 | promises.push(transceiver[method].getStats());
1892 | }
1893 | });
1894 | });
1895 | var cb = arguments.length > 1 && typeof arguments[1] === 'function' &&
1896 | arguments[1];
1897 | return new Promise(function(resolve) {
1898 | var results = {};
1899 | Promise.all(promises).then(function(res) {
1900 | res.forEach(function(result) {
1901 | Object.keys(result).forEach(function(id) {
1902 | results[id] = result[id];
1903 | });
1904 | });
1905 | if (cb) {
1906 | window.setTimeout(cb, 0, results);
1907 | }
1908 | resolve(results);
1909 | });
1910 | });
1911 | };
1912 | },
1913 |
1914 | // Attach a media stream to an element.
1915 | attachMediaStream: function(element, stream) {
1916 | logging('DEPRECATED, attachMediaStream will soon be removed.');
1917 | element.srcObject = stream;
1918 | },
1919 |
1920 | reattachMediaStream: function(to, from) {
1921 | logging('DEPRECATED, reattachMediaStream will soon be removed.');
1922 | to.srcObject = from.srcObject;
1923 | }
1924 | };
1925 |
1926 | // Expose public methods.
1927 | module.exports = {
1928 | shimPeerConnection: edgeShim.shimPeerConnection,
1929 | attachMediaStream: edgeShim.attachMediaStream,
1930 | reattachMediaStream: edgeShim.reattachMediaStream
1931 | };
1932 |
1933 | },{"../utils":9,"./edge_sdp":4}],6:[function(require,module,exports){
1934 | /*
1935 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
1936 | *
1937 | * Use of this source code is governed by a BSD-style license
1938 | * that can be found in the LICENSE file in the root of the source
1939 | * tree.
1940 | */
1941 | /* eslint-env node */
1942 | 'use strict';
1943 |
1944 | var logging = require('../utils').log;
1945 | var browserDetails = require('../utils').browserDetails;
1946 |
1947 | var firefoxShim = {
1948 | shimOnTrack: function() {
1949 | if (typeof window === 'object' && window.RTCPeerConnection && !('ontrack' in
1950 | window.RTCPeerConnection.prototype)) {
1951 | Object.defineProperty(window.RTCPeerConnection.prototype, 'ontrack', {
1952 | get: function() {
1953 | return this._ontrack;
1954 | },
1955 | set: function(f) {
1956 | if (this._ontrack) {
1957 | this.removeEventListener('track', this._ontrack);
1958 | this.removeEventListener('addstream', this._ontrackpoly);
1959 | }
1960 | this.addEventListener('track', this._ontrack = f);
1961 | this.addEventListener('addstream', this._ontrackpoly = function(e) {
1962 | e.stream.getTracks().forEach(function(track) {
1963 | var event = new Event('track');
1964 | event.track = track;
1965 | event.receiver = {track: track};
1966 | event.streams = [e.stream];
1967 | this.dispatchEvent(event);
1968 | }.bind(this));
1969 | }.bind(this));
1970 | }
1971 | });
1972 | }
1973 | },
1974 |
1975 | shimSourceObject: function() {
1976 | // Firefox has supported mozSrcObject since FF22, unprefixed in 42.
1977 | if (typeof window === 'object') {
1978 | if (window.HTMLMediaElement &&
1979 | !('srcObject' in window.HTMLMediaElement.prototype)) {
1980 | // Shim the srcObject property, once, when HTMLMediaElement is found.
1981 | Object.defineProperty(window.HTMLMediaElement.prototype, 'srcObject', {
1982 | get: function() {
1983 | return this.mozSrcObject;
1984 | },
1985 | set: function(stream) {
1986 | this.mozSrcObject = stream;
1987 | }
1988 | });
1989 | }
1990 | }
1991 | },
1992 |
1993 | shimPeerConnection: function() {
1994 | // The RTCPeerConnection object.
1995 | if (!window.RTCPeerConnection) {
1996 | window.RTCPeerConnection = function(pcConfig, pcConstraints) {
1997 | if (browserDetails.version < 38) {
1998 | // .urls is not supported in FF < 38.
1999 | // create RTCIceServers with a single url.
2000 | if (pcConfig && pcConfig.iceServers) {
2001 | var newIceServers = [];
2002 | for (var i = 0; i < pcConfig.iceServers.length; i++) {
2003 | var server = pcConfig.iceServers[i];
2004 | if (server.hasOwnProperty('urls')) {
2005 | for (var j = 0; j < server.urls.length; j++) {
2006 | var newServer = {
2007 | url: server.urls[j]
2008 | };
2009 | if (server.urls[j].indexOf('turn') === 0) {
2010 | newServer.username = server.username;
2011 | newServer.credential = server.credential;
2012 | }
2013 | newIceServers.push(newServer);
2014 | }
2015 | } else {
2016 | newIceServers.push(pcConfig.iceServers[i]);
2017 | }
2018 | }
2019 | pcConfig.iceServers = newIceServers;
2020 | }
2021 | }
2022 | return new mozRTCPeerConnection(pcConfig, pcConstraints);
2023 | };
2024 | window.RTCPeerConnection.prototype = mozRTCPeerConnection.prototype;
2025 |
2026 | // wrap static methods. Currently just generateCertificate.
2027 | if (mozRTCPeerConnection.generateCertificate) {
2028 | Object.defineProperty(window.RTCPeerConnection, 'generateCertificate', {
2029 | get: function() {
2030 | return mozRTCPeerConnection.generateCertificate;
2031 | }
2032 | });
2033 | }
2034 |
2035 | window.RTCSessionDescription = mozRTCSessionDescription;
2036 | window.RTCIceCandidate = mozRTCIceCandidate;
2037 | }
2038 |
2039 | // shim away need for obsolete RTCIceCandidate/RTCSessionDescription.
2040 | ['setLocalDescription', 'setRemoteDescription', 'addIceCandidate']
2041 | .forEach(function(method) {
2042 | var nativeMethod = RTCPeerConnection.prototype[method];
2043 | RTCPeerConnection.prototype[method] = function() {
2044 | arguments[0] = new ((method === 'addIceCandidate')?
2045 | RTCIceCandidate : RTCSessionDescription)(arguments[0]);
2046 | return nativeMethod.apply(this, arguments);
2047 | };
2048 | });
2049 | },
2050 |
2051 | shimGetUserMedia: function() {
2052 | // getUserMedia constraints shim.
2053 | var getUserMedia_ = function(constraints, onSuccess, onError) {
2054 | var constraintsToFF37_ = function(c) {
2055 | if (typeof c !== 'object' || c.require) {
2056 | return c;
2057 | }
2058 | var require = [];
2059 | Object.keys(c).forEach(function(key) {
2060 | if (key === 'require' || key === 'advanced' ||
2061 | key === 'mediaSource') {
2062 | return;
2063 | }
2064 | var r = c[key] = (typeof c[key] === 'object') ?
2065 | c[key] : {ideal: c[key]};
2066 | if (r.min !== undefined ||
2067 | r.max !== undefined || r.exact !== undefined) {
2068 | require.push(key);
2069 | }
2070 | if (r.exact !== undefined) {
2071 | if (typeof r.exact === 'number') {
2072 | r. min = r.max = r.exact;
2073 | } else {
2074 | c[key] = r.exact;
2075 | }
2076 | delete r.exact;
2077 | }
2078 | if (r.ideal !== undefined) {
2079 | c.advanced = c.advanced || [];
2080 | var oc = {};
2081 | if (typeof r.ideal === 'number') {
2082 | oc[key] = {min: r.ideal, max: r.ideal};
2083 | } else {
2084 | oc[key] = r.ideal;
2085 | }
2086 | c.advanced.push(oc);
2087 | delete r.ideal;
2088 | if (!Object.keys(r).length) {
2089 | delete c[key];
2090 | }
2091 | }
2092 | });
2093 | if (require.length) {
2094 | c.require = require;
2095 | }
2096 | return c;
2097 | };
2098 | constraints = JSON.parse(JSON.stringify(constraints));
2099 | if (browserDetails.version < 38) {
2100 | logging('spec: ' + JSON.stringify(constraints));
2101 | if (constraints.audio) {
2102 | constraints.audio = constraintsToFF37_(constraints.audio);
2103 | }
2104 | if (constraints.video) {
2105 | constraints.video = constraintsToFF37_(constraints.video);
2106 | }
2107 | logging('ff37: ' + JSON.stringify(constraints));
2108 | }
2109 | return navigator.mozGetUserMedia(constraints, onSuccess, onError);
2110 | };
2111 |
2112 | navigator.getUserMedia = getUserMedia_;
2113 |
2114 | // Returns the result of getUserMedia as a Promise.
2115 | var getUserMediaPromise_ = function(constraints) {
2116 | return new Promise(function(resolve, reject) {
2117 | navigator.getUserMedia(constraints, resolve, reject);
2118 | });
2119 | };
2120 |
2121 | // Shim for mediaDevices on older versions.
2122 | if (!navigator.mediaDevices) {
2123 | navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
2124 | addEventListener: function() { },
2125 | removeEventListener: function() { }
2126 | };
2127 | }
2128 | navigator.mediaDevices.enumerateDevices =
2129 | navigator.mediaDevices.enumerateDevices || function() {
2130 | return new Promise(function(resolve) {
2131 | var infos = [
2132 | {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
2133 | {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
2134 | ];
2135 | resolve(infos);
2136 | });
2137 | };
2138 |
2139 | if (browserDetails.version < 41) {
2140 | // Work around http://bugzil.la/1169665
2141 | var orgEnumerateDevices =
2142 | navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
2143 | navigator.mediaDevices.enumerateDevices = function() {
2144 | return orgEnumerateDevices().then(undefined, function(e) {
2145 | if (e.name === 'NotFoundError') {
2146 | return [];
2147 | }
2148 | throw e;
2149 | });
2150 | };
2151 | }
2152 | },
2153 |
2154 | // Attach a media stream to an element.
2155 | attachMediaStream: function(element, stream) {
2156 | logging('DEPRECATED, attachMediaStream will soon be removed.');
2157 | element.srcObject = stream;
2158 | },
2159 |
2160 | reattachMediaStream: function(to, from) {
2161 | logging('DEPRECATED, reattachMediaStream will soon be removed.');
2162 | to.srcObject = from.srcObject;
2163 | }
2164 | };
2165 |
2166 | // Expose public methods.
2167 | module.exports = {
2168 | shimOnTrack: firefoxShim.shimOnTrack,
2169 | shimSourceObject: firefoxShim.shimSourceObject,
2170 | shimPeerConnection: firefoxShim.shimPeerConnection,
2171 | shimGetUserMedia: require('./getusermedia'),
2172 | attachMediaStream: firefoxShim.attachMediaStream,
2173 | reattachMediaStream: firefoxShim.reattachMediaStream
2174 | };
2175 |
2176 | },{"../utils":9,"./getusermedia":7}],7:[function(require,module,exports){
2177 | /*
2178 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
2179 | *
2180 | * Use of this source code is governed by a BSD-style license
2181 | * that can be found in the LICENSE file in the root of the source
2182 | * tree.
2183 | */
2184 | /* eslint-env node */
2185 | 'use strict';
2186 |
2187 | var logging = require('../utils').log;
2188 | var browserDetails = require('../utils').browserDetails;
2189 |
2190 | // Expose public methods.
2191 | module.exports = function() {
2192 | // getUserMedia constraints shim.
2193 | var getUserMedia_ = function(constraints, onSuccess, onError) {
2194 | var constraintsToFF37_ = function(c) {
2195 | if (typeof c !== 'object' || c.require) {
2196 | return c;
2197 | }
2198 | var require = [];
2199 | Object.keys(c).forEach(function(key) {
2200 | if (key === 'require' || key === 'advanced' || key === 'mediaSource') {
2201 | return;
2202 | }
2203 | var r = c[key] = (typeof c[key] === 'object') ?
2204 | c[key] : {ideal: c[key]};
2205 | if (r.min !== undefined ||
2206 | r.max !== undefined || r.exact !== undefined) {
2207 | require.push(key);
2208 | }
2209 | if (r.exact !== undefined) {
2210 | if (typeof r.exact === 'number') {
2211 | r. min = r.max = r.exact;
2212 | } else {
2213 | c[key] = r.exact;
2214 | }
2215 | delete r.exact;
2216 | }
2217 | if (r.ideal !== undefined) {
2218 | c.advanced = c.advanced || [];
2219 | var oc = {};
2220 | if (typeof r.ideal === 'number') {
2221 | oc[key] = {min: r.ideal, max: r.ideal};
2222 | } else {
2223 | oc[key] = r.ideal;
2224 | }
2225 | c.advanced.push(oc);
2226 | delete r.ideal;
2227 | if (!Object.keys(r).length) {
2228 | delete c[key];
2229 | }
2230 | }
2231 | });
2232 | if (require.length) {
2233 | c.require = require;
2234 | }
2235 | return c;
2236 | };
2237 | constraints = JSON.parse(JSON.stringify(constraints));
2238 | if (browserDetails.version < 38) {
2239 | logging('spec: ' + JSON.stringify(constraints));
2240 | if (constraints.audio) {
2241 | constraints.audio = constraintsToFF37_(constraints.audio);
2242 | }
2243 | if (constraints.video) {
2244 | constraints.video = constraintsToFF37_(constraints.video);
2245 | }
2246 | logging('ff37: ' + JSON.stringify(constraints));
2247 | }
2248 | return navigator.mozGetUserMedia(constraints, onSuccess, onError);
2249 | };
2250 |
2251 | navigator.getUserMedia = getUserMedia_;
2252 |
2253 | // Returns the result of getUserMedia as a Promise.
2254 | var getUserMediaPromise_ = function(constraints) {
2255 | return new Promise(function(resolve, reject) {
2256 | navigator.getUserMedia(constraints, resolve, reject);
2257 | });
2258 | };
2259 |
2260 | // Shim for mediaDevices on older versions.
2261 | if (!navigator.mediaDevices) {
2262 | navigator.mediaDevices = {getUserMedia: getUserMediaPromise_,
2263 | addEventListener: function() { },
2264 | removeEventListener: function() { }
2265 | };
2266 | }
2267 | navigator.mediaDevices.enumerateDevices =
2268 | navigator.mediaDevices.enumerateDevices || function() {
2269 | return new Promise(function(resolve) {
2270 | var infos = [
2271 | {kind: 'audioinput', deviceId: 'default', label: '', groupId: ''},
2272 | {kind: 'videoinput', deviceId: 'default', label: '', groupId: ''}
2273 | ];
2274 | resolve(infos);
2275 | });
2276 | };
2277 |
2278 | if (browserDetails.version < 41) {
2279 | // Work around http://bugzil.la/1169665
2280 | var orgEnumerateDevices =
2281 | navigator.mediaDevices.enumerateDevices.bind(navigator.mediaDevices);
2282 | navigator.mediaDevices.enumerateDevices = function() {
2283 | return orgEnumerateDevices().then(undefined, function(e) {
2284 | if (e.name === 'NotFoundError') {
2285 | return [];
2286 | }
2287 | throw e;
2288 | });
2289 | };
2290 | }
2291 | };
2292 |
2293 | },{"../utils":9}],8:[function(require,module,exports){
2294 | /*
2295 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
2296 | *
2297 | * Use of this source code is governed by a BSD-style license
2298 | * that can be found in the LICENSE file in the root of the source
2299 | * tree.
2300 | */
2301 | 'use strict';
2302 | var safariShim = {
2303 | // TODO: DrAlex, should be here, double check against LayoutTests
2304 | // shimOnTrack: function() { },
2305 |
2306 | // TODO: DrAlex
2307 | // attachMediaStream: function(element, stream) { },
2308 | // reattachMediaStream: function(to, from) { },
2309 |
2310 | // TODO: once the back-end for the mac port is done, add.
2311 | // TODO: check for webkitGTK+
2312 | // shimPeerConnection: function() { },
2313 |
2314 | shimGetUserMedia: function() {
2315 | navigator.getUserMedia = navigator.webkitGetUserMedia;
2316 | }
2317 | };
2318 |
2319 | // Expose public methods.
2320 | module.exports = {
2321 | shimGetUserMedia: safariShim.shimGetUserMedia
2322 | // TODO
2323 | // shimOnTrack: safariShim.shimOnTrack,
2324 | // shimPeerConnection: safariShim.shimPeerConnection,
2325 | // attachMediaStream: safariShim.attachMediaStream,
2326 | // reattachMediaStream: safariShim.reattachMediaStream
2327 | };
2328 |
2329 | },{}],9:[function(require,module,exports){
2330 | /*
2331 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
2332 | *
2333 | * Use of this source code is governed by a BSD-style license
2334 | * that can be found in the LICENSE file in the root of the source
2335 | * tree.
2336 | */
2337 | /* eslint-env node */
2338 | 'use strict';
2339 |
2340 | var logDisabled_ = false;
2341 |
2342 | // Utility methods.
2343 | var utils = {
2344 | disableLog: function(bool) {
2345 | if (typeof bool !== 'boolean') {
2346 | return new Error('Argument type: ' + typeof bool +
2347 | '. Please use a boolean.');
2348 | }
2349 | logDisabled_ = bool;
2350 | return (bool) ? 'adapter.js logging disabled' :
2351 | 'adapter.js logging enabled';
2352 | },
2353 |
2354 | log: function() {
2355 | if (typeof window === 'object') {
2356 | if (logDisabled_) {
2357 | return;
2358 | }
2359 | if (typeof console !== 'undefined' && typeof console.log === 'function') {
2360 | console.log.apply(console, arguments);
2361 | }
2362 | }
2363 | },
2364 |
2365 | /**
2366 | * Extract browser version out of the provided user agent string.
2367 | *
2368 | * @param {!string} uastring userAgent string.
2369 | * @param {!string} expr Regular expression used as match criteria.
2370 | * @param {!number} pos position in the version string to be returned.
2371 | * @return {!number} browser version.
2372 | */
2373 | extractVersion: function(uastring, expr, pos) {
2374 | var match = uastring.match(expr);
2375 | return match && match.length >= pos && parseInt(match[pos], 10);
2376 | },
2377 |
2378 | /**
2379 | * Browser detector.
2380 | *
2381 | * @return {object} result containing browser, version and minVersion
2382 | * properties.
2383 | */
2384 | detectBrowser: function() {
2385 | // Returned result object.
2386 | var result = {};
2387 | result.browser = null;
2388 | result.version = null;
2389 | result.minVersion = null;
2390 |
2391 | // Fail early if it's not a browser
2392 | if (typeof window === 'undefined' || !window.navigator) {
2393 | result.browser = 'Not a browser.';
2394 | return result;
2395 | }
2396 |
2397 | // Firefox.
2398 | if (navigator.mozGetUserMedia) {
2399 | result.browser = 'firefox';
2400 | result.version = this.extractVersion(navigator.userAgent,
2401 | /Firefox\/([0-9]+)\./, 1);
2402 | result.minVersion = 31;
2403 |
2404 | // all webkit-based browsers
2405 | } else if (navigator.webkitGetUserMedia) {
2406 | // Chrome, Chromium, Webview, Opera, all use the chrome shim for now
2407 | if (window.webkitRTCPeerConnection) {
2408 | result.browser = 'chrome';
2409 | result.version = this.extractVersion(navigator.userAgent,
2410 | /Chrom(e|ium)\/([0-9]+)\./, 2);
2411 | result.minVersion = 38;
2412 |
2413 | // Safari or unknown webkit-based
2414 | // for the time being Safari has support for MediaStreams but not webRTC
2415 | } else {
2416 | // Safari UA substrings of interest for reference:
2417 | // - webkit version: AppleWebKit/602.1.25 (also used in Op,Cr)
2418 | // - safari UI version: Version/9.0.3 (unique to Safari)
2419 | // - safari UI webkit version: Safari/601.4.4 (also used in Op,Cr)
2420 | //
2421 | // if the webkit version and safari UI webkit versions are equals,
2422 | // ... this is a stable version.
2423 | //
2424 | // only the internal webkit version is important today to know if
2425 | // media streams are supported
2426 | //
2427 | if (navigator.userAgent.match(/Version\/(\d+).(\d+)/)) {
2428 | result.browser = 'safari';
2429 | result.version = this.extractVersion(navigator.userAgent,
2430 | /AppleWebKit\/([0-9]+)\./, 1);
2431 | result.minVersion = 602;
2432 |
2433 | // unknown webkit-based browser
2434 | } else {
2435 | result.browser = 'Unsupported webkit-based browser ' +
2436 | 'with GUM support but no WebRTC support.';
2437 | return result;
2438 | }
2439 | }
2440 |
2441 | // Edge.
2442 | } else if (navigator.mediaDevices &&
2443 | navigator.userAgent.match(/Edge\/(\d+).(\d+)$/)) {
2444 | result.browser = 'edge';
2445 | result.version = this.extractVersion(navigator.userAgent,
2446 | /Edge\/(\d+).(\d+)$/, 2);
2447 | result.minVersion = 10547;
2448 |
2449 | // Default fallthrough: not supported.
2450 | } else {
2451 | result.browser = 'Not a supported browser.';
2452 | return result;
2453 | }
2454 |
2455 | // Warn if version is less than minVersion.
2456 | if (result.version < result.minVersion) {
2457 | utils.log('Browser: ' + result.browser + ' Version: ' + result.version +
2458 | ' < minimum supported version: ' + result.minVersion +
2459 | '\n some things might not work!');
2460 | }
2461 |
2462 | return result;
2463 | }
2464 | };
2465 |
2466 | // Export.
2467 | module.exports = {
2468 | log: utils.log,
2469 | disableLog: utils.disableLog,
2470 | browserDetails: utils.detectBrowser(),
2471 | extractVersion: utils.extractVersion
2472 | };
2473 |
2474 | },{}]},{},[1])(1)
2475 | });
--------------------------------------------------------------------------------