├── .gitignore
├── images
└── favicon.ico
├── src
├── peer2peer-video
│ ├── output.mp4
│ ├── index.html
│ └── js
│ │ └── main.js
├── peer2peer
│ ├── extension
│ │ ├── src
│ │ │ ├── icon.png
│ │ │ ├── manifest.json
│ │ │ ├── content-script.js
│ │ │ └── background.js
│ │ └── screencapture.zip
│ ├── help.html
│ ├── index.html
│ └── js
│ │ └── main.js
├── peer2peer-iframe
│ └── index.html
├── pause-play
│ ├── index.html
│ └── js
│ │ └── main.js
├── iframe-video
│ └── index.html
├── canvas-capture
│ ├── index.html
│ └── js
│ │ └── main.js
├── replaceTrack
│ ├── css
│ │ └── main.css
│ ├── index.html
│ └── js
│ │ └── main.js
├── codec_constraints
│ ├── css
│ │ └── main.css
│ ├── index.html
│ └── js
│ │ └── main.js
├── iframe-apprtc
│ └── index.html
├── multiple-video-devices
│ ├── index.html
│ └── js
│ │ └── main.js
├── multiple-peerconnections
│ ├── index.html
│ └── js
│ │ └── main.js
├── single-audio
│ └── index.html
├── single-video
│ └── index.html
├── audio-and-video
│ └── index.html
├── multiple-audio
│ └── index.html
├── multiple-video
│ └── index.html
└── css
│ └── main.css
├── .csslintrc
├── .travis.yml
├── CONTRIBUTING.md
├── web_server
└── server.js
├── package.json
├── README.md
├── Gruntfile.js
├── .eslintrc
├── css
└── landing_page.css
└── index.html
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .DS_Store
3 | .eslintcache
4 |
--------------------------------------------------------------------------------
/images/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/webrtc/test-pages/HEAD/images/favicon.ico
--------------------------------------------------------------------------------
/src/peer2peer-video/output.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/webrtc/test-pages/HEAD/src/peer2peer-video/output.mp4
--------------------------------------------------------------------------------
/src/peer2peer/extension/src/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/webrtc/test-pages/HEAD/src/peer2peer/extension/src/icon.png
--------------------------------------------------------------------------------
/src/peer2peer/extension/screencapture.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/webrtc/test-pages/HEAD/src/peer2peer/extension/screencapture.zip
--------------------------------------------------------------------------------
/.csslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "box-model": false,
3 | "ids": false,
4 | "known-properties": false,
5 | "overqualified-elements": false,
6 | "unique-headings": false
7 | }
8 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: trusty
2 | language: node_js
3 | cache:
4 | - node_modules
5 | node_js:
6 | - 6
7 |
8 | branches:
9 | only:
10 | - gh-pages
11 |
12 | script:
13 | - grunt
14 |
--------------------------------------------------------------------------------
/src/peer2peer-iframe/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 | WebRTC IFRAME peer2peer test page
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/src/pause-play/index.html:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
9 | Pause Play Test
10 |
11 |
12 | Pause Play Test
13 | Status: not-started
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | WebRTC welcomes patches/pulls for features and bug fixes.
2 |
3 | For contributors external to Google, follow the instructions given in the [Google Individual Contributor License Agreement](https://cla.developers.google.com/about/google-individual).
4 |
5 | In all cases, contributors must sign a contributor license agreement before a contribution can be accepted. Please complete the agreement for an [individual](https://developers.google.com/open-source/cla/individual) or a [corporation](https://developers.google.com/open-source/cla/corporate) as appropriate.
6 |
7 |
--------------------------------------------------------------------------------
/src/peer2peer/extension/src/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Screensharing Extension",
3 | "description": "Screensharing Extension for my app",
4 | "version": "1.0.0",
5 | "manifest_version": 2,
6 | "icons": {
7 | "128": "icon.png"
8 | },
9 | "background": {
10 | "scripts": ["background.js"]
11 | },
12 | "content_scripts": [
13 | {
14 | "matches": ["https://webrtc.github.io/test-pages/src/peer2peer/*"],
15 | "js": ["content-script.js"]
16 | }
17 | ],
18 | "permissions": [
19 | "desktopCapture",
20 | "https://webrtc.github.io/test-pages/src/peer2peer/*"
21 | ]
22 | }
23 |
--------------------------------------------------------------------------------
/src/iframe-video/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | IFRAME Single Local Preview (Video Only)
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/web_server/server.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 | /* eslint-env node */
9 |
10 | 'use strict';
11 |
12 | var express = require('express');
13 | var https = require('https');
14 | var pem = require('pem');
15 |
16 | pem.createCertificate({days: 1, selfSigned: true}, function(err, keys) {
17 | var options = {
18 | key: keys.serviceKey,
19 | cert: keys.certificate
20 | };
21 |
22 | var app = express();
23 |
24 | app.use(express.static('../'));
25 |
26 | // Create an HTTPS service.
27 | https.createServer(options, app).listen(8080);
28 |
29 | console.log('serving on https://localhost:8080');
30 | });
31 |
--------------------------------------------------------------------------------
/src/peer2peer-video/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 | Video to peerConnection
11 |
12 |
13 |
14 |
15 |
Stream from video to peerConnection
16 |
17 |
18 |
19 |
20 |
21 | Start test
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/canvas-capture/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 | Canvas capture stream to peerConnection
11 |
12 |
13 |
14 |
15 |
Canvas capture stream to peerConnection
16 |
17 |
18 |
19 |
20 |
21 | Start test
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/replaceTrack/css/main.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 | button {
9 | margin: 0 20px 0 0;
10 | width: 83px;
11 | }
12 |
13 | button#hangupButton {
14 | margin: 0;
15 | }
16 |
17 | video {
18 | height: 225px;
19 | margin: 0 0 20px 0;
20 | vertical-align: top;
21 | width: calc(50% - 12px);
22 | }
23 |
24 | video#localVideo {
25 | margin: 0 20px 20px 0;
26 | }
27 |
28 | @media screen and (max-width: 400px) {
29 | button {
30 | width: 83px;
31 | }
32 |
33 | button {
34 | margin: 0 11px 10px 0;
35 | }
36 |
37 |
38 | video {
39 | height: 90px;
40 | margin: 0 0 10px 0;
41 | width: calc(50% - 7px);
42 | }
43 | video#localVideo {
44 | margin: 0 10px 20px 0;
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/codec_constraints/css/main.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 | button {
9 | margin: 0 20px 0 0;
10 | width: 83px;
11 | }
12 |
13 | button#hangupButton {
14 | margin: 0;
15 | }
16 |
17 | video {
18 | height: 225px;
19 | margin: 0 0 20px 0;
20 | vertical-align: top;
21 | width: calc(50% - 12px);
22 | }
23 |
24 | video#localVideo {
25 | margin: 0 20px 20px 0;
26 | }
27 |
28 | @media screen and (max-width: 400px) {
29 | button {
30 | width: 83px;
31 | }
32 |
33 | button {
34 | margin: 0 11px 10px 0;
35 | }
36 |
37 |
38 | video {
39 | height: 90px;
40 | margin: 0 0 10px 0;
41 | width: calc(50% - 7px);
42 | }
43 | video#localVideo {
44 | margin: 0 10px 20px 0;
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "test-pages",
3 | "description": "Test pages",
4 | "keywords": [
5 | "webrtc",
6 | "demos",
7 | "samples",
8 | "javascript"
9 | ],
10 | "homepage": "https://github.com/webrtc/test-pages",
11 | "bugs": {
12 | "url": "https://github.com/webrtc/test-pages/issues"
13 | },
14 | "license": "BSD-3-Clause",
15 | "author": "The WebRTC project authors",
16 | "main": "index.html",
17 | "repository": {
18 | "type": "git",
19 | "url": "https://github.com/webrtc/test-pages.git"
20 | },
21 | "scripts": {
22 | "test": "grunt",
23 | "postinstall": "grunt githooks"
24 | },
25 | "devDependencies": {
26 | "eslint-config-webrtc": ">=1.0.0",
27 | "express": "^4.14.1",
28 | "grunt": ">=0.4.5",
29 | "grunt-cli": ">=0.1.9",
30 | "grunt-contrib-csslint": ">=0.3.1",
31 | "grunt-eslint": ">=17.2.0",
32 | "grunt-githooks": ">=0.3.1",
33 | "grunt-htmlhint": ">=0.9.12",
34 | "pem": ">=1.9.4"
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://travis-ci.org/webrtc/test-pages)
2 |
3 | # Intro #
4 | Collection of test pages used for WebRTC development
5 |
6 |
7 | ## Development ##
8 | Detailed information on developing in the [webrtc](https://github.com/webrtc) GitHub repo can be found in the [WebRTC GitHub repo developer's guide](https://docs.google.com/document/d/1tn1t6LW2ffzGuYTK3366w1fhTkkzsSvHsBnOHoDfRzY/edit?pli=1#heading=h.e3366rrgmkdk).
9 |
10 |
11 | #### Clone the repo in desired folder
12 | ```bash
13 | git clone https://github.com/webrtc/test-pages.git
14 | ```
15 |
16 | #### Install npm dependencies (also adds linting to precommit githooks)
17 | ```bash
18 | npm install
19 | ```
20 |
21 | ### Start web server for development
22 | From the root of the checkout do `cd test` then run `node server.js` and finally navigate your browser to `https://localhost:8080`.
23 |
24 | #### Linting
25 | Runs grunt which currently only does linting.
26 | ```bash
27 | npm test
28 | ```
29 |
--------------------------------------------------------------------------------
/src/replaceTrack/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 | ReplaceTrack
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | Start
23 | Call
24 | Restart video with replaceTrack
25 | toggle audio with replaceTrack
26 | Hang Up
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/src/iframe-apprtc/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | AppRTC web app in an IFRAME
14 |
15 |
16 |
17 |
18 |
19 | AppRTC in an <iframe> element: To be deprecated in M64 (~Jan 2018)
20 | To test website still uses cross-origin iframes
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/multiple-video-devices/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | Multiple device test (Video Only)
14 |
20 |
21 |
22 | Opens all cameras available and attaches the video stream to indiviual video elements.
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/src/multiple-peerconnections/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 |
12 | Multiple peerconnections
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
PeerConnection
24 |
25 | Number of peer connections
26 |
27 |
28 |
29 | Enable googCpuOveruseDetection
30 |
31 |
32 |
33 | Start Test
34 |
35 |
36 |
37 |
38 |
Remote Streams
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/Gruntfile.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | /* globals module */
4 |
5 | module.exports = function(grunt) {
6 | // configure project
7 | grunt.initConfig({
8 | // make node configurations available
9 | pkg: grunt.file.readJSON('package.json'),
10 | csslint: {
11 | options: {
12 | csslintrc: '.csslintrc'
13 | },
14 | src: ['css/*.css', 'src/css/*.css']
15 | },
16 | eslint: {
17 | options: {
18 | configFile: '.eslintrc',
19 | cache: true
20 | },
21 | target: ['src/**/*.js']
22 | },
23 | // Runs the npm test command which has access to the grunt path.
24 | githooks: {
25 | all: {
26 | options: {
27 | command: 'npm',
28 | },
29 | 'pre-commit': 'test'
30 | }
31 | },
32 | htmlhint: {
33 | html1: {
34 | src: ['src/**/*.html']
35 | },
36 | html2: {
37 | src: ['index.html']
38 | }
39 | },
40 | });
41 |
42 | // enable plugins
43 | grunt.loadNpmTasks('grunt-contrib-csslint');
44 | grunt.loadNpmTasks('grunt-eslint');
45 | grunt.loadNpmTasks('grunt-githooks');
46 | grunt.loadNpmTasks('grunt-htmlhint');
47 |
48 | // set default tasks to run when grunt is called without parameters
49 | grunt.registerTask('default', ['csslint', 'htmlhint', 'eslint']);
50 | };
51 |
--------------------------------------------------------------------------------
/src/peer2peer/extension/src/content-script.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
2 | // Use of this source code is governed by a BSD-style license
3 | // that can be found in the LICENSE file in the root of the source
4 | // tree.
5 |
6 | 'use strict';
7 |
8 | // https://goo.gl/7p5VrQ
9 | // - 'content_script' and execution env are isolated from each other
10 | // - In order to communicate we use the DOM (window.postMessage)
11 | //
12 | // app.js | |content-script.js | |background.js
13 | // window.postMessage|------->|port.postMessage |----->| port.onMessage
14 | // | window | | port |
15 | // webkitGetUserMedia|<------ |window.postMessage|<-----| port.postMessage
16 | //
17 |
18 | var port = chrome.runtime.connect(chrome.runtime.id);
19 |
20 | port.onMessage.addListener(function(msg) {
21 | window.postMessage(msg, '*');
22 | });
23 |
24 | window.addEventListener('message', function(event) {
25 | // We only accept messages from ourselves
26 | if (event.source !== window) {
27 | return;
28 | }
29 |
30 | if (event.data.type && ((event.data.type === 'SS_UI_REQUEST') ||
31 | (event.data.type === 'SS_UI_CANCEL'))) {
32 | port.postMessage(event.data);
33 | }
34 | }, false);
35 |
36 | window.postMessage({type: 'SS_PING', text: 'start'}, '*');
37 |
--------------------------------------------------------------------------------
/src/single-audio/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | Single Local Preview (Audio Only)
14 |
15 |
16 |
17 |
31 |
32 |
33 |
34 |
35 | Sound test
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/src/single-video/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | Single Local Preview (Video Only)
14 |
15 |
16 |
17 |
31 |
32 |
33 |
34 |
35 | Local Preview
36 |
37 |
38 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "rules": {
3 | "array-bracket-spacing": 2,
4 | "block-spacing": [2, "never"],
5 | "brace-style": [2, "1tbs", { "allowSingleLine": false }],
6 | "camelcase": [2, {"properties": "always"}],
7 | "curly": 2,
8 | "default-case": 2,
9 | "dot-notation": 2,
10 | "eqeqeq": 2,
11 | "indent": [
12 | 2,
13 | 2
14 | ],
15 | "key-spacing": [2, {"beforeColon": false, "afterColon": true}],
16 | "max-len": [2, 80, 2, {"ignoreUrls": true}],
17 | "new-cap": 2,
18 | "no-console": 0,
19 | "no-else-return": 2,
20 | "no-eval": 2,
21 | "no-multi-spaces": 2,
22 | "no-multiple-empty-lines": [2, {"max": 2}],
23 | "no-shadow": 2,
24 | "no-trailing-spaces": 2,
25 | "no-unused-expressions": 2,
26 | "no-unused-vars": [2, {"args": "none"}],
27 | "object-curly-spacing": [2, "never"],
28 | "padded-blocks": [2, "never"],
29 | "quotes": [
30 | 2,
31 | "single"
32 | ],
33 | "semi": [
34 | 2,
35 | "always"
36 | ],
37 | "space-before-blocks": 2,
38 | "space-before-function-paren": [2, "never"],
39 | "spaced-comment": 2,
40 | "valid-typeof": 2
41 | },
42 | "env": {
43 | "es6": true,
44 | "browser": true,
45 | "node": false
46 | },
47 | "extends": ["eslint:recommended", "webrtc"],
48 | "globals": {
49 | "adapter": true,
50 | "audioContext": true,
51 | "browserSupportsIPHandlingPolicy": true,
52 | "browserSupportsNonProxiedUdpBoolean": true,
53 | "chrome": true,
54 | "ga": true,
55 | "getPolicyFromBooleans": true,
56 | "trace": true,
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/src/audio-and-video/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | Single Local Preview (Video and Audio)
14 |
15 |
16 |
17 |
32 |
33 |
34 |
35 |
36 | Local Preview
37 |
38 |
39 |
41 |
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/src/peer2peer/extension/src/background.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
2 | // Use of this source code is governed by a BSD-style license
3 | // that can be found in the LICENSE file in the root of the source
4 | // tree.
5 |
6 | 'use strict';
7 |
8 | var dataSources = ['screen', 'window'];
9 | if (getChromeVersion() >= 50) {
10 | dataSources.push('tab');
11 | dataSources.push('audio');
12 | }
13 | var desktopMediaRequestId = '';
14 |
15 | chrome.runtime.onConnect.addListener(function(port) {
16 | port.onMessage.addListener(function(msg) {
17 | if (msg.type === 'SS_UI_REQUEST') {
18 | requestScreenSharing(port, msg);
19 | }
20 |
21 | if (msg.type === 'SS_UI_CANCEL') {
22 | cancelScreenSharing(msg);
23 | }
24 | });
25 | });
26 |
27 | function requestScreenSharing(port, msg) {
28 | // https://developer.chrome.com/extensions/desktopCapture
29 | // params:
30 | // - 'dataSources' Set of sources that should be shown to the user.
31 | // - 'targetTab' Tab for which the stream is created.
32 | // - 'streamId' String that can be passed to getUserMedia() API
33 | desktopMediaRequestId =
34 | chrome.desktopCapture.chooseDesktopMedia(dataSources, port.sender.tab,
35 | function(streamId, options) {
36 | if (streamId) {
37 | msg.type = 'SS_DIALOG_SUCCESS';
38 | msg.streamId = streamId;
39 | msg.requestAudio = options && options.canRequestAudioTrack;
40 | } else {
41 | msg.type = 'SS_DIALOG_CANCEL';
42 | }
43 | port.postMessage(msg);
44 | });
45 | }
46 |
47 | function cancelScreenSharing() {
48 | if (desktopMediaRequestId) {
49 | chrome.desktopCapture.cancelChooseDesktopMedia(desktopMediaRequestId);
50 | }
51 | }
52 |
53 | function getChromeVersion() {
54 | var raw = navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./);
55 | return raw ? parseInt(raw[2], 10) : -1;
56 | }
57 |
--------------------------------------------------------------------------------
/css/landing_page.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 | a {
9 | color: #6fa8dc;
10 | font-weight: 300;
11 | text-decoration: none;
12 | }
13 |
14 | a:hover {
15 | color: #3d85c6;
16 | text-decoration: underline;
17 | }
18 |
19 | a#viewSource {
20 | border-top: 1px solid #999;
21 | display: block;
22 | margin: 1.3em 0 .0 0;
23 | padding: 1em 0 0 0;
24 | }
25 |
26 | body {
27 | font-family: 'Roboto', sans-serif;
28 | font-weight: 300;
29 | margin: 0;
30 | padding: 1em;
31 | word-break: break-word;
32 | }
33 |
34 | div#container {
35 | margin: 0 auto 0 auto;
36 | max-width: 40em;
37 | padding: 1em 1.5em 1.3em 1.5em;
38 | }
39 |
40 | h1 {
41 | border-bottom: 1px solid #ccc;
42 | font-family: 'Roboto', sans-serif;
43 | font-weight: 500;
44 | margin: 0 0 0.8em 0;
45 | padding: 0 0 0.2em 0;
46 | }
47 |
48 | h2 {
49 | color: #444;
50 | font-size: 1em;
51 | font-weight: 500;
52 | line-height: 1.2em;
53 | margin: 0 0 0.8em 0;
54 | }
55 |
56 | h3 {
57 | border-top: 1px solid #eee;
58 | color: #666;
59 | font-size: 0.9em;
60 | font-weight: 500;
61 | margin: 20px 0 10px 0;
62 | padding: 10px 0 0 0;
63 | white-space: nowrap;
64 | }
65 |
66 | p {
67 | color: #444;
68 | font-weight: 300;
69 | line-height: 1.6em;
70 | }
71 |
72 | section p:last-of-type {
73 | margin: 0;
74 | }
75 |
76 | section {
77 | border-bottom: 1px solid #eee;
78 | margin: 0 0 30px 0;
79 | padding: 0 0 20px 0;
80 | }
81 |
82 | section:last-of-type {
83 | border-bottom: none;
84 | padding: 0 0 1em 0;
85 | }
86 |
87 | @media screen and (max-width: 650px) {
88 | h1 {
89 | font-size: 24px;
90 | }
91 | }
92 |
93 | @media screen and (max-width: 550px) {
94 | h1 {
95 | font-size: 22px;
96 | }
97 | }
98 |
99 | @media screen and (max-width: 450px) {
100 | h1 {
101 | font-size: 20px;
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/src/multiple-audio/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | Multiple Local Preview (Audio Only)
14 |
15 |
16 |
17 |
34 |
35 |
36 |
37 |
38 | Sound test
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
--------------------------------------------------------------------------------
/src/multiple-video/index.html:
--------------------------------------------------------------------------------
1 |
2 |
11 |
12 |
13 | Multiple Local Preview (Video Only)
14 |
15 |
16 |
17 |
18 |
19 | Local Preview
20 |
21 |
22 |
24 |
26 |
28 |
29 |
30 |
32 |
34 |
36 |
37 |
38 |
40 |
42 |
44 |
45 |
46 |
48 |
49 |
50 |
51 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/src/multiple-video-devices/js/main.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
2 |
3 | // Use of this source code is governed by a BSD-style license
4 | // that can be found in the LICENSE file in the root of the source
5 | // tree. An additional intellectual property rights grant can be found
6 | // in the file PATENTS. All contributing project authors may
7 | // be found in the AUTHORS file in the root of the source tree.
8 |
9 | 'use strict';
10 |
11 | var deviceList = [];
12 | var counter = 0;
13 |
14 | window.onload = function() {
15 | getSources_();
16 | };
17 |
18 | function getSources_() {
19 | navigator.mediaDevices.enumerateDevices().then(function(devices) {
20 | for (var i = 0; i < devices.length; i++) {
21 | if (devices[i].kind === 'videoinput') {
22 | deviceList[i] = devices[i];
23 | requestVideo_(deviceList[i].deviceId);
24 | }
25 | }
26 | });
27 | }
28 |
29 | function requestVideo_(id) {
30 | navigator.mediaDevices.getUserMedia({
31 | video: {deviceId: {exact: id}},
32 | audio: false}).then(
33 | function(stream) {
34 | getUserMediaOkCallback_(stream);
35 | },
36 | getUserMediaFailedCallback_
37 | );
38 | }
39 |
40 | function getUserMediaFailedCallback_(error) {
41 | alert('User media request denied with error: ' + error.name);
42 | }
43 |
44 | function getUserMediaOkCallback_(stream) {
45 | var videoArea = document.getElementById('videoArea');
46 | var video = document.createElement('video');
47 | var div = document.createElement('div');
48 | div.style.float = 'left';
49 | video.setAttribute('id', 'view' + counter);
50 | video.width = 320;
51 | video.height = 240;
52 | video.autoplay = true;
53 | div.appendChild(video);
54 | videoArea.appendChild(div);
55 | if (typeof stream.getVideoTracks()[0].label !== 'undefined') {
56 | var deviceLabel = document.createElement('p');
57 | deviceLabel.innerHTML = stream.getVideoTracks()[0].label;
58 | div.appendChild(deviceLabel);
59 | }
60 | stream.getVideoTracks()[0].addEventListener('ended', errorMessage_);
61 | document.getElementById('view' + counter).srcObject = stream;
62 | counter++;
63 | }
64 |
65 | var errorMessage_ = function(event) {
66 | var message = 'getUserMedia successful but ' + event.type + ' event fired ' +
67 | 'from camera. Most likely too many cameras on the same USB ' +
68 | 'bus/hub. Verify this by disconnecting one of the cameras ' +
69 | 'and try again.';
70 | document.getElementById('messages').innerHTML += event.target.label + ': ' +
71 | message + ' ';
72 | };
73 |
--------------------------------------------------------------------------------
/src/peer2peer-video/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 |
9 | /* jshint esversion: 6 */
10 |
11 | 'use strict';
12 |
13 | var localVideo = document.getElementById('localVideo');
14 | var remoteVideo = document.getElementById('remoteVideo');
15 | var startButton = document.getElementById('startButton');
16 | startButton.onclick = start;
17 |
18 | var pc1;
19 | var pc2;
20 | var stream;
21 |
22 | function logError(err) {
23 | console.error(err);
24 | }
25 |
26 | function maybeCreateStream() {
27 | if (stream) {
28 | return;
29 | }
30 | if (localVideo.captureStream) {
31 | stream = localVideo.captureStream();
32 | } else if (localVideo.mozCaptureStream) {
33 | stream = localVideo.mozCaptureStream();
34 | } else {
35 | console.error('captureStream() not supported');
36 | }
37 | }
38 |
39 | function start() {
40 | startButton.onclick = hangup;
41 | startButton.className = 'red';
42 | startButton.innerHTML = 'Stop test';
43 | if (localVideo.readyState >= 3) { // HAVE_FUTURE_DATA
44 | // Video is already ready to play, call maybeCreateStream in case oncanplay
45 | // fired before we registered the event handler.
46 | maybeCreateStream();
47 | }
48 | localVideo.play();
49 | call();
50 | }
51 |
52 | function call() {
53 | var servers = null;
54 | pc1 = new RTCPeerConnection(servers);
55 | pc1.onicecandidate = (event) => {
56 | if (event.candidate) {
57 | pc2.addIceCandidate(event.candidate);
58 | }
59 | };
60 |
61 | pc2 = new RTCPeerConnection(servers);
62 | pc2.onicecandidate = (event) => {
63 | if (event.candidate) {
64 | pc1.addIceCandidate(event.candidate);
65 | }
66 | };
67 | pc2.onaddstream = (event) => {
68 | remoteVideo.srcObject = event.stream;
69 | };
70 |
71 | pc1.addStream(stream);
72 | pc1.createOffer({
73 | offerToReceiveAudio: 1,
74 | offerToReceiveVideo: 1
75 | }).then(onCreateOfferSuccess, logError);
76 | }
77 |
78 | function onCreateOfferSuccess(desc) {
79 | pc1.setLocalDescription(desc);
80 | pc2.setRemoteDescription(desc);
81 | pc2.createAnswer().then(onCreateAnswerSuccess, logError);
82 | }
83 |
84 | function onCreateAnswerSuccess(desc) {
85 | pc2.setLocalDescription(desc);
86 | pc1.setRemoteDescription(desc);
87 | }
88 |
89 | function hangup() {
90 | pc1.close();
91 | pc2.close();
92 | pc1 = null;
93 | pc2 = null;
94 | startButton.onclick = start;
95 | startButton.className = 'green';
96 | startButton.innerHTML = 'Start test';
97 | localVideo.pause();
98 | }
99 |
--------------------------------------------------------------------------------
/src/canvas-capture/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 |
9 | 'use strict';
10 |
11 | const DEFAULT_FRAME_RATE = 30;
12 |
13 | var canvas = document.getElementById('canvas');
14 | var context = canvas.getContext('2d');
15 |
16 | var remoteVideo = document.getElementById('remoteVideo');
17 | var startButton = document.getElementById('startButton');
18 | startButton.onclick = start;
19 |
20 | var pc1;
21 | var pc2;
22 | var stream;
23 |
24 | function logError(err) {
25 | console.error(err);
26 | }
27 |
28 | // This function draws a red rectangle on the canvas using
29 | // requestAnimationFrame().
30 | function draw() {
31 | window.requestAnimationFrame(draw);
32 | context.rect(0, 0, canvas.clientWidth, canvas.clientHeight);
33 | var randomNumber = Math.random();
34 | var hue;
35 | if (randomNumber < 0.33) {
36 | hue = 'red';
37 | } else if (randomNumber < 0.66) {
38 | hue = 'green';
39 | } else {
40 | hue = 'blue';
41 | }
42 | context.fillStyle = hue;
43 | context.fill();
44 | }
45 |
46 | function start() {
47 | startButton.onclick = hangup;
48 | startButton.className = 'red';
49 | startButton.innerHTML = 'Stop test';
50 | draw();
51 | stream = canvas.captureStream(DEFAULT_FRAME_RATE);
52 | call();
53 | }
54 |
55 | function call() {
56 | var servers = null;
57 | pc1 = new RTCPeerConnection(servers);
58 | pc1.onicecandidate = (event) => {
59 | if (event.candidate) {
60 | pc2.addIceCandidate(event.candidate);
61 | }
62 | };
63 |
64 | pc2 = new RTCPeerConnection(servers);
65 | pc2.onicecandidate = (event) => {
66 | if (event.candidate) {
67 | pc1.addIceCandidate(event.candidate);
68 | }
69 | };
70 | pc2.onaddstream = (event) => {
71 | remoteVideo.srcObject = event.stream;
72 | };
73 |
74 | pc1.addStream(stream);
75 | pc1.createOffer({
76 | offerToReceiveAudio: 1,
77 | offerToReceiveVideo: 1
78 | }).then(onCreateOfferSuccess, logError);
79 | }
80 |
81 | function onCreateOfferSuccess(desc) {
82 | pc1.setLocalDescription(desc);
83 | pc2.setRemoteDescription(desc);
84 | pc2.createAnswer().then(onCreateAnswerSuccess, logError);
85 | }
86 |
87 | function onCreateAnswerSuccess(desc) {
88 | pc2.setLocalDescription(desc);
89 | pc1.setRemoteDescription(desc);
90 | }
91 |
92 | function hangup() {
93 | pc1.close();
94 | pc2.close();
95 | pc1 = null;
96 | pc2 = null;
97 | startButton.onclick = start;
98 | startButton.className = 'green';
99 | startButton.innerHTML = 'Start test';
100 | }
101 |
--------------------------------------------------------------------------------
/src/codec_constraints/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | Video codec constraints
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
39 |
40 |
WebRTC samples Peer connection with codec constraints
41 |
42 |
43 |
44 |
45 |
46 |
Video Codec Constraints
47 | H264
48 | VP8
49 | VP9
50 |
51 |
52 | Start
53 | Call
54 | Hang Up
55 |
56 |
57 |
View the console to see logging. The MediaStream object localStream, and the RTCPeerConnection objects pc1 and pc2 are in global scope, so you can inspect them in the console as well.
58 |
59 |
For more information about RTCPeerConnection, see Getting Started With WebRTC .
60 |
61 |
62 |
View source on GitHub
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | WebRTC test pages
24 |
25 |
26 |
27 |
28 |
37 |
38 |
39 |
40 |
41 |
WebRTC test pages
42 |
43 |
44 |
45 | This is a collection of WebRTC test pages.
46 |
47 | Patches and issues welcome! See
48 | CONTRIBUTING.md
49 | for instructions. The Developer's Guide
50 | for this repo has more information about code style, structure and validation.
51 |
52 |
53 |
54 |
55 |
86 |
87 |
github.com/webrtc/test-pages/tree/gh-pages
88 |
89 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/src/multiple-peerconnections/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 |
9 | /* jshint esversion: 6 */
10 |
11 | 'use strict';
12 |
13 | var $ = document.getElementById.bind(document);
14 |
15 | var testTable = $('test-table');
16 | var nPeerConnectionsInput = $('num-peerconnections');
17 | var startTestButton = $('start-test');
18 | var cpuOveruseDetectionCheckbox = $('cpuoveruse-detection');
19 |
20 | startTestButton.onclick = startTest;
21 |
22 | function logError(err) {
23 | console.error(err);
24 | }
25 |
26 | function addNewVideoElement() {
27 | var newRow = testTable.insertRow(-1);
28 | var newCell = newRow.insertCell(-1);
29 | var video = document.createElement('video');
30 | video.autoplay = true;
31 | newCell.appendChild(video);
32 | return video;
33 | }
34 |
35 | function PeerConnection(id, cpuOveruseDetection) {
36 | this.id = id;
37 | this.cpuOveruseDetection = cpuOveruseDetection;
38 |
39 | this.localConnection = null;
40 | this.remoteConnection = null;
41 |
42 | this.remoteView = addNewVideoElement();
43 |
44 | this.start = function() {
45 | var onGetUserMediaSuccess = this.onGetUserMediaSuccess.bind(this);
46 | navigator.mediaDevices.getUserMedia({
47 | audio: true,
48 | video: true
49 | })
50 | .then(onGetUserMediaSuccess)
51 | .catch(logError);
52 | };
53 |
54 | this.onGetUserMediaSuccess = function(stream) {
55 | // Create local peer connection.
56 | this.localConnection = new RTCPeerConnection(null, {
57 | 'optional': [{
58 | 'googCpuOveruseDetection': this.cpuOveruseDetection
59 | }]
60 | });
61 | this.localConnection.onicecandidate = (event) => {
62 | this.onIceCandidate(this.remoteConnection, event);
63 | };
64 | this.localConnection.addStream(stream);
65 |
66 | // Create remote peer connection.
67 | this.remoteConnection = new RTCPeerConnection(null, {
68 | 'optional': [{
69 | 'googCpuOveruseDetection': this.cpuOveruseDetection
70 | }]
71 | });
72 | this.remoteConnection.onicecandidate = (event) => {
73 | this.onIceCandidate(this.localConnection, event);
74 | };
75 | this.remoteConnection.onaddstream = (e) => {
76 | this.remoteView.srcObject = e.stream;
77 | };
78 |
79 | // Initiate call.
80 | var onCreateOfferSuccess = this.onCreateOfferSuccess.bind(this);
81 | this.localConnection.createOffer({
82 | offerToReceiveAudio: 1,
83 | offerToReceiveVideo: 1
84 | })
85 | .then(onCreateOfferSuccess, logError);
86 | };
87 |
88 | this.onCreateOfferSuccess = function(desc) {
89 | this.localConnection.setLocalDescription(desc);
90 | this.remoteConnection.setRemoteDescription(desc);
91 |
92 | var onCreateAnswerSuccess = this.onCreateAnswerSuccess.bind(this);
93 | this.remoteConnection.createAnswer()
94 | .then(onCreateAnswerSuccess, logError);
95 | };
96 |
97 | this.onCreateAnswerSuccess = function(desc) {
98 | this.remoteConnection.setLocalDescription(desc);
99 | this.localConnection.setRemoteDescription(desc);
100 | };
101 |
102 | this.onIceCandidate = function(connection, event) {
103 | if (event.candidate) {
104 | connection.addIceCandidate(new RTCIceCandidate(event.candidate));
105 | }
106 | };
107 | }
108 |
109 | function startTest() {
110 | var cpuOveruseDetection = cpuOveruseDetectionCheckbox.checked;
111 | var nPeerConnections = nPeerConnectionsInput.value;
112 | for (var i = 0; i < nPeerConnections; ++i) {
113 | new PeerConnection(i, cpuOveruseDetection).start();
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/src/css/main.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 | .drop-down {
9 | font-size: 10px;
10 | white-space: nowrap;
11 | width: 65%;
12 | }
13 |
14 | .constraints {
15 | display: none;
16 | height: auto;
17 | overflow: scroll;
18 | position: absolute;
19 | width: 75%;
20 | }
21 |
22 | .float-left {
23 | float: left;
24 | width: 100%;
25 | }
26 |
27 | .float-clear-left {
28 | clear: left;
29 | float: left;
30 | width: 100%;
31 | }
32 |
33 | .top-border {
34 | border-top: 4px solid grey;
35 | }
36 |
37 | .bottom-border {
38 | border-bottom: 4px solid grey;
39 | }
40 |
41 | #messages {
42 | font-size: 0.7em;
43 | white-space: pre-wrap;
44 | word-wrap: break-word;
45 | }
46 |
47 | #audio-source {
48 | float: left;
49 | width: 50%;
50 | }
51 |
52 | #video-source {
53 | margin-left: 50%;
54 | width: 50%;
55 | }
56 |
57 | #video-res {
58 | width: 30%;
59 | }
60 |
61 | #signal-server {
62 | float: left;
63 | width: 60%;
64 | }
65 |
66 | #pc-server {
67 | margin-left: 0.1em;
68 | margin-top: 0.1em;
69 | width: 98%;
70 | }
71 |
72 | #peer-id-container {
73 | height: 1.5em;
74 | margin-left: 60%;
75 | }
76 |
77 | #peer-id {
78 | margin-top: 0.1em;
79 | width: 7em;
80 | }
81 |
82 | #pc-server-label {
83 | width: 15%;
84 | }
85 |
86 | #pc-server-container {
87 | height: 1.5em;
88 | margin: 0 0 0 12%;
89 | overflow: hidden;
90 | position: absolute;
91 | width: 40%;
92 | }
93 |
94 | #pc-constraints-left {
95 | margin: 0.7em 0 0 0;
96 | width: 60%;
97 | }
98 |
99 | #call {
100 | float: left;
101 | margin: 0.7em 0 0 0;
102 | }
103 |
104 | .float-left {
105 | float: left;
106 | width: 100%;
107 | }
108 |
109 | .float-clear-left {
110 | clear: left;
111 | float: left;
112 | width: 100%;
113 | }
114 |
115 | .small-input {
116 | width: 3em;
117 | }
118 |
119 | .medium-input {
120 | width: 6em;
121 | }
122 |
123 | #screencapture-info {
124 | margin: 1% auto; /* 15% from the top and centered */
125 | width: 100%; /* Could be more or less, depending on screen size */
126 | }
127 |
128 | a {
129 | color: lightBlue;
130 | font-weight: 300;
131 | text-decoration: none;
132 | }
133 |
134 | a:hover {
135 | color: blue;
136 | text-decoration: underline;
137 | }
138 |
139 | body {
140 | font-family: 'Roboto', sans-serif;
141 | margin: 0;
142 | padding: 1em;
143 | word-wrap: break-word;
144 | }
145 |
146 | button {
147 | background-color: grey;
148 | border: none;
149 | border-radius: 1px;
150 | color: white;
151 | font-family: 'Roboto', sans-serif;
152 | font-size: 0.8em;
153 | margin: 0 0 1em 0;
154 | padding: 0.2em;
155 | }
156 |
157 | button:hover {
158 | background-color: darkGrey;
159 | }
160 |
161 | button.green {
162 | background: darkGreen;
163 | color: white;
164 | }
165 |
166 | button.green:hover {
167 | background: forestGreen;
168 | color: white;
169 | }
170 |
171 | button.red {
172 | background: darkRed;
173 | color: white;
174 | }
175 |
176 | button.red:hover {
177 | background: fireBrick;
178 | }
179 |
180 | button.pressed {
181 | background-color: black;
182 | }
183 |
184 | div#container {
185 | margin: 0 auto 0 auto;
186 | max-width: 40em;
187 | padding: 0 1.5em 1.3em 1.5em;
188 | position: relative;
189 | z-index: 2;
190 | }
191 |
192 | h2 {
193 | color: black;
194 | font-size: 1em;
195 | font-weight: 700;
196 | line-height: 1.2em;
197 | margin: 0 0 0.8em 0;
198 | }
199 |
200 | table, td, th {
201 | border: 1px solid black;
202 | }
203 |
204 | table {
205 | border-collapse: collapse;
206 | width: 100%;
207 | }
208 |
209 | td {
210 | height: 50px;
211 | vertical-align: bottom;
212 | }
213 |
214 | div {
215 | background: white;
216 | }
217 |
218 | html {
219 | /* avoid annoying page width change
220 | when moving from the home page.*/
221 | overflow-y: scroll;
222 | }
223 |
224 | select {
225 | margin: 0 1em 1em 0;
226 | position: relative;
227 | top: -1px;
228 | }
229 |
230 | video {
231 | background: black;
232 | width: 100%;
233 | }
234 |
235 | #log {
236 | float: left;
237 | left: 0;
238 | overflow: auto;
239 | padding: 16px;
240 | position: fixed;
241 | top: 0;
242 | width: 20%;
243 | word-wrap: break-word;
244 | z-index: 1;
245 | }
246 |
247 | @media screen and (max-width: 1200px) {
248 | div#log {
249 | float: none;
250 | padding: 0;
251 | position: inherit;
252 | width: 100%;
253 | }
254 | button {
255 | padding: 0.7em;
256 | }
257 | button:active {
258 | background: black;
259 | }
260 | #pc-server-label {
261 | margin-top: 5px;
262 | }
263 | #pc-server-container {
264 | margin-top: 5px;
265 | }
266 | }
267 |
--------------------------------------------------------------------------------
/src/peer2peer/help.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 | WebRTC PeerConnection Manual Test Help Page
12 |
13 |
14 |
15 |
16 |
17 | WebRTC PeerConnection Manual Test Help Page
18 |
19 | The test page is intended for testing WebRTC calls.
20 |
21 | This is how you set up a normal call:
22 |
23 |
24 | Open this page in two tabs.
25 | Start the peerconnection server. Click on the question mark next
26 | to the 'server' field for instruction on how to do that. The easiest
27 | thing is to start it on localhost, but you can start it on any
28 | machine you like and connect to hostname:8888.
29 | Click the Connect button in both tabs.
30 | Click the Call:Negotiate button in one of the tabs. You should see a bunch
31 | of printouts when this happens. Note that no streams are sent to
32 | begin with (although you could run steps 5-6 before this step to get streams
33 | even in the initial call).
34 | Grant media access using the checkboxes and Request button.
35 | Add the local stream by clicking the "Add" button, in both tabs.
36 | Now you must re-negotiate the call by clicking on Negotiate again.
37 | You should now have a call up and both sides should be receiving
38 | media data (depending on what access you granted on the respective
39 | pages).
40 | You can now choose to stop, re-request, re-send or disable streams
41 | in any way you like, or hang up and re-start the call. You don't
42 | need to disconnect: that's done automatically when you close the
43 | page. Hanging up is NOT done automatically though.
44 |
45 |
46 |
47 | To create a data channel:
48 |
49 |
50 | Make sure Chrome is started with the --enable-data-channels flag.
51 | Follow the instructions above to connect two tabs to a
52 | peerconnection_server.
53 | Click the Data channel: Create button in one tab. Notice the status
54 | changes to "connecting".
55 | Click the Call:Negotiate button. You should see the status change to
56 | "open" in both tabs.
57 | Enter text in the textbox next to the Send data button and then click Send
58 | data. Notice the text is received in the remote tab in the Received on data
59 | channel text box. Data can be sent in both direct.
60 | To close the channel press the Close button followed by Negotiate. Notice
61 | the status change to "closed"
62 |
63 |
64 | Detailed descriptions:
65 |
66 | Connect - once a connection is established, you generally won't
67 | need to click this button again. Connecting really isn't something
68 | related to WebRTC as such, it's just the signalling solution.
69 | Note that if more than two users/machines have established a
70 | connection to the same PC server, you will get an error when
71 | pressing this button. The test is hard-coded to only allow 2 peers
72 | on the server at the same time.
73 | Pressing the Add button for local streams will in effect add
74 | the current local stream, such as it is, to the current
75 | peerconnection.
76 | If you request user media again, it will overwrite the current
77 | local stream with the new one. This means that pressing Add will
78 | add the stream you just got from the request. The code will not
79 | attempt to stop or remove the previous stream from the
80 | peerconnection, so depending on peerconnection's semantics the old
81 | stream will remain with the peerconnection (perhaps the streams will
82 | be sent simultaneously?)
83 | Hang Up will clear away peer connections on both sides, and a new
84 | call can be started if desired. The peers remain connected to the
85 | peerconnection server.
86 | The Toggle buttons will set the .enabled properties on the first
87 | video and audio track for the local or remote stream, respectively.
88 | This is effectively a temporary "mute" for the streams.
89 | Stop terminates a stream, which means it will no longer send any
90 | more data.
91 | Remove will remove the current local stream from the current
92 | peerconnection. For instance, you should be able to send a stream,
93 | remove it, re-request a new stream and send that within the same
94 | call. Note that re-requesting user media overwrites the current
95 | media stream, so the reverse is not possible.
96 | The PeerConnection constraints field can pass in constraints for the
97 | peerconnection to be established. The code will attempt to eval the code
98 | you write in and pass it whenever the code asks for constraints.
99 | [experimental]
100 | The Force Opus checkbox will remove all codecs except OPUS for all
101 | outgoing messages sent by this page. Note that this ONLY means that
102 | we are guaranteed to send Opus to the other side; it does NOT mean
103 | that the other side will necessarily send Opus to us. To do that,
104 | you need to check the box on the other side too. You can either
105 | check the box before the call, or check the box and then re-send the
106 | local stream.
107 |
108 |
109 |
110 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/src/pause-play/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 The Chromium Authors. All rights reserved.
3 | * Use of this source code is governed by a BSD-style license that can be
4 | * found in the LICENSE file.
5 | */
6 | /* exported startTest */
7 |
8 | 'use strict';
9 |
10 | const $ = document.getElementById.bind(document);
11 |
12 | function logError(err) {
13 | console.error(err);
14 | }
15 |
16 | /**
17 | * FeedTable stores all video elements.
18 | */
19 | class FeedTable {
20 | constructor() {
21 | this.numCols = 5;
22 | this.col = 0;
23 | this.testTable = document.getElementById('test-table');
24 | this.row = this.testTable.insertRow(-1);
25 | }
26 |
27 | addNewCell(elementType) {
28 | if (this.col === this.numCols) {
29 | this.row = this.testTable.insertRow(-1);
30 | this.col = 0;
31 | }
32 | var newCell = this.row.insertCell(-1);
33 | var element = document.createElement(elementType);
34 | element.autoplay = false;
35 | newCell.appendChild(element);
36 | this.col++;
37 | return element;
38 | }
39 | }
40 |
41 | /**
42 | * A simple loopback connection;
43 | * - localConnection is fed video from local camera
44 | * - localConnection is linked to remoteConnection
45 | * - remoteConnection is displayed in the given videoElement
46 | */
47 | class PeerConnection {
48 | /**
49 | * @param {Object} element - An 'audio' or 'video' element.
50 | * @param {Object} constraints - The constraints for the peer connection.
51 | */
52 | constructor(element, constraints) {
53 | this.localConnection = null;
54 | this.remoteConnection = null;
55 | this.remoteView = element;
56 | this.constraints = constraints;
57 | }
58 |
59 | start() {
60 | return navigator.mediaDevices
61 | .getUserMedia(this.constraints)
62 | .then((stream) => {
63 | this.onGetUserMediaSuccess(stream);
64 | });
65 | }
66 |
67 | onGetUserMediaSuccess(stream) {
68 | this.localConnection = new RTCPeerConnection(null);
69 | this.localConnection.onicecandidate = (event) => {
70 | this.onIceCandidate(this.remoteConnection, event);
71 | };
72 | this.localConnection.addStream(stream);
73 |
74 | this.remoteConnection = new RTCPeerConnection(null);
75 | this.remoteConnection.onicecandidate = (event) => {
76 | this.onIceCandidate(this.localConnection, event);
77 | };
78 | this.remoteConnection.onaddstream = (e) => {
79 | this.remoteView.srcObject = e.stream;
80 | };
81 |
82 | this.localConnection
83 | .createOffer({offerToReceiveAudio: 1, offerToReceiveVideo: 1})
84 | .then((offerDesc) => {
85 | this.onCreateOfferSuccess(offerDesc);
86 | }, logError);
87 | }
88 |
89 | onCreateOfferSuccess(offerDesc) {
90 | this.localConnection.setLocalDescription(offerDesc);
91 | this.remoteConnection.setRemoteDescription(offerDesc);
92 |
93 | this.remoteConnection.createAnswer().then(
94 | (answerDesc) => {
95 | this.onCreateAnswerSuccess(answerDesc);
96 | }, logError);
97 | }
98 |
99 | onCreateAnswerSuccess(answerDesc) {
100 | this.remoteConnection.setLocalDescription(answerDesc);
101 | this.localConnection.setRemoteDescription(answerDesc);
102 | }
103 |
104 | onIceCandidate(connection, event) {
105 | if (event.candidate) {
106 | connection.addIceCandidate(new RTCIceCandidate(event.candidate));
107 | }
108 | }
109 | }
110 |
111 | class TestRunner {
112 | constructor(runtimeSeconds, pausePlayIterationDelayMillis) {
113 | this.runtimeSeconds = runtimeSeconds;
114 | this.pausePlayIterationDelayMillis = pausePlayIterationDelayMillis;
115 | this.elements = [];
116 | this.peerConnections = [];
117 | this.feedTable = new FeedTable();
118 | this.iteration = 0;
119 | this.startTime = null;
120 | this.lastIterationTime = null;
121 | }
122 |
123 | addPeerConnection(elementType) {
124 | const element = this.feedTable.addNewCell(elementType);
125 | const constraints = {audio: true};
126 | if (elementType === 'video') {
127 | constraints.video = {
128 | width: {exact: 300}
129 | };
130 | } else if (elementType === 'audio') {
131 | constraints.video = false;
132 | } else {
133 | throw new Error('elementType must be one of "audio" or "video"');
134 | }
135 | this.elements.push(element);
136 | this.peerConnections.push(new PeerConnection(element, constraints));
137 | }
138 |
139 | startTest() {
140 | this.startTime = Date.now();
141 | let promises = testRunner.peerConnections.map((conn) => conn.start());
142 | Promise.all(promises)
143 | .then(() => {
144 | this.startTime = Date.now();
145 | this.pauseAndPlayLoop();
146 | })
147 | .catch((e) => {
148 | throw e;
149 | });
150 | }
151 |
152 | pauseAndPlayLoop() {
153 | this.iteration++;
154 | this.elements.forEach((feed) => {
155 | if (Math.random() >= 0.5) {
156 | feed.play();
157 | } else {
158 | feed.pause();
159 | }
160 | });
161 | const status = this.getStatus();
162 | this.lastIterationTime = Date.now();
163 | $('status').textContent = status;
164 | if (status !== 'ok-done') {
165 | setTimeout(
166 | () => {
167 | this.pauseAndPlayLoop();
168 | }, this.pausePlayIterationDelayMillis);
169 | } else { // We're done. Pause all feeds.
170 | this.elements.forEach((feed) => {
171 | feed.pause();
172 | });
173 | }
174 | }
175 |
176 | getStatus() {
177 | if (this.iteration === 0) {
178 | return 'not-started';
179 | }
180 | const timeSpent = Date.now() - this.startTime;
181 | if (timeSpent >= this.runtimeSeconds * 1000) {
182 | return 'ok-done';
183 | }
184 | return `running, iteration: ${this.iteration}`;
185 | }
186 |
187 | getResults() {
188 | const runTimeMillis = this.lastIterationTime - this.startTime;
189 | return {'runTimeSeconds': runTimeMillis / 1000};
190 | }
191 | }
192 |
193 | let testRunner;
194 |
195 | function startTest(
196 | runtimeSeconds, numPeerConnections, pausePlayIterationDelayMillis,
197 | elementType) {
198 | testRunner = new TestRunner(
199 | runtimeSeconds, pausePlayIterationDelayMillis);
200 | for (let i = 0; i < numPeerConnections; i++) {
201 | testRunner.addPeerConnection(elementType);
202 | }
203 | testRunner.startTest();
204 | }
205 |
--------------------------------------------------------------------------------
/src/peer2peer/index.html:
--------------------------------------------------------------------------------
1 |
2 |
9 |
10 |
11 |
12 | WebRTC peer2peer test page
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
Remote Video
26 |
27 |
28 |
29 | Stream size
31 |
32 | 640x360
33 |
34 |
35 |
Local Preview
36 |
38 |
39 | Stream size
41 |
42 |
43 | 320x180
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
GetUserMedia
52 |
53 |
55 | Audio
57 | Video
59 | Screen capture
60 |
61 | Edit constraints
62 | Request GetUserMedia
64 |
65 |
66 |
67 | Audio source
68 |
69 |
70 |
72 |
73 |
Auto
74 |
Get devices
75 |
76 |
77 |
78 | Video source
79 |
80 |
81 |
83 |
84 |
85 | HD
86 | VGA
87 | QVGA
88 |
89 |
90 |
91 |
92 |
93 |
94 |
PeerConnection
95 |
96 |
97 |
Server [? ]:
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 | Peer ID:
106 |
107 |
108 | Connect
109 |
110 |
111 |
112 |
113 | Constraints:
114 |
115 | Connection
116 |
117 |
118 | createOffer
119 |
121 |
122 | createAnswer
123 |
124 |
125 | Call:
126 | Negotiate
127 |
128 | Hang up
129 |
130 |
131 |
132 |
133 |
Media streams
134 |
135 | Local Stream:
136 | Add
137 |
138 | Remove
139 |
140 | Stop
141 | Toggle Video
142 | Toggle Audio
143 |
144 |
145 | Remote Stream:
146 | Toggle Video
147 | Toggle Audio
148 |
149 |
172 |
173 |
174 |
175 |
DTMF Sender
176 | Create
177 | tones:
178 |
179 | dur(ms):
180 |
182 | gap(ms):
183 |
184 | Send
185 |
186 |
187 |
188 |
Options
189 | Force iSAC
190 |
191 | CPU overuse
192 |
194 |
195 |
196 |
197 |
198 |
Log
199 |
Clear logs
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
--------------------------------------------------------------------------------
/src/replaceTrack/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 | 'use strict';
9 |
10 | function trace(arg) {
11 | var now = (window.performance.now() / 1000).toFixed(3);
12 | console.log(now + ': ', arg);
13 | }
14 |
15 | var startButton = document.getElementById('startButton');
16 | var callButton = document.getElementById('callButton');
17 | var hangupButton = document.getElementById('hangupButton');
18 | var restartButton = document.getElementById('restartButton');
19 | var muteButton = document.querySelector('button#muteButton');
20 | callButton.disabled = true;
21 | hangupButton.disabled = true;
22 | restartButton.disabled = true;
23 | startButton.onclick = start;
24 | callButton.onclick = call;
25 | hangupButton.onclick = hangup;
26 | restartButton.onclick = restartVideo;
27 | muteButton.onclick = toggleMute;
28 |
29 | var supportsReplaceTrack =('RTCRtpSender' in window &&
30 | 'replaceTrack' in RTCRtpSender.prototype);
31 |
32 | var startTime;
33 | var localVideo = document.getElementById('localVideo');
34 | var remoteVideo = document.getElementById('remoteVideo');
35 |
36 | localVideo.addEventListener('loadedmetadata', function() {
37 | trace('Local video videoWidth: ' + this.videoWidth +
38 | 'px, videoHeight: ' + this.videoHeight + 'px');
39 | });
40 |
41 | remoteVideo.addEventListener('loadedmetadata', function() {
42 | trace('Remote video videoWidth: ' + this.videoWidth +
43 | 'px, videoHeight: ' + this.videoHeight + 'px');
44 | });
45 |
46 | remoteVideo.onresize = function() {
47 | trace('Remote video size changed to ' +
48 | remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight);
49 | // We'll use the first onsize callback as an indication that video has started
50 | // playing out.
51 | if (startTime) {
52 | var elapsedTime = window.performance.now() - startTime;
53 | trace('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
54 | startTime = null;
55 | }
56 | };
57 |
58 | var localStream;
59 | var pc1;
60 | var pc2;
61 | var offerOptions = {
62 | offerToReceiveAudio: 1,
63 | offerToReceiveVideo: 1
64 | };
65 |
66 | function getName(pc) {
67 | return (pc === pc1) ? 'pc1' : 'pc2';
68 | }
69 |
70 | function getOtherPc(pc) {
71 | return (pc === pc1) ? pc2 : pc1;
72 | }
73 |
74 | function gotStream(stream) {
75 | trace('Received local stream');
76 | localVideo.srcObject = stream;
77 | localStream = stream;
78 | callButton.disabled = false;
79 | }
80 |
81 | function start() {
82 | trace('Requesting local stream');
83 | startButton.disabled = true;
84 | navigator.mediaDevices.getUserMedia({
85 | audio: true,
86 | video: true
87 | })
88 | .then(gotStream)
89 | .catch(function(e) {
90 | alert('getUserMedia() error: ' + e.name);
91 | });
92 | }
93 |
94 | function call() {
95 | callButton.disabled = true;
96 | hangupButton.disabled = false;
97 | restartButton.disabled = !supportsReplaceTrack;
98 | muteButton.disabled = !supportsReplaceTrack;
99 |
100 | startTime = window.performance.now();
101 | var videoTracks = localStream.getVideoTracks();
102 | var audioTracks = localStream.getAudioTracks();
103 | if (videoTracks.length > 0) {
104 | trace('Using video device: ' + videoTracks[0].label);
105 | }
106 | if (audioTracks.length > 0) {
107 | trace('Using audio device: ' + audioTracks[0].label);
108 | }
109 | var servers = null;
110 | pc1 = new RTCPeerConnection(servers);
111 | trace('Created local peer connection object pc1');
112 | pc1.onicecandidate = function(e) {
113 | onIceCandidate(pc1, e);
114 | };
115 | pc2 = new RTCPeerConnection(servers);
116 | trace('Created remote peer connection object pc2');
117 | pc2.onicecandidate = function(e) {
118 | onIceCandidate(pc2, e);
119 | };
120 | pc1.oniceconnectionstatechange = function(e) {
121 | onIceStateChange(pc1, e);
122 | };
123 | pc2.oniceconnectionstatechange = function(e) {
124 | onIceStateChange(pc2, e);
125 | };
126 | pc2.ontrack = gotRemoteStream;
127 |
128 | localStream.getTracks().forEach(
129 | function(track) {
130 | pc1.addTrack(
131 | track,
132 | localStream
133 | );
134 | }
135 | );
136 | trace('Added local stream to pc1');
137 |
138 | trace('pc1 createOffer start');
139 | pc1.createOffer(
140 | offerOptions
141 | ).then(
142 | onCreateOfferSuccess,
143 | onCreateSessionDescriptionError
144 | );
145 | }
146 |
147 | function onCreateSessionDescriptionError(error) {
148 | trace('Failed to create session description: ' + error.toString());
149 | }
150 |
151 | function onCreateOfferSuccess(desc) {
152 | trace('Offer from pc1\n' + desc.sdp);
153 | trace('pc1 setLocalDescription start');
154 | pc1.setLocalDescription(desc).then(
155 | function() {
156 | onSetLocalSuccess(pc1);
157 | },
158 | onSetSessionDescriptionError
159 | );
160 | trace('pc2 setRemoteDescription start');
161 | pc2.setRemoteDescription(desc).then(
162 | function() {
163 | onSetRemoteSuccess(pc2);
164 | },
165 | onSetSessionDescriptionError
166 | );
167 | trace('pc2 createAnswer start');
168 | // Since the 'remote' side has no media stream we need
169 | // to pass in the right constraints in order for it to
170 | // accept the incoming offer of audio and video.
171 | pc2.createAnswer().then(
172 | onCreateAnswerSuccess,
173 | onCreateSessionDescriptionError
174 | );
175 | }
176 |
177 | function onSetLocalSuccess(pc) {
178 | trace(getName(pc) + ' setLocalDescription complete');
179 | }
180 |
181 | function onSetRemoteSuccess(pc) {
182 | trace(getName(pc) + ' setRemoteDescription complete');
183 | }
184 |
185 | function onSetSessionDescriptionError(error) {
186 | trace('Failed to set session description: ' + error.toString());
187 | }
188 |
189 | function gotRemoteStream(e) {
190 | if (remoteVideo.srcObject !== e.streams[0]) {
191 | remoteVideo.srcObject = e.streams[0];
192 | trace('pc2 received remote stream');
193 | }
194 | }
195 |
196 | function onCreateAnswerSuccess(desc) {
197 | trace('Answer from pc2:\n' + desc.sdp);
198 | trace('pc2 setLocalDescription start');
199 | pc2.setLocalDescription(desc).then(
200 | function() {
201 | onSetLocalSuccess(pc2);
202 | },
203 | onSetSessionDescriptionError
204 | );
205 | trace('pc1 setRemoteDescription start');
206 | pc1.setRemoteDescription(desc).then(
207 | function() {
208 | onSetRemoteSuccess(pc1);
209 | },
210 | onSetSessionDescriptionError
211 | );
212 | }
213 |
214 | function onIceCandidate(pc, event) {
215 | getOtherPc(pc).addIceCandidate(event.candidate)
216 | .then(
217 | function() {
218 | onAddIceCandidateSuccess(pc);
219 | },
220 | function(err) {
221 | onAddIceCandidateError(pc, err);
222 | }
223 | );
224 | trace(getName(pc) + ' ICE candidate: \n' + (event.candidate ?
225 | event.candidate.candidate : '(null)'));
226 | }
227 |
228 | function onAddIceCandidateSuccess(pc) {
229 | trace(getName(pc) + ' addIceCandidate success');
230 | }
231 |
232 | function onAddIceCandidateError(pc, error) {
233 | trace(getName(pc) + ' failed to add ICE Candidate: ' + error.toString());
234 | }
235 |
236 | function onIceStateChange(pc, event) {
237 | if (pc) {
238 | trace(getName(pc) + ' ICE state: ' + pc.iceConnectionState);
239 | console.log('ICE state change event: ', event);
240 | }
241 | }
242 |
243 | function hangup() {
244 | trace('Ending call');
245 | pc1.close();
246 | pc2.close();
247 | pc1 = null;
248 | pc2 = null;
249 | hangupButton.disabled = true;
250 | callButton.disabled = false;
251 | }
252 |
253 | // Stops and restarts the video with replaceTrack.
254 | function restartVideo() {
255 | localStream.getVideoTracks()[0].stop();
256 | localStream.removeTrack(localStream.getVideoTracks()[0]);
257 | window.setTimeout(function() {
258 | navigator.mediaDevices.getUserMedia({video: true})
259 | .then(function(stream) {
260 | localStream.addTrack(stream.getVideoTracks()[0]);
261 | var sender = pc1.getSenders().find(function(s) {
262 | return s.track && s.track.kind === 'video';
263 | });
264 | return sender.replaceTrack(stream.getVideoTracks()[0]);
265 | })
266 | .then(function() {
267 | console.log('Replaced video track');
268 | })
269 | .catch(function(err) {
270 | console.error(err);
271 | });
272 | }, 5000);
273 | }
274 |
275 | // Toggles audio mute with replaceTrack(null/track)
276 | function toggleMute() {
277 | var sender = pc1.getSenders()[0];
278 | var p;
279 | if (!sender.track) {
280 | trace('re-adding audio track');
281 | p = sender.replaceTrack(localStream.getAudioTracks()[0]);
282 | } else {
283 | trace('replacing audio track with null');
284 | p = sender.replaceTrack(null);
285 | }
286 | p.then(function() {
287 | console.log('replaced track');
288 | }).catch(function(err) {
289 | console.error('during replaceTrack', err);
290 | });
291 | }
292 |
--------------------------------------------------------------------------------
/src/codec_constraints/js/main.js:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 | *
4 | * Use of this source code is governed by a BSD-style license
5 | * that can be found in the LICENSE file in the root of the source
6 | * tree.
7 | */
8 |
9 | 'use strict';
10 |
11 | var startButton = document.getElementById('startButton');
12 | var callButton = document.getElementById('callButton');
13 | var hangupButton = document.getElementById('hangupButton');
14 | callButton.disabled = true;
15 | hangupButton.disabled = true;
16 | startButton.onclick = start;
17 | callButton.onclick = startTest;
18 | hangupButton.onclick = hangup;
19 |
20 | var startTime;
21 | var localVideo = document.getElementById('localVideo');
22 | var remoteVideo = document.getElementById('remoteVideo');
23 |
24 | function trace(arg) {
25 | var now = (window.performance.now() / 1000).toFixed(3);
26 | console.log(now + ': ', arg);
27 | }
28 |
29 | function getSelectedVideoCodec() {
30 | var codec;
31 | if (document.getElementById('H264').checked) {
32 | codec = document.getElementById('H264').value;
33 | } else if (document.getElementById('VP8').checked) {
34 | codec = document.getElementById('VP8').value;
35 | } else {
36 | codec = document.getElementById('VP9').value;
37 | }
38 | return codec;
39 | }
40 |
41 | function Failure(message) {
42 | this.message = message;
43 | this.name = 'Failure';
44 | }
45 |
46 | localVideo.addEventListener('loadedmetadata', function() {
47 | trace('Local video videoWidth: ' + this.videoWidth +
48 | 'px, videoHeight: ' + this.videoHeight + 'px');
49 | });
50 |
51 | remoteVideo.addEventListener('loadedmetadata', function() {
52 | trace('Remote video videoWidth: ' + this.videoWidth +
53 | 'px, videoHeight: ' + this.videoHeight + 'px');
54 | });
55 |
56 | remoteVideo.onresize = function() {
57 | trace('Remote video size changed to ' +
58 | remoteVideo.videoWidth + 'x' + remoteVideo.videoHeight);
59 | // We'll use the first onsize callback as an indication that video has started
60 | // playing out.
61 | if (startTime) {
62 | var elapsedTime = window.performance.now() - startTime;
63 | trace('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
64 | startTime = null;
65 | }
66 | };
67 |
68 | var localStream;
69 | var pc1;
70 | var pc2;
71 | var offerOptions = {
72 | offerToReceiveAudio: 1,
73 | offerToReceiveVideo: 1
74 | };
75 |
76 | function getName(pc) {
77 | return (pc === pc1) ? 'pc1' : 'pc2';
78 | }
79 |
80 | function getOtherPc(pc) {
81 | return (pc === pc1) ? pc2 : pc1;
82 | }
83 |
84 | function gotStream(stream) {
85 | trace('Received local stream');
86 | localVideo.srcObject = stream;
87 | localStream = stream;
88 | callButton.disabled = false;
89 | }
90 |
91 | function start() {
92 | trace('Requesting local stream');
93 | startButton.disabled = true;
94 | navigator.mediaDevices.getUserMedia({
95 | audio: true,
96 | video: true
97 | })
98 | .then(gotStream)
99 | .catch(function(e) {
100 | alert('getUserMedia() error: ' + e.name);
101 | });
102 | }
103 |
104 | function call() {
105 | callButton.disabled = true;
106 | hangupButton.disabled = false;
107 | trace('Starting call');
108 | startTime = window.performance.now();
109 | var videoTracks = localStream.getVideoTracks();
110 | var audioTracks = localStream.getAudioTracks();
111 | if (videoTracks.length > 0) {
112 | trace('Using video device: ' + videoTracks[0].label);
113 | }
114 | if (audioTracks.length > 0) {
115 | trace('Using audio device: ' + audioTracks[0].label);
116 | }
117 | var servers = null;
118 | pc1 = new RTCPeerConnection(servers);
119 | trace('Created local peer connection object pc1');
120 | pc1.onicecandidate = function(e) {
121 | onIceCandidate(pc1, e);
122 | };
123 | pc2 = new RTCPeerConnection(servers);
124 | trace('Created remote peer connection object pc2');
125 | pc2.onicecandidate = function(e) {
126 | onIceCandidate(pc2, e);
127 | };
128 | pc1.oniceconnectionstatechange = function(e) {
129 | onIceStateChange(pc1, e);
130 | };
131 | pc2.oniceconnectionstatechange = function(e) {
132 | onIceStateChange(pc2, e);
133 | };
134 | pc2.ontrack = gotRemoteStream;
135 |
136 | localStream.getTracks().forEach(
137 | function(track) {
138 | pc1.addTrack(
139 | track,
140 | localStream
141 | );
142 | }
143 | );
144 | trace('Added local stream to pc1');
145 |
146 | trace('pc1 createOffer start');
147 | pc1.createOffer(
148 | offerOptions
149 | ).then(
150 | onCreateOfferSuccess,
151 | onCreateSessionDescriptionError
152 | );
153 | }
154 |
155 | function onCreateSessionDescriptionError(error) {
156 | trace('Failed to create session description: ' + error.toString());
157 | }
158 |
159 | /**
160 | * See |setSdpDefaultCodec|.
161 | */
162 | function setSdpDefaultVideoCodec(sdp, codec, preferHwCodec) {
163 | return setSdpDefaultCodec(sdp, 'video', codec, preferHwCodec);
164 | }
165 |
166 | /**
167 | * Returns a modified version of |sdp| where the |codec| has been promoted to be
168 | * the default codec, i.e. the codec whose ID is first in the list of codecs on
169 | * the 'm=|type|' line, where |type| is 'audio' or 'video'. If |preferHwCodec|
170 | * is true, it will select the last codec with the given name, and if false, it
171 | * will select the first codec with the given name, because HW codecs are listed
172 | * after SW codecs in the SDP list.
173 | * @private
174 | */
175 | function setSdpDefaultCodec(sdp, type, codec, preferHwCodec) {
176 | var sdpLines = splitSdpLines(sdp);
177 |
178 | // Find codec ID, e.g. 100 for 'VP8' if 'a=rtpmap:100 VP8/9000'.
179 | var codecId = findRtpmapId(sdpLines, codec, preferHwCodec);
180 | if (codecId === null) {
181 | throw new Failure('setSdpDefaultCodec',
182 | 'Unknown ID for |codec| = \'' + codec + '\'.');
183 | }
184 |
185 | // Find 'm=|type|' line, e.g. 'm=video 9 UDP/TLS/RTP/SAVPF 100 101 107 116'.
186 | var mLineNo = findLine(sdpLines, 'm=' + type);
187 | if (mLineNo === null) {
188 | throw new Failure('setSdpDefaultCodec',
189 | '\'m=' + type + '\' line missing from |sdp|.');
190 | }
191 |
192 | // Modify video line to use the desired codec as the default.
193 | sdpLines[mLineNo] = setMLineDefaultCodec(sdpLines[mLineNo], codecId);
194 | return mergeSdpLines(sdpLines);
195 | }
196 |
197 | /**
198 | * Searches through all |sdpLines| for the 'a=rtpmap:' line for the codec of
199 | * the specified name, returning its ID as an int if found, or null otherwise.
200 | * |codec| is the case-sensitive name of the codec. If |lastInstance|
201 | * is true, it will return the last such ID, and if false, it will return the
202 | * first such ID.
203 | * For example, if |sdpLines| contains 'a=rtpmap:100 VP8/9000' and |codec| is
204 | * 'VP8', this function returns 100.
205 | * @private
206 | */
207 | function findRtpmapId(sdpLines, codec, lastInstance) {
208 | var lineNo = findRtpmapLine(sdpLines, codec, lastInstance);
209 | if (lineNo === null) {
210 | return null;
211 | }
212 | // Parse from 'a=rtpmap: /'.
213 | var id = sdpLines[lineNo].substring(9, sdpLines[lineNo].indexOf(' '));
214 | return parseInt(id);
215 | }
216 |
217 | /**
218 | * Finds a 'a=rtpmap:' line from |sdpLines| that contains |contains| and returns
219 | * its line index, or null if no such line was found. |contains| may be the
220 | * codec ID, codec name or bitrate. If |lastInstance| is true, it will return
221 | * the last such line index, and if false, it will return the first such line
222 | * index.
223 | * An 'a=rtpmap:' line looks like this: 'a=rtpmap: /'.
224 | */
225 | function findRtpmapLine(sdpLines, contains, lastInstance) {
226 | if (lastInstance === true) {
227 | for (var i = sdpLines.length - 1; i >= 0 ; i--) {
228 | if (isRtpmapLine(sdpLines[i], contains)) {
229 | return i;
230 | }
231 | }
232 | } else {
233 | for (i = 0; i < sdpLines.length; i++) {
234 | if (isRtpmapLine(sdpLines[i], contains)) {
235 | return i;
236 | }
237 | }
238 | }
239 | return null;
240 | }
241 |
242 | /**
243 | * Returns true if |sdpLine| contains |contains| and is of pattern
244 | * 'a=rtpmap: /'.
245 | */
246 | function isRtpmapLine(sdpLine, contains) {
247 | // Is 'a=rtpmap:' line containing |contains| string?
248 | if (sdpLine.startsWith('a=rtpmap:') &&
249 | sdpLine.indexOf(contains) !== -1) {
250 | // Expecting pattern 'a=rtpmap: /'.
251 | var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+');
252 | if (!sdpLine.match(pattern)) {
253 | throw new Failure('isRtpmapLine', 'Unexpected "a=rtpmap:" pattern.');
254 | }
255 | return true;
256 | }
257 | return false;
258 | }
259 |
260 | /**
261 | * Returns a modified version of |mLine| that has |codecId| first in the list of
262 | * codec IDs. For example, setMLineDefaultCodec(
263 | * 'm=video 9 UDP/TLS/RTP/SAVPF 100 101 107 116 117 96', 107)
264 | * Returns:
265 | * 'm=video 9 UDP/TLS/RTP/SAVPF 107 100 101 116 117 96'
266 | * @private
267 | */
268 | function setMLineDefaultCodec(mLine, codecId) {
269 | var elements = mLine.split(' ');
270 |
271 | // Copy first three elements, codec order starts on fourth.
272 | var newLine = elements.slice(0, 3);
273 |
274 | // Put target |codecId| first and copy the rest.
275 | newLine.push(codecId);
276 | for (var i = 3; i < elements.length; i++) {
277 | if (elements[i] !== codecId) {
278 | newLine.push(elements[i]);
279 | }
280 | }
281 |
282 | return newLine.join(' ');
283 | }
284 |
285 | /** @private */
286 | function splitSdpLines(sdp) {
287 | return sdp.split('\r\n');
288 | }
289 |
290 | /** @private */
291 | function mergeSdpLines(sdpLines) {
292 | return sdpLines.join('\r\n');
293 | }
294 |
295 | /** @private */
296 | function findLine(lines, lineStartsWith, startingLine = 0) {
297 | for (var i = startingLine; i < lines.length; i++) {
298 | if (lines[i].startsWith(lineStartsWith)) {
299 | return i;
300 | }
301 | }
302 | return null;
303 | }
304 |
305 | function onCreateOfferSuccess(desc) {
306 | var videoCodec = getSelectedVideoCodec();
307 | desc.sdp = setSdpDefaultVideoCodec(desc.sdp, videoCodec, videoCodec);
308 | trace('Offer from pc1\n' + desc.sdp);
309 | trace('Ok-' + JSON.stringify(desc));
310 | trace('pc1 setLocalDescription start');
311 | pc1.setLocalDescription(desc).then(
312 | function() {
313 | onSetLocalSuccess(pc1);
314 | },
315 | onSetSessionDescriptionError
316 | );
317 | trace('pc2 setRemoteDescription start');
318 | pc2.setRemoteDescription(desc).then(
319 | function() {
320 | onSetRemoteSuccess(pc2);
321 | },
322 | onSetSessionDescriptionError
323 | );
324 | trace('pc2 createAnswer start');
325 | // Since the 'remote' side has no media stream we need
326 | // to pass in the right constraints in order for it to
327 | // accept the incoming offer of audio and video.
328 | pc2.createAnswer().then(
329 | onCreateAnswerSuccess,
330 | onCreateSessionDescriptionError
331 | );
332 | }
333 |
334 | function onSetLocalSuccess(pc) {
335 | trace(getName(pc) + ' setLocalDescription complete');
336 | }
337 |
338 | function onSetRemoteSuccess(pc) {
339 | trace(getName(pc) + ' setRemoteDescription complete');
340 | }
341 |
342 | function onSetSessionDescriptionError(error) {
343 | trace('Failed to set session description: ' + error.toString());
344 | }
345 |
346 | function startTest() {
347 | call();
348 | setInterval(() => {
349 | pc1.getStats((response) => {
350 | trace(response);
351 | });
352 | }, 10 * 1000);
353 | }
354 |
355 | function gotRemoteStream(e) {
356 | if (remoteVideo.srcObject !== e.streams[0]) {
357 | remoteVideo.srcObject = e.streams[0];
358 | trace('pc2 received remote stream');
359 | }
360 | }
361 |
362 | function onCreateAnswerSuccess(desc) {
363 | trace('Answer from pc2:\n' + desc.sdp);
364 | trace('pc2 setLocalDescription start');
365 | pc2.setLocalDescription(desc).then(
366 | function() {
367 | onSetLocalSuccess(pc2);
368 | },
369 | onSetSessionDescriptionError
370 | );
371 | trace('pc1 setRemoteDescription start');
372 | pc1.setRemoteDescription(desc).then(
373 | function() {
374 | onSetRemoteSuccess(pc1);
375 | },
376 | onSetSessionDescriptionError
377 | );
378 | }
379 |
380 | function onIceCandidate(pc, event) {
381 | getOtherPc(pc).addIceCandidate(event.candidate)
382 | .then(
383 | function() {
384 | onAddIceCandidateSuccess(pc);
385 | },
386 | function(err) {
387 | onAddIceCandidateError(pc, err);
388 | }
389 | );
390 | trace(getName(pc) + ' ICE candidate: \n' + (event.candidate ?
391 | event.candidate.candidate : '(null)'));
392 | }
393 |
394 | function onAddIceCandidateSuccess(pc) {
395 | trace(getName(pc) + ' addIceCandidate success');
396 | }
397 |
398 | function onAddIceCandidateError(pc, error) {
399 | trace(getName(pc) + ' failed to add ICE Candidate: ' + error.toString());
400 | }
401 |
402 | function onIceStateChange(pc, event) {
403 | if (pc) {
404 | trace(getName(pc) + ' ICE state: ' + pc.iceConnectionState);
405 | console.log('ICE state change event: ', event);
406 | }
407 | }
408 |
409 | function hangup() {
410 | trace('Ending call');
411 | pc1.close();
412 | pc2.close();
413 | pc1 = null;
414 | pc2 = null;
415 | hangupButton.disabled = true;
416 | callButton.disabled = false;
417 | }
418 |
--------------------------------------------------------------------------------
/src/peer2peer/js/main.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
2 | // Use of this source code is governed by a BSD-style license
3 | // that can be found in the LICENSE file in the root of the source
4 | // tree.
5 |
6 | // See http://dev.w3.org/2011/webrtc/editor/getusermedia.html for more
7 | // information on getUserMedia. See
8 | // http://dev.w3.org/2011/webrtc/editor/webrtc.html for more information on
9 | // peerconnection and webrtc in general.
10 |
11 | 'use strict';
12 |
13 | // TODO(jansson) rewrite to classes.
14 | // Global namespace object.
15 | var global = {};
16 | global.transformOutgoingSdp = function(sdp) {
17 | return sdp;
18 | };
19 | // Default getUserMedia video resolution.
20 | global.videoWidth = 1280;
21 | global.videoHeight = 720;
22 |
23 | // We need a STUN server for some API calls.
24 | var STUN_SERVER = 'stun.l.google.com:19302';
25 |
26 | // Used as a shortcut for finding DOM elements by ID.
27 | // @param {string} id is a case-sensitive string representing the unique ID of
28 | // the element being sought.
29 | // @return {object} id returns the element object specified as a parameter.
30 | var $ = function(id) {
31 | return document.getElementById(id);
32 | };
33 |
34 | // Prepopulate constraints from JS to the UI. Enumerate devices available
35 | // via getUserMedia, register elements to be used for local storage.
36 | window.onload = function() {
37 | hookupDataChannelCallbacks_();
38 | hookupDtmfSenderCallback_();
39 | updateGetUserMediaConstraints();
40 | setupLocalStorageFieldValues();
41 | acceptIncomingCalls();
42 | setPeerConnectionConstraints();
43 | if ($('get-devices-onload').checked === true) {
44 | getDevices();
45 | }
46 | // Checks if the mobile UI should be used.
47 | registerResButtonsEvents();
48 | screenCaptureExtensionHandler_();
49 | };
50 |
51 | // Disconnect before the tab is closed.
52 | window.onbeforeunload = function() {
53 | disconnect_();
54 | };
55 |
56 | // Handles the resolution button events.
57 | function registerResButtonsEvents() {
58 | var lastResButtonPressed;
59 | var elementIdAndResolutions = [
60 | ['video-qvga', 320, 180],
61 | ['video-vga', 640, 360],
62 | ['video-hd', 1280, 720]
63 | ];
64 |
65 | function setResolution(elementAndRes) {
66 | $(elementAndRes[0]).addEventListener('click', function() {
67 | global.videoWidth = elementAndRes[1];
68 | global.videoHeight = elementAndRes[2];
69 | $(elementAndRes[0]).className = 'pressed';
70 | if (typeof lastResButtonPressed !== 'undefined') {
71 | lastResButtonPressed.className = '';
72 | }
73 | lastResButtonPressed = $(elementAndRes[0]);
74 | updateGetUserMediaConstraints();
75 | }, false);
76 | }
77 |
78 | for (var i in elementIdAndResolutions) {
79 | setResolution(elementIdAndResolutions[i]);
80 | }
81 | }
82 |
83 | // TODO (jansson) Setup events using addEventListener, applies in general.
84 | // A list of element id's to be registered for local storage.
85 | function setupLocalStorageFieldValues() {
86 | registerLocalStorage_('pc-server');
87 | registerLocalStorage_('get-devices-onload');
88 | }
89 |
90 | // Public HTML functions
91 |
92 | // The *Here functions are called from peer2peer.html and will make calls
93 | // into our underlying JavaScript library with the values from the page
94 | // (have to be named differently to avoid name clashes with existing functions).
95 | /* exported getUserMediaFromHere */
96 | function getUserMediaFromHere() {
97 | var constraints = $('getusermedia-constraints').value;
98 | try {
99 | doGetUserMedia_(constraints);
100 | } catch (exception) {
101 | print_('getUserMedia says: ' + exception);
102 | }
103 | }
104 | /* exported editConstraints */
105 | function editConstraints(elementId) {
106 | $(elementId).style.display = 'inline';
107 | $(elementId).style.height = '400px';
108 | $(elementId).style.zIndex = '9';
109 | $(elementId).focus();
110 | $(elementId).onblur = function() {
111 | $(elementId).style.display = 'none';
112 | };
113 | }
114 |
115 | /* exported connectFromHere */
116 | function connectFromHere() {
117 | var server = $('pc-server').value;
118 | if ($('peer-id').value === '') {
119 | // Generate a random name to distinguish us from other tabs:
120 | $('peer-id').value = 'peer_' + Math.floor(Math.random() * 10000);
121 | print_('Our name from now on will be ' + $('peer-id').value);
122 | }
123 | connect(server, $('peer-id').value);
124 | }
125 |
126 | /* exported negotiateCallFromHere */
127 | function negotiateCallFromHere() {
128 | // Set the global variables with values from our UI.
129 | setCreateOfferConstraints(getEvaluatedJavaScript_(
130 | $('createoffer-constraints').value));
131 | setCreateAnswerConstraints(getEvaluatedJavaScript_(
132 | $('createanswer-constraints').value));
133 |
134 | ensureHasPeerConnection_();
135 | negotiateCall_();
136 | }
137 |
138 | /* exported addLocalStreamFromHere */
139 | function addLocalStreamFromHere() {
140 | ensureHasPeerConnection_();
141 | addLocalStream();
142 | }
143 |
144 | /* exported removeLocalStreamFromHere */
145 | function removeLocalStreamFromHere() {
146 | removeLocalStream();
147 | }
148 |
149 | /* exported hangUpFromHere */
150 | function hangUpFromHere() {
151 | hangUp();
152 | acceptIncomingCalls();
153 | }
154 |
155 | /* exported toggleRemoteVideoFromHere */
156 | function toggleRemoteVideoFromHere() {
157 | toggleRemoteStream(function(remoteStream) {
158 | return remoteStream.getVideoTracks()[0];
159 | }, 'video');
160 | }
161 |
162 | /* exported toggleRemoteAudioFromHere */
163 | function toggleRemoteAudioFromHere() {
164 | toggleRemoteStream(function(remoteStream) {
165 | return remoteStream.getAudioTracks()[0];
166 | }, 'audio');
167 | }
168 | /* exported toggleLocalVideoFromHere */
169 | function toggleLocalVideoFromHere() {
170 | toggleLocalStream(function(localStream) {
171 | return localStream.getVideoTracks()[0];
172 | }, 'video');
173 | }
174 |
175 | /* exported toggleLocalAudioFromHere */
176 | function toggleLocalAudioFromHere() {
177 | toggleLocalStream(function(localStream) {
178 | return localStream.getAudioTracks()[0];
179 | }, 'audio');
180 | }
181 |
182 | /* exported stopLocalFromHere */
183 | function stopLocalFromHere() {
184 | stopLocalStream();
185 | }
186 |
187 | /* exported createDataChannelFromHere */
188 | function createDataChannelFromHere() {
189 | ensureHasPeerConnection_();
190 | createDataChannelOnPeerConnection();
191 | }
192 |
193 | /* exported closeDataChannelFromHere */
194 | function closeDataChannelFromHere() {
195 | ensureHasPeerConnection_();
196 | closeDataChannelOnPeerConnection();
197 | }
198 |
199 | /* exported sendDataFromHere */
200 | function sendDataFromHere() {
201 | var data = $('data-channel-send').value;
202 | sendDataOnChannel(data);
203 | }
204 |
205 | /* exported createDtmfSenderFromHere */
206 | function createDtmfSenderFromHere() {
207 | ensureHasPeerConnection_();
208 | createDtmfSenderOnPeerConnection();
209 | }
210 |
211 | /* exported insertDtmfFromHere */
212 | function insertDtmfFromHere() {
213 | var tones = $('dtmf-tones').value;
214 | var duration = $('dtmf-tones-duration').value;
215 | var gap = $('dtmf-tones-gap').value;
216 | insertDtmfOnSender(tones, duration, gap);
217 | }
218 |
219 | /* exported forceIsacChanged */
220 | function forceIsacChanged() {
221 | var forceIsac = $('force-isac').checked;
222 | if (forceIsac) {
223 | forceIsac_();
224 | } else {
225 | dontTouchSdp_();
226 | }
227 | }
228 |
229 | // Updates the constraints in the getusermedia-constraints text box with a
230 | // MediaStreamConstraints string. This string is created based on the state
231 | // of the 'audiosrc' and 'videosrc' checkboxes.
232 | // If device enumeration is supported and device source id's are not undefined
233 | // they will be added to the constraints string.
234 | function updateGetUserMediaConstraints() {
235 | var selectedAudioDevice = $('audiosrc');
236 | var selectedVideoDevice = $('videosrc');
237 | global.constraints = {audio: $('audio').checked,
238 | video: $('video').checked
239 | };
240 |
241 | if ($('video').checked) {
242 | global.constraints.video = {height: global.videoHeight,
243 | width: global.videoWidth};
244 | }
245 |
246 | if (!selectedAudioDevice.disabled && !selectedAudioDevice.disabled) {
247 | var devices = getSourcesFromField_(selectedAudioDevice,
248 | selectedVideoDevice);
249 |
250 | if ($('audio').checked) {
251 | if (devices.audioId !== null) {
252 | global.constraints.audio = {deviceId: devices.audioId};
253 | }
254 | }
255 |
256 | if ($('video').checked) {
257 | if (devices.videoId !== null) {
258 | global.constraints.video = {deviceId: devices.videoId};
259 | }
260 | }
261 | }
262 |
263 | $('getusermedia-constraints').value = JSON.stringify(global.constraints,
264 | null, ' ');
265 | $('getusermedia-constraints').addEventListener('change', function() {
266 | global.constraints = JSON.parse($('getusermedia-constraints').value);
267 | }, false);
268 | $('local-res').innerHTML = global.videoWidth + 'x' + global.videoHeight;
269 | }
270 |
271 | /* exported showServerHelp */
272 | function showServerHelp() {
273 | alert('You need to build and run a peerconnection_server on some ' +
274 | 'suitable machine. To build it in chrome, just run make/ninja ' +
275 | 'peerconnection_server. Otherwise, read in https://code.google' +
276 | '.com/searchframe#xSWYf0NTG_Q/trunk/peerconnection/README&q=REA' +
277 | 'DME%20package:webrtc%5C.googlecode%5C.com.');
278 | }
279 |
280 | /* exported clearLog */
281 | function clearLog() {
282 | $('messages').innerHTML = '';
283 | }
284 |
285 | // Stops the local stream.
286 | function stopLocalStream() {
287 | if (typeof global.localStream === 'undefined') {
288 | warning_('Tried to stop local stream, ' +
289 | 'but media access is not granted.');
290 | return;
291 | }
292 | removeVideoTrackEvents(global.localStream);
293 | global.localStream.getTracks().forEach(function(track) {
294 | track.stop();
295 | });
296 | }
297 |
298 | // Adds the current local media stream to a peer connection.
299 | // @param {RTCPeerConnection} peerConnection
300 | function addLocalStreamToPeerConnection(peerConnection) {
301 | if (typeof global.localStream === 'undefined') {
302 | error_('Tried to add local stream to peer connection, but there is no ' +
303 | 'stream yet.');
304 | }
305 | try {
306 | peerConnection.addStream(global.localStream, global.addStreamConstraints);
307 | } catch (exception) {
308 | error_('Failed to add stream with constraints ' +
309 | global.addStreamConstraints + ': ' + exception);
310 | }
311 | print_('Added local stream.');
312 | }
313 |
314 | // Removes the local stream from the peer connection.
315 | // @param {rtcpeerconnection} peerConnection
316 | function removeLocalStreamFromPeerConnection(peerConnection) {
317 | if (typeof global.localStream === 'undefined') {
318 | error_('Tried to remove local stream from peer connection, but there is ' +
319 | 'no stream yet.');
320 | }
321 | try {
322 | peerConnection.removeStream(global.localStream);
323 | } catch (exception) {
324 | error_('Could not remove stream: ' + exception);
325 | }
326 | print_('Removed local stream.');
327 | }
328 |
329 | // Enumerates the audio and video devices available in Chrome and adds the
330 | // devices to the HTML elements with Id 'audiosrc' and 'videosrc'.
331 | // Checks if device enumeration is supported and if the 'audiosrc' + 'videosrc'
332 | // elements exists, if not a debug printout will be displayed.
333 | // If the device label is empty, audio/video + sequence number will be used to
334 | // populate the name. Also makes sure the children has been loaded in order
335 | // to update the constraints.
336 | function getDevices() {
337 | var selectedAudioDevice = $('audiosrc');
338 | var selectedVideoDevice = $('videosrc');
339 | selectedAudioDevice.innerHTML = '';
340 | selectedVideoDevice.innerHTML = '';
341 |
342 | if (typeof navigator.mediaDevices.enumerateDevices === 'undefined') {
343 | selectedAudioDevice.disabled = true;
344 | selectedVideoDevice.disabled = true;
345 | $('get-devices').disabled = true;
346 | $('get-devices-onload').disabled = true;
347 | updateGetUserMediaConstraints();
348 | error_('enumerateDevices not found, device enumeration not supported');
349 | }
350 |
351 | navigator.mediaDevices.enumerateDevices().then(function(devices) {
352 | for (var i = 0; i < devices.length; i++) {
353 | var option = document.createElement('option');
354 | option.value = devices[i].deviceId;
355 | option.text = devices[i].label;
356 |
357 | if (devices[i].kind === 'audioinput') {
358 | if (option.text === '') {
359 | option.text = devices[i].deviceId;
360 | }
361 | selectedAudioDevice.appendChild(option);
362 | } else if (devices[i].kind === 'videoinput') {
363 | if (option.text === '') {
364 | option.text = devices[i].deviceId;
365 | }
366 | selectedVideoDevice.appendChild(option);
367 | } else if (devices[i].kind === 'audiooutput') {
368 | // TODO: Add output device selection.
369 | return;
370 | } else {
371 | error_('Device type ' + devices[i].kind + ' not recognized, ' +
372 | 'cannot enumerate device. Currently only device types' +
373 | '\'audio\' and \'video\' are supported');
374 | updateGetUserMediaConstraints();
375 | }
376 | }
377 | }).catch(function(error) {
378 | error_('Could not enumerateDevices: ' + error);
379 | });
380 |
381 | checkIfDeviceDropdownsArePopulated_();
382 | }
383 |
384 | function displayScreenCaptureInfo() {
385 | if ($('screencapture-info')) {
386 | $('screencapture-info').style.display = 'block';
387 | return;
388 | }
389 | var message = 'Please install the screen capture extension: ' +
390 | '1. Go to chrome://extensions ' +
391 | '2. Check: "Enable Developer mode" ' +
392 | '3. Click: "Load the unpacked extension..." ' +
393 | '4. Choose "extension" folder from the ' +
394 | 'repository ' +
395 | '5. Reload this page over https ' +
396 | 'Note: Make sure the URL permission in manifest.json matches ' +
397 | 'the URL for this page.';
398 | var startScreenCaptureButton = document.getElementById('start-screencapture');
399 | var messageDiv = document.createElement('div');
400 | messageDiv.innerHTML = message;
401 | messageDiv.id = 'screencapture-info';
402 |
403 | window.onclick = function(event) {
404 | if (event.target === messageDiv) {
405 | messageDiv.style.display = 'none';
406 | }
407 | };
408 |
409 | document.getElementById('general-gum').insertBefore(messageDiv,
410 | startScreenCaptureButton);
411 | }
412 |
413 | function screenCaptureExtensionHandler_() {
414 | // Copied and modified from desktop capture example.
415 | var extensionInstalled = false;
416 | $('start-screencapture').addEventListener('click', function() {
417 | // send screen-sharer request to content-script
418 | window.postMessage({type: 'SS_UI_REQUEST', text: 'start'}, '*');
419 | if (!extensionInstalled) {
420 | displayScreenCaptureInfo();
421 | }
422 | });
423 |
424 | // listen for messages from the content-script
425 | window.addEventListener('message', function(event) {
426 | if (event.origin !== window.location.origin) {
427 | return;
428 | }
429 |
430 | // content-script will send a 'SS_PING' msg if extension is installed
431 | if (event.data.type && (event.data.type === 'SS_PING')) {
432 | extensionInstalled = true;
433 | }
434 |
435 | // user chose a stream
436 | if (event.data.type && (event.data.type === 'SS_DIALOG_SUCCESS')) {
437 | var audioConstraints =
438 | (adapter.browserDetails.browser === 'chrome' &&
439 | adapter.browserDetails.version >= 50 &&
440 | event.data.requestAudio) ? {
441 | mandatory: {
442 | chromeMediaSource: 'desktop',
443 | chromeMediaSourceId: event.data.streamId
444 | }
445 | } : false;
446 |
447 | var videoConstraints = {
448 | mandatory: {
449 | chromeMediaSource: 'desktop',
450 | chromeMediaSourceId: event.data.streamId,
451 | maxWidth: window.screen.width,
452 | maxHeight: window.screen.height
453 | }
454 | };
455 |
456 | var constraints = {audio: audioConstraints, video: videoConstraints};
457 | doGetUserMedia_(JSON.stringify(constraints));
458 | }
459 |
460 | // user clicked on 'cancel' in choose media dialog
461 | if (event.data.type && (event.data.type === 'SS_DIALOG_CANCEL')) {
462 | warning_('User cancelled!');
463 | }
464 | });
465 | }
466 |
467 | // Sets the transform to apply just before setting the local description and
468 | // sending to the peer.
469 | // @param {function} transformFunction A function which takes one SDP string as
470 | // argument and returns the modified SDP string.
471 | function setOutgoingSdpTransform(transformFunction) {
472 | global.transformOutgoingSdp = transformFunction;
473 | }
474 |
475 | // Sets the MediaConstraints to be used for PeerConnection createAnswer() calls.
476 | // @param {string} mediaConstraints The constraints, as defined in the
477 | // PeerConnection JS API spec.
478 | function setCreateAnswerConstraints(mediaConstraints) {
479 | global.createAnswerConstraints = mediaConstraints;
480 | }
481 |
482 | // Sets the MediaConstraints to be used for PeerConnection createOffer() calls.
483 | // @param {string} mediaConstraints The constraints, as defined in the
484 | // PeerConnection JS API spec.
485 | function setCreateOfferConstraints(mediaConstraints) {
486 | global.createOfferConstraints = mediaConstraints;
487 | }
488 |
489 | // Sets the callback functions that will receive DataChannel readyState updates
490 | // and received data.
491 | // @param {function} statusCallback The function that will receive a string
492 | // with the current DataChannel readyState.
493 | // @param {function} dataCallback The function that will a string with data
494 | // received from the remote peer.
495 | function setDataCallbacks(statusCallback, dataCallback) {
496 | global.dataStatusCallback = statusCallback;
497 | global.dataCallback = dataCallback;
498 | }
499 |
500 | // Sends data on an active DataChannel.
501 | // @param {string} data The string that will be sent to the remote peer.
502 | function sendDataOnChannel(data) {
503 | if (typeof global.dataChannel === 'undefined') {
504 | error_('Trying to send data, but there is no DataChannel.');
505 | }
506 | global.dataChannel.send(data);
507 | }
508 |
509 | // Sets the callback function that will receive DTMF sender ontonechange events.
510 | // @param {function} ontonechange The function that will receive a string with
511 | // the tone that has just begun playout.
512 | function setOnToneChange(ontonechange) {
513 | global.dtmfOnToneChange = ontonechange;
514 | }
515 |
516 | // Inserts DTMF tones on an active DTMF sender.
517 | // @param {string} tones to be sent.
518 | // @param {string} duration duration of the tones to be sent.
519 | // @param {string} interToneGap gap between the tones to be sent.
520 | function insertDtmf(tones, duration, interToneGap) {
521 | if (typeof global.dtmfSender === 'undefined') {
522 | error_('Trying to send DTMF, but there is no DTMF sender.');
523 | }
524 | global.dtmfSender.insertDTMF(tones, duration, interToneGap);
525 | }
526 |
527 | function handleMessage(peerConnection, message) {
528 | var parsedMsg = JSON.parse(message);
529 | if (parsedMsg.type) {
530 | var sessionDescription = new RTCSessionDescription(parsedMsg);
531 | peerConnection.setRemoteDescription(sessionDescription)
532 | .then(
533 | function() {
534 | success_('setRemoteDescription');
535 | },
536 | function(error) {
537 | error_('setRemoteDescription', error);
538 | }
539 | );
540 | if (sessionDescription.type === 'offer') {
541 | print_('createAnswer with constraints: ' +
542 | JSON.stringify(global.createAnswerConstraints, null, ' '));
543 | peerConnection.createAnswer(
544 | global.createAnswerConstraints
545 | ).then(
546 | setLocalAndSendMessage_,
547 | function(error) {
548 | error_('createAnswer', error);
549 | }
550 | );
551 | }
552 | return;
553 | } else if (parsedMsg.candidate) {
554 | var candidate = new RTCIceCandidate(parsedMsg);
555 | peerConnection.addIceCandidate(candidate,
556 | function() {
557 | success_('addIceCandidate');
558 | },
559 | function(error) {
560 | error_('addIceCandidate', error);
561 | }
562 | );
563 | return;
564 | }
565 | error_('unknown message received');
566 | }
567 |
568 | // Sets the peerConnection constraints based on checkboxes.
569 | // TODO (jansson) Make it possible to use the text field for constraints like
570 | // for getUserMedia.
571 | function setPeerConnectionConstraints() {
572 | // Only added optional for now.
573 | global.pcConstraints = {
574 | optional: []
575 | };
576 |
577 | global.pcConstraints.optional.push(
578 | {googCpuOveruseDetection: $('cpuoveruse-detection').checked});
579 |
580 | global.pcConstraints.optional.push(
581 | {RtpDataChannels: $('data-channel-type-rtp').checked});
582 |
583 | $('pc-constraints').value = JSON.stringify(global.pcConstraints, null, ' ');
584 | }
585 |
586 | function createPeerConnection(stunServer) {
587 | var servers = {iceServers: [{url: 'stun:' + stunServer}]};
588 | var peerConnection;
589 | try {
590 | peerConnection = new RTCPeerConnection(servers, global.pcConstraints);
591 | } catch (exception) {
592 | error_('Failed to create peer connection: ' + exception);
593 | }
594 | peerConnection.onaddstream = addStreamCallback_;
595 | peerConnection.onremovestream = removeStreamCallback_;
596 | peerConnection.onicecandidate = iceCallback_;
597 | peerConnection.ondatachannel = onCreateDataChannelCallback_;
598 | return peerConnection;
599 | }
600 |
601 | function setupCall(peerConnection) {
602 | print_('createOffer with constraints: ' +
603 | JSON.stringify(global.createOfferConstraints, null, ' '));
604 | peerConnection.createOffer(
605 | global.createOfferConstraints
606 | ).then(
607 | setLocalAndSendMessage_,
608 | function(error) {
609 | error_('createOffer', error);
610 | }
611 | );
612 | }
613 |
614 | function answerCall(peerConnection, message) {
615 | handleMessage(peerConnection, message);
616 | }
617 |
618 | function createDataChannel(peerConnection, label) {
619 | if (typeof global.dataChannel !== 'undefined' &&
620 | global.dataChannel.readyState !== 'closed') {
621 | error_('Creating DataChannel, but we already have one.');
622 | }
623 |
624 | global.dataChannel = peerConnection.createDataChannel(label,
625 | {reliable: false});
626 | print_('DataChannel with label ' + global.dataChannel.label + ' initiated ' +
627 | 'locally.');
628 | hookupDataChannelEvents();
629 | }
630 |
631 | function closeDataChannel() {
632 | if (typeof global.dataChannel === 'undefined') {
633 | error_('Closing DataChannel, but none exists.');
634 | }
635 | print_('DataChannel with label ' + global.dataChannel.label +
636 | ' is beeing closed.');
637 | global.dataChannel.close();
638 | }
639 |
640 | function createDtmfSender(peerConnection) {
641 | if (typeof global.dtmfSender !== 'undefined') {
642 | error_('Creating DTMF sender, but we already have one.');
643 | }
644 | if (typeof global.localStream === 'undefined') {
645 | error_('Creating DTMF sender but local stream is undefined.');
646 | }
647 | var localAudioTrack = global.localStream.getAudioTracks()[0];
648 | global.dtmfSender = peerConnection.createDTMFSender(localAudioTrack);
649 | global.dtmfSender.ontonechange = global.dtmfOnToneChange;
650 | }
651 |
652 | // Connects to the provided peerconnection_server.
653 | // @param {string} serverUrl The server URL in string form without an ending
654 | // slash, something like http://localhost:8888.
655 | // @param {string} clientName The name to use when connecting to the server.
656 | function connect(serverUrl, clientName) {
657 | if (typeof global.ourPeerId !== 'undefined') {
658 | error_('connecting, but is already connected.');
659 | }
660 | print_('Connecting to ' + serverUrl + ' as ' + clientName);
661 | global.serverUrl = serverUrl;
662 | global.ourClientName = clientName;
663 |
664 | var request = new XMLHttpRequest();
665 | request.open('GET', serverUrl + '/sign_in?' + clientName);
666 | print_(serverUrl + '/sign_in?' + clientName);
667 | request.onreadystatechange = function() {
668 | connectCallback_(request);
669 | };
670 | request.send();
671 | }
672 |
673 | // Creates a peer connection. Must be called before most other public functions
674 | // in this file.
675 | function preparePeerConnection() {
676 | if (typeof global.peerConnection !== 'undefined') {
677 | error_('creating peer connection, but we already have one.');
678 | }
679 | global.peerConnection = createPeerConnection(STUN_SERVER);
680 | success_('ok-peerconnection-created');
681 | }
682 |
683 | // Adds the local stream to the peer connection. You will have to re-negotiate
684 | // the call for this to take effect in the call.
685 | function addLocalStream() {
686 | if (typeof global.peerConnection === 'undefined') {
687 | error_('adding local stream, but we have no peer connection.');
688 | }
689 | addLocalStreamToPeerConnection(global.peerConnection);
690 | print_('ok-added');
691 | }
692 |
693 | // Removes the local stream from the peer connection. You will have to
694 | // re-negotiate the call for this to take effect in the call.
695 | function removeLocalStream() {
696 | if (typeof global.peerConnection === 'undefined') {
697 | error_('attempting to remove local stream, but no call is up');
698 | }
699 | removeLocalStreamFromPeerConnection(global.peerConnection);
700 | print_('ok-local-stream-removed');
701 | }
702 |
703 | // Toggles the remote audio stream's enabled state on the peer connection, given
704 | // that a call is active. Returns ok-[typeToToggle]-toggled-to-[true/false]
705 | // on success.
706 | // @param {function} selectAudioOrVideoTrack A function that takes a remote
707 | // stream as argument and returns a track (e.g. either the video or audio
708 | // track).
709 | // @param {function} typeToToggle Either "audio" or "video" depending on what
710 | // the selector function selects.
711 | function toggleRemoteStream(selectAudioOrVideoTrack, typeToToggle) {
712 | if (typeof global.peerConnection === 'undefined') {
713 | error_('Tried to toggle remote stream, but have no peer connection.');
714 | }
715 | if (global.peerConnection.getRemoteStreams().length === 0) {
716 | error_('Tried to toggle remote stream, but not receiving any stream.');
717 | }
718 | var track = selectAudioOrVideoTrack(
719 | global.peerConnection.getRemoteStreams()[0]);
720 | toggle_(track, 'remote', typeToToggle);
721 | }
722 |
723 | // See documentation on toggleRemoteStream (this function is the same except
724 | // we are looking at local streams).
725 | function toggleLocalStream(selectAudioOrVideoTrack, typeToToggle) {
726 | if (typeof global.peerConnection === 'undefined') {
727 | error_('Tried to toggle local stream, but have no peer connection.');
728 | }
729 | if (global.peerConnection.getLocalStreams().length === 0) {
730 | error_('Tried to toggle local stream, but there is no local stream in ' +
731 | 'the call.');
732 | }
733 | var track = selectAudioOrVideoTrack(
734 | global.peerConnection.getLocalStreams()[0]);
735 | toggle_(track, 'local', typeToToggle);
736 | }
737 |
738 | // Hangs up a started call. Returns ok-call-hung-up on success. This tab will
739 | // not accept any incoming calls after this call.
740 | function hangUp() {
741 | if (typeof global.peerConnection === 'undefined') {
742 | error_('hanging up, but has no peer connection');
743 | }
744 | if (getReadyState_() !== 'active') {
745 | error_('hanging up, but ready state is not active (no call up).');
746 | }
747 | sendToPeer(global.remotePeerId, 'BYE');
748 | closeCall_();
749 | global.acceptsIncomingCalls = false;
750 | print_('ok-call-hung-up');
751 | }
752 |
753 | // Start accepting incoming calls.
754 | function acceptIncomingCalls() {
755 | global.acceptsIncomingCalls = true;
756 | }
757 |
758 | // Creates a DataChannel on the current PeerConnection. Only one DataChannel can
759 | // be created on each PeerConnection.
760 | // Returns ok-datachannel-created on success.
761 | function createDataChannelOnPeerConnection() {
762 | if (typeof global.peerConnection === 'undefined') {
763 | error_('Tried to create data channel, but have no peer connection.');
764 | }
765 | createDataChannel(global.peerConnection, global.ourClientName);
766 | print_('ok-datachannel-created');
767 | }
768 |
769 | // Close the DataChannel on the current PeerConnection.
770 | // Returns ok-datachannel-close on success.
771 | function closeDataChannelOnPeerConnection() {
772 | if (typeof global.peerConnection === 'undefined') {
773 | error_('Tried to close data channel, but have no peer connection.');
774 | }
775 | closeDataChannel(global.peerConnection);
776 | print_('ok-datachannel-close');
777 | }
778 |
779 | // Creates a DTMF sender on the current PeerConnection.
780 | // Returns ok-dtmfsender-created on success.
781 | function createDtmfSenderOnPeerConnection() {
782 | if (typeof global.peerConnection === 'undefined') {
783 | error_('Tried to create DTMF sender, but have no peer connection.');
784 | }
785 | createDtmfSender(global.peerConnection);
786 | print_('ok-dtmfsender-created');
787 | }
788 |
789 | // Send DTMF tones on the global.dtmfSender.
790 | // Returns ok-dtmf-sent on success.
791 | function insertDtmfOnSender(tones, duration, interToneGap) {
792 | if (typeof global.dtmfSender === 'undefined') {
793 | error_('Tried to insert DTMF tones, but have no DTMF sender.');
794 | }
795 | insertDtmf(tones, duration, interToneGap);
796 | print_('ok-dtmf-sent');
797 | }
798 |
799 | // Sends a message to a peer through the peerconnection_server.
800 | function sendToPeer(peer, message) {
801 | var messageToLog = message.sdp ? message.sdp : message;
802 | print_('Sending message ' + messageToLog + ' to peer ' + peer + '.');
803 |
804 | var request = new XMLHttpRequest();
805 | var url = global.serverUrl + '/message?peer_id=' + global.ourPeerId + '&to=' +
806 | peer;
807 | request.open('POST', url, true);
808 | request.setRequestHeader('Content-Type', 'text/plain');
809 | request.send(message);
810 | }
811 |
812 | // @param {!string} videoElementId The ID of the video element to update.
813 | // @param {!number} width of the video to update the video element, if width or
814 | // height is 0, size will be taken from videoElement.videoWidth.
815 | // @param {!number} height of the video to update the video element, if width or
816 | // height is 0 size will be taken from the videoElement.videoHeight.
817 | /* exported updateVideoElementSize */
818 | function updateVideoElementSize(videoElementId, width, height) {
819 | var videoElement = $(videoElementId);
820 | if (width > 0 || height > 0) {
821 | videoElement.width = width;
822 | videoElement.height = height;
823 | } else {
824 | if (videoElement.videoWidth > 0 || videoElement.videoHeight > 0) {
825 | videoElement.width = videoElement.videoWidth;
826 | videoElement.height = videoElement.videoHeight;
827 | print_('Set video element "' + videoElementId + '" size to ' +
828 | videoElement.width + 'x' + videoElement.height);
829 | } else {
830 | print_('"' + videoElementId + '" video stream size is 0, skipping ' +
831 | ' resize');
832 | }
833 | }
834 | displayVideoSize(videoElement);
835 | }
836 |
837 | // Disconnects from the peerconnection server. Returns ok-disconnected on
838 | // success.
839 | function disconnect_() {
840 | if (typeof global.ourPeerId === 'undefined') {
841 | return;
842 | }
843 | var request = new XMLHttpRequest();
844 | request.open('GET', global.serverUrl + '/sign_out?peer_id=' +
845 | global.ourPeerId, true);
846 | request.send();
847 | global.ourPeerId = 'undefined';
848 | print_('ok-disconnected');
849 | }
850 |
851 | // Returns true if we are disconnected from peerconnection_server.
852 | function isDisconnected_() {
853 | return global.ourPeerId === 'undefined';
854 | }
855 |
856 | // @return {!string} The current peer connection's ready state, or
857 | // 'no-peer-connection' if there is no peer connection up.
858 | // NOTE: The PeerConnection states are changing and until chromium has
859 | // implemented the new states we have to use this interim solution of always
860 | // assuming that the PeerConnection is 'active'.
861 | function getReadyState_() {
862 | if (typeof global.peerConnection === 'undefined') {
863 | return 'no-peer-connection';
864 | }
865 | return 'active';
866 | }
867 |
868 | // This function asks permission to use the webcam and mic from the browser. It
869 | // will return ok-requested to the test. This does not mean the request was
870 | // approved though. The test will then have to click past the dialog that
871 | // appears in Chrome, which will run either the OK or failed callback as a
872 | // a result. To see which callback was called, use obtainGetUserMediaResult_().
873 | // @param {string} constraints Defines what to be requested, with mandatory
874 | // and optional constraints defined. The contents of this parameter depends
875 | // on the WebRTC version. This should be JavaScript code that we eval().
876 | function doGetUserMedia_(constraints) {
877 | if (!navigator.getUserMedia) {
878 | print_('Browser does not support WebRTC.');
879 | return;
880 | }
881 | var evaluatedConstraints;
882 | try {
883 | evaluatedConstraints = JSON.parse(constraints);
884 | } catch (exception) {
885 | error_('Not valid JavaScript expression: ' + constraints);
886 | }
887 |
888 | print_('Requesting doGetUserMedia: constraints: ' + constraints);
889 | navigator.mediaDevices.getUserMedia(evaluatedConstraints)
890 | .then(function(stream) {
891 | global.localStream = stream;
892 | success_('getUserMedia');
893 |
894 | if (stream.getVideoTracks().length > 0) {
895 | // Show video element if we did request video in the getUserMedia call.
896 | var videoElement = $('local-view');
897 | videoElement.srcObject = stream;
898 | registerVideoTrackEvents(stream);
899 | window.addEventListener('loadedmetadata', function() {
900 | displayVideoSize(videoElement);
901 | }, true);
902 | }
903 | }).catch(function(error) {
904 | error_('GetUserMedia failed with error: ' + error.name);
905 | });
906 | }
907 |
908 | function registerVideoTrackEvents(stream) {
909 | // Throw an error when no video is sent from camera but gUM returns OK.
910 | stream.getVideoTracks()[0].onended = function() {
911 | error_(stream + ' getUserMedia successful but ' +
912 | 'MediaStreamTrack.onended event fired, no frames from camera.');
913 | };
914 | // Print information on track being muted.
915 | stream.getVideoTracks()[0].onmute = function() {
916 | error_(stream + ' MediaStreamTrack.onmute event has ' +
917 | 'fired, no frames to the track.');
918 | };
919 | // Print information on track being unmuted mute.
920 | stream.getVideoTracks()[0].onunmute = function() {
921 | warning_(stream + ' MediaStreamTrack.onunmute event has ' +
922 | 'fired.');
923 | };
924 | }
925 |
926 | function removeVideoTrackEvents(stream) {
927 | var videoTrackevents = ['onmute', 'onunmute', 'onended'];
928 | videoTrackevents.forEach(function(trackEvent) {
929 | stream.getVideoTracks()[0][trackEvent] = null;
930 | });
931 | }
932 |
933 | // Must be called after calling doGetUserMedia.
934 | // @return {string} Returns not-called-yet if we have not yet been called back
935 | // by WebRTC. Otherwise it returns either ok-got-stream or failed-with-error-x
936 | // (where x is the error code from the error callback) depending on which
937 | // callback got called by WebRTC.
938 | function obtainGetUserMediaResult_() {
939 | if (typeof global.requestWebcamAndMicrophoneResult === 'undefined') {
940 | global.requestWebcamAndMicrophoneResult = ' not called yet';
941 | }
942 | return global.requestWebcamAndMicrophoneResult;
943 | }
944 |
945 | // Negotiates a call with the other side. This will create a peer connection on
946 | // the other side if there isn't one.
947 | // To call this method we need to be aware of the other side, e.g. we must be
948 | // connected to peerconnection_server and we must have exactly one peer on that
949 | // server.
950 | // This method may be called any number of times. If you haven't added any
951 | // streams to the call, an "empty" call will result. The method will return
952 | // ok-negotiating immediately to the test if the negotiation was successfully
953 | // sent.
954 | function negotiateCall_() {
955 | if (typeof global.peerConnection === 'undefined') {
956 | error_('Negotiating call, but we have no peer connection.');
957 | } else if (typeof global.ourPeerId === 'undefined') {
958 | error_('Negotiating call, but not connected.');
959 | } else if (typeof global.remotePeerId === 'undefined') {
960 | error_('Negotiating call, but missing remote peer.');
961 | }
962 | setupCall(global.peerConnection);
963 | print_('ok-negotiating');
964 | }
965 |
966 | // This provides the selected source id from the objects in the parameters
967 | // provided to this function. If the audioSelect or videoSelect objects does
968 | // not have any HTMLOptions children it will return null in the source
969 | // object.
970 | // @param {!object} audioSelect HTML drop down element with audio devices added
971 | // as HTMLOptionsCollection children.
972 | // @param {!object} videoSelect HTML drop down element with audio devices added
973 | // as HTMLOptionsCollection children.
974 | // @return {!object} source contains audio and video source ID from the selected
975 | // devices in the drop down menu elements.
976 | function getSourcesFromField_(audioSelect, videoSelect) {
977 | var source = {
978 | audioId: null,
979 | videoId: null
980 | };
981 | if (audioSelect.options.length > 0) {
982 | source.audioId = audioSelect.options[audioSelect.selectedIndex].value;
983 | }
984 | if (videoSelect.options.length > 0) {
985 | source.videoId = videoSelect.options[videoSelect.selectedIndex].value;
986 | }
987 | return source;
988 | }
989 |
990 | function iceCallback_(event) {
991 | if (event.candidate) {
992 | sendToPeer(global.remotePeerId, JSON.stringify(event.candidate));
993 | }
994 | }
995 |
996 | function setLocalAndSendMessage_(sessionDescription) {
997 | var unmodifiedSdp = sessionDescription.sdp;
998 | sessionDescription.sdp =
999 | global.transformOutgoingSdp(sessionDescription.sdp);
1000 | global.peerConnection.setLocalDescription(
1001 | sessionDescription
1002 | ).then(
1003 | function() {
1004 | success_('setLocalDescription');
1005 | },
1006 | failedSetLocalDescription
1007 | );
1008 | print_('Sending SDP message:\n' + sessionDescription.sdp);
1009 | sendToPeer(global.remotePeerId, JSON.stringify(sessionDescription));
1010 |
1011 | function failedSetLocalDescription(error) {
1012 | error_('SetLocalDescription failure: ' + error + '\n' +
1013 | 'SDP before transform:\n ' + unmodifiedSdp + '\n' +
1014 | 'SDP after transform:\n ' + sessionDescription.sdp + '\n');
1015 | }
1016 | }
1017 |
1018 | function addStreamCallback_(event) {
1019 | print_('Receiving remote stream...');
1020 | var videoElement = document.getElementById('remote-view');
1021 | videoElement.srcObject = event.stream;
1022 |
1023 | window.addEventListener('loadedmetadata',
1024 | function() {
1025 | displayVideoSize(videoElement);
1026 | }, true);
1027 | }
1028 |
1029 | function removeStreamCallback_() {
1030 | print_('Call ended.');
1031 | document.getElementById('remote-view').src = '';
1032 | }
1033 |
1034 | function onCreateDataChannelCallback_(event) {
1035 | if (typeof global.dataChannel !== 'undefined' &&
1036 | global.dataChannel.readyState !== 'closed') {
1037 | error_('Received DataChannel, but we already have one.');
1038 | }
1039 | global.dataChannel = event.channel;
1040 | print_('DataChannel with label ' + global.dataChannel.label +
1041 | ' initiated by remote peer.');
1042 | hookupDataChannelEvents();
1043 | }
1044 |
1045 | function hookupDataChannelEvents() {
1046 | global.dataChannel.onmessage = global.dataCallback;
1047 | global.dataChannel.onopen = onDataChannelReadyStateChange_;
1048 | global.dataChannel.onclose = onDataChannelReadyStateChange_;
1049 | // Trigger global.dataStatusCallback so an application is notified
1050 | // about the created data channel.
1051 | onDataChannelReadyStateChange_();
1052 | }
1053 |
1054 | function onDataChannelReadyStateChange_() {
1055 | print_('DataChannel state:' + global.dataChannel.readyState);
1056 | global.dataStatusCallback(global.dataChannel.readyState);
1057 | // Display dataChannel.id only when dataChannel is active/open.
1058 | if (global.dataChannel.readyState === 'open') {
1059 | $('data-channel-id').value = global.dataChannel.id;
1060 | } else if (global.dataChannel.readyState === 'closed') {
1061 | $('data-channel-id').value = '';
1062 | }
1063 | }
1064 |
1065 | // @param {string} videoTag The ID of the video tag + stream used to write the
1066 | // size to a HTML tag based on id if the div's exists.
1067 | function displayVideoSize(videoTag) {
1068 | if (videoTag.videoWidth > 0 || videoTag.videoHeight > 0) {
1069 | $(videoTag.id + '-size').firstChild.data = videoTag.videoWidth + 'x' +
1070 | videoTag.videoHeight;
1071 | }
1072 | }
1073 |
1074 | // Checks if the 'audiosrc' and 'videosrc' drop down menu elements has had all
1075 | // of its children appended in order to provide device ID's to the function
1076 | // 'updateGetUserMediaConstraints()', used in turn to populate the getUserMedia
1077 | // constraints text box when the page has loaded.
1078 | function checkIfDeviceDropdownsArePopulated_() {
1079 | if (document.addEventListener) {
1080 | $('audiosrc').addEventListener('DOMNodeInserted',
1081 | updateGetUserMediaConstraints, false);
1082 | $('videosrc').addEventListener('DOMNodeInserted',
1083 | updateGetUserMediaConstraints, false);
1084 | } else {
1085 | print_('addEventListener is not supported by your browser, cannot update ' +
1086 | 'device source ID\'s automatically. Select a device from the audio' +
1087 | ' or video source drop down menu to update device source id\'s');
1088 | }
1089 | }
1090 |
1091 | // Register an input element to use local storage to remember its state between
1092 | // sessions (using local storage). Only input elements are supported.
1093 | // @param {!string} element_id to be used as a key for local storage and the id
1094 | // of the element to store the state for.
1095 | function registerLocalStorage_(elementId) {
1096 | var element = $(elementId);
1097 | if (element.tagName !== 'INPUT') {
1098 | error_('You can only use registerLocalStorage_ for input elements. ' +
1099 | 'Element "' + element.tagName + '" is not an input element. ');
1100 | }
1101 |
1102 | if (localStorage.getItem(element.id) === null) {
1103 | storeLocalStorageField_(element);
1104 | } else {
1105 | getLocalStorageField_(element);
1106 | }
1107 |
1108 | // Registers the appropriate events for input elements.
1109 | if (element.type === 'checkbox') {
1110 | element.onclick = function() {
1111 | storeLocalStorageField_(this);
1112 | };
1113 | } else if (element.type === 'text') {
1114 | element.onblur = function() {
1115 | storeLocalStorageField_(this);
1116 | };
1117 | } else {
1118 | error_('Unsupportered input type: ' + '"' + element.type + '"');
1119 | }
1120 | }
1121 |
1122 | // Fetches the stored values from local storage and updates checkbox status.
1123 | // @param {!Object} element of which id is representing the key parameter for
1124 | // local storage.
1125 | function getLocalStorageField_(element) {
1126 | // Makes sure the checkbox status is matching the local storage value.
1127 | if (element.type === 'checkbox') {
1128 | element.checked = (localStorage.getItem(element.id) === 'true');
1129 | } else if (element.type === 'text') {
1130 | element.value = localStorage.getItem(element.id);
1131 | } else {
1132 | error_('Unsupportered input type: ' + '"' + element.type + '"');
1133 | }
1134 | }
1135 |
1136 | // Stores the string value of the element object using local storage.
1137 | // @param {!Object} element of which id is representing the key parameter for
1138 | // local storage.
1139 | function storeLocalStorageField_(element) {
1140 | if (element.type === 'checkbox') {
1141 | localStorage.setItem(element.id, element.checked);
1142 | } else if (element.type === 'text') {
1143 | localStorage.setItem(element.id, element.value);
1144 | }
1145 | }
1146 |
1147 | // Create the peer connection if none is up (this is just convenience to
1148 | // avoid having a separate button for that).
1149 | function ensureHasPeerConnection_() {
1150 | if (getReadyState_() === 'no-peer-connection') {
1151 | preparePeerConnection();
1152 | }
1153 | }
1154 |
1155 | // @param {string} message Text to print.
1156 | function print_(message) {
1157 | printHandler_(message, 'black');
1158 | }
1159 |
1160 | // @param {string} message Text to print.
1161 | function success_(message) {
1162 | printHandler_(message, 'green');
1163 | }
1164 |
1165 | // @param {string} message Text to print.
1166 | function warning_(message) {
1167 | printHandler_(message, 'orange');
1168 | }
1169 |
1170 | // @param {string} message Text to print.
1171 | function error_(message) {
1172 | printHandler_(message, 'red');
1173 | }
1174 |
1175 | // @param {string} message Text to print.
1176 | // @param {string} textField Element ID of where to print.
1177 | // @param {string} color Color of the text.
1178 | function printHandler_(message, color) {
1179 | if (color === 'green') {
1180 | message += ' success';
1181 | }
1182 | $('messages').innerHTML += '' + message +
1183 | ' ';
1184 | if (color === 'red') {
1185 | throw new Error(message);
1186 | }
1187 | console.log(message);
1188 | }
1189 |
1190 | // @param {string} stringRepresentation JavaScript as a string.
1191 | // @return {Object} The PeerConnection constraints as a JavaScript dictionary.
1192 | function getEvaluatedJavaScript_(stringRepresentation) {
1193 | try {
1194 | var evaluatedJavaScript;
1195 | evaluatedJavaScript = JSON.parse(stringRepresentation);
1196 | return evaluatedJavaScript;
1197 | } catch (exception) {
1198 | error_('Not valid JavaScript expression: ' + stringRepresentation);
1199 | }
1200 | }
1201 |
1202 | function forceIsac_() {
1203 | setOutgoingSdpTransform(function(sdp) {
1204 | // Remove all other codecs (not the video codecs though).
1205 | sdp = sdp.replace('/m=audio (\\d+) UDP/TLS/RTP/SAVPF.*\r\n/g',
1206 | 'm=audio $1 UDP/TLS/RTP/SAVPF 104\r\n');
1207 | sdp = sdp.replace('a=rtcp-fb:111 transport-cc',
1208 | 'a=rtcp-fb:104 transport-cc');
1209 | sdp = sdp.replace('a=fmtp:111 minptime=10', 'a=fmtp:104 minptime=10');
1210 | var t = /a=rtpmap:(?!104)\d{1,3} (?!VP8|H264|VP9|red|ulpfec|rtx).*\r\n/g;
1211 | sdp = sdp.replace(t,'');
1212 | return sdp;
1213 | });
1214 | }
1215 |
1216 | function dontTouchSdp_() {
1217 | setOutgoingSdpTransform(function(sdp) {
1218 | return sdp;
1219 | });
1220 | }
1221 |
1222 | function hookupDataChannelCallbacks_() {
1223 | setDataCallbacks(function(status) {
1224 | $('data-channel-status').value = status;
1225 | },
1226 | function(dataMessage) {
1227 | print_('Received ' + dataMessage.data);
1228 | $('data-channel-receive').value =
1229 | dataMessage.data + '\n' + $('data-channel-receive').value;
1230 | });
1231 | }
1232 |
1233 | function hookupDtmfSenderCallback_() {
1234 | setOnToneChange(function(tone) {
1235 | print_('Sent DTMF tone: ' + tone.tone);
1236 | });
1237 | }
1238 |
1239 | function toggle_(track, localOrRemote, audioOrVideo) {
1240 | if (!track) {
1241 | error_('Tried to toggle ' + localOrRemote + ' ' + audioOrVideo +
1242 | ' stream, but has no such stream.');
1243 | }
1244 | track.enabled = !track.enabled;
1245 | print_('ok-' + audioOrVideo + '-toggled-to-' + track.enabled);
1246 | }
1247 |
1248 | function connectCallback_(request) {
1249 | print_('Connect callback: ' + request.status + ', ' + request.readyState);
1250 | if (request.status === 0) {
1251 | print_('peerconnection_server doesn\'t seem to be up.');
1252 | error_('failed connecting to peerConnection server');
1253 | }
1254 | if (request.readyState === 4 && request.status === 200) {
1255 | global.ourPeerId = parseOurPeerId_(request.responseText);
1256 | global.remotePeerId = parseRemotePeerIdIfConnected_(request.responseText);
1257 | startHangingGet_(global.serverUrl, global.ourPeerId);
1258 | print_('ok-connected');
1259 | }
1260 | }
1261 |
1262 | function parseOurPeerId_(responseText) {
1263 | // According to peerconnection_server's protocol.
1264 | var peerList = responseText.split('\n');
1265 | return parseInt(peerList[0].split(',')[1]);
1266 | }
1267 |
1268 | function parseRemotePeerIdIfConnected_(responseText) {
1269 | var peerList = responseText.split('\n');
1270 | if (peerList.length === 1) {
1271 | // No peers have connected yet - we'll get their id later in a notification.
1272 | return null;
1273 | }
1274 | var remotePeerId = null;
1275 | for (var i = 0; i < peerList.length; i++) {
1276 | if (peerList[i].length === 0) {
1277 | continue;
1278 | }
1279 | var parsed = peerList[i].split(',');
1280 | var name = parsed[0];
1281 | var id = parseInt(parsed[1]);
1282 | if (id !== global.ourPeerId) {
1283 | print_('Found remote peer with name ' + name + ', id ' +
1284 | id + ' when connecting.');
1285 | // There should be at most one remote peer in this test.
1286 | if (remotePeerId !== null) {
1287 | error_('Expected just one remote peer in this test: ' +
1288 | 'found several.');
1289 | }
1290 | // Found a remote peer.
1291 | remotePeerId = id;
1292 | }
1293 | }
1294 | return remotePeerId;
1295 | }
1296 |
1297 | function startHangingGet_(server, ourId) {
1298 | if (isDisconnected_()) {
1299 | return;
1300 | }
1301 | var hangingGetRequest = new XMLHttpRequest();
1302 | hangingGetRequest.onreadystatechange = function() {
1303 | hangingGetCallback_(hangingGetRequest, server, ourId);
1304 | };
1305 | hangingGetRequest.ontimeout = function() {
1306 | hangingGetTimeoutCallback_(hangingGetRequest, server, ourId);
1307 | };
1308 | var callUrl = server + '/wait?peer_id=' + ourId;
1309 | print_('Sending ' + callUrl);
1310 | hangingGetRequest.open('GET', callUrl);
1311 | hangingGetRequest.send();
1312 | }
1313 |
1314 | function hangingGetCallback_(hangingGetRequest, server, ourId) {
1315 | if (hangingGetRequest.readyState !== 4 || hangingGetRequest.status === 0) {
1316 | // Code 0 is not possible if the server actually responded. Ignore.
1317 | return;
1318 | }
1319 | if (hangingGetRequest.status !== 200) {
1320 | error_('Error ' + hangingGetRequest.status + ' from server: ' +
1321 | hangingGetRequest.statusText);
1322 | }
1323 | var targetId = readResponseHeader_(hangingGetRequest, 'Pragma');
1324 | if (targetId === ourId) {
1325 | handleServerNotification_(hangingGetRequest.responseText);
1326 | } else {
1327 | handlePeerMessage_(targetId, hangingGetRequest.responseText);
1328 | }
1329 | hangingGetRequest.abort();
1330 |
1331 | restartHangingGet_(server, ourId);
1332 | }
1333 |
1334 | function hangingGetTimeoutCallback_(hangingGetRequest, server, ourId) {
1335 | print_('Hanging GET times out, re-issuing...');
1336 | hangingGetRequest.abort();
1337 | restartHangingGet_(server, ourId);
1338 | }
1339 |
1340 | function handleServerNotification_(message) {
1341 | var parsed = message.split(',');
1342 | if (parseInt(parsed[2]) === 1) {
1343 | // Peer connected - this must be our remote peer, and it must mean we
1344 | // connected before them (except if we happened to connect to the server
1345 | // at precisely the same moment).
1346 | print_('Found remote peer with name ' + parsed[0] + ', id ' + parsed[1] +
1347 | ' when connecting.');
1348 | global.remotePeerId = parseInt(parsed[1]);
1349 | }
1350 | }
1351 |
1352 | function closeCall_() {
1353 | if (typeof global.peerConnection === 'undefined') {
1354 | warning_('Closing call, but no call active.');
1355 | }
1356 | global.peerConnection.close();
1357 | global.peerConnection = undefined;
1358 | }
1359 |
1360 | function handlePeerMessage_(peerId, message) {
1361 | print_('Received message from peer ' + peerId + ': ' + message);
1362 | if (peerId !== global.remotePeerId) {
1363 | error_('Received notification from unknown peer ' + peerId +
1364 | ' (only know about ' + global.remotePeerId + '.');
1365 | }
1366 | if (message.search('BYE') === 0) {
1367 | print_('Received BYE from peer: closing call');
1368 | closeCall_();
1369 | return;
1370 | }
1371 | if (typeof global.peerConnection === 'undefined' &&
1372 | global.acceptsIncomingCalls) {
1373 | // The other side is calling us.
1374 | print_('We are being called: answer...');
1375 |
1376 | global.peerConnection = createPeerConnection(STUN_SERVER);
1377 |
1378 | if ($('auto-add-stream-oncall') &&
1379 | obtainGetUserMediaResult_() === 'ok-got-stream') {
1380 | print_('We have a local stream, so hook it up automatically.');
1381 | addLocalStreamToPeerConnection(global.peerConnection);
1382 | }
1383 | answerCall(global.peerConnection, message);
1384 | return;
1385 | }
1386 | handleMessage(global.peerConnection, message);
1387 | }
1388 |
1389 | function restartHangingGet_(server, ourId) {
1390 | window.setTimeout(function() {
1391 | startHangingGet_(server, ourId);
1392 | }, 0);
1393 | }
1394 |
1395 | function readResponseHeader_(request, key) {
1396 | var value = request.getResponseHeader(key);
1397 | if (value === null || value.length === 0) {
1398 | error_('Received empty value ' + value +
1399 | ' for response header key ' + key + '.');
1400 | }
1401 | return parseInt(value);
1402 | }
1403 |
--------------------------------------------------------------------------------